code
stringlengths 10
805k
| def_use_chains
sequencelengths 0
667
|
---|---|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# #*** <License> ************************************************************#
# This module is part of the repository CNDB.
#
# This module is licensed under the terms of the BSD 3-Clause License
# <http://www.c-tanzer.at/license/bsd_3c.html>.
# #*** </License> ***********************************************************#
from _TFL.pyk import pyk
from rsclib.HTML_Parse import tag, Page_Tree
from rsclib.autosuper import autosuper
from spider.common import Interface, Inet4, Inet6, unroutable
from spider.common import WLAN_Config
from spider.luci import Version_Mixin
class Status (Page_Tree, Version_Mixin) :
url = 'cgi-bin/luci/freifunk/status/status'
retries = 2
timeout = 10
html_charset = 'utf-8' # force utf-8 encoding
wl_names = dict \
( ssid = 'ssid'
, _bsiid = 'bssid'
, channel = 'channel'
, mode = 'mode'
)
def parse (self) :
root = self.tree.getroot ()
self.wlans = []
self.routes = {}
for div in root.findall (".//%s" % tag ("div")) :
id = div.get ('id')
if id == 'cbi-wireless' :
wlan_div = div
elif id == 'cbi-routes' :
route_div = div
self.try_get_version (div)
for d in self.tbl_iter (wlan_div) :
for k, newkey in pyk.iteritems (self.wl_names) :
if k in d :
d [newkey] = d [k]
wl = WLAN_Config (** d)
self.wlans.append (wl)
for d in self.tbl_iter (route_div) :
iface = d.get ('iface')
gw = d.get ('gateway')
if iface and gw :
self.routes [iface] = gw
self.set_version (root)
# end def parse
def tbl_iter (self, div) :
tbl = div.find (".//%s" % tag ("table"))
assert tbl.get ('class') == 'cbi-section-table'
d = {}
for tr in tbl :
if 'cbi-section-table-row' not in tr.get ('class').split () :
continue
for input in tr.findall (".//%s" % tag ('input')) :
name = input.get ('id').split ('.') [-1]
val = input.get ('value')
d [name] = val
if not d :
continue
yield d
# end def tbl_iter
# end class Status
class Table_Iter (Page_Tree) :
def table_iter (self) :
root = self.tree.getroot ()
for div in root.findall (".//%s" % tag ("div")) :
if div.get ('id') == 'maincontent' :
break
tbl = div.find (".//%s" % tag ("table"))
if tbl is None :
return
for tr in tbl :
if tr [0].tag == tag ('th') :
continue
yield (self.tree.get_text (x) for x in tr)
# end def table_iter
# end class Table_Iter
class OLSR_Connections (Table_Iter) :
url = 'cgi-bin/luci/freifunk/olsr/'
retries = 2
timeout = 10
html_charset = 'utf-8' # force utf-8 encoding
def parse (self) :
self.neighbors = {}
for l in self.table_iter () :
neighbor, ip, lq, nlq, etx = l
lq, nlq, etx = (float (x) for x in (lq, nlq, etx))
self.neighbors [neighbor] = [ip, lq, nlq, etx]
# end def parse
# end class OLSR_Connections
class OLSR_Routes (Table_Iter) :
url = 'cgi-bin/luci/freifunk/olsr/routes'
retries = 2
timeout = 10
html_charset = 'utf-8' # force utf-8 encoding
def parse (self) :
self.iface_by_gw = {}
for l in self.table_iter () :
announced, gw, iface, metric, etx = l
if gw in self.iface_by_gw :
assert iface == self.iface_by_gw [gw]
else :
self.iface_by_gw [gw] = iface
# end def parse
# end class OLSR_Routes
class OpenWRT (autosuper) :
def __init__ (self, site, request) :
self.site = site
self.request = request
if 'interfaces' in self.request or 'ips' in self.request :
st = Status (site = site)
conn = OLSR_Connections (site = site)
route = OLSR_Routes (site = site)
self.version = st.version
assert len (st.wlans) <= 1
interfaces = {}
ips = {}
count = 0
for gw, ifname in pyk.iteritems (route.iface_by_gw) :
ip, lq, nlq, etx = conn.neighbors [gw]
i4 = Inet4 (ip, None, None, iface = ifname)
ips [i4] = 1
is_wlan = True
if lq == nlq == etx == 1.0 :
is_wlan = False
if ifname in interfaces :
iface = interfaces [ifname]
if not iface.is_wlan and is_wlan :
iface.is_wlan = True
iface.wlan_info = st.wlans [0]
else :
iface = Interface (count, ifname, None)
iface.is_wlan = is_wlan
if is_wlan :
iface.wlan_info = st.wlans [0]
count += 1
interfaces [ifname] = iface
if i4 not in iface.inet4 :
iface.append_inet4 (i4)
wl_if = None
for iface in pyk.itervalues (interfaces) :
if iface.is_wlan :
if wl_if :
m = "Duplicate wlan: %s/%s" % (iface.name, wl_if.name)
raise ValueError (m)
wl_if = iface
# check own ip
n = 'unknown'
i4 = Inet4 (self.request ['ip'], None, None, iface = n)
if i4 not in ips :
assert n not in interfaces
iface = interfaces [n] = Interface (count, n, None)
iface.append_inet4 (i4)
iface.is_wlan = False
if not wl_if and st.wlans :
iface.is_wlan = True
iface.wlan_info = st.wlans [0]
ips [i4] = True
self.request ['ips'] = ips
self.request ['interfaces'] = interfaces
self.request ['version'] = st.version
# end def __init__
# end class OpenWRT
| [
[
[
400,
403
],
[
1447,
1450
],
[
4508,
4511
],
[
5479,
5482
]
],
[
[
438,
441
],
[
1149,
1152
],
[
1925,
1928
],
[
2181,
2184
],
[
2581,
2584
],
[
2701,
2704
],
[
2813,
2816
]
],
[
[
443,
452
],
[
672,
681
],
[
2459,
2468
]
],
[
[
486,
495
],
[
3985,
3994
]
],
[
[
529,
538
],
[
5099,
5108
],
[
5970,
5979
]
],
[
[
540,
545
],
[
4621,
4626
],
[
5804,
5809
]
],
[
[
547,
552
]
],
[
[
554,
564
]
],
[
[
598,
609
],
[
1563,
1574
]
],
[
[
643,
656
],
[
683,
696
]
],
[
[
664,
670
],
[
4186,
4192
]
],
[
[
2447,
2457
],
[
2980,
2990
],
[
3461,
3471
]
],
[
[
2962,
2978
],
[
4237,
4253
]
],
[
[
3448,
3459
],
[
4288,
4299
]
],
[
[
3976,
3983
]
]
] |
# UCF Senior Design 2017-18
# Group 38
from PIL import Image
import cv2
import imagehash
import math
import numpy as np
DIFF_THRES = 20
LIMIT = 2
RESIZE = 1000
def calc_hash(img):
"""
Calculate the wavelet hash of the image
img: (ndarray) image file
"""
# resize image if height > 1000
img = resize(img)
return imagehash.whash(Image.fromarray(img))
def compare(hash1, hash2):
"""
Calculate the difference between two images
hash1: (array) first wavelet hash
hash2: (array) second wavelet hash
"""
return hash1 - hash2
def limit(img, std_hash, count):
"""
Determine whether image should be removed from image dictionary in main.py
img: (ndarray) image file
std_hash: (array) wavelet hash of comparison standard
count: (int) global count of images similar to comparison standard
"""
# calculate hash for given image
cmp_hash = calc_hash(img)
# compare to standard
diff = compare(std_hash, cmp_hash)
# image is similar to standard
if diff <= DIFF_THRES:
# if there are 3 similar images already, remove image
if count >= LIMIT:
return 'remove'
# non-similar image found
else:
# update comparison standard
return 'update_std'
# else continue reading images with same standard
return 'continue'
def resize(img):
"""
Resize an image
img: (ndarray) RGB color image
"""
# get dimensions of image
width = np.shape(img)[1]
height = np.shape(img)[0]
# if height of image is greater than 1000, resize it to 1000
if width > RESIZE:
# keep resize proportional
scale = RESIZE / width
resized_img = cv2.resize(
img, (RESIZE, math.floor(height / scale)), cv2.INTER_AREA)
# return resized image
return resized_img
# if height of image is less than 1000, return image unresized
return img
def set_standard(images, filename):
"""
Set new comparison standard and update information
images: (dictionary) dictionary containing all the image data
filename: (String) name of the image file
"""
return filename, calc_hash(images[filename]), 0
| [
[
[
56,
61
],
[
363,
368
]
],
[
[
69,
72
],
[
1745,
1748
],
[
1812,
1815
]
],
[
[
80,
89
],
[
347,
356
]
],
[
[
97,
101
],
[
1783,
1787
]
],
[
[
109,
120
],
[
1521,
1523
],
[
1551,
1553
]
],
[
[
122,
132
],
[
1073,
1083
]
],
[
[
138,
143
],
[
1167,
1172
]
],
[
[
148,
154
],
[
1649,
1655
],
[
1708,
1714
],
[
1775,
1781
]
],
[
[
168,
177
],
[
941,
950
],
[
2219,
2228
]
],
[
[
391,
398
],
[
994,
1001
]
],
[
[
594,
599
]
],
[
[
1391,
1397
],
[
324,
330
]
],
[
[
1975,
1987
]
]
] |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .cli.cli import main
# TODO(hongyes): add more commands:
# kfp compile (migrate from dsl-compile)
# kfp experiment (manage experiments)
if __name__ == '__main__':
main()
| [
[
[
597,
601
],
[
750,
754
]
]
] |
import time
from PyQt5 import QtGui, QtCore
from ui.room_item import Ui_Form
from PyQt5.QtWidgets import QWidget
class Room_Item(QWidget,Ui_Form):
def __init__(self,parent=None,room_data=None):
super(Room_Item,self).__init__(parent)
self.setupUi(self)
self.data = room_data
self.setRoomInfo()
def setRoomInfo(self):
self.room_name.setText('{}({})'.format(self.data['naturalName'], self.data['roomName']))
self.description.setText("<a style='color:#BCBCBC'>{}</a>".format(self.data['description']))
timeStamp = int(self.data['creationDate']) / 1000
timeArray = time.localtime(timeStamp)
otherStyleTime = time.strftime("%Y-%m-%d", timeArray)
self.create_time.setText("<a style='color:#BCBCBC'>{}</a>".format(otherStyleTime))
members = len(self.data['owners']) + len(self.data['admins']) + len(self.data['members'])
memberCounter = "<a style='color:#BCBCBC'>{}/{}</a>".format(members, ('∞' if self.data['maxUsers']==0 else self.data['maxUsers']))
self.member.setText(memberCounter) | [
[
[
7,
11
],
[
636,
640
],
[
687,
691
]
],
[
[
31,
36
]
],
[
[
38,
44
]
],
[
[
71,
78
],
[
140,
147
]
],
[
[
107,
114
],
[
132,
139
]
],
[
[
122,
131
],
[
215,
224
]
]
] |
import asyncio
import re
import sys
import traceback
import toga
from toga import Key
from .keys import toga_to_winforms_key
from .libs import Threading, WinForms, shcore, user32, win_version
from .libs.proactor import WinformsProactorEventLoop
from .window import Window
class MainWindow(Window):
def winforms_FormClosing(self, sender, event):
if not self.interface.app._impl._is_exiting:
event.Cancel = not self.interface.app.exit()
class App:
_MAIN_WINDOW_CLASS = MainWindow
def __init__(self, interface):
self.interface = interface
self.interface._impl = self
# Winforms app exit is tightly bound to the close of the MainWindow.
# The FormClosing message on MainWindow calls app.exit(), which
# will then trigger the "on_exit" handler (which might abort the
# close). However, if app.exit() succeeds, it will request the
# Main Window to close... which calls app.exit().
# So - we have a flag that is only ever sent once a request has been
# made to exit the native app. This flag can be used to shortcut any
# window-level close handling.
self._is_exiting = False
self.loop = WinformsProactorEventLoop()
asyncio.set_event_loop(self.loop)
def create(self):
self.native = WinForms.Application
self.app_context = WinForms.ApplicationContext()
# Check the version of windows and make sure we are setting the DPI mode
# with the most up to date API
# Windows Versioning Check Sources : https://www.lifewire.com/windows-version-numbers-2625171
# and https://docs.microsoft.com/en-us/windows/release-information/
if win_version.Major >= 6: # Checks for Windows Vista or later
# Represents Windows 8.1 up to Windows 10 before Build 1703 which should use
# SetProcessDpiAwareness(True)
if ((win_version.Major == 6 and win_version.Minor == 3) or
(win_version.Major == 10 and win_version.Build < 15063)):
shcore.SetProcessDpiAwareness(True)
# Represents Windows 10 Build 1703 and beyond which should use
# SetProcessDpiAwarenessContext(-2)
elif win_version.Major == 10 and win_version.Build >= 15063:
user32.SetProcessDpiAwarenessContext(-2)
# Any other version of windows should use SetProcessDPIAware()
else:
user32.SetProcessDPIAware()
self.native.EnableVisualStyles()
self.native.SetCompatibleTextRenderingDefault(False)
self.interface.commands.add(
toga.Command(
lambda _: self.interface.about(),
'About {}'.format(self.interface.name),
group=toga.Group.HELP
),
toga.Command(None, 'Preferences', group=toga.Group.FILE),
# Quit should always be the last item, in a section on it's own
toga.Command(
lambda _: self.interface.exit(),
'Exit ' + self.interface.name,
shortcut=Key.MOD_1 + 'q',
group=toga.Group.FILE,
section=sys.maxsize
),
toga.Command(
lambda _: self.interface.visit_homepage(),
'Visit homepage',
enabled=self.interface.home_page is not None,
group=toga.Group.HELP
)
)
self._create_app_commands()
# Call user code to populate the main window
self.interface.startup()
self.create_menus()
self.interface.icon.bind(self.interface.factory)
self.interface.main_window._impl.set_app(self)
def create_menus(self):
self._menu_items = {}
self._menu_groups = {}
toga.Group.FILE.order = 0
menubar = WinForms.MenuStrip()
submenu = None
for cmd in self.interface.commands:
if cmd == toga.GROUP_BREAK:
submenu = None
elif cmd == toga.SECTION_BREAK:
submenu.DropDownItems.Add('-')
else:
submenu = self._submenu(cmd.group, menubar)
item = WinForms.ToolStripMenuItem(cmd.label)
if cmd.action:
item.Click += cmd._impl.as_handler()
item.Enabled = cmd.enabled
if cmd.shortcut is not None:
shortcut_keys = toga_to_winforms_key(cmd.shortcut)
item.ShortcutKeys = shortcut_keys
item.ShowShortcutKeys = True
cmd._impl.native.append(item)
self._menu_items[item] = cmd
submenu.DropDownItems.Add(item)
self.interface.main_window._impl.native.Controls.Add(menubar)
self.interface.main_window._impl.native.MainMenuStrip = menubar
self.interface.main_window.content.refresh()
def _submenu(self, group, menubar):
try:
return self._menu_groups[group]
except KeyError:
if group is None:
submenu = menubar
else:
parent_menu = self._submenu(group.parent, menubar)
submenu = WinForms.ToolStripMenuItem(group.label)
# Top level menus are added in a different way to submenus
if group.parent is None:
parent_menu.Items.Add(submenu)
else:
parent_menu.DropDownItems.Add(submenu)
self._menu_groups[group] = submenu
return submenu
def _create_app_commands(self):
# No extra menus
pass
def open_document(self, fileURL):
'''Add a new document to this app.'''
print("STUB: If you want to handle opening documents, implement App.open_document(fileURL)")
def winforms_thread_exception(self, sender, winforms_exc):
# The PythonException returned by Winforms doesn't give us
# easy access to the underlying Python stacktrace; so we
# reconstruct it from the string message.
# The Python message is helpfully included in square brackets,
# as the context for the first line in the .net stack trace.
# So, look for the closing bracket and the start of the Python.net
# stack trace. Then, reconstruct the line breaks internal to the
# remaining string.
print("Traceback (most recent call last):")
py_exc = winforms_exc.get_Exception()
full_stack_trace = py_exc.StackTrace
regex = re.compile(
r"^\[(?:'(.*?)', )*(?:'(.*?)')\] (?:.*?) Python\.Runtime",
re.DOTALL | re.UNICODE
)
stacktrace_relevant_lines = regex.findall(full_stack_trace)
if len(stacktrace_relevant_lines) == 0:
self.print_stack_trace(full_stack_trace)
else:
for lines in stacktrace_relevant_lines:
for line in lines:
self.print_stack_trace(line)
print(py_exc.Message)
@classmethod
def print_stack_trace(cls, stack_trace_line):
for level in stack_trace_line.split("', '"):
for line in level.split("\\n"):
if line:
print(line)
def run_app(self):
try:
self.create()
self.native.ThreadException += self.winforms_thread_exception
self.loop.run_forever(self.app_context)
except: # NOQA
traceback.print_exc()
def main_loop(self):
thread = Threading.Thread(Threading.ThreadStart(self.run_app))
thread.SetApartmentState(Threading.ApartmentState.STA)
thread.Start()
thread.Join()
def show_about_dialog(self):
message_parts = []
if self.interface.name is not None:
if self.interface.version is not None:
message_parts.append(
"{name} v{version}".format(
name=self.interface.name,
version=self.interface.version,
)
)
else:
message_parts.append(
"{name}".format(name=self.interface.name)
)
elif self.interface.version is not None:
message_parts.append(
"v{version}".format(version=self.interface.version)
)
if self.interface.author is not None:
message_parts.append(
"Author: {author}".format(author=self.interface.author)
)
if self.interface.description is not None:
message_parts.append(
"\n{description}".format(
description=self.interface.description
)
)
self.interface.main_window.info_dialog(
'About {}'.format(self.interface.name), "\n".join(message_parts)
)
def exit(self):
self._is_exiting = True
self.native.Exit()
def set_main_window(self, window):
self.app_context.MainForm = window._impl.native
def set_on_exit(self, value):
pass
def current_window(self):
self.interface.factory.not_implemented('App.current_window()')
def enter_full_screen(self, windows):
self.interface.factory.not_implemented('App.enter_full_screen()')
def exit_full_screen(self, windows):
self.interface.factory.not_implemented('App.exit_full_screen()')
def set_cursor(self, value):
self.interface.factory.not_implemented('App.set_cursor()')
def show_cursor(self):
self.interface.factory.not_implemented('App.show_cursor()')
def hide_cursor(self):
self.interface.factory.not_implemented('App.hide_cursor()')
def add_background_task(self, handler):
self.loop.call_soon(handler, self)
class DocumentApp(App):
def _create_app_commands(self):
self.interface.commands.add(
toga.Command(
lambda w: self.open_file,
label='Open...',
shortcut=Key.MOD_1 + 'o',
group=toga.Group.FILE,
section=0
),
)
def open_document(self, fileURL):
"""Open a new document in this app.
Args:
fileURL (str): The URL/path to the file to add as a document.
"""
self.interface.factory.not_implemented('DocumentApp.open_document()')
| [
[
[
7,
14
],
[
1254,
1261
]
],
[
[
22,
24
],
[
6600,
6602
],
[
6697,
6699
],
[
6709,
6711
]
],
[
[
32,
35
],
[
3204,
3207
]
],
[
[
43,
52
],
[
7529,
7538
]
],
[
[
61,
65
],
[
2658,
2662
],
[
2800,
2804
],
[
2843,
2847
],
[
2883,
2887
],
[
2989,
2993
],
[
3163,
3167
],
[
3243,
3247
],
[
3434,
3438
],
[
3836,
3840
],
[
3990,
3994
],
[
4063,
4067
],
[
10015,
10019
],
[
10168,
10172
]
],
[
[
83,
86
],
[
3124,
3127
],
[
10129,
10132
]
],
[
[
105,
125
],
[
4484,
4504
]
],
[
[
145,
154
],
[
7594,
7603
],
[
7611,
7620
],
[
7681,
7690
]
],
[
[
156,
164
],
[
1333,
1341
],
[
1381,
1389
],
[
3880,
3888
],
[
4232,
4240
],
[
5258,
5266
]
],
[
[
166,
172
],
[
2079,
2085
]
],
[
[
174,
180
],
[
2327,
2333
],
[
2477,
2483
]
],
[
[
182,
193
],
[
1721,
1732
],
[
1931,
1942
],
[
1958,
1969
],
[
2006,
2017
],
[
2034,
2045
],
[
2255,
2266
],
[
2283,
2294
]
],
[
[
221,
246
],
[
1218,
1243
]
],
[
[
267,
273
],
[
293,
299
]
],
[
[
282,
292
],
[
501,
511
]
],
[
[
471,
474
],
[
9924,
9927
]
],
[
[
9912,
9923
]
]
] |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
SimplePhotogrammetryRoutePlanner
A QGIS plugin
A imple photogrammetry route planner.
Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/
-------------------
begin : 2021-04-24
copyright : (C) 2021 by Xiangyong Luo
email : solo_lxy@126.com
git sha : $Format:%H$
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
This script initializes the plugin, making it known to QGIS.
"""
__version__ = "0.4.0"
# noinspection PyPep8Naming
def classFactory(iface): # pylint: disable=invalid-name
"""Load SimplePhotogrammetryRoutePlanner class from file SimplePhotogrammetryRoutePlanner.
:param iface: A QGIS interface instance.
:type iface: QgsInterface
"""
#
from .SimplePhotogrammetryRoutePlanner import SimplePhotogrammetryRoutePlanner
return SimplePhotogrammetryRoutePlanner(iface)
| [
[
[
1304,
1315
]
],
[
[
1359,
1371
]
]
] |
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 24 14:38:20 2018
dimension reduction with VarianceThreshold using sklearn.
Feature selector that removes all low-variance features.
@author: lenovo
"""
from sklearn.feature_selection import VarianceThreshold
import numpy as np
#
np.random.seed(1)
X = np.random.randn(100, 10)
X = np.hstack([X, np.zeros([100, 5])])
#
def featureSelection_variance(X, thrd):
sel = VarianceThreshold(threshold=thrd)
X_selected = sel.fit_transform(X)
mask = sel.get_support()
return X_selected, mask
X = [[0, 2, 0, 3], [0, 1, 4, 3], [0, 1, 1, 3]]
selector = VarianceThreshold()
selector.fit_transform(X)
selector.variances_
| [
[
[
237,
254
],
[
605,
622
],
[
416,
433
]
],
[
[
262,
273
],
[
276,
278
],
[
298,
300
],
[
327,
329
],
[
341,
343
]
],
[
[
294,
295
],
[
338,
339
]
],
[
[
323,
324
]
],
[
[
370,
395
]
],
[
[
547,
548
],
[
648,
649
]
],
[
[
594,
602
],
[
625,
633
],
[
651,
659
]
]
] |
#!/usr/bin/env python
# coding=utf-8
from my_multi_main3 import main
import numpy as np
import argparse
import time
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=64, metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N',
help='input batch size for testing (default: 1000)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--lr', type=float, default=0.01, metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument('--momentum', type=float, default=0.5, metavar='M',
help='SGD momentum (default: 0.5)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--save-model', action='store_true', default=False,
help='For Saving the current Model')
parser.add_argument('--norm-flag', type=bool, default=False,
help='Triggering the Layer Normalization flag for attention scores')
parser.add_argument('--gamma', type=float, default=None,
help='Controlling the sparisty of gfusedmax/sparsemax, the smaller, the more sparse')
parser.add_argument('--lam', type=float, default=1.0,
help='Lambda: Controlling the smoothness of gfusedmax, the larger, the smoother')
parser.add_argument('--max-type', type=str, default='softmax',choices=['softmax','sparsemax','gfusedmax'],
help='mapping function in attention')
parser.add_argument('--optim-type', type=str, default='SGD',choices=['SGD','Adam'],
help='mapping function in attention')
parser.add_argument('--head-cnt', type=int, default=2, metavar='S', choices=[1,2,4,5,10],
help='Number of heads for attention (default: 1)')
args = parser.parse_args()
hyperparameter_choices = {
'lr':list(10**np.arange(-4,-1,0.5)),
'norm_flag': [True,False],
'gamma':list(10**np.arange(-1,3,0.5))+[None,],
'lam':list(10**np.arange(-2,2,0.5)),
'max_type':['softmax','sparsemax','gfusedmax'],
# 'max_type':['sparsemax'],
'optim_type':['SGD','Adam'],
'head_cnt':[1,2,4,5,10,20]
}
param_num = 25
record = np.zeros([param_num,len(hyperparameter_choices)+1])
record_name = 'record3_multi_%s.csv'%time.strftime('%Y-%m-%d_%H-%M-%S',time.localtime())
for n in range(param_num):
for param_index,(k,v) in enumerate(hyperparameter_choices.items()):
print(param_index,k)
value_index = np.random.choice(len(v))
if isinstance(v[value_index],str) or isinstance(v[value_index],bool) or v[value_index] is None:
record[n,param_index] = value_index
else:
record[n,param_index] = v[value_index]
setattr(args,k,v[value_index])
record[n,-1] = main(args)
np.savetxt(record_name, record, delimiter=',')
| [
[
[
65,
69
],
[
3317,
3321
]
],
[
[
77,
88
],
[
2405,
2407
],
[
2480,
2482
],
[
2529,
2531
],
[
2726,
2728
],
[
3017,
3019
],
[
3332,
3334
]
],
[
[
96,
104
],
[
127,
135
]
],
[
[
112,
116
],
[
2815,
2819
],
[
2849,
2853
]
],
[
[
118,
124
],
[
188,
194
],
[
331,
337
],
[
482,
488
],
[
617,
623
],
[
743,
749
],
[
871,
877
],
[
991,
997
],
[
1108,
1114
],
[
1265,
1271
],
[
1394,
1400
],
[
1544,
1550
],
[
1707,
1713
],
[
1863,
1869
],
[
2028,
2034
],
[
2170,
2176
],
[
2339,
2345
]
],
[
[
2332,
2336
],
[
3275,
3279
],
[
3322,
3326
]
],
[
[
2360,
2382
],
[
2750,
2772
],
[
2933,
2955
]
],
[
[
2702,
2711
],
[
2736,
2745
],
[
2882,
2891
]
],
[
[
2717,
2723
],
[
3158,
3164
],
[
3220,
3226
],
[
3302,
3308
],
[
3356,
3362
]
],
[
[
2778,
2789
],
[
3343,
3354
]
],
[
[
2871,
2872
],
[
3165,
3166
],
[
3227,
3228
],
[
3309,
3310
]
],
[
[
2902,
2913
],
[
2980,
2991
],
[
3167,
3178
],
[
3229,
3240
]
],
[
[
2915,
2916
],
[
2992,
2993
],
[
3280,
3281
]
],
[
[
2917,
2918
],
[
3038,
3039
],
[
3064,
3065
],
[
3098,
3099
],
[
3122,
3123
],
[
3244,
3245
],
[
3282,
3283
]
],
[
[
3003,
3014
],
[
3066,
3077
],
[
3100,
3111
],
[
3124,
3135
],
[
3182,
3193
],
[
3246,
3257
],
[
3284,
3295
]
]
] |
"""HDF5 related files.
This file contains a set of functions that related to read and write
HDF5 files.
Author: Yuhuang Hu
Email : duguyue100@gmail.com
"""
from __future__ import print_function, absolute_import
import h5py
from spiker import log
logger = log.get_logger("data-hdf5", log.DEBUG)
def init_hdf5(file_path, mode="w", cam_type="davis"):
"""Init HDF5 file object.
# Parameters
file_path : str
absolute path for the HDF5 file.
mode : str
w : for writing
r : for reading
cam_type : str
davis : for DAVIS camera
dvs : for DVS camera
# Returns
dataset : h5py.File
The file object of the given dataset
"""
if mode == "w":
dataset = h5py.File(file_path, mode=mode)
dataset.create_group("dvs")
dataset.create_group("extra")
if cam_type == "davis":
dataset.create_group("aps")
dataset.create_group("imu")
elif mode == "r":
dataset = h5py.File(file_path, mode=mode)
return dataset
| [
[
[
181,
195
]
],
[
[
197,
212
]
],
[
[
221,
225
],
[
740,
744
],
[
998,
1002
]
],
[
[
246,
249
],
[
260,
263
],
[
288,
291
]
],
[
[
251,
257
]
],
[
[
305,
314
]
]
] |
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: flatbuf
import flatbuffers
class FloatingPoint(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsFloatingPoint(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = FloatingPoint()
x.Init(buf, n + offset)
return x
# FloatingPoint
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# FloatingPoint
def Precision(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos)
return 0
def FloatingPointStart(builder): builder.StartObject(1)
def FloatingPointAddPrecision(builder, precision): builder.PrependInt16Slot(0, precision, 0)
def FloatingPointEnd(builder): return builder.EndObject()
| [
[
[
99,
110
],
[
246,
257
],
[
269,
280
],
[
458,
469
],
[
550,
561
],
[
670,
681
]
],
[
[
118,
131
],
[
322,
335
]
],
[
[
748,
766
]
],
[
[
804,
829
]
],
[
[
897,
913
]
]
] |
"""[Scynced Lights]
Class attributes are "shared"
Instance attributes are not shared.
"""
def sub(x, y):
f
class Light:
pass
a = Light()
b = Ligth()
| [
[
[
96,
99
]
],
[
[
121,
126
],
[
142,
147
]
],
[
[
138,
139
]
],
[
[
150,
151
]
]
] |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2012-11-14 17:09:50
from __future__ import unicode_literals, division, absolute_import
import time
import logging
from collections import deque
try:
from UserDict import DictMixin
except ImportError:
from collections import Mapping as DictMixin
import six
from six import iteritems
from six.moves import cPickle
class BaseCounter(object):
def __init__(self):
raise NotImplementedError
def event(self, value=1):
"""Fire a event."""
raise NotImplementedError
def value(self, value):
"""Set counter value."""
raise NotImplementedError
@property
def avg(self):
"""Get average value"""
raise NotImplementedError
@property
def sum(self):
"""Get sum of counter"""
raise NotImplementedError
def empty(self):
"""Clear counter"""
raise NotImplementedError
class TotalCounter(BaseCounter):
"""Total counter"""
def __init__(self):
self.cnt = 0
def event(self, value=1):
self.cnt += value
def value(self, value):
self.cnt = value
@property
def avg(self):
return self.cnt
@property
def sum(self):
return self.cnt
def empty(self):
return self.cnt == 0
class AverageWindowCounter(BaseCounter):
"""
Record last N(window) value
"""
def __init__(self, window_size=300):
self.window_size = window_size
self.values = deque(maxlen=window_size)
def event(self, value=1):
self.values.append(value)
value = event
@property
def avg(self):
return self.sum / len(self.values)
@property
def sum(self):
return sum(self.values)
def empty(self):
if not self.values:
return True
class TimebaseAverageWindowCounter(BaseCounter):
"""
Record last window_size * window_interval seconds values.
records will trim evert window_interval seconds
"""
def __init__(self, window_size=30, window_interval=10):
self.max_window_size = window_size
self.window_size = 0
self.window_interval = window_interval
self.values = deque(maxlen=window_size)
self.times = deque(maxlen=window_size)
self.cache_value = 0
self.cache_start = None
self._first_data_time = None
def event(self, value=1):
now = time.time()
if self._first_data_time is None:
self._first_data_time = now
if self.cache_start is None:
self.cache_value = value
self.cache_start = now
elif now - self.cache_start > self.window_interval:
self.values.append(self.cache_value)
self.times.append(self.cache_start)
self.on_append(self.cache_value, self.cache_start)
self.cache_value = value
self.cache_start = now
else:
self.cache_value += value
return self
def value(self, value):
self.cache_value = value
def _trim_window(self):
now = time.time()
if self.cache_start and now - self.cache_start > self.window_interval:
self.values.append(self.cache_value)
self.times.append(self.cache_start)
self.on_append(self.cache_value, self.cache_start)
self.cache_value = 0
self.cache_start = None
if self.window_size != self.max_window_size and self._first_data_time is not None:
time_passed = now - self._first_data_time
self.window_size = min(self.max_window_size, time_passed / self.window_interval)
window_limit = now - self.window_size * self.window_interval
while self.times and self.times[0] < window_limit:
self.times.popleft()
self.values.popleft()
@property
def avg(self):
sum = float(self.sum)
if not self.window_size:
return 0
return sum / self.window_size / self.window_interval
@property
def sum(self):
self._trim_window()
return sum(self.values) + self.cache_value
def empty(self):
self._trim_window()
if not self.values and not self.cache_start:
return True
def on_append(self, value, time):
pass
class CounterValue(DictMixin):
"""
A dict like value item for CounterManager.
"""
def __init__(self, manager, keys):
self.manager = manager
self._keys = keys
def __getitem__(self, key):
if key == '__value__':
key = self._keys
return self.manager.counters[key]
else:
key = self._keys + (key, )
available_keys = []
for _key in self.manager.counters:
if _key[:len(key)] == key:
available_keys.append(_key)
if len(available_keys) == 0:
raise KeyError
elif len(available_keys) == 1:
if available_keys[0] == key:
return self.manager.counters[key]
else:
return CounterValue(self.manager, key)
else:
return CounterValue(self.manager, key)
def __len__(self):
return len(self.keys())
def __iter__(self):
return iter(self.keys())
def __contains__(self, key):
return key in self.keys()
def keys(self):
result = set()
for key in self.manager.counters:
if key[:len(self._keys)] == self._keys:
key = key[len(self._keys):]
result.add(key[0] if key else '__value__')
return result
def to_dict(self, get_value=None):
"""Dump counters as a dict"""
result = {}
for key, value in iteritems(self):
if isinstance(value, BaseCounter):
if get_value is not None:
value = getattr(value, get_value)
result[key] = value
else:
result[key] = value.to_dict(get_value)
return result
class CounterManager(DictMixin):
"""
A dict like counter manager.
When using a tuple as event key, say: ('foo', 'bar'), You can visite counter
with manager['foo']['bar']. Or get all counters which first element is 'foo'
by manager['foo'].
It's useful for a group of counters.
"""
def __init__(self, cls=TimebaseAverageWindowCounter):
"""init manager with Counter cls"""
self.cls = cls
self.counters = {}
def event(self, key, value=1):
"""Fire a event of a counter by counter key"""
if isinstance(key, six.string_types):
key = (key, )
assert isinstance(key, tuple), "event key type error"
if key not in self.counters:
self.counters[key] = self.cls()
self.counters[key].event(value)
return self
def value(self, key, value=1):
"""Set value of a counter by counter key"""
if isinstance(key, six.string_types):
key = (key, )
assert isinstance(key, tuple), "event key type error"
if key not in self.counters:
self.counters[key] = self.cls()
self.counters[key].value(value)
return self
def trim(self):
"""Clear not used counters"""
for key, value in list(iteritems(self.counters)):
if value.empty():
del self.counters[key]
def __getitem__(self, key):
key = (key, )
available_keys = []
for _key in self.counters:
if _key[:len(key)] == key:
available_keys.append(_key)
if len(available_keys) == 0:
raise KeyError
elif len(available_keys) == 1:
if available_keys[0] == key:
return self.counters[key]
else:
return CounterValue(self, key)
else:
return CounterValue(self, key)
def __iter__(self):
return iter(self.keys())
def __len__(self):
return len(self.keys())
def keys(self):
result = set()
for key in self.counters:
result.add(key[0] if key else ())
return result
def to_dict(self, get_value=None):
"""Dump counters as a dict"""
self.trim()
result = {}
for key, value in iteritems(self):
if isinstance(value, BaseCounter):
if get_value is not None:
value = getattr(value, get_value)
result[key] = value
else:
result[key] = value.to_dict(get_value)
return result
def dump(self, filename):
"""Dump counters to file"""
try:
with open(filename, 'wb') as fp:
cPickle.dump(self.counters, fp)
except:
logging.error("can't dump counter to file: %s" % filename)
return False
return True
def load(self, filename):
"""Load counters to file"""
try:
with open(filename) as fp:
self.counters = cPickle.load(fp)
except:
logging.debug("can't load counter from file: %s" % filename)
return False
return True
| [
[
[
208,
224
]
],
[
[
226,
234
]
],
[
[
236,
251
]
],
[
[
260,
264
],
[
2554,
2558
],
[
3227,
3231
]
],
[
[
272,
279
],
[
8972,
8979
],
[
9272,
9279
]
],
[
[
304,
309
],
[
1626,
1631
],
[
2337,
2342
],
[
2384,
2389
]
],
[
[
340,
349
],
[
4473,
4482
],
[
6208,
6217
]
],
[
[
398,
418
],
[
4473,
4482
],
[
6208,
6217
]
],
[
[
427,
430
],
[
6769,
6772
],
[
7132,
7135
]
],
[
[
447,
456
],
[
5894,
5903
],
[
7470,
7479
],
[
8480,
8489
]
],
[
[
479,
486
],
[
8912,
8919
],
[
9227,
9234
]
],
[
[
495,
506
],
[
1070,
1081
],
[
1461,
1472
],
[
1990,
2001
],
[
5944,
5955
],
[
8530,
8541
]
],
[
[
1057,
1069
]
],
[
[
1440,
1460
]
],
[
[
1961,
1989
],
[
6526,
6554
]
],
[
[
4460,
4472
],
[
5228,
5240
],
[
5293,
5305
],
[
7995,
8007
],
[
8052,
8064
]
],
[
[
6193,
6207
]
]
] |
# coding=utf-8
# Copyright 2021 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Feature extractor class for ViT."""
from typing import List, Optional, Union
import numpy as np
from PIL import Image
from ...feature_extraction_utils import BatchFeature, FeatureExtractionMixin
from ...file_utils import TensorType
from ...image_utils import IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD, ImageFeatureExtractionMixin, is_torch_tensor
from ...utils import logging
logger = logging.get_logger(__name__)
class ViTFeatureExtractor(FeatureExtractionMixin, ImageFeatureExtractionMixin):
r"""
Constructs a ViT feature extractor.
This feature extractor inherits from :class:`~transformers.FeatureExtractionMixin` which contains most of the main
methods. Users should refer to this superclass for more information regarding those methods.
Args:
do_resize (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether to resize the input to a certain :obj:`size`.
size (:obj:`int` or :obj:`Tuple(int)`, `optional`, defaults to 224):
Resize the input to the given size. If a tuple is provided, it should be (width, height). If only an
integer is provided, then the input will be resized to (size, size). Only has an effect if :obj:`do_resize`
is set to :obj:`True`.
resample (:obj:`int`, `optional`, defaults to :obj:`PIL.Image.BILINEAR`):
An optional resampling filter. This can be one of :obj:`PIL.Image.NEAREST`, :obj:`PIL.Image.BOX`,
:obj:`PIL.Image.BILINEAR`, :obj:`PIL.Image.HAMMING`, :obj:`PIL.Image.BICUBIC` or :obj:`PIL.Image.LANCZOS`.
Only has an effect if :obj:`do_resize` is set to :obj:`True`.
do_normalize (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not to normalize the input with mean and standard deviation.
image_mean (:obj:`List[int]`, defaults to :obj:`[0.5, 0.5, 0.5]`):
The sequence of means for each channel, to be used when normalizing images.
image_std (:obj:`List[int]`, defaults to :obj:`[0.5, 0.5, 0.5]`):
The sequence of standard deviations for each channel, to be used when normalizing images.
"""
model_input_names = ["pixel_values"]
def __init__(
self,
do_resize=True,
size=224,
resample=Image.BILINEAR,
do_normalize=True,
image_mean=None,
image_std=None,
**kwargs
):
super().__init__(**kwargs)
self.do_resize = do_resize
self.size = size
self.resample = resample
self.do_normalize = do_normalize
self.image_mean = image_mean if image_mean is not None else IMAGENET_STANDARD_MEAN
self.image_std = image_std if image_std is not None else IMAGENET_STANDARD_STD
def __call__(
self,
images: Union[
Image.Image, np.ndarray, "torch.Tensor", List[Image.Image], List[np.ndarray], List["torch.Tensor"] # noqa
],
return_tensors: Optional[Union[str, TensorType]] = None,
**kwargs
) -> BatchFeature:
"""
Main method to prepare for the model one or several image(s).
.. warning::
NumPy arrays and PyTorch tensors are converted to PIL images when resizing, so the most efficient is to pass
PIL images.
Args:
images (:obj:`PIL.Image.Image`, :obj:`np.ndarray`, :obj:`torch.Tensor`, :obj:`List[PIL.Image.Image]`, :obj:`List[np.ndarray]`, :obj:`List[torch.Tensor]`):
The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch
tensor. In case of a NumPy array/PyTorch tensor, each image should be of shape (C, H, W), where C is a
number of channels, H and W are image height and width.
return_tensors (:obj:`str` or :class:`~transformers.file_utils.TensorType`, `optional`, defaults to :obj:`'np'`):
If set, will return tensors of a particular framework. Acceptable values are:
* :obj:`'tf'`: Return TensorFlow :obj:`tf.constant` objects.
* :obj:`'pt'`: Return PyTorch :obj:`torch.Tensor` objects.
* :obj:`'np'`: Return NumPy :obj:`np.ndarray` objects.
* :obj:`'jax'`: Return JAX :obj:`jnp.ndarray` objects.
Returns:
:class:`~transformers.BatchFeature`: A :class:`~transformers.BatchFeature` with the following fields:
- **pixel_values** -- Pixel values to be fed to a model, of shape (batch_size, num_channels, height,
width).
"""
# Input type checking for clearer error
valid_images = False
# Check that images has a valid type
if isinstance(images, (Image.Image, np.ndarray)) or is_torch_tensor(images):
valid_images = True
elif isinstance(images, (list, tuple)):
if len(images) == 0 or isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]):
valid_images = True
if not valid_images:
raise ValueError(
"Images must of type `PIL.Image.Image`, `np.ndarray` or `torch.Tensor` (single example),"
"`List[PIL.Image.Image]`, `List[np.ndarray]` or `List[torch.Tensor]` (batch of examples)."
)
is_batched = bool(
isinstance(images, (list, tuple))
and (isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]))
)
if not is_batched:
images = [images]
# transformations (resizing + normalization)
if self.do_resize and self.size is not None:
images = [self.resize(image=image, size=self.size, resample=self.resample) for image in images]
if self.do_normalize:
images = [self.normalize(image=image, mean=self.image_mean, std=self.image_std) for image in images]
# return as BatchFeature
data = {"pixel_values": images}
encoded_inputs = BatchFeature(data=data, tensor_type=return_tensors)
return encoded_inputs
| [
[
[
685,
689
],
[
3487,
3491
],
[
3506,
3510
],
[
3524,
3528
]
],
[
[
691,
699
],
[
3588,
3596
]
],
[
[
701,
706
],
[
3427,
3432
],
[
3597,
3602
]
],
[
[
715,
726
],
[
3459,
3461
],
[
3511,
3513
],
[
5368,
5370
],
[
5560,
5562
],
[
6054,
6056
]
],
[
[
743,
748
],
[
2915,
2920
],
[
3446,
3451
],
[
3492,
3497
],
[
5355,
5360
],
[
5547,
5552
],
[
6041,
6046
]
],
[
[
790,
802
],
[
3655,
3667
],
[
6623,
6635
]
],
[
[
804,
826
],
[
1080,
1102
]
],
[
[
853,
863
],
[
3608,
3618
]
],
[
[
891,
913
],
[
3268,
3290
]
],
[
[
915,
936
],
[
3356,
3377
]
],
[
[
938,
965
],
[
1104,
1131
]
],
[
[
967,
982
],
[
5384,
5399
],
[
5576,
5591
],
[
6070,
6085
]
],
[
[
1004,
1011
],
[
1023,
1030
]
],
[
[
1014,
1020
]
],
[
[
1060,
1079
]
]
] |
#!/usr/bin/env python3
UNKNOWN = -1
def read_val():
return int(input())
def read_row():
return list(map(int, input().split()))
def read_grid():
return [read_row() for _ in range(read_val())]
def make_blank_row(i):
return [UNKNOWN] * i
def make_blank_grid(n):
return [make_blank_row(i) for i in range(1, n + 1)]
def compute_max_path_sum(grid):
memo = make_blank_grid(len(grid))
def dfs(i, j):
if i == len(grid):
return 0
if memo[i][j] == UNKNOWN:
memo[i][j] = grid[i][j] + max(dfs(i + 1, j), dfs(i + 1, j + 1))
return memo[i][j]
return dfs(0, 0)
for t in range(read_val()):
print(compute_max_path_sum(read_grid()))
| [
[
[
24,
31
],
[
243,
250
],
[
514,
521
]
],
[
[
42,
50
],
[
676,
684
],
[
194,
202
]
],
[
[
83,
91
],
[
168,
176
]
],
[
[
143,
152
],
[
720,
729
]
],
[
[
212,
226
],
[
293,
307
]
],
[
[
261,
276
],
[
381,
396
]
],
[
[
342,
362
],
[
699,
719
]
],
[
[
665,
666
]
]
] |
import platform
# print(platform.system())
operating_system = platform.system().lower()
if operating_system == 'darwin':
from .blender_utils_macos import get_installed_blender_versions
operating_system_name = 'macos'
elif operating_system == 'linux':
from .blender_utils_linux import get_installed_blender_versions
operating_system_name = 'linux'
elif operating_system == 'windows':
from .blender_utils_windows import get_installed_blender_versions
operating_system_name = 'windows'
else:
raise Exception("Unimplemented for OS {}".format(operating_system))
from .blender_utils_web import get_blender_version_download_links
def find_blender(version):
# TODO: add fuzzy version matching, ie. '>=2.80', '~2.80', '<2.80', etc.
installed_versions = get_installed_blender_versions()
if version in installed_versions:
return installed_versions[version]
else:
print("blender version '{}' not found; found {} version(s):".format(version, len(installed_versions)))
for v, path in installed_versions.items():
print(" {}: {}".format(v, path))
print("searching web archive...")
versions = get_blender_version_download_links(version, operating_system_name)
print("found {} download(s) for blender version '{}', platform '{}':".format(len(versions), version, operating_system_name))
for url in versions:
print(" {}".format(url))
if __name__ == '__main__':
for version, exec_path in get_installed_blender_versions().items():
print("found blender {version}: {path}".format(version=version,
path=exec_path))
blender = find_blender('2.80')
if blender:
print("Found blender: '{}'".format(blender))
else:
print("No matching blender version installed :(")
| [
[
[
7,
15
],
[
64,
72
]
],
[
[
45,
61
],
[
93,
109
],
[
232,
248
],
[
370,
386
],
[
568,
584
]
],
[
[
160,
190
],
[
1508,
1538
],
[
785,
815
]
],
[
[
195,
216
],
[
1224,
1245
],
[
1356,
1377
]
],
[
[
298,
328
],
[
1508,
1538
],
[
785,
815
]
],
[
[
333,
354
],
[
1224,
1245
],
[
1356,
1377
]
],
[
[
440,
470
],
[
1508,
1538
],
[
785,
815
]
],
[
[
475,
496
],
[
1224,
1245
],
[
1356,
1377
]
],
[
[
619,
653
],
[
1180,
1214
]
],
[
[
660,
672
],
[
1708,
1720
]
],
[
[
1486,
1493
],
[
1613,
1620
]
],
[
[
1495,
1504
],
[
1682,
1691
]
],
[
[
1698,
1705
],
[
1736,
1743
],
[
1788,
1795
]
]
] |
import functools
import random
from math import cos, pi
import cv2
import kornia
import numpy as np
import torch
from kornia.augmentation import ColorJitter
from data.util import read_img
from PIL import Image
from io import BytesIO
# Get a rough visualization of the above distribution. (Y-axis is meaningless, just spreads data)
from utils.util import opt_get
'''
if __name__ == '__main__':
import numpy as np
import matplotlib.pyplot as plt
data = np.asarray([get_rand() for _ in range(5000)])
plt.plot(data, np.random.uniform(size=(5000,)), 'x')
plt.show()
'''
def kornia_color_jitter_numpy(img, setting):
if setting * 255 > 1:
# I'm using Kornia's ColorJitter, which requires pytorch arrays in b,c,h,w format.
img = torch.from_numpy(img).permute(2,0,1).unsqueeze(0)
img = ColorJitter(setting, setting, setting, setting)(img)
img = img.squeeze(0).permute(1,2,0).numpy()
return img
# Performs image corruption on a list of images from a configurable set of corruption
# options.
class ImageCorruptor:
def __init__(self, opt):
self.opt = opt
self.reset_random()
self.blur_scale = opt['corruption_blur_scale'] if 'corruption_blur_scale' in opt.keys() else 1
self.fixed_corruptions = opt['fixed_corruptions'] if 'fixed_corruptions' in opt.keys() else []
self.num_corrupts = opt['num_corrupts_per_image'] if 'num_corrupts_per_image' in opt.keys() else 0
self.cosine_bias = opt_get(opt, ['cosine_bias'], True)
if self.num_corrupts == 0:
return
else:
self.random_corruptions = opt['random_corruptions'] if 'random_corruptions' in opt.keys() else []
def reset_random(self):
if 'random_seed' in self.opt.keys():
self.rand = random.Random(self.opt['random_seed'])
else:
self.rand = random.Random()
# Feeds a random uniform through a cosine distribution to slightly bias corruptions towards "uncorrupted".
# Return is on [0,1] with a bias towards 0.
def get_rand(self):
r = self.rand.random()
if self.cosine_bias:
return 1 - cos(r * pi / 2)
else:
return r
def corrupt_images(self, imgs, return_entropy=False):
if self.num_corrupts == 0 and not self.fixed_corruptions:
if return_entropy:
return imgs, []
else:
return imgs
if self.num_corrupts == 0:
augmentations = []
else:
augmentations = random.choices(self.random_corruptions, k=self.num_corrupts)
# Sources of entropy
corrupted_imgs = []
entropy = []
undo_fns = []
applied_augs = augmentations + self.fixed_corruptions
for img in imgs:
for aug in augmentations:
r = self.get_rand()
img, undo_fn = self.apply_corruption(img, aug, r, applied_augs)
if undo_fn is not None:
undo_fns.append(undo_fn)
for aug in self.fixed_corruptions:
r = self.get_rand()
img, undo_fn = self.apply_corruption(img, aug, r, applied_augs)
entropy.append(r)
if undo_fn is not None:
undo_fns.append(undo_fn)
# Apply undo_fns after all corruptions are finished, in same order.
for ufn in undo_fns:
img = ufn(img)
corrupted_imgs.append(img)
if return_entropy:
return corrupted_imgs, entropy
else:
return corrupted_imgs
def apply_corruption(self, img, aug, rand_val, applied_augmentations):
undo_fn = None
if 'color_quantization' in aug:
# Color quantization
quant_div = 2 ** (int(rand_val * 10 / 3) + 2)
img = img * 255
img = (img // quant_div) * quant_div
img = img / 255
elif 'color_jitter' in aug:
lo_end = 0
hi_end = .2
setting = rand_val * (hi_end - lo_end) + lo_end
img = kornia_color_jitter_numpy(img, setting)
elif 'gaussian_blur' in aug:
img = cv2.GaussianBlur(img, (0,0), self.blur_scale*rand_val*1.5)
elif 'motion_blur' in aug:
# Motion blur
intensity = self.blur_scale*rand_val * 3 + 1
angle = random.randint(0,360)
k = np.zeros((intensity, intensity), dtype=np.float32)
k[(intensity - 1) // 2, :] = np.ones(intensity, dtype=np.float32)
k = cv2.warpAffine(k, cv2.getRotationMatrix2D((intensity / 2 - 0.5, intensity / 2 - 0.5), angle, 1.0),
(intensity, intensity))
k = k * (1.0 / np.sum(k))
img = cv2.filter2D(img, -1, k)
elif 'block_noise' in aug:
# Large distortion blocks in part of an img, such as is used to mask out a face.
pass
elif 'lq_resampling' in aug:
# Random mode interpolation HR->LR->HR
if 'lq_resampling4x' == aug:
scale = 4
else:
if rand_val < .3:
scale = 1
elif rand_val < .7:
scale = 2
else:
scale = 4
if scale > 1:
interpolation_modes = [cv2.INTER_NEAREST, cv2.INTER_CUBIC, cv2.INTER_LINEAR, cv2.INTER_LANCZOS4]
mode = random.randint(0,4) % len(interpolation_modes)
# Downsample first, then upsample using the random mode.
img = cv2.resize(img, dsize=(img.shape[1]//scale, img.shape[0]//scale), interpolation=mode)
def lq_resampling_undo_fn(scale, img):
return cv2.resize(img, dsize=(img.shape[1]*scale, img.shape[0]*scale), interpolation=cv2.INTER_LINEAR)
undo_fn = functools.partial(lq_resampling_undo_fn, scale)
elif 'color_shift' in aug:
# Color shift
pass
elif 'interlacing' in aug:
# Interlacing distortion
pass
elif 'chromatic_aberration' in aug:
# Chromatic aberration
pass
elif 'noise' in aug:
# Random noise
if 'noise-5' == aug:
noise_intensity = 5 / 255.0
else:
noise_intensity = (rand_val*6) / 255.0
img += np.random.rand(*img.shape) * noise_intensity
elif 'jpeg' in aug:
if 'noise' not in applied_augmentations and 'noise-5' not in applied_augmentations:
if aug == 'jpeg':
lo=10
range=20
elif aug == 'jpeg-low':
lo=15
range=10
elif aug == 'jpeg-medium':
lo=23
range=25
elif aug == 'jpeg-broad':
lo=15
range=60
elif aug == 'jpeg-normal':
lo=47
range=35
else:
raise NotImplementedError("specified jpeg corruption doesn't exist")
# JPEG compression
qf = (int((1-rand_val)*range) + lo)
# Use PIL to perform a mock compression to a data buffer, then swap back to cv2.
img = (img * 255).astype(np.uint8)
img = Image.fromarray(img)
buffer = BytesIO()
img.save(buffer, "JPEG", quality=qf, optimize=True)
buffer.seek(0)
jpeg_img_bytes = np.asarray(bytearray(buffer.read()), dtype="uint8")
img = read_img("buffer", jpeg_img_bytes, rgb=True)
elif 'saturation' in aug:
# Lightening / saturation
saturation = rand_val * .3
img = np.clip(img + saturation, a_max=1, a_min=0)
elif 'greyscale' in aug:
img = np.tile(np.mean(img, axis=2, keepdims=True), [1,1,3])
elif 'none' not in aug:
raise NotImplementedError("Augmentation doesn't exist")
return img, undo_fn
| [
[
[
7,
16
],
[
5932,
5941
]
],
[
[
24,
30
],
[
1805,
1811
],
[
1882,
1888
],
[
2559,
2565
],
[
4420,
4426
],
[
5500,
5506
]
],
[
[
48,
51
],
[
2165,
2168
]
],
[
[
53,
55
],
[
2173,
2175
]
],
[
[
64,
67
],
[
4223,
4226
],
[
4603,
4606
],
[
4621,
4624
],
[
4813,
4816
],
[
5403,
5406
],
[
5422,
5425
],
[
5439,
5442
],
[
5457,
5460
],
[
5642,
5645
],
[
5810,
5813
],
[
5888,
5891
]
],
[
[
75,
81
]
],
[
[
89,
100
],
[
4458,
4460
],
[
4497,
4499
],
[
4550,
4552
],
[
4575,
4577
],
[
4784,
4786
],
[
6468,
6470
],
[
7450,
7452
],
[
7670,
7672
],
[
7918,
7920
],
[
8013,
8015
],
[
8021,
8023
]
],
[
[
108,
113
],
[
768,
773
]
],
[
[
146,
157
],
[
832,
843
]
],
[
[
181,
189
],
[
7744,
7752
]
],
[
[
206,
211
],
[
7482,
7487
]
],
[
[
227,
234
],
[
7528,
7535
]
],
[
[
358,
365
],
[
1493,
1500
]
],
[
[
596,
621
],
[
4128,
4153
]
],
[
[
1057,
1071
]
]
] |
# This test requires CPython3.5
print(b"%%" % ())
print(b"=%d=" % 1)
print(b"=%d=%d=" % (1, 2))
print(b"=%s=" % b"str")
print(b"=%r=" % b"str")
print("PASS") | [] |
#
# test_JpegCompression.py
#
import pytest
import albumentations as A
from .context import TfDataAugmentation as Tfda
from . import test_utils
from .test_utils import TestResult
@pytest.mark.parametrize(
"quality_lower, quality_upper, expected, message", [
# quality_lower
(-1, 100, TestResult.Error,
"quality_lower < min => Error"),
(0, 100, TestResult.OK,
"quality_lower == min => OK"),
(100, 100, TestResult.OK,
"quality_lower == max => OK"),
(101, 100, TestResult.Error,
"quality_lower >= max => Error"),
# quality_upper
(0, -1, TestResult.Error,
"quality_upper < min => Error"),
(0, 0, TestResult.OK,
"quality_upper == min => OK"),
(0, 100, TestResult.OK,
"quality_upper == max => OK"),
(0, 101, TestResult.Error,
"quality_upper > max => Error"),
# Relation
(50, 50, TestResult.OK,
"quality_lower == quality_upper => OK"),
(51, 50, TestResult.Error,
"quality_lower > quality_upper => Error"),
])
def test_hue_shift_limit_value(
quality_lower, quality_upper, expected, message):
try:
Tfda.JpegCompression(
quality_lower=quality_lower,
quality_upper=quality_upper)
actual = TestResult.OK
except ValueError:
actual = TestResult.Error
assert expected == actual, message
def test_call():
quality_lower = 50
quality_upper = 100
tgt_jpeg = Tfda.JpegCompression(
quality_lower=quality_lower,
quality_upper=quality_upper,
p=1.0)
tgt_transform = \
test_utils.make_tgt_transform(tgt_jpeg)
image = test_utils.make_test_image()
tgt_result = tgt_transform(image=image)
actual_image = tgt_result['image']
image_np = image.numpy()
quality = float(tgt_jpeg.get_param('quality'))
expected_image = A.image_compression(
image_np, quality, image_type='.jpg')
test_utils.partial_assert_array(
expected_image, actual_image, 0.6, "image", eps=0.1)
| [
[
[
38,
44
],
[
183,
189
]
],
[
[
52,
71
],
[
1936,
1937
]
],
[
[
93,
119
],
[
1216,
1220
],
[
1528,
1532
]
],
[
[
134,
144
],
[
1669,
1679
],
[
1721,
1731
],
[
2008,
2018
]
],
[
[
169,
179
],
[
307,
317
],
[
384,
394
],
[
458,
468
],
[
532,
542
],
[
634,
644
],
[
709,
719
],
[
781,
791
],
[
853,
863
],
[
950,
960
],
[
1032,
1042
],
[
1337,
1347
],
[
1391,
1401
]
],
[
[
1113,
1139
]
],
[
[
1453,
1462
]
]
] |
import os
from torch.utils.data import DataLoader
from continuum.datasets import CIFAR10, InMemoryDataset
from continuum.datasets import MNIST
import torchvision
from continuum.scenarios import TransformationIncremental
import pytest
import numpy as np
from continuum.transforms.bg_swap import BackgroundSwap
DATA_PATH = os.environ.get("CONTINUUM_DATA_PATH")
# Uncomment for debugging via image output
# import matplotlib.pyplot as plt
def test_bg_swap_fast():
"""
Fast test for background swap.
"""
bg_x = np.ones(shape=[2, 5, 5, 3]) * -1
bg_y = np.random.rand(2)
fg = np.random.normal(loc=.5, scale=.1, size=[5, 5])
bg = InMemoryDataset(bg_x, bg_y)
bg_swap = BackgroundSwap(bg, input_dim=(5, 5), normalize_bg=None)
spliced_1_channel = bg_swap(fg)[:, :, 0]
assert np.array_equal((spliced_1_channel <= -1), (fg <= .5))
@pytest.mark.slow
def test_background_swap_numpy():
"""
Test background swap on a single ndarray input.
"""
mnist = MNIST(DATA_PATH, download=True, train=True)
cifar = CIFAR10(DATA_PATH, download=True, train=True)
bg_swap = BackgroundSwap(cifar, input_dim=(28, 28))
im = mnist.get_data()[0][0]
im = bg_swap(im)
# Uncomment for debugging
# plt.imshow(im, interpolation='nearest')
# plt.show()
@pytest.mark.slow
def test_background_swap_torch():
"""
Test background swap on a single tensor input.
"""
cifar = CIFAR10(DATA_PATH, download=True, train=True)
mnist = torchvision.datasets.MNIST(DATA_PATH, train=True, download=True,
transform=torchvision.transforms.Compose([
torchvision.transforms.ToTensor()
]))
bg_swap = BackgroundSwap(cifar, input_dim=(28, 28))
im = mnist[0][0]
im = bg_swap(im)
# Uncomment for debugging
# plt.imshow(im.permute(1, 2, 0), interpolation='nearest')
# plt.show()
@pytest.mark.slow
def test_background_tranformation():
"""
Example code using TransformationIncremental to create a setting with 3 tasks.
"""
cifar = CIFAR10(DATA_PATH, train=True)
mnist = MNIST(DATA_PATH, download=False, train=True)
nb_task = 3
list_trsf = []
for i in range(nb_task):
list_trsf.append([torchvision.transforms.ToTensor(), BackgroundSwap(cifar, bg_label=i, input_dim=(28, 28)),
torchvision.transforms.ToPILImage()])
scenario = TransformationIncremental(mnist, base_transformations=[torchvision.transforms.ToTensor()],
incremental_transformations=list_trsf)
folder = "tests/samples/background_trsf/"
if not os.path.exists(folder):
os.makedirs(folder)
for task_id, task_data in enumerate(scenario):
task_data.plot(path=folder, title=f"background_{task_id}.jpg", nb_samples=100, shape=[28, 28, 3])
loader = DataLoader(task_data)
_, _, _ = next(iter(loader))
| [
[
[
7,
9
],
[
324,
326
],
[
2723,
2725
],
[
2755,
2757
]
],
[
[
40,
50
],
[
2949,
2959
]
],
[
[
82,
89
],
[
1059,
1066
],
[
1443,
1450
],
[
2148,
2155
]
],
[
[
91,
106
],
[
658,
673
]
],
[
[
138,
143
],
[
1003,
1008
],
[
2191,
2196
]
],
[
[
151,
162
],
[
1502,
1513
],
[
1616,
1627
],
[
1692,
1703
],
[
2326,
2337
],
[
2442,
2453
],
[
2550,
2561
]
],
[
[
195,
220
],
[
2495,
2520
]
],
[
[
228,
234
],
[
872,
878
],
[
1313,
1319
],
[
1983,
1989
]
],
[
[
242,
253
],
[
529,
531
],
[
573,
575
],
[
601,
603
],
[
815,
817
]
],
[
[
296,
310
],
[
701,
715
],
[
1120,
1134
],
[
1784,
1798
],
[
2361,
2375
]
],
[
[
312,
321
],
[
1009,
1018
],
[
1067,
1076
],
[
1451,
1460
],
[
1529,
1538
],
[
2156,
2165
],
[
2197,
2206
]
],
[
[
446,
463
]
],
[
[
893,
919
]
],
[
[
1334,
1360
]
],
[
[
2004,
2033
]
]
] |
# =========================================================================================
# Copyright 2015 Community Information Online Consortium (CIOC) and KCL Software Solutions
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================================
# std lib
import os
# jQuery and jQueryUI versions
JQUERY_VERSION = "1.6.2"
JQUERY_UI_VERSION = "1.8.16"
# formatting constants
DATE_TEXT_SIZE = 25
TEXT_SIZE = 85
TEXTAREA_COLS = 85
TEXTAREA_ROWS_SHORT = 2
TEXTAREA_ROWS_LONG = 4
TEXTAREA_ROWS_XLONG = 10
MAX_LENGTH_CHECKLIST_NOTES = 255
EMAIL_LENGTH = 60
# application running constants
_app_path = None
_config_file = None
_app_name = None
session_lock_dir = None
publish_dir = None
def update_cache_values():
# called from application init at startup
global _app_path, _config_file, _app_name, session_lock_dir, publish_dir
if _app_path is None:
_app_path = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
_app_name = os.path.split(_app_path)[1]
_config_file = os.path.join(_app_path, '..', '..', 'config', _app_name + '.ini')
session_lock_dir = os.path.join(_app_path, 'python', 'session_lock')
publish_dir = os.path.join(_app_path, 'python', 'published_files')
try:
os.makedirs(session_lock_dir)
except os.error:
pass
try:
os.makedirs(publish_dir)
except os.error:
pass
| [
[
[
850,
852
],
[
1469,
1471
],
[
1486,
1488
],
[
1499,
1501
],
[
1565,
1567
],
[
1616,
1618
],
[
1709,
1711
],
[
1781,
1783
],
[
1860,
1862
],
[
1905,
1907
],
[
1958,
1960
],
[
1998,
2000
]
],
[
[
885,
899
]
],
[
[
910,
927
]
],
[
[
963,
977
]
],
[
[
983,
992
]
],
[
[
998,
1011
]
],
[
[
1017,
1036
]
],
[
[
1041,
1059
]
],
[
[
1064,
1083
]
],
[
[
1089,
1115
]
],
[
[
1122,
1134
]
],
[
[
1173,
1182
],
[
1430,
1439
]
],
[
[
1190,
1202
]
],
[
[
1210,
1219
]
],
[
[
1227,
1243
]
],
[
[
1251,
1262
]
],
[
[
1276,
1295
]
],
[
[
1457,
1466
],
[
1579,
1588
],
[
1629,
1638
],
[
1722,
1731
],
[
1794,
1803
]
],
[
[
1553,
1562
],
[
1662,
1671
]
],
[
[
1601,
1613
]
],
[
[
1690,
1706
],
[
1872,
1888
]
],
[
[
1767,
1778
],
[
1970,
1981
]
]
] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class WebSiteManagementClientConfiguration(Configuration):
"""Configuration for WebSiteManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Your Azure subscription ID. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000).
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(WebSiteManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2015-08-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-web/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| [
[
[
487,
490
],
[
1484,
1487
],
[
2190,
2193
]
],
[
[
492,
505
],
[
693,
706
]
],
[
[
544,
557
],
[
873,
886
]
],
[
[
590,
598
],
[
2277,
2285
],
[
2374,
2382
],
[
2465,
2473
],
[
2558,
2566
],
[
2764,
2772
],
[
2870,
2878
],
[
2970,
2978
],
[
3186,
3194
]
],
[
[
636,
656
],
[
2675,
2695
]
],
[
[
681,
688
],
[
2093,
2100
]
],
[
[
807,
827
]
],
[
[
836,
872
],
[
1735,
1771
]
]
] |
import django.http
import unittest.mock
from .. import middleware
def get_response(req):
# dummy get_response, just return an empty response
return django.http.HttpResponse()
def test_leaves_remote_addr_alone_if_no_real_ip():
remote_addr = object()
request = unittest.mock.MagicMock()
request.META = {"REMOTE_ADDR": remote_addr}
middleware.XRealIPMiddleware(get_response)(request)
assert request.META["REMOTE_ADDR"] is remote_addr
def test_switches_out_x_real_ip_if_available():
remote_addr = object()
x_real_ip = object()
request = unittest.mock.MagicMock()
request.META = {"REMOTE_ADDR": remote_addr, "HTTP_X_REAL_IP": x_real_ip}
middleware.XRealIPMiddleware(get_response)(request)
assert request.META["REMOTE_ADDR"] is x_real_ip
assert request.META["HTTP_X_REAL_IP"] is x_real_ip
| [
[
[
7,
18
],
[
160,
166
]
],
[
[
27,
40
],
[
281,
289
],
[
584,
592
]
],
[
[
57,
67
],
[
360,
370
],
[
692,
702
]
],
[
[
74,
86
],
[
389,
401
],
[
721,
733
]
],
[
[
193,
236
]
],
[
[
473,
513
]
]
] |
#!/usr/bin/env python
import time
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(21, GPIO.OUT)
GPIO.output(21, GPIO.LOW)
time.sleep(3.00)
GPIO.output(21, GPIO.HIGH)
GPIO.cleanup()
| [
[
[
30,
34
],
[
137,
141
]
],
[
[
43,
59
],
[
62,
66
],
[
75,
79
],
[
85,
89
],
[
100,
104
],
[
110,
114
],
[
126,
130
],
[
155,
159
],
[
171,
175
],
[
182,
186
]
]
] |
from direct.directnotify.DirectNotifyGlobal import directNotify
class Notifier:
def __init__(self, name):
"""
@param name: The name of the notifier. Be sure to add it to your config/Config.prc!
@type name: str
"""
self.notify = directNotify.newCategory(name)
| [
[
[
51,
63
],
[
274,
286
]
],
[
[
72,
80
]
]
] |
import numpy as np
def train_ml_squarer() -> None:
print("Training!")
def square() -> int:
"""Square a number...maybe"""
return np.random.randint(1, 100)
if __name__ == '__main__':
train_ml_squarer() | [
[
[
7,
18
],
[
144,
146
]
],
[
[
25,
41
],
[
203,
219
]
],
[
[
82,
88
]
]
] |
"""
Platformer Game
"""
import arcade
# Constants
SCREEN_WIDTH = 1000
SCREEN_HEIGHT = 650
SCREEN_TITLE = "Platformer"
# Constants used to scale our sprites from their original size
CHARACTER_SCALING = 1
TILE_SCALING = 0.5
COIN_SCALING = 0.5
SPRITE_PIXEL_SIZE = 128
GRID_PIXEL_SIZE = SPRITE_PIXEL_SIZE * TILE_SCALING
# Movement speed of player, in pixels per frame
PLAYER_MOVEMENT_SPEED = 10
GRAVITY = 1
PLAYER_JUMP_SPEED = 20
class MyGame(arcade.Window):
"""
Main application class.
"""
def __init__(self):
# Call the parent class and set up the window
super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
# Our TileMap Object
self.tile_map = None
# Our Scene Object
self.scene = None
# Separate variable that holds the player sprite
self.player_sprite = None
# Our physics engine
self.physics_engine = None
# A Camera that can be used for scrolling the screen
self.camera = None
# A Camera that can be used to draw GUI elements
self.gui_camera = None
# Keep track of the score
self.score = 0
# Load sounds
self.collect_coin_sound = arcade.load_sound(":resources:sounds/coin1.wav")
self.jump_sound = arcade.load_sound(":resources:sounds/jump1.wav")
arcade.set_background_color(arcade.csscolor.CORNFLOWER_BLUE)
def setup(self):
"""Set up the game here. Call this function to restart the game."""
# Setup the Cameras
self.camera = arcade.Camera(self.width, self.height)
self.gui_camera = arcade.Camera(self.width, self.height)
# Name of map file to load
map_name = ":resources:tiled_maps/map.json"
# Layer specific options are defined based on Layer names in a dictionary
# Doing this will make the SpriteList for the platforms layer
# use spatial hashing for detection.
layer_options = {
"Platforms": {
"use_spatial_hash": True,
},
}
# Read in the tiled map
self.tile_map = arcade.load_tilemap(map_name, TILE_SCALING, layer_options)
# Initialize Scene with our TileMap, this will automatically add all layers
# from the map as SpriteLists in the scene in the proper order.
self.scene = arcade.Scene.from_tilemap(self.tile_map)
# Keep track of the score
self.score = 0
# Set up the player, specifically placing it at these coordinates.
image_source = ":resources:images/animated_characters/female_adventurer/femaleAdventurer_idle.png"
self.player_sprite = arcade.Sprite(image_source, CHARACTER_SCALING)
self.player_sprite.center_x = 128
self.player_sprite.center_y = 128
self.scene.add_sprite("Player", self.player_sprite)
# --- Other stuff
# Set the background color
if self.tile_map.background_color:
arcade.set_background_color(self.tile_map.background_color)
# Create the 'physics engine'
self.physics_engine = arcade.PhysicsEnginePlatformer(
self.player_sprite, gravity_constant=GRAVITY, walls=self.scene["Platforms"]
)
def on_draw(self):
"""Render the screen."""
# Clear the screen to the background color
arcade.start_render()
# Activate the game camera
self.camera.use()
# Draw our Scene
self.scene.draw()
# Activate the GUI camera before drawing GUI elements
self.gui_camera.use()
# Draw our score on the screen, scrolling it with the viewport
score_text = f"Score: {self.score}"
arcade.draw_text(
score_text,
10,
10,
arcade.csscolor.WHITE,
18,
)
def on_key_press(self, key, modifiers):
"""Called whenever a key is pressed."""
if key == arcade.key.UP or key == arcade.key.W:
if self.physics_engine.can_jump():
self.player_sprite.change_y = PLAYER_JUMP_SPEED
arcade.play_sound(self.jump_sound)
elif key == arcade.key.LEFT or key == arcade.key.A:
self.player_sprite.change_x = -PLAYER_MOVEMENT_SPEED
elif key == arcade.key.RIGHT or key == arcade.key.D:
self.player_sprite.change_x = PLAYER_MOVEMENT_SPEED
def on_key_release(self, key, modifiers):
"""Called when the user releases a key."""
if key == arcade.key.LEFT or key == arcade.key.A:
self.player_sprite.change_x = 0
elif key == arcade.key.RIGHT or key == arcade.key.D:
self.player_sprite.change_x = 0
def center_camera_to_player(self):
screen_center_x = self.player_sprite.center_x - (self.camera.viewport_width / 2)
screen_center_y = self.player_sprite.center_y - (
self.camera.viewport_height / 2
)
if screen_center_x < 0:
screen_center_x = 0
if screen_center_y < 0:
screen_center_y = 0
player_centered = screen_center_x, screen_center_y
self.camera.move_to(player_centered)
def on_update(self, delta_time):
"""Movement and game logic"""
# Move the player with the physics engine
self.physics_engine.update()
# See if we hit any coins
coin_hit_list = arcade.check_for_collision_with_list(
self.player_sprite, self.scene["Coins"]
)
# Loop through each coin we hit (if any) and remove it
for coin in coin_hit_list:
# Remove the coin
coin.remove_from_sprite_lists()
# Play a sound
arcade.play_sound(self.collect_coin_sound)
# Add one to the score
self.score += 1
# Position the camera
self.center_camera_to_player()
def main():
"""Main function"""
window = MyGame()
window.setup()
arcade.run()
if __name__ == "__main__":
main()
| [
[
[
31,
37
],
[
444,
450
],
[
1215,
1221
],
[
1290,
1296
],
[
1348,
1354
],
[
1376,
1382
],
[
1558,
1564
],
[
1623,
1629
],
[
2125,
2131
],
[
2362,
2368
],
[
2673,
2679
],
[
2981,
2987
],
[
3110,
3116
],
[
3357,
3363
],
[
3710,
3716
],
[
3796,
3802
],
[
3957,
3963
],
[
3981,
3987
],
[
4122,
4128
],
[
4177,
4183
],
[
4203,
4209
],
[
4302,
4308
],
[
4329,
4335
],
[
4524,
4530
],
[
4550,
4556
],
[
4628,
4634
],
[
4655,
4661
],
[
5410,
5416
],
[
5722,
5728
],
[
5981,
5987
]
],
[
[
51,
63
],
[
609,
621
]
],
[
[
71,
84
],
[
623,
636
]
],
[
[
91,
103
],
[
638,
650
]
],
[
[
183,
200
],
[
2701,
2718
]
],
[
[
205,
217
],
[
305,
317
],
[
2155,
2167
]
],
[
[
224,
236
]
],
[
[
243,
260
],
[
285,
302
]
],
[
[
267,
282
]
],
[
[
367,
388
],
[
4260,
4281
],
[
4385,
4406
]
],
[
[
394,
401
],
[
3191,
3198
]
],
[
[
406,
423
],
[
4088,
4105
]
],
[
[
437,
443
],
[
5949,
5955
]
],
[
[
5904,
5908
],
[
6027,
6031
]
]
] |
#!/usr/bin/env python3
'''
lib/ycmd/start.py
Server bootstrap logic. Includes a utility class for normalizing parameters and
calculating default ones. Also includes a helper to set up the temporary
options file.
'''
import logging
import os
import tempfile
from ..process import (
FileHandles,
Process,
)
from ..util.fs import (
default_python_binary_path,
save_json_file,
)
from ..ycmd.constants import (
YCMD_LOG_SPOOL_OUTPUT,
YCMD_LOG_SPOOL_SIZE,
YCMD_DEFAULT_SERVER_CHECK_INTERVAL_SECONDS,
YCMD_DEFAULT_SERVER_IDLE_SUICIDE_SECONDS,
)
from ..ycmd.settings import (
get_default_settings_path,
generate_settings_data,
)
logger = logging.getLogger('sublime-ycmd.' + __name__)
class StartupParameters(object):
'''
Startup parameters for a ycmd server instance.
Should include all the necessary configuration for creating the ycmd
server process. Also calculates defaults for certain parameters.
'''
def __init__(self, ycmd_root_directory=None,
ycmd_settings_path=None,
working_directory=None,
python_binary_path=None,
server_idle_suicide_seconds=None,
server_check_interval_seconds=None):
self._ycmd_root_directory = None
self._ycmd_settings_path = None
self._working_directory = None
self._python_binary_path = None
self._server_idle_suicide_seconds = None
self._server_check_interval_seconds = None
# additional attributes, can be set via the properties
self._log_level = None
self._stdout_log_path = None
self._stderr_log_path = None
self._keep_logs = None
self.ycmd_root_directory = ycmd_root_directory
self.ycmd_settings_path = ycmd_settings_path
self.working_directory = working_directory
self.python_binary_path = python_binary_path
self.server_idle_suicide_seconds = server_idle_suicide_seconds
self.server_check_interval_seconds = server_check_interval_seconds
@property
def ycmd_root_directory(self):
if self._ycmd_root_directory is None:
logger.warning('no ycmd root directory has been set')
return self._ycmd_root_directory
@ycmd_root_directory.setter
def ycmd_root_directory(self, ycmd_root_directory):
if ycmd_root_directory is not None and \
not isinstance(ycmd_root_directory, str):
raise TypeError(ycmd_root_directory,)
self._ycmd_root_directory = ycmd_root_directory
@property
def ycmd_settings_path(self):
if self._ycmd_settings_path is None:
if self._ycmd_root_directory is not None:
return get_default_settings_path(self._ycmd_root_directory)
logger.warning('no ycmd root directory has been set')
return self._ycmd_settings_path
@ycmd_settings_path.setter
def ycmd_settings_path(self, ycmd_settings_path):
if ycmd_settings_path is not None and \
not isinstance(ycmd_settings_path, str):
raise TypeError(ycmd_settings_path,)
self._ycmd_settings_path = ycmd_settings_path
@property
def working_directory(self):
if self._working_directory is None:
return os.getcwd()
return self._working_directory
@working_directory.setter
def working_directory(self, working_directory):
if working_directory is not None and \
not isinstance(working_directory, str):
raise TypeError(working_directory,)
self._working_directory = working_directory
@property
def python_binary_path(self):
if self._python_binary_path is None:
return default_python_binary_path()
return self._python_binary_path
@python_binary_path.setter
def python_binary_path(self, python_binary_path):
if python_binary_path is not None and \
not isinstance(python_binary_path, str):
raise TypeError(python_binary_path,)
self._python_binary_path = python_binary_path
@property
def server_idle_suicide_seconds(self):
if self._server_idle_suicide_seconds is None:
return YCMD_DEFAULT_SERVER_IDLE_SUICIDE_SECONDS
return self._server_idle_suicide_seconds
@server_idle_suicide_seconds.setter
def server_idle_suicide_seconds(self, server_idle_suicide_seconds):
if server_idle_suicide_seconds is not None and \
not isinstance(server_idle_suicide_seconds, int):
raise TypeError(server_idle_suicide_seconds,)
self._server_idle_suicide_seconds = server_idle_suicide_seconds
@property
def server_check_interval_seconds(self):
if self._server_check_interval_seconds is None:
return YCMD_DEFAULT_SERVER_CHECK_INTERVAL_SECONDS
return self._server_check_interval_seconds
@server_check_interval_seconds.setter
def server_check_interval_seconds(self, server_check_interval_seconds):
if server_check_interval_seconds is not None and \
not isinstance(server_check_interval_seconds, int):
raise TypeError(server_check_interval_seconds,)
self._server_check_interval_seconds = server_check_interval_seconds
@property
def log_level(self):
return self._log_level
@log_level.setter
def log_level(self, log_level):
if log_level is not None and not isinstance(log_level, str):
raise TypeError('log level must be a str: %r' % (log_level))
if log_level is not None and not _is_valid_log_level(log_level):
logger.warning('log level unrecognized: %r', log_level)
# but fall through and do it anyway
self._log_level = log_level
@property
def stdout_log_path(self):
return self._stdout_log_path
@stdout_log_path.setter
def stdout_log_path(self, stdout_log_path):
if stdout_log_path is not None and \
not isinstance(stdout_log_path, str):
raise TypeError(
'stdout log path must be a str: %r' % (stdout_log_path)
)
self._stdout_log_path = stdout_log_path
@property
def stderr_log_path(self):
return self._stderr_log_path
@stderr_log_path.setter
def stderr_log_path(self, stderr_log_path):
if stderr_log_path is not None and \
not isinstance(stderr_log_path, str):
raise TypeError(
'stderr_log_path must be a str: %r' % (stderr_log_path)
)
self._stderr_log_path = stderr_log_path
@property
def keep_logs(self):
if self._keep_logs is None:
return False
return self._keep_logs
@keep_logs.setter
def keep_logs(self, keep_logs):
if keep_logs is not None and not isinstance(keep_logs, bool):
raise TypeError('keep-logs must be a bool: %r' % (keep_logs))
self._keep_logs = keep_logs
@property
def ycmd_module_directory(self):
if self._ycmd_root_directory is None:
logger.error('no ycmd root directory set')
raise AttributeError
return os.path.join(self._ycmd_root_directory, 'ycmd')
def copy(self):
'''
Creates a shallow-copy of the startup parameters.
'''
raw_attrs = [
'_ycmd_root_directory',
'_ycmd_settings_path',
'_working_directory',
'_python_binary_path',
'_server_idle_suicide_seconds',
'_server_check_interval_seconds',
'_log_level',
'_stdout_log_path',
'_stderr_log_path',
'_keep_logs',
]
result = StartupParameters()
for attr in raw_attrs:
attr_value = getattr(self, attr)
setattr(result, attr, attr_value)
return result
def __iter__(self):
''' Dictionary-compatible iterator. '''
return iter((
('ycmd_root_directory', self.ycmd_root_directory),
('ycmd_settings_path', self.ycmd_settings_path),
('working_directory', self.working_directory),
('python_binary_path', self.python_binary_path),
('server_idle_suicide_seconds', self.server_idle_suicide_seconds),
(
'server_check_interval_seconds',
self.server_check_interval_seconds,
),
('ycmd_module_directory', self.ycmd_module_directory),
('log_level', self.log_level),
('stdout_log_path', self.stdout_log_path),
('stderr_log_path', self.stderr_log_path),
('keep_logs', self.keep_logs),
))
def __str__(self):
return (
'ycmd path, default settings path, '
'python binary path, working directory: '
'%(ycmd_root_directory)s, %(ycmd_settings_path)s, '
'%(python_binary_path)s, %(working_directory)s' %
(dict(self))
)
def __repr__(self):
return '%s(%r)' % (StartupParameters, dict(self))
def to_startup_parameters(ycmd_root_directory,
ycmd_settings_path=None,
working_directory=None,
python_binary_path=None,
server_idle_suicide_seconds=None,
server_check_interval_seconds=None):
'''
Internal convenience function. Receives the raw arguments to starting a
ycmd server and returns a `StartupParameters` instance from it.
If the first argument is already `StartupParameters`, it is returned as-is,
and the remaining parameters are ignored.
Otherwise, a `StartupParameters` instance is constructed with all the given
parameters and returned.
'''
if isinstance(ycmd_root_directory, StartupParameters):
# great, already in the desired state
# check if other params are provided and issue a warning
# (they get ignored in that case)
if ycmd_settings_path is not None:
logger.warning(
'ycmd settings path will be ignored: %s', ycmd_settings_path,
)
if working_directory is not None:
logger.warning(
'working directory will be ignored: %s', working_directory,
)
if python_binary_path is not None:
logger.warning(
'python binary path will be ignored: %s', python_binary_path,
)
if server_idle_suicide_seconds is not None:
logger.warning(
'server idle suicide seconds will be ignored: %s',
server_idle_suicide_seconds,
)
if server_check_interval_seconds is not None:
logger.warning(
'server check interval seconds will be ignored: %s',
server_check_interval_seconds,
)
return ycmd_root_directory
# else, generate them
logger.warning('[DEPRECATED] to startup parameters', stack_info=True)
logger.debug(
'generating startup parameters with root: %s', ycmd_root_directory,
)
return StartupParameters(
ycmd_root_directory,
ycmd_settings_path=ycmd_settings_path,
working_directory=working_directory,
python_binary_path=python_binary_path,
server_idle_suicide_seconds=server_idle_suicide_seconds,
server_check_interval_seconds=server_check_interval_seconds,
)
def check_startup_parameters(startup_parameters):
'''
Performs quick, non-blocking validation on startup parameters to catch type
mismatches or empty configurations. Raises an exception or returns `None`.
This is meant to be run on the main thread to catch common startup errors
before initializing the server off-thread. It isn't strictly necessary, but
produces nicer error messages when the plugin is not configured correctly.
NOTE : This does not check the file system for things like missing files,
as that can be a blocking operation.
'''
if not isinstance(startup_parameters, StartupParameters):
raise TypeError(
'startup parameters must be StartupParameters: %r' %
(startup_parameters)
)
ycmd_root_directory = startup_parameters.ycmd_root_directory
if not ycmd_root_directory:
raise RuntimeError('no ycmd root directory has been set')
ycmd_settings_path = startup_parameters.ycmd_settings_path
if not ycmd_settings_path:
raise RuntimeError('no ycmd default settings path has been set')
logger.debug(
'startup parameters seem to be filled in, '
'ready to attempt startup: %r', startup_parameters,
)
def write_ycmd_settings_file(ycmd_settings_path, ycmd_hmac_secret, out=None):
'''
Writes out a ycmd server settings file based on the template file
`ycmd_settings_path`. A uniquely-generated `ycmd_hmac_secret` must also be
supplied, as it needs to be written into this file.
The return value is the path to the settings file, as a `str`.
If `out` is omitted, a secure temporary file is created, and the returned
path should be passed via the options flag to ycmd.
If `out` is provided, it should be a path to an output file (`str`), or a
file-like handle (must support `.write`). This is not recommended for use
with ycmd, as it may be insecure.
'''
ycmd_settings_data = generate_settings_data(
ycmd_settings_path, ycmd_hmac_secret,
)
out_path = None
if out is None:
# no point using `with` for this, since we also use `delete=False`
temp_file_object = tempfile.NamedTemporaryFile(
prefix='ycmd_settings_', suffix='.json', delete=False,
)
temp_file_name = temp_file_object.name
temp_file_handle = temp_file_object.file # type: io.TextIOWrapper
out = temp_file_handle
out_path = temp_file_name
def flush():
temp_file_handle.flush()
def close():
temp_file_object.close()
else:
raise NotImplementedError('unimplemented: output to specific file')
if out_path is None and out is not None:
logger.error('failed to get path for output file: %r', out)
# fall through and write it out anyway
save_json_file(out, ycmd_settings_data)
flush()
close()
logger.debug('successfully wrote file: %s', out_path)
return out_path
def prepare_ycmd_process(startup_parameters, ycmd_settings_tempfile_path,
ycmd_server_hostname, ycmd_server_port):
'''
Initializes and returns a `Process` handle, correctly configured to launch
a ycmd server process. It does not automatically start it though.
The `ycmd_settings_tempfile_path` should be created by (return value of)
`write_ycmd_settings_file`. The ycmd server process will read that file on
startup and then immediately delete it.
The `ycmd_server_hostname` and `ycmd_server_port` must also be provided to
instruct the server to listen on the given address.
'''
assert isinstance(startup_parameters, StartupParameters), \
'startup parameters must be StartupParameters: %r' % \
(startup_parameters)
assert isinstance(ycmd_settings_tempfile_path, str), \
'ycmd settings temporary file path must be a str: %r' % \
(ycmd_settings_tempfile_path)
# this may throw:
check_startup_parameters(startup_parameters)
working_directory = startup_parameters.working_directory
python_binary_path = startup_parameters.python_binary_path
server_idle_suicide_seconds = \
startup_parameters.server_idle_suicide_seconds
server_check_interval_seconds = \
startup_parameters.server_check_interval_seconds
ycmd_module_directory = startup_parameters.ycmd_module_directory
if YCMD_LOG_SPOOL_OUTPUT:
stdout_log_spool = \
tempfile.SpooledTemporaryFile(max_size=YCMD_LOG_SPOOL_SIZE)
stderr_log_spool = \
tempfile.SpooledTemporaryFile(max_size=YCMD_LOG_SPOOL_SIZE)
logger.debug(
'using temporary spools for stdout, stderr: %r, %r',
stdout_log_spool, stderr_log_spool,
)
stdout_handle = stdout_log_spool
stderr_handle = stderr_log_spool
else:
# explicitly close handles - don't inherit from this process
stdout_handle = FileHandles.DEVNULL
stderr_handle = FileHandles.DEVNULL
ycmd_process_handle = Process()
ycmd_process_handle.binary = python_binary_path
ycmd_process_handle.args.extend([
ycmd_module_directory,
'--host=%s' % (ycmd_server_hostname),
'--port=%s' % (ycmd_server_port),
'--idle_suicide_seconds=%s' % (server_idle_suicide_seconds),
'--check_interval_seconds=%s' % (server_check_interval_seconds),
'--options_file=%s' % (ycmd_settings_tempfile_path),
])
ycmd_process_handle.cwd = working_directory
ycmd_process_handle.filehandles.stdout = stdout_handle
ycmd_process_handle.filehandles.stderr = stderr_handle
if startup_parameters.log_level is not None:
add_ycmd_debug_args(
ycmd_process_handle,
log_level=startup_parameters.log_level,
stdout_file_name=startup_parameters.stdout_log_path,
stderr_file_name=startup_parameters.stderr_log_path,
keep_logfiles=startup_parameters.keep_logs,
)
return ycmd_process_handle
def add_ycmd_debug_args(ycmd_process_handle, log_level='info',
stdout_file_name=None, stderr_file_name=None,
keep_logfiles=False):
'''
Adds startup flags to `ycmd_process_handle` to enable logging output.
The `ycmd_process_handle` should be an instance of `Process`.
The `log_level` should be one of 'debug', 'info', 'warning', 'error', or
'critical'. Any `str` is accepted, this routine does not actually check it.
If `stdout_file_name` and `stderr_file_name` are provided, the server will
write log messages to the given files. The bulk of the logs will be on
stderr, with only a few startup messages appearing on stdout.
If `keep_logfiles` is `True`, then the server won't delete the log files
when it exits. Otherwise, the log files will be deleted when it shuts down.
'''
if not isinstance(ycmd_process_handle, Process):
raise TypeError(
'ycmd process handle must be a Process: %r' % (ycmd_process_handle)
)
assert isinstance(ycmd_process_handle, Process)
if ycmd_process_handle.alive():
raise ValueError(
'ycmd process is already started, cannot modify it: %r' %
(ycmd_process_handle)
)
if not _is_valid_log_level(log_level):
logger.warning('log level unrecognized: %r', log_level)
# but fall through and do it anyway
ycmd_debug_args = [
'--log=%s' % (log_level),
]
if stdout_file_name and stderr_file_name:
ycmd_debug_args.extend([
'--stdout=%s' % (stdout_file_name),
'--stderr=%s' % (stderr_file_name),
])
if keep_logfiles:
ycmd_debug_args.append(
'--keep_logfiles',
)
logger.debug('adding ycmd debug args: %r', ycmd_debug_args)
ycmd_process_handle.args.extend(ycmd_debug_args)
def _is_valid_log_level(log_level):
if not isinstance(log_level, str):
raise TypeError('log level must be a str: %r' % (log_level))
# these can be found by running `python /path/to/ycmd/ycmd --help`
recognized_log_levels = [
'debug',
'info',
'warning',
'error',
'critical',
]
return log_level in recognized_log_levels
| [
[
[
225,
232
],
[
674,
681
]
],
[
[
240,
242
],
[
3305,
3307
],
[
7231,
7233
]
],
[
[
250,
258
],
[
13763,
13771
],
[
16058,
16066
],
[
16159,
16167
]
],
[
[
288,
299
],
[
16551,
16562
],
[
16595,
16606
]
],
[
[
305,
312
],
[
16642,
16649
],
[
18549,
18556
],
[
18717,
18724
]
],
[
[
344,
370
],
[
3755,
3781
]
],
[
[
376,
390
],
[
14429,
14443
]
],
[
[
429,
450
],
[
15994,
16015
]
],
[
[
456,
475
],
[
16097,
16116
],
[
16198,
16217
]
],
[
[
481,
523
],
[
4840,
4882
]
],
[
[
529,
569
],
[
4249,
4289
]
],
[
[
607,
632
],
[
2740,
2765
]
],
[
[
638,
660
],
[
13543,
13565
]
],
[
[
665,
671
],
[
2172,
2178
],
[
2805,
2811
],
[
5674,
5680
],
[
7140,
7146
],
[
10141,
10147
],
[
10303,
10309
],
[
10464,
10470
],
[
10636,
10642
],
[
10844,
10850
],
[
11057,
11063
],
[
11131,
11137
],
[
12690,
12696
],
[
14317,
14323
],
[
14499,
14505
],
[
16228,
16234
],
[
18954,
18960
],
[
19422,
19428
]
],
[
[
728,
745
],
[
7777,
7794
],
[
9122,
9139
],
[
9913,
9930
],
[
11239,
11256
],
[
12200,
12217
],
[
15257,
15274
]
],
[
[
9159,
9180
]
],
[
[
11572,
11596
],
[
15561,
15585
]
],
[
[
12828,
12852
]
],
[
[
14579,
14599
]
],
[
[
17637,
17656
],
[
17297,
17316
]
],
[
[
19541,
19560
],
[
5630,
5649
],
[
18914,
18933
]
]
] |
#!/usr/bin/env python
import serial
import sys
import struct
import pprint
import argparse
import code
pp = pprint.PrettyPrinter()
class ConsoleUI:
def opStart(self, name):
sys.stdout.write(name.ljust(40))
def opProgress(self, progress, total=-1):
if (total >= 0):
prstr = "0x%04x / 0x%04x" % (progress, total)
else:
prstr = "0x%04x" % (progress)
sys.stdout.write(prstr.ljust(20))
sys.stdout.write('\x08' * 20)
sys.stdout.flush()
def opEnd(self, result):
sys.stdout.write(result.ljust(20))
sys.stdout.write("\n")
class XFlash:
def __init__(self, serialport):
self.serial = serial.Serial(serialport, baudrate=115200)
def __del__(self):
try:
self.serial.close()
del self.serial
except:
pass
def cmd(self, cmd, argA=0, argB=0):
buffer = struct.pack("<LL", argA, argB)
self.serial.write(bytes([cmd]))
self.serial.write(buffer)
self.serial.flush()
def flashPowerOn(self):
self.cmd(0x10)
def flashShutdown(self):
self.cmd(0x11)
def update(self):
try:
self.cmd(0xF0)
except:
pass
def flashInit(self):
self.cmd(0x03)
buffer = self.serial.read(4)
return struct.unpack("<L", buffer)[0]
def flashDeInit(self):
self.cmd(0x04)
def flashStatus(self):
self.cmd(0x05)
buffer = self.serial.read(2)
return struct.unpack("<H", buffer)[0]
def flashErase(self, block):
self.cmd(0x06, block)
# return self.flashStatus()
def flashReadBlock(self, block):
self.cmd(0x01, block, 528 * 32)
# for i in range(0, 32):
buffer = self.serial.read(528 * 32)
status = self.flashStatus()
return (status, buffer)
def flashWriteBlock(self, block, buffer):
self.cmd(0x02, block, len(buffer))
self.serial.write(buffer)
return self.flashStatus()
# def calcecc(data):
# assert len(data) == 0x210
# val = 0
# for i in range(0x1066):
# if not i & 31:
# v = ~struct.unpack("<L", data[i/8:i/8+4])[0]
# val ^= v & 1
# v >>= 1
# if val & 1:
# val ^= 0x6954559
# val >>= 1
#
# val = ~val
# return data[:-4] + struct.pack("<L", (val << 6) & 0xFFFFFFFF)
#
# def addecc(data, block = 0, off_8 = "\x00" * 4):
# res = ""
# while len(data):
# d = (data[:0x200] + "\x00" * 0x200)[:0x200]
# data = data[0x200:]
#
# d += struct.pack("<L4B4s4s", block / 32, 0, 0xFF, 0, 0, off_8, "\0\0\0\0")
# d = calcecc(d)
# block += 1
# res += d
# return res
def main(argv):
parser = argparse.ArgumentParser(description='XBox 360 NAND Flasher')
parser.add_argument('port', metavar='port', type=str,
help='serial port for comms (e.g. COM5 or /dev/ttyUSB0)')
subparsers = parser.add_subparsers(title='Operations', dest='action')
parser_read = subparsers.add_parser('read', help='Dumps an image from the NAND')
parser_read.add_argument('file', nargs=1, type=argparse.FileType('wb'), help='The file to dump the NAND to')
parser_read.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0,
help='The block to start the action from')
parser_read.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400,
help='The count of blocks to perform the action to')
parser_write = subparsers.add_parser('write', help='Writes an image into the NAND')
parser_write.add_argument('file', nargs=1, type=argparse.FileType('rb'), help='The image file to write to the NAND')
parser_write.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0,
help='The block to start the action from')
parser_write.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400,
help='The count of blocks to perform the action to')
# parser_erase = subparsers.add_parser('erase', help='Erases blocks in the NAND')
# parser_erase.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0,
# help='The block to start the action from')
# parser_erase.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400,
# help='The count of blocks to perform the action to')
#
# parser_update = subparsers.add_parser('update',
# help='Jumps into the bootloader of the NAND Flashing device for updating the firmware')
# parser_shutdown = subparsers.add_parser('shutdown', help='Shuts down the attached XBox 360')
# parser_poweron = subparsers.add_parser('powerup', help='Powers up the attached XBox 360')
arguments = parser.parse_args(argv[1:])
ui = ConsoleUI()
xf = XFlash(arguments.port)
if arguments.action in ('erase', 'write', 'read'):
try:
flash_config = xf.flashInit()
print("FlashConfig: 0x%08x" % (flash_config))
if flash_config <= 0:
raise Exception("FlashConfig invalid!")
except Exception as e:
print("Error!", e)
xf.flashDeInit()
return 1
try:
if arguments.action == 'erase':
# start = 0
# end = (options.flashsize * 1024) / 16
start = arguments.start
end = arguments.end
ui.opStart('Erase')
ui.opProgress(0, end)
for b in range(start, end):
status = xf.flashErase(b)
ui.opProgress(b + 1, end)
ui.opEnd('0x%04x blocks OK' % (end))
if arguments.action == 'read':
# start = 0
# end = (options.flashsize * 1024) / 16
start = arguments.start
end = arguments.end
ui.opStart('Read')
ui.opProgress(0, end)
for b in range(start, end):
(status, buffer) = xf.flashReadBlock(b)
ui.opProgress(b + 1, end)
arguments.file[0].write(buffer)
if arguments.action == 'write':
# start = 0
# end = (options.flashsize * 1024) / 16
start = arguments.start
end = arguments.end
blocksize = 528 * 32
ui.opStart('Write')
ui.opProgress(0, end)
for b in range(start, end):
buffer = arguments.file[0].read(blocksize)
if len(buffer) < blocksize:
buffer += ('\xFF' * (blocksize - len(buffer)))
status = xf.flashWriteBlock(b, buffer)
ui.opProgress(b + 1, end)
#
# if arguments.action == 'update':
# xf.update()
#
# if arguments.action == 'powerup':
# xf.flashPowerOn()
#
# if arguments.action == 'shutdown':
# xf.flashShutdown()
except Exception as e:
raise e
finally:
xf.flashDeInit()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| [
[
[
29,
35
],
[
693,
699
]
],
[
[
43,
46
],
[
7333,
7336
],
[
7347,
7350
],
[
188,
191
],
[
416,
419
],
[
458,
461
],
[
496,
499
],
[
553,
556
],
[
596,
599
]
],
[
[
54,
60
],
[
924,
930
],
[
1361,
1367
],
[
1547,
1553
]
],
[
[
68,
74
],
[
109,
115
]
],
[
[
82,
90
],
[
2755,
2763
],
[
3168,
3176
],
[
3731,
3739
]
],
[
[
98,
102
]
],
[
[
104,
106
]
],
[
[
140,
149
],
[
5065,
5074
]
],
[
[
627,
633
],
[
5087,
5093
]
],
[
[
2730,
2734
],
[
7342,
7346
]
]
] |
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM16.IEC61968.Common.ActivityRecord import ActivityRecord
class ComplianceEvent(ActivityRecord):
"""Compliance events are used for reporting regulatory or contract compliance issues and/or variances. These might be created as a consequence of local business processes and associated rules. It is anticipated that this class will be customised extensively to meet local implementation needs. Use inherited 'category' to indicate that, for example, expected performance will not be met or reported as mandated.Compliance events are used for reporting regulatory or contract compliance issues and/or variances. These might be created as a consequence of local business processes and associated rules. It is anticipated that this class will be customised extensively to meet local implementation needs. Use inherited 'category' to indicate that, for example, expected performance will not be met or reported as mandated.
"""
def __init__(self, deadline='', *args, **kw_args):
"""Initialises a new 'ComplianceEvent' instance.
@param deadline: The deadline for compliance.
"""
#: The deadline for compliance.
self.deadline = deadline
super(ComplianceEvent, self).__init__(*args, **kw_args)
_attrs = ["deadline"]
_attr_types = {"deadline": str}
_defaults = {"deadline": ''}
_enums = {}
_refs = []
_many_refs = []
| [
[
[
1149,
1163
],
[
1187,
1201
]
],
[
[
1171,
1186
],
[
2305,
2320
]
]
] |
import logging
from django.db.models.query_utils import Q
from django.shortcuts import get_object_or_404
from django.utils.decorators import method_decorator
from django_filters.rest_framework import DjangoFilterBackend
from drf_yasg import openapi
from drf_yasg.openapi import Parameter
from drf_yasg.utils import no_body, swagger_auto_schema
from notifications.signals import notify
from rest_framework import mixins, status, viewsets
from rest_framework.decorators import action
from rest_framework.decorators import parser_classes as dparser_classes
from rest_framework.parsers import FormParser, JSONParser, MultiPartParser
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework_extensions.mixins import DetailSerializerMixin, NestedViewSetMixin
from looking_for_group.mixins import AutoPermissionViewSetMixin, ParentObjectAutoPermissionViewSetMixin
from . import models, serializers
from .signals import player_kicked, player_left
logger = logging.getLogger("api")
parent_lookup_game__slug = Parameter(
name="parent_lookup_game__slug",
in_="path",
type="string",
format=openapi.FORMAT_SLUG,
description="Slug of related game object.",
)
parent_lookup_session__slug = Parameter(
name="parent_lookup_session__slug",
in_="path",
type="string",
format=openapi.FORMAT_SLUG,
description="Slug of related session object.",
)
parent_lookup_session__game__slug = Parameter(
name="parent_lookup_session__game__slug",
in_="path",
type="string",
format=openapi.FORMAT_SLUG,
description="Slug of related game object.",
)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="List Games",
operation_description="Fetch a list of game records. **NOTE**: You will probably want to filter by status at least.",
),
)
@method_decorator(
name="create",
decorator=swagger_auto_schema(
operation_summary="Game: Create",
operation_description="Create a new game posting.",
request_body=serializers.GameDataSerializer,
responses={201: serializers.GameDataSerializer},
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Game: Details",
operation_description="Fetch the details for the given game. **NOTE**: If you are not a member of the game, only a subset of the available information will be displayed.",
responses={
200: serializers.GameDataSerializer,
403: "You are not authorized to view this game.",
},
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Game: Update",
operation_description="Update the details of this game. (Only available to GM)",
request_body=serializers.GameDataSerializer,
responses={
200: serializers.GameDataSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Game: Update",
operation_description="Update the details of this game. (Only available to GM)",
request_body=serializers.GameDataSerializer,
responses={
200: serializers.GameDataSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Game: Delete",
operation_description="Delete the given game. (Only available to GM.)",
request_body=no_body,
responses={204: "Game was deleted.", 403: "You are not the GM of this game."},
),
)
@method_decorator(
name="leave",
decorator=swagger_auto_schema(
operation_summary="Game: Leave",
operation_description="Leave the current game. (Players only.)",
request_body=no_body,
reponses={
204: "You have successfully left the game.",
400: "You are not a member of this game.",
403: "You are the GM and cannot leave.",
},
),
)
@method_decorator(
name="apply",
decorator=swagger_auto_schema(
operation_summary="Game: Apply",
operation_description="Apply to join this game.",
request_body=serializers.GameApplicationSerializer,
responses={
201: serializers.GameApplicationSerializer,
400: "You are already a member of this game.",
403: "You are not permitted to apply to this game either due to your access rights or the game's status.",
},
),
)
class GamePostingViewSet(
AutoPermissionViewSetMixin,
DetailSerializerMixin,
NestedViewSetMixin,
viewsets.ModelViewSet,
):
"""
A view set that allows the retrieval and manipulation of posted game data.
"""
permission_classes = (IsAuthenticated,)
parser_classes = [FormParser, MultiPartParser]
model = models.GamePosting
lookup_field = "slug"
lookup_url_kwarg = "slug"
serializer_class = serializers.GameDataListSerializer
serializer_detail_class = serializers.GameDataSerializer
filter_backends = [DjangoFilterBackend]
filterset_fields = [
"published_game",
"game_system",
"published_module",
"status",
"game_type",
"game_mode",
]
permission_type_map = {
**AutoPermissionViewSetMixin.permission_type_map,
"apply": "apply",
"leave": "leave",
}
def get_queryset(self):
gamer = self.request.user.gamerprofile
friends = gamer.friends.all()
communities = [f.id for f in gamer.communities.all()]
game_player_ids = [
obj.game.id
for obj in models.Player.objects.filter(gamer=gamer).select_related("game")
]
q_gm = Q(gm=gamer)
q_gm_is_friend = Q(gm__in=friends) & Q(privacy_level="community")
q_isplayer = Q(id__in=game_player_ids)
q_community = Q(communities__id__in=communities) & Q(privacy_level="community")
q_public = Q(privacy_level="public")
qs = models.GamePosting.objects.filter(
q_gm | q_public | q_gm_is_friend | q_isplayer | q_community
).distinct()
return qs
def create(self, request, *args, **kwargs):
self.serializer_class = serializers.GameDataSerializer
return super().create(request, *args, **kwargs)
def retrieve(self, request, *args, **kwargs):
if not request.user.has_perm("game.is_member", self.get_object()):
logger.debug(
"User is not a member of game, swtiching serializer to list view mode."
)
self.serializer_detail_class = serializers.GameDataListSerializer
return super().retrieve(request, *args, **kwargs)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def apply(self, request, *args, **kwargs):
obj = self.get_object()
logger.debug("Retrieved game object of {}".format(obj))
if request.user.has_perm("game.is_member", obj):
return Response(
data={"errors": "You are already in this game..."},
status=status.HTTP_400_BAD_REQUEST,
)
new_application = serializers.GameApplicationSerializer(
data=request.data, context={"request": request}
)
if not new_application.is_valid():
return Response(
data=new_application.errors, status=status.HTTP_400_BAD_REQUEST
)
app = models.GamePostingApplication.objects.create(
game=obj,
gamer=request.user.gamerprofile,
message=new_application.validated_data["message"],
status="pending",
)
notify.send(
request.user.gamerprofile,
recipient=obj.gm.user,
verb="submitted application",
action_object=app,
target=obj,
)
return Response(
data=serializers.GameApplicationSerializer(
app, context={"request": request}
).data,
status=status.HTTP_201_CREATED,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def leave(self, request, *args, **kwargs):
obj = self.get_object()
if request.user == obj.gm.user:
return Response(
data={"errors": "The GM cannot leave the game."},
status=status.HTTP_400_BAD_REQUEST,
)
player = models.Player.objects.get(gamer=request.user.gamerprofile, game=obj)
player_left.send(models.Player, player=player)
player.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="Game: List Sessions",
operation_description="List the sessions for the given game.",
manual_parameters=[parent_lookup_game__slug],
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Game Session: Details",
operation_description="Get the details for the given session. **NOTE**: If the user is just a player, the GM notes and player details will not be included.",
manual_parameters=[parent_lookup_game__slug],
responses={
200: serializers.GameSessionGMSerializer,
403: "You are not a member of this game.",
},
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Game Session: Update",
operation_description="Update details of the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.GameSessionGMSerializer,
responses={
200: serializers.GameSessionGMSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Game Session: Update",
operation_description="Update details of the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.GameSessionGMSerializer,
responses={
200: serializers.GameSessionGMSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Game Session: Delete",
operation_description="Delete the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.GameSessionGMSerializer,
responses={
204: "Session was deleted.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="cancel",
decorator=swagger_auto_schema(
operation_summary="Game Session: Cancel",
operation_description="Cancel the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.GameSessionGMSerializer,
400: "This session is already canceled or complete.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="uncancel",
decorator=swagger_auto_schema(
operation_summary="Game Session: Uncancel",
operation_description="Uncancel the game session.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.GameSessionGMSerializer,
400: "This session is not canceled.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="complete",
decorator=swagger_auto_schema(
operation_summary="Game Session: Mark Complete",
operation_description="Mark the game session as complete.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.GameSessionGMSerializer,
400: "This session is already canceled or complete.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="uncomplete",
decorator=swagger_auto_schema(
operation_summary="Game Session: Uncomplete",
operation_description="Undo the completion status of the session.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.GameSessionGMSerializer,
400: "This session isn't marked as complete.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="reschedule",
decorator=swagger_auto_schema(
operation_summary="Game Session: Reschedule",
operation_description="Reschedule the game session to another date/time.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.ScheduleSerializer,
responses={
200: serializers.GameSessionGMSerializer,
400: "Your date and time were invalid or the session is already marked as complete or canceled.",
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="addlog",
decorator=swagger_auto_schema(
operation_summary="Game Session: Add Adventure Log",
operation_description="Add an adventure log to this session.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.AdventureLogSerializer,
responses={
201: serializers.AdventureLogSerializer,
400: "This session already has an adventure log. You should update that instead.",
403: "You don't have permission to add an adventure log.",
},
),
)
class GameSessionViewSet(
ParentObjectAutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
"""
Views for seeing game session data.
"""
model = models.GameSession
serializer_class = serializers.GameSessionSerializer
lookup_field = "slug"
lookup_url_kwarg = "slug"
parent_dependent_actions = [
"create",
"retrieve",
"update",
"partial_update",
"list",
"destroy",
"reschedule",
"cancel",
"uncancel",
"addlog",
"complete",
"uncomplete",
]
parent_lookup_field = "game"
parent_object_model = models.GamePosting
parent_object_lookup_field = "slug"
parent_object_url_kwarg = "parent_lookup_game__slug"
permission_type_map = {
**ParentObjectAutoPermissionViewSetMixin.permission_type_map,
"addlog": "view",
"reschedule": "change",
"cancel": "change",
"uncancel": "change",
"complete": "change",
"uncomplete": "change",
}
permission_type_map["list"] = "view"
def get_parent_game(self):
return get_object_or_404(
models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"]
)
def get_queryset(self):
return self.model.objects.filter(
game__slug=self.kwargs["parent_lookup_game__slug"]
).order_by("-scheduled_time")
def dispatch(self, request, *args, **kwargs):
if (
request.user.is_authenticated
and request.user.gamerprofile == self.get_parent_game().gm
):
self.serializer_class = serializers.GameSessionGMSerializer
return super().dispatch(request, *args, **kwargs)
@action(methods=["post"], detail=True)
def reschedule(self, request, *args, **kwargs):
date_serializer = serializers.ScheduleSerializer(data=request.data)
if not date_serializer.is_valid():
return Response(
data=date_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
obj = self.get_object()
if obj.status in ["complete", "cancel"]:
return Response(
data={
"errors": "This session is already marked as {} and cannot be rescheduled.".format(
obj.get_status_display()
)
},
status=status.HTTP_400_BAD_REQUEST,
)
obj.move(date_serializer.validated_data["new_scheduled_time"])
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def complete(self, request, *args, **kwargs):
obj = self.get_object()
if obj.status in ["complete", "cancel"]:
return Response(
data={
"errors": "This object is either already completed or canceled and cannot be toggled to complete."
},
status=status.HTTP_400_BAD_REQUEST,
)
obj.status = "complete"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def uncomplete(self, request, *args, **kwargs):
obj = self.get_object()
if obj.status != "complete":
return Response(
data={
"errors": "This object is not completed and so completion cannot be undone."
},
status=status.HTTP_400_BAD_REQUEST,
)
obj.status = "pending"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def cancel(self, request, *args, **kwargs):
obj = self.get_object()
if obj.status in ["complete", "cancel"]:
return Response(
data={"errors": "This session is already completed or canceled."},
status=status.HTTP_400_BAD_REQUEST,
)
obj.cancel()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def uncancel(self, request, *args, **kwargs):
obj = self.get_object()
if obj.status != "cancel":
return Response(
data={
"errors": "This session is not canceled and can't be changed this way."
},
status=status.HTTP_400_BAD_REQUEST,
)
obj.uncancel()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True)
def addlog(self, request, *args, **kwargs):
"""
Create the adventure log for this session.
"""
session = self.get_object()
if hasattr(session, "adventurelog"):
return Response(
data={"errors": "This session already has an adventure log."},
status=status.HTTP_400_BAD_REQUEST,
)
log_serializer = serializers.AdventureLogSerializer(
session=session, data=request.data, context={"request": request}
)
if not log_serializer.is_valid():
return Response(
data=log_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
new_log = log_serializer.save()
return Response(
data=serializers.AdventureLogSerializer(
new_log, context={"request": request}
).data,
status=status.HTTP_201_CREATED,
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Adventure Log: Details",
operation_description="Fetch the details for a given adventure log.",
manual_parameters=[
parent_lookup_session__game__slug,
parent_lookup_session__slug,
],
responses={
200: serializers.AdventureLogSerializer,
403: "You are not a member of this game.",
},
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Adventure Log: Update",
operation_description="Update the details for a given adventure log.",
manual_parameters=[
parent_lookup_session__game__slug,
parent_lookup_session__slug,
],
request_body=serializers.AdventureLogSerializer,
responses={
200: serializers.AdventureLogSerializer,
403: "You don't have permissions to edit this adventure log.",
},
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Adventure Log: Update",
operation_description="Update the details for a given adventure log.",
manual_parameters=[
parent_lookup_session__game__slug,
parent_lookup_session__slug,
],
request_body=serializers.AdventureLogSerializer,
responses={
200: serializers.AdventureLogSerializer,
403: "You don't have permissions to edit this adventure log.",
},
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Adventure Log: Delete",
operation_description="Delete a given adventure log.",
manual_parameters=[
parent_lookup_session__game__slug,
parent_lookup_session__slug,
],
request_body=no_body,
responses={
204: "The adventure log was successfully deleted.",
403: "You don't have permissions to edit this adventure log.",
},
),
)
class AdventureLogViewSet(
ParentObjectAutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
"""
Allows the manipulation of view sets.
"""
model = models.AdventureLog
parent_lookup_field = "session__game"
parent_object_model = models.GamePosting
parent_object_lookup_field = "slug"
parent_object_url_kwarg = "parent_lookup_session__game__slug"
serializer_class = serializers.AdventureLogSerializer
lookup_field = "slug"
lookup_url_kwarg = "slug"
permission_required = "game.is_member"
permission_type_map = {**ParentObjectAutoPermissionViewSetMixin.permission_type_map}
permission_type_map["list"] = "add"
parent_dependent_actions = [
"create",
"retrieve",
"update",
"partial_update",
"destroy",
]
def get_queryset(self):
return models.AdventureLog.objects.filter(
session__slug=self.kwargs["parent_lookup_session__slug"]
)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="List Your Game Applications",
operation_description="Fetch a list of all your game applications.",
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Your Game Application: Details",
operation_description="Fetch the details of your game application.",
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Your Game Application: Update",
operation_description="Update the details of your game application.",
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Your Game Application: Update",
operation_description="Update the details of your game application.",
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Your Game Application: Withdraw",
operation_description="Withdraw your game application by deleting the record.",
),
)
class GameApplicationViewSet(
AutoPermissionViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
"""
View for an applicant to review, create, update, and delete their applications to games.
"""
permission_classes = (IsAuthenticated,)
serializer_class = serializers.GameApplicationSerializer
filter_backends = [DjangoFilterBackend]
filterset_fields = ["status"]
lookup_field = "slug"
lookup_url_kwarg = "slug"
permission_type_map = {**AutoPermissionViewSetMixin.permission_type_map}
def get_queryset(self):
logger.debug("Fetching gamerprofile from request...")
gamer = self.request.user.gamerprofile
logger.debug("Fetching game applications for gamer {}".format(gamer))
qs = models.GamePostingApplication.objects.filter(
gamer=self.request.user.gamerprofile
).order_by("-modified", "-created", "status")
logger.debug(
"Retrieved queryset of length {} for gamer {}".format(
qs.count(), self.request.user.gamerprofile
)
)
return qs
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="List Applicants for Game",
operation_description="List the applicants for the current game. (GM Only)",
manual_parameters=[parent_lookup_game__slug],
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Game Applicant: Details",
operation_description="Fetch details for a given game application. (GM Only)",
manual_parameters=[parent_lookup_game__slug],
reponses={
200: serializers.GameApplicationGMSerializer,
403: "You are not the GM for this game.",
},
),
)
@method_decorator(
name="approve",
decorator=swagger_auto_schema(
operation_summary="Game Applicant: Approve",
operation_description="Approve the game applicant and add as a player to game.",
request_body=no_body,
responses={
201: serializers.PlayerSerializer,
403: "You are not the GM of this game.",
},
),
)
@method_decorator(
name="reject",
decorator=swagger_auto_schema(
operation_summary="Game Applicant: Reject",
operation_description="Reject the game applicant.",
request_body=no_body,
responses={
200: serializers.GameApplicationGMSerializer,
403: "You are not the GM of this game.",
},
),
)
class GMGameApplicationViewSet(
ParentObjectAutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
viewsets.GenericViewSet,
):
"""
View for a GM to review and approve applicants.
"""
permission_classes = (IsAuthenticated,)
serializer_class = serializers.GameApplicationGMSerializer
filter_backends = [DjangoFilterBackend]
filterset_fields = ["status"]
lookup_field = "slug"
lookup_url_kwarg = "slug"
parent_lookup_field = "game"
parent_object_lookup_field = "slug"
parent_object_model = models.GamePosting
parent_object_url_kwarg = "parent_lookup_game__slug"
parent_dependent_actions = ["list", "retrieve", "approve", "reject"]
permission_type_map = {
**ParentObjectAutoPermissionViewSetMixin.permission_type_map,
"approve": "approve",
"reject": "approve",
}
permission_type_map["retrieve"] = "approve"
permission_type_map["list"] = "approve"
def get_queryset(self):
return models.GamePostingApplication.objects.filter(
game__slug=self.kwargs["parent_lookup_game__slug"]
).exclude(status="new")
def get_parent_game(self):
return get_object_or_404(
models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"]
)
@action(methods=["post"], detail=True)
def approve(self, request, *args, **kwargs):
"""
Approves the game application.
"""
obj = self.get_object()
obj.status = "approve"
player = models.Player.objects.create(game=obj.game, gamer=obj.gamer)
obj.save()
return Response(
data=serializers.PlayerSerializer(
player, context={"request", request}
).data,
status=status.HTTP_201_CREATED,
)
@action(methods=["post"], detail=True)
def reject(self, request, *args, **kwargs):
"""
Rejects the game application.
"""
obj = self.get_object()
obj.status = "deny"
obj.save()
notify.send(
obj,
recipient=obj.gamer.user,
verb="Your player application was not accepted",
action_object=obj,
target=obj.game,
)
return Response(
data=serializers.GameApplicationSerializer(
obj, context={"request": request}
).data,
status=status.HTTP_200_OK,
)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="Game: Player List",
operation_description="List players for a given game",
manual_parameters=[parent_lookup_game__slug],
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Player: Details",
operation_description="Details for a player record in a given game.",
manual_parameters=[parent_lookup_game__slug],
responses={
200: serializers.PlayerSerializer,
403: "You are not a member of this game.",
},
),
)
@method_decorator(
name="kick",
decorator=swagger_auto_schema(
operation_summary="Player: Kick from game",
operation_description="Kick the player out of the game.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
204: "Player was removed from the game.",
403: "You are not the GM of this game.",
},
),
)
class PlayerViewSet(
ParentObjectAutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
viewsets.GenericViewSet,
):
"""
Provides views for players in a given game.
"""
permission_classes = (IsAuthenticated,)
serializer_class = serializers.PlayerSerializer
permission_required = "game.is_member"
lookup_field = "slug"
lookup_url_kwarg = "slug"
parent_lookup_field = "game"
parent_object_model = models.GamePosting
parent_object_lookup_field = "slug"
parent_object_url_kwarg = "parent_lookup_game__slug"
parent_dependent_actions = ["list", "retrieve"]
permission_type_map = {**ParentObjectAutoPermissionViewSetMixin.permission_type_map}
permission_type_map["list"] = "view"
def get_parent_game(self):
return get_object_or_404(
models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"]
)
def get_queryset(self):
return models.Player.objects.filter(game=self.get_parent_game())
@action(methods=["post"], detail=True)
def kick(self, request, *args, **kwargs):
obj = self.get_object()
player_kicked.send(request.user, player=obj)
obj.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="Game: List Characters",
operation_description="Fetch the list of characters for a given game.",
manual_parameters=[parent_lookup_game__slug],
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Game: Character Details",
operation_description="Fetch the details of a character for a given game.",
manual_parameters=[parent_lookup_game__slug],
responses={
200: serializers.CharacterSerializer,
403: "You are not a member of this game.",
},
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Game: Update Character Details",
operation_description="Update the character for the given game.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.CharacterSerializer,
responses={
200: serializers.CharacterSerializer,
403: "You are not the owner of this character or the GM of the game.",
},
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Game: Update Character Details",
operation_description="Update the character for the given game.",
manual_parameters=[parent_lookup_game__slug],
request_body=serializers.CharacterSerializer,
responses={
200: serializers.CharacterSerializer,
403: "You are not the owner of this character or the GM of the game.",
},
),
)
@method_decorator(
name="deactivate",
decorator=swagger_auto_schema(
operation_summary="Game: Deactivate Character",
operation_description="Mark the character as inactive.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.CharacterSerializer,
400: "This character is already inactive.",
403: "You are not the owner of this character or the GM of the game.",
},
),
)
@method_decorator(
name="reactivate",
decorator=swagger_auto_schema(
operation_summary="Game: Reactivate Character",
operation_description="Mark the character as active.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.CharacterSerializer,
400: "This character is already active.",
403: "You are not the owner of this character or the GM of the game.",
},
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Game: Delete Character",
operation_description="Delete the character.",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
204: "Character was deleted.",
403: "You are not the owner of this character.",
},
),
)
@method_decorator(
name="approve",
decorator=swagger_auto_schema(
operation_summary="Game: Approve Character",
operation_description="Mark the character as approved (GM Only).",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.CharacterSerializer,
400: "This character is already approved.",
403: "You are not the GM of the game.",
},
),
)
@method_decorator(
name="reject",
decorator=swagger_auto_schema(
operation_summary="Game: Reject Character",
operation_description="Mark the character as rejected (GM Only).",
manual_parameters=[parent_lookup_game__slug],
request_body=no_body,
responses={
200: serializers.CharacterSerializer,
400: "This character is already rejected.",
403: "You are not the GM of the game.",
},
),
)
class CharacterViewSet(
ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, viewsets.ModelViewSet
):
"""
Provides views for the characters in a game.
"""
permission_classes = (IsAuthenticated,)
parser_classes = [FormParser, MultiPartParser]
parent_object_lookup_field = "slug"
parent_object_url_kwarg = "parent_lookup_game__slug"
parent_lookup_field = "game"
parent_object_model = models.GamePosting
parent_dependent_actions = ["create", "list", "retrieve"]
serializer_class = serializers.CharacterSerializer
lookup_field = "slug"
lookup_url_kwarg = "slug"
filter_backends = [DjangoFilterBackend]
filterset_fields = ["status"]
parent_game = None
permission_type_map = {
**ParentObjectAutoPermissionViewSetMixin.permission_type_map,
"approve": "approve",
"reject": "approve",
"deactivate": "delete",
"reactivate": "delete",
}
permission_type_map["list"] = "gamelist"
def get_parent_game(self):
if not self.parent_game:
self.parent_game = get_object_or_404(
models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"]
)
return self.parent_game
def get_queryset(self):
return models.Character.objects.filter(game=self.get_parent_game())
def create(self, request, *args, **kwargs):
if request.user.gamerprofile == self.get_parent_game().gm:
return Response(
data={"errors": "Only a player can create a character."},
status=status.HTTP_403_FORBIDDEN,
)
char_ser = serializers.CharacterSerializer(
data=request.data,
context={"request": request, "game": self.get_parent_game()},
)
if not char_ser.is_valid():
return Response(data=char_ser.errors, status=status.HTTP_400_BAD_REQUEST)
char_ser.save()
return Response(data=char_ser.data, status=status.HTTP_201_CREATED)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def approve(self, request, *args, **kwargs):
"""
Approves the proposed character.
"""
obj = self.get_object()
obj.status = "approved"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def reject(self, request, *args, **kwargs):
"""
Rejects the proposed character.
"""
obj = self.get_object()
obj.status = "rejected"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def deactivate(self, request, *args, **kwargs):
"""
Make a character inactive.
"""
obj = self.get_object()
obj.status = "inactive"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def reactivate(self, request, *args, **kwargs):
"""
Reactivate an inactive character.
"""
obj = self.get_object()
obj.status = "pending"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@method_decorator(
name="list",
decorator=swagger_auto_schema(
operation_summary="List Your Characters",
operation_description="Fetch a list of all of your characters.",
),
)
@method_decorator(
name="retrieve",
decorator=swagger_auto_schema(
operation_summary="Your Character: Details",
operation_description="Fetch the details of your character.",
),
)
@method_decorator(
name="update",
decorator=swagger_auto_schema(
operation_summary="Your Character: Update",
operation_description="Update the details of your character.",
),
)
@method_decorator(
name="partial_update",
decorator=swagger_auto_schema(
operation_summary="Your Character: Update",
operation_description="Update the details of your character.",
),
)
@method_decorator(
name="destroy",
decorator=swagger_auto_schema(
operation_summary="Your Character: Delete",
operation_description="Delete your character.",
request_body=no_body,
responses={204: "Character was deleted."},
),
)
@method_decorator(
name="deactivate",
decorator=swagger_auto_schema(
operation_summary="Your Character: Deactivate",
operation_description="Mark your character as inactive.",
request_body=no_body,
responses={
200: "Character was marked as inactive.",
400: "Character was already inactive.",
},
),
)
@method_decorator(
name="reactivate",
decorator=swagger_auto_schema(
operation_summary="Your Character: Reactivate",
operation_description="Mark your character as active.",
request_body=no_body,
responses={
200: "Character was marked as active.",
400: "Character was already active.",
},
),
)
class MyCharacterViewSet(
AutoPermissionViewSetMixin,
NestedViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
"""
Provides a vew so that players can view all their characters in one place.
"""
serializer_class = serializers.CharacterSerializer
permission_classes = (IsAuthenticated,)
lookup_field = "slug"
lookup_url_kwarg = "slug"
filter_backends = [DjangoFilterBackend]
filterset_fields = ["status"]
permission_type_map = {
**AutoPermissionViewSetMixin.permission_type_map,
"deactivate": "delete",
"reactivate": "delete",
}
permission_type_map["retrieve"] = "delete"
parser_classes = [FormParser, MultiPartParser]
def get_queryset(self):
return models.Character.objects.filter(
player__gamer=self.request.user.gamerprofile
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def deactivate(self, request, *args, **kwargs):
"""
Make a character inactive.
"""
obj = self.get_object()
obj.status = "inactive"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser])
def reactivate(self, request, *args, **kwargs):
"""
Reactivate an inactive character.
"""
obj = self.get_object()
obj.status = "pending"
obj.save()
return Response(
data=self.serializer_class(obj, context={"request": request}).data,
status=status.HTTP_200_OK,
)
@dparser_classes([FormParser, JSONParser])
def destroy(self, request, *args, **kwargs):
self.parser_classes = [FormParser, JSONParser]
return super().destroy(request, *args, **kwargs)
| [
[
[
7,
14
],
[
1015,
1022
]
],
[
[
57,
58
],
[
5921,
5922
],
[
5958,
5959
],
[
5978,
5979
],
[
6028,
6029
],
[
6076,
6077
],
[
6113,
6114
],
[
6161,
6162
]
],
[
[
88,
105
],
[
15326,
15343
],
[
28403,
28420
],
[
31567,
31584
],
[
37176,
37193
]
],
[
[
142,
158
],
[
1647,
1663
],
[
1893,
1909
],
[
2187,
2203
],
[
2636,
2652
],
[
3035,
3051
],
[
3442,
3458
],
[
3764,
3780
],
[
4184,
4200
],
[
8882,
8898
],
[
9136,
9152
],
[
9631,
9647
],
[
10082,
10098
],
[
10541,
10557
],
[
10969,
10985
],
[
11447,
11463
],
[
11915,
11931
],
[
12412,
12428
],
[
12909,
12925
],
[
13487,
13503
],
[
20194,
20210
],
[
20674,
20690
],
[
21229,
21245
],
[
21792,
21808
],
[
23404,
23420
],
[
23618,
23634
],
[
23839,
23855
],
[
24058,
24074
],
[
24285,
24301
],
[
25724,
25740
],
[
25997,
26013
],
[
26417,
26433
],
[
26803,
26819
],
[
29667,
29683
],
[
29911,
29927
],
[
30305,
30321
],
[
32033,
32049
],
[
32298,
32314
],
[
32709,
32725
],
[
33197,
33213
],
[
33693,
33709
],
[
34204,
34220
],
[
34711,
34727
],
[
35120,
35136
],
[
35604,
35620
],
[
39844,
39860
],
[
40047,
40063
],
[
40254,
40270
],
[
40459,
40475
],
[
40672,
40688
],
[
40944,
40960
],
[
41319,
41335
]
],
[
[
201,
220
],
[
5247,
5266
],
[
24965,
24984
],
[
27557,
27576
],
[
36730,
36749
],
[
42192,
42211
]
],
[
[
242,
249
],
[
1162,
1169
],
[
1360,
1367
],
[
1573,
1580
]
],
[
[
279,
288
],
[
1068,
1077
],
[
1263,
1272
],
[
1470,
1479
]
],
[
[
316,
323
],
[
3658,
3665
],
[
3970,
3977
],
[
11224,
11231
],
[
11708,
11715
],
[
12189,
12196
],
[
12693,
12700
],
[
22127,
22134
],
[
26653,
26660
],
[
27008,
27015
],
[
30568,
30575
],
[
33965,
33972
],
[
34474,
34481
],
[
34966,
34973
],
[
35396,
35403
],
[
35878,
35885
],
[
40874,
40881
],
[
41163,
41170
],
[
41536,
41543
]
],
[
[
325,
344
],
[
1696,
1715
],
[
1944,
1963
],
[
2240,
2259
],
[
2687,
2706
],
[
3094,
3113
],
[
3494,
3513
],
[
3814,
3833
],
[
4234,
4253
],
[
8931,
8950
],
[
9189,
9208
],
[
9682,
9701
],
[
10141,
10160
],
[
10593,
10612
],
[
11020,
11039
],
[
11500,
11519
],
[
11968,
11987
],
[
12467,
12486
],
[
12964,
12983
],
[
13538,
13557
],
[
20247,
20266
],
[
20725,
20744
],
[
21288,
21307
],
[
21844,
21863
],
[
23453,
23472
],
[
23671,
23690
],
[
23890,
23909
],
[
24117,
24136
],
[
24337,
24356
],
[
25773,
25792
],
[
26050,
26069
],
[
26469,
26488
],
[
26854,
26873
],
[
29716,
29735
],
[
29964,
29983
],
[
30354,
30373
],
[
32082,
32101
],
[
32351,
32370
],
[
32760,
32779
],
[
33256,
33275
],
[
33748,
33767
],
[
34259,
34278
],
[
34763,
34782
],
[
35172,
35191
],
[
35655,
35674
],
[
39893,
39912
],
[
40100,
40119
],
[
40305,
40324
],
[
40518,
40537
],
[
40724,
40743
],
[
40999,
41018
],
[
41374,
41393
]
],
[
[
379,
385
],
[
7891,
7897
],
[
29265,
29271
]
],
[
[
413,
419
],
[
14159,
14165
],
[
14186,
14192
],
[
14217,
14223
],
[
14246,
14252
],
[
22414,
22420
],
[
22445,
22451
],
[
22474,
22480
],
[
24582,
24588
],
[
24609,
24615
],
[
24640,
24646
],
[
24669,
24675
],
[
27272,
27278
],
[
27299,
27305
],
[
30817,
30823
],
[
30844,
30850
],
[
41773,
41779
],
[
41800,
41806
],
[
41831,
41837
],
[
41860,
41866
]
],
[
[
421,
427
],
[
7309,
7315
],
[
7611,
7617
],
[
8255,
8261
],
[
8612,
8618
],
[
8851,
8857
],
[
16218,
16224
],
[
16610,
16616
],
[
16848,
16854
],
[
17266,
17272
],
[
17484,
17490
],
[
17870,
17876
],
[
18087,
18093
],
[
18425,
18431
],
[
18613,
18619
],
[
18990,
18996
],
[
19180,
19186
],
[
19589,
19595
],
[
19903,
19909
],
[
20156,
20162
],
[
28989,
28995
],
[
29634,
29640
],
[
32002,
32008
],
[
37669,
37675
],
[
37970,
37976
],
[
38074,
38080
],
[
38505,
38511
],
[
38939,
38945
],
[
39372,
39378
],
[
39811,
39817
],
[
43048,
43054
],
[
43487,
43493
]
],
[
[
429,
437
],
[
4801,
4809
],
[
14276,
14284
],
[
22504,
22512
],
[
24699,
24707
],
[
27330,
27338
],
[
30875,
30883
],
[
36173,
36181
],
[
41890,
41898
]
],
[
[
476,
482
],
[
6910,
6916
],
[
8296,
8302
],
[
15928,
15934
],
[
16884,
16890
],
[
17520,
17526
],
[
18123,
18129
],
[
18649,
18655
],
[
19216,
19222
],
[
28515,
28521
],
[
29030,
29036
],
[
31781,
31787
],
[
38105,
38111
],
[
38541,
38547
],
[
38975,
38981
],
[
39408,
39414
],
[
42651,
42657
],
[
43084,
43090
]
],
[
[
521,
554
],
[
43523,
43538
]
],
[
[
590,
600
],
[
4989,
4999
],
[
6964,
6974
],
[
8350,
8360
],
[
36330,
36340
],
[
38159,
38169
],
[
38595,
38605
],
[
39029,
39039
],
[
39462,
39472
],
[
42472,
42482
],
[
42705,
42715
],
[
43138,
43148
],
[
43540,
43550
],
[
43645,
43655
]
],
[
[
602,
612
],
[
6976,
6986
],
[
8362,
8372
],
[
38171,
38181
],
[
38607,
38617
],
[
39041,
39051
],
[
39474,
39484
],
[
42717,
42727
],
[
43150,
43160
],
[
43552,
43562
],
[
43657,
43667
]
],
[
[
614,
629
],
[
5001,
5016
],
[
36342,
36357
],
[
42484,
42499
]
],
[
[
669,
684
],
[
4949,
4964
],
[
24863,
24878
],
[
27453,
27468
],
[
30994,
31009
],
[
36290,
36305
],
[
42095,
42110
]
],
[
[
721,
729
],
[
7208,
7216
],
[
7549,
7557
],
[
8100,
8108
],
[
8513,
8521
],
[
8835,
8843
],
[
16156,
16164
],
[
16360,
16368
],
[
16739,
16747
],
[
17072,
17080
],
[
17375,
17383
],
[
17698,
17706
],
[
17978,
17986
],
[
18309,
18317
],
[
18504,
18512
],
[
18823,
18831
],
[
19071,
19079
],
[
19477,
19485
],
[
19842,
19850
],
[
20000,
20008
],
[
28840,
28848
],
[
29479,
29487
],
[
31986,
31994
],
[
37562,
37570
],
[
37932,
37940
],
[
38038,
38046
],
[
38396,
38404
],
[
38830,
38838
],
[
39263,
39271
],
[
39702,
39710
],
[
42939,
42947
],
[
43378,
43386
]
],
[
[
775,
796
],
[
4750,
4771
]
],
[
[
798,
816
],
[
4777,
4795
],
[
14135,
14153
],
[
22390,
22408
],
[
27248,
27266
],
[
30793,
30811
],
[
36153,
36171
],
[
41749,
41767
]
],
[
[
855,
881
],
[
4718,
4744
],
[
5474,
5500
],
[
24550,
24576
],
[
25105,
25131
],
[
41717,
41743
],
[
42285,
42311
]
],
[
[
883,
921
],
[
14091,
14129
],
[
14994,
15032
],
[
22346,
22384
],
[
23002,
23040
],
[
27204,
27242
],
[
27954,
27992
],
[
30749,
30787
],
[
31419,
31457
],
[
36113,
36151
],
[
36846,
36884
]
],
[
[
937,
943
],
[
5030,
5036
],
[
14373,
14379
],
[
14840,
14846
],
[
22603,
22609
],
[
22691,
22697
],
[
27767,
27773
],
[
31222,
31228
],
[
36515,
36521
],
[
5831,
5837
],
[
6200,
6206
],
[
7667,
7673
],
[
8672,
8678
],
[
8766,
8772
],
[
15357,
15363
],
[
23286,
23292
],
[
25382,
25388
],
[
28215,
28221
],
[
28434,
28440
],
[
28745,
28751
],
[
31598,
31604
],
[
31717,
31723
],
[
37211,
37217
],
[
37366,
37372
],
[
42545,
42551
]
],
[
[
945,
956
],
[
2088,
2099
],
[
2144,
2155
],
[
2521,
2532
],
[
2860,
2871
],
[
2929,
2940
],
[
3267,
3278
],
[
3336,
3347
],
[
4375,
4386
],
[
4451,
4462
],
[
5128,
5139
],
[
5193,
5204
],
[
9518,
9529
],
[
9897,
9908
],
[
9971,
9982
],
[
10356,
10367
],
[
10430,
10441
],
[
10797,
10808
],
[
11270,
11281
],
[
11754,
11765
],
[
12235,
12246
],
[
12739,
12750
],
[
13197,
13208
],
[
13266,
13277
],
[
13766,
13777
],
[
13839,
13850
],
[
14415,
14426
],
[
20562,
20573
],
[
21024,
21035
],
[
21097,
21108
],
[
21587,
21598
],
[
21660,
21671
],
[
22839,
22850
],
[
24904,
24915
],
[
26301,
26312
],
[
26699,
26710
],
[
27054,
27065
],
[
27494,
27505
],
[
30199,
30210
],
[
31035,
31046
],
[
32600,
32611
],
[
32990,
33001
],
[
33060,
33071
],
[
33486,
33497
],
[
33556,
33567
],
[
34011,
34022
],
[
34520,
34531
],
[
35442,
35453
],
[
35924,
35935
],
[
36619,
36630
],
[
42037,
42048
],
[
6427,
6438
],
[
6811,
6822
],
[
7378,
7389
],
[
8127,
8138
],
[
15828,
15839
],
[
16044,
16055
],
[
19658,
19669
],
[
20027,
20038
],
[
28867,
28878
],
[
29506,
29517
],
[
37729,
37740
]
],
[
[
978,
991
],
[
31905,
31918
]
],
[
[
993,
1004
],
[
8749,
8760
]
],
[
[
1006,
1012
],
[
6652,
6658
],
[
7076,
7082
],
[
25190,
25196
],
[
25299,
25305
],
[
25539,
25545
]
],
[
[
1041,
1065
],
[
9099,
9123
],
[
9454,
9478
],
[
9849,
9873
],
[
10308,
10332
],
[
10749,
10773
],
[
11176,
11200
],
[
11660,
11684
],
[
12141,
12165
],
[
12645,
12669
],
[
13149,
13173
],
[
13718,
13742
],
[
25960,
25984
],
[
26238,
26262
],
[
29874,
29898
],
[
30135,
30159
],
[
30520,
30544
],
[
32261,
32285
],
[
32536,
32560
],
[
32942,
32966
],
[
33438,
33462
],
[
33917,
33941
],
[
34426,
34450
],
[
34918,
34942
],
[
35348,
35372
],
[
35830,
35854
]
],
[
[
1233,
1260
],
[
20485,
20512
],
[
20963,
20990
],
[
21526,
21553
],
[
22066,
22093
]
],
[
[
1434,
1467
],
[
20438,
20471
],
[
20916,
20949
],
[
21479,
21512
],
[
22019,
22052
]
],
[
[
4694,
4712
]
],
[
[
14067,
14085
]
],
[
[
22321,
22340
]
],
[
[
24522,
24544
]
],
[
[
27174,
27198
]
],
[
[
30730,
30743
]
],
[
[
36091,
36107
]
],
[
[
41693,
41711
]
]
] |
"""Mypy style test cases for SQLAlchemy stubs and plugin."""
import os
import os.path
import sys
import pytest # type: ignore # no pytest in typeshed
from mypy.test.config import test_temp_dir
from mypy.test.data import DataDrivenTestCase, DataSuite
from mypy.test.helpers import assert_string_arrays_equal
from mypy.util import try_find_python2_interpreter
from mypy import api
this_file_dir = os.path.dirname(os.path.realpath(__file__))
prefix = os.path.dirname(this_file_dir)
inipath = os.path.abspath(os.path.join(prefix, 'test'))
# Locations of test data files such as test case descriptions (.test).
test_data_prefix = os.path.join(prefix, 'test', 'test-data')
class SQLDataSuite(DataSuite):
files = ['sqlalchemy-basics.test',
'sqlalchemy-sql-elements.test',
'sqlalchemy-sql-sqltypes.test',
'sqlalchemy-sql-selectable.test',
'sqlalchemy-sql-schema.test',
'sqlalchemy-plugin-features.test',
'sqlalchemy-plugin-query.test']
data_prefix = test_data_prefix
def run_case(self, testcase: DataDrivenTestCase) -> None:
assert testcase.old_cwd is not None, "test was not properly set up"
mypy_cmdline = [
'--show-traceback',
'--no-silence-site-packages',
'--config-file={}/sqlalchemy.ini'.format(inipath),
]
py2 = testcase.name.lower().endswith('python2')
if py2:
if try_find_python2_interpreter() is None:
pytest.skip()
return
mypy_cmdline.append('--py2')
else:
mypy_cmdline.append('--python-version={}'.format('.'.join(map(str,
sys.version_info[:2]))))
# Write the program to a file.
program_path = os.path.join(test_temp_dir, 'main.py')
mypy_cmdline.append(program_path)
with open(program_path, 'w') as file:
for s in testcase.input:
file.write('{}\n'.format(s))
output = []
# Type check the program.
out, err, returncode = api.run(mypy_cmdline)
# split lines, remove newlines, and remove directory of test case
for line in (out + err).splitlines():
if line.startswith(test_temp_dir + os.sep):
output.append(line[len(test_temp_dir + os.sep):].rstrip("\r\n").replace('.py',
''))
else:
output.append(line.rstrip("\r\n"))
# Remove temp file.
os.remove(program_path)
assert_string_arrays_equal(testcase.output, output,
'Invalid output ({}, line {})'.format(
testcase.file, testcase.line))
| [
[
[
69,
71
]
],
[
[
79,
86
],
[
401,
403
],
[
417,
419
],
[
454,
456
],
[
495,
497
],
[
511,
513
],
[
632,
634
],
[
1842,
1844
],
[
2325,
2327
],
[
2389,
2391
],
[
2627,
2629
]
],
[
[
94,
97
],
[
1754,
1757
]
],
[
[
106,
112
],
[
1509,
1515
]
],
[
[
184,
197
],
[
1855,
1868
],
[
2309,
2322
],
[
2373,
2386
]
],
[
[
225,
243
],
[
1088,
1106
]
],
[
[
245,
254
],
[
695,
704
]
],
[
[
285,
311
],
[
2659,
2685
]
],
[
[
334,
362
],
[
1453,
1481
]
],
[
[
380,
383
],
[
2136,
2139
]
],
[
[
385,
398
],
[
470,
483
]
],
[
[
445,
451
],
[
524,
530
],
[
645,
651
]
],
[
[
485,
492
],
[
1346,
1353
]
],
[
[
613,
629
],
[
1037,
1053
]
],
[
[
682,
694
]
]
] |
#!/usr/bin/env python
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generates the text to paste into the email for announcing a new
# release of Gerrit. The text is generated based on a template that
# is filled with values either passed to the script or calculated
# at runtime.
#
# The script outputs a plain text file with the announcement text:
#
# release-announcement-gerrit-X.Y.txt
#
# and, if GPG is available, the announcement text wrapped with a
# signature:
#
# release-announcement-gerrit-X.Y.txt.asc
#
# Usage:
#
# ./tools/release-announcement.py -v 2.14.2 -p 2.14.1 \
# -s "This release fixes several bugs since 2.14.1"
#
# Parameters:
#
# --version (-v): The version of Gerrit being released.
#
# --previous (-p): The previous version of Gerrit. Optional. If
# specified, the generated text includes a link to the gitiles
# log of commits between the previous and new versions.
#
# --summary (-s): Short summary of the release. Optional. When
# specified, the summary is inserted in the introductory sentence
# of the generated text.
#
# Prerequisites:
#
# - The Jinja2 python library [1] must be installed.
#
# - For GPG signing to work, the python-gnupg library [2] must be
# installed, and the ~/.gnupg folder must exist.
#
# - The war file must have been installed to the local Maven repository
# using the `./tools/mvn/api.sh war_install` command.
#
# [1] http://jinja.pocoo.org/
# [2] http://pythonhosted.org/gnupg/
from __future__ import print_function
import argparse
import hashlib
import os
import sys
from gnupg import GPG
from jinja2 import Template
class Version:
def __init__(self, version):
self.version = version
parts = version.split('.')
if len(parts) > 2:
self.major = ".".join(parts[:2])
self.patch = version
else:
self.major = version
self.patch = None
def __str__(self):
return self.version
def _main():
descr = 'Generate Gerrit release announcement email text'
parser = argparse.ArgumentParser(
description=descr,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-v', '--version', dest='version',
required=True,
help='gerrit version to release')
parser.add_argument('-p', '--previous', dest='previous',
help='previous gerrit version (optional)')
parser.add_argument('-s', '--summary', dest='summary',
help='summary of the release content (optional)')
options = parser.parse_args()
summary = options.summary
if summary and not summary.endswith("."):
summary = summary + "."
data = {
"version": Version(options.version),
"previous": options.previous,
"summary": summary
}
war = os.path.join(
os.path.expanduser("~/.m2/repository/com/google/gerrit/gerrit-war/"),
"%(version)s/gerrit-war-%(version)s.war" % data)
if not os.path.isfile(war):
print("Could not find war file for Gerrit %s in local Maven repository"
% data["version"], file=sys.stderr)
sys.exit(1)
md5 = hashlib.md5()
sha1 = hashlib.sha1()
sha256 = hashlib.sha256()
BUF_SIZE = 65536 # Read data in 64kb chunks
with open(war, 'rb') as f:
while True:
d = f.read(BUF_SIZE)
if not d:
break
md5.update(d)
sha1.update(d)
sha256.update(d)
data["sha1"] = sha1.hexdigest()
data["sha256"] = sha256.hexdigest()
data["md5"] = md5.hexdigest()
template = Template(open("tools/release-announcement-template.txt").read())
output = template.render(data=data)
filename = "release-announcement-gerrit-%s.txt" % data["version"]
with open(filename, "w") as f:
f.write(output)
gpghome = os.path.abspath(os.path.expanduser("~/.gnupg"))
if not os.path.isdir(gpghome):
print("Skipping signing due to missing gnupg home folder")
else:
try:
gpg = GPG(homedir=gpghome)
except TypeError:
gpg = GPG(gnupghome=gpghome)
signed = gpg.sign(output)
filename = filename + ".asc"
with open(filename, "w") as f:
f.write(str(signed))
if __name__ == "__main__":
_main()
| [
[
[
2044,
2058
]
],
[
[
2066,
2074
],
[
2601,
2609
],
[
2677,
2685
]
],
[
[
2082,
2089
],
[
3763,
3770
],
[
3788,
3795
],
[
3816,
3823
]
],
[
[
2097,
2099
],
[
3421,
3423
],
[
3443,
3445
],
[
3581,
3583
],
[
4469,
4471
],
[
4485,
4487
],
[
4528,
4530
]
],
[
[
2107,
2110
],
[
3720,
3723
],
[
3740,
3743
]
],
[
[
2129,
2132
],
[
4660,
4663
],
[
4725,
4728
]
],
[
[
2152,
2160
],
[
4219,
4227
]
],
[
[
2169,
2176
],
[
3311,
3318
]
],
[
[
2517,
2522
],
[
4924,
4929
]
]
] |
# coding: utf-8
"""
An API to insert and retrieve metadata on cloud artifacts.
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1alpha1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ApiArtifact(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'checksum': 'str',
'id': 'str',
'names': 'list[str]'
}
attribute_map = {
'name': 'name',
'checksum': 'checksum',
'id': 'id',
'names': 'names'
}
def __init__(self, name=None, checksum=None, id=None, names=None): # noqa: E501
"""ApiArtifact - a model defined in Swagger""" # noqa: E501
self._name = None
self._checksum = None
self._id = None
self._names = None
self.discriminator = None
if name is not None:
self.name = name
if checksum is not None:
self.checksum = checksum
if id is not None:
self.id = id
if names is not None:
self.names = names
@property
def name(self):
"""Gets the name of this ApiArtifact. # noqa: E501
Name of the artifact. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. This field is deprecated in favor of the plural `names` field; it continues to exist here to allow existing BuildProvenance serialized to json in google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to deserialize back into proto. # noqa: E501
:return: The name of this ApiArtifact. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ApiArtifact.
Name of the artifact. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. This field is deprecated in favor of the plural `names` field; it continues to exist here to allow existing BuildProvenance serialized to json in google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to deserialize back into proto. # noqa: E501
:param name: The name of this ApiArtifact. # noqa: E501
:type: str
"""
self._name = name
@property
def checksum(self):
"""Gets the checksum of this ApiArtifact. # noqa: E501
Hash or checksum value of a binary, or Docker Registry 2.0 digest of a container. # noqa: E501
:return: The checksum of this ApiArtifact. # noqa: E501
:rtype: str
"""
return self._checksum
@checksum.setter
def checksum(self, checksum):
"""Sets the checksum of this ApiArtifact.
Hash or checksum value of a binary, or Docker Registry 2.0 digest of a container. # noqa: E501
:param checksum: The checksum of this ApiArtifact. # noqa: E501
:type: str
"""
self._checksum = checksum
@property
def id(self):
"""Gets the id of this ApiArtifact. # noqa: E501
:return: The id of this ApiArtifact. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ApiArtifact.
:param id: The id of this ApiArtifact. # noqa: E501
:type: str
"""
self._id = id
@property
def names(self):
"""Gets the names of this ApiArtifact. # noqa: E501
Related artifact names. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. Note that a single Artifact ID can have multiple names, for example if two tags are applied to one image. # noqa: E501
:return: The names of this ApiArtifact. # noqa: E501
:rtype: list[str]
"""
return self._names
@names.setter
def names(self, names):
"""Sets the names of this ApiArtifact.
Related artifact names. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. Note that a single Artifact ID can have multiple names, for example if two tags are applied to one image. # noqa: E501
:param names: The names of this ApiArtifact. # noqa: E501
:type: list[str]
"""
self._names = names
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ApiArtifact, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApiArtifact):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
[
[
328,
334
],
[
6261,
6267
]
],
[
[
342,
344
]
],
[
[
367,
370
],
[
5335,
5338
]
],
[
[
379,
390
],
[
6039,
6050
],
[
6501,
6512
]
]
] |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals, print_function
import re
import os
import tempfile
import six
from .. import environment
from ..console import log
from .. import util
WIN = (os.name == "nt")
def _find_conda():
"""Find the conda executable robustly across conda versions.
Returns
-------
conda : str
Path to the conda executable.
Raises
------
IOError
If the executable cannot be found in either the CONDA_EXE environment
variable or in the PATH.
Notes
-----
In POSIX platforms in conda >= 4.4, conda can be set up as a bash function
rather than an executable. (This is to enable the syntax
``conda activate env-name``.) In this case, the environment variable
``CONDA_EXE`` contains the path to the conda executable. In other cases,
we use standard search for the appropriate name in the PATH.
See https://github.com/airspeed-velocity/asv/issues/645 for more details.
"""
if 'CONDA_EXE' in os.environ:
conda = os.environ['CONDA_EXE']
else:
conda = util.which('conda')
return conda
class Conda(environment.Environment):
"""
Manage an environment using conda.
Dependencies are installed using ``conda``. The benchmarked
project is installed using ``pip`` (since ``conda`` doesn't have a
method to install from an arbitrary ``setup.py``).
"""
tool_name = "conda"
_matches_cache = {}
def __init__(self, conf, python, requirements):
"""
Parameters
----------
conf : Config instance
python : str
Version of Python. Must be of the form "MAJOR.MINOR".
requirements : dict
Dictionary mapping a PyPI package name to a version
identifier string.
"""
self._python = python
self._requirements = requirements
self._conda_channels = conf.conda_channels
super(Conda, self).__init__(conf, python, requirements)
@classmethod
def matches(cls, python):
# Calling conda can take a long time, so remember the result
if python not in cls._matches_cache:
cls._matches_cache[python] = cls._matches(python)
return cls._matches_cache[python]
@classmethod
def _matches(cls, python):
if not re.match(r'^[0-9].*$', python):
# The python name should be a version number
return False
try:
conda = _find_conda()
except IOError:
return False
else:
# This directory never gets created, since we're just
# doing a dry run below. All it needs to be is something
# that doesn't already exist.
path = os.path.join(tempfile.gettempdir(), 'check')
# Check that the version number is valid
try:
util.check_call([
conda,
'create',
'--yes',
'-p',
path,
'python={0}'.format(python),
'--dry-run'], display_error=False, dots=False)
except util.ProcessError:
return False
else:
return True
def _setup(self):
try:
conda = _find_conda()
except IOError as e:
raise util.UserError(str(e))
log.info("Creating conda environment for {0}".format(self.name))
# create a temporary environment.yml file
# and use that to generate the env for benchmarking
env_file = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix=".yml")
try:
env_file.write('name: {0}\n'
'channels:\n'.format(self.name))
env_file.writelines((' - %s\n' % ch for ch in self._conda_channels))
env_file.write('dependencies:\n'
' - python={0}\n'
' - wheel\n'
' - pip\n'.format(self._python))
# categorize & write dependencies based on pip vs. conda
conda_args, pip_args = self._get_requirements(conda)
env_file.writelines((' - %s\n' % s for s in conda_args))
if pip_args:
# and now specify the packages that are to be installed in
# the pip subsection
env_file.write(' - pip:\n')
env_file.writelines((' - %s\n' % s for s in pip_args))
env_file.close()
util.check_output([conda] + ['env', 'create', '-f', env_file.name,
'-p', self._path, '--force'])
except Exception as exc:
if os.path.isfile(env_file.name):
with open(env_file.name, 'r') as f:
text = f.read()
log.info("conda env create failed: in {} with:\n{}".format(self._path, text))
raise
finally:
os.unlink(env_file.name)
def _get_requirements(self, conda):
if self._requirements:
# retrieve and return all conda / pip dependencies
conda_args = []
pip_args = []
for key, val in six.iteritems(self._requirements):
if key.startswith('pip+'):
if val:
pip_args.append("{0}=={1}".format(key[4:], val))
else:
pip_args.append(key[4:])
else:
if val:
conda_args.append("{0}={1}".format(key, val))
else:
conda_args.append(key)
return conda_args, pip_args
else:
return [], []
def run(self, args, **kwargs):
log.debug("Running '{0}' in {1}".format(' '.join(args), self.name))
return self.run_executable('python', args, **kwargs)
def run_executable(self, executable, args, **kwargs):
# Conda doesn't guarantee that user site directories are excluded
kwargs["env"] = dict(kwargs.pop("env", os.environ),
PYTHONNOUSERSITE=str("True"))
return super(Conda, self).run_executable(executable, args, **kwargs)
| [
[
[
112,
127
]
],
[
[
129,
137
]
],
[
[
139,
155
]
],
[
[
157,
171
]
],
[
[
180,
182
],
[
2443,
2445
]
],
[
[
190,
192
],
[
304,
306
],
[
1117,
1119
],
[
1145,
1147
],
[
2865,
2867
],
[
4879,
4881
],
[
5139,
5141
],
[
6263,
6265
]
],
[
[
200,
208
],
[
2878,
2886
],
[
3726,
3734
]
],
[
[
217,
220
],
[
5382,
5385
]
],
[
[
237,
248
],
[
1246,
1257
]
],
[
[
271,
274
],
[
3531,
3534
],
[
5014,
5017
],
[
5954,
5957
]
],
[
[
290,
294
],
[
1195,
1199
],
[
2997,
3001
],
[
3288,
3292
],
[
3499,
3503
],
[
4693,
4697
]
],
[
[
297,
300
]
],
[
[
327,
338
],
[
2591,
2602
],
[
3438,
3449
]
],
[
[
1240,
1245
],
[
2063,
2068
],
[
6356,
6361
]
]
] |
"""Timer class based on the timeit.Timer class, but torch aware."""
import enum
import timeit
import textwrap
from typing import Any, Callable, Dict, List, NoReturn, Optional, Type, Union
import numpy as np
import torch
from torch.utils.benchmark.utils import common, cpp_jit
from torch.utils.benchmark.utils._stubs import TimerClass, TimeitModuleType
from torch.utils.benchmark.utils.valgrind_wrapper import timer_interface as valgrind_timer_interface
__all__ = ["Timer", "timer", "Language"]
if torch.has_cuda and torch.cuda.is_available():
def timer() -> float:
torch.cuda.synchronize()
return timeit.default_timer()
else:
timer = timeit.default_timer
class Language(enum.Enum):
PYTHON = 0
CPP = 1
class CPPTimer:
def __init__(
self,
stmt: str,
setup: str,
timer: Callable[[], float],
globals: Dict[str, Any],
) -> None:
if timer is not timeit.default_timer:
raise NotImplementedError(
"PyTorch was built with CUDA and a GPU is present; however "
"Timer does not yet support GPU measurements. If your "
"code is CPU only, pass `timer=timeit.default_timer` to the "
"Timer's constructor to indicate this. (Note that this will "
"produce incorrect results if the GPU is in fact used, as "
"Timer will not synchronize CUDA.)"
)
if globals:
raise ValueError("C++ timing does not support globals.")
self._stmt: str = textwrap.dedent(stmt)
self._setup: str = textwrap.dedent(setup)
self._timeit_module: Optional[TimeitModuleType] = None
def timeit(self, number: int) -> float:
if self._timeit_module is None:
self._timeit_module = cpp_jit.compile_timeit_template(
self._stmt,
self._setup,
)
return self._timeit_module.timeit(number)
class Timer(object):
"""Helper class for measuring execution time of PyTorch statements.
For a full tutorial on how to use this class, see:
https://pytorch.org/tutorials/recipes/recipes/benchmark.html
The PyTorch Timer is based on `timeit.Timer` (and in fact uses
`timeit.Timer` internally), but with several key differences:
1) Runtime aware:
Timer will perform warmups (important as some elements of PyTorch are
lazily initialized), set threadpool size so that comparisons are
apples-to-apples, and synchronize asynchronous CUDA functions when
necessary.
2) Focus on replicates:
When measuring code, and particularly complex kernels / models,
run-to-run variation is a significant confounding factor. It is
expected that all measurements should include replicates to quantify
noise and allow median computation, which is more robust than mean.
To that effect, this class deviates from the `timeit` API by
conceptually merging `timeit.Timer.repeat` and `timeit.Timer.autorange`.
(Exact algorithms are discussed in method docstrings.) The `timeit`
method is replicated for cases where an adaptive strategy is not
desired.
3) Optional metadata:
When defining a Timer, one can optionally specify `label`, `sub_label`,
`description`, and `env`. (Defined later) These fields are included in
the representation of result object and by the `Compare` class to group
and display results for comparison.
4) Instruction counts
In addition to wall times, Timer can run a statement under Callgrind
and report instructions executed.
Directly analogous to `timeit.Timer` constructor arguments:
`stmt`, `setup`, `timer`, `globals`
PyTorch Timer specific constructor arguments:
`label`, `sub_label`, `description`, `env`, `num_threads`
Args:
stmt: Code snippet to be run in a loop and timed.
setup: Optional setup code. Used to define variables used in `stmt`
timer:
Callable which returns the current time. If PyTorch was built
without CUDA or there is no GPU present, this defaults to
`timeit.default_timer`; otherwise it will synchronize CUDA before
measuring the time.
globals:
A dict which defines the global variables when `stmt` is being
executed. This is the other method for providing variables which
`stmt` needs.
label:
String which summarizes `stmt`. For instance, if `stmt` is
"torch.nn.functional.relu(torch.add(x, 1, out=out))"
one might set label to "ReLU(x + 1)" to improve readability.
sub_label:
Provide supplemental information to disambiguate measurements
with identical stmt or label. For instance, in our example
above sub_label might be "float" or "int", so that it is easy
to differentiate:
"ReLU(x + 1): (float)"
"ReLU(x + 1): (int)"
when printing Measurements or summarizing using `Compare`.
description:
String to distinguish measurements with identical label and
sub_label. The principal use of `description` is to signal to
`Compare` the columns of data. For instance one might set it
based on the input size to create a table of the form: ::
| n=1 | n=4 | ...
------------- ...
ReLU(x + 1): (float) | ... | ... | ...
ReLU(x + 1): (int) | ... | ... | ...
using `Compare`. It is also included when printing a Measurement.
env:
This tag indicates that otherwise identical tasks were run in
different environments, and are therefore not equivilent, for
instance when A/B testing a change to a kernel. `Compare` will
treat Measurements with different `env` specification as distinct
when merging replicate runs.
num_threads:
The size of the PyTorch threadpool when executing `stmt`. Single
threaded performace is important as both a key inference workload
and a good indicator of intrinsic algorithmic efficiency, so the
default is set to one. This is in contrast to the default PyTorch
threadpool size which tries to utilize all cores.
"""
_timer_cls: Type[TimerClass] = timeit.Timer
def __init__(
self,
stmt: str = "pass",
setup: str = "pass",
timer: Callable[[], float] = timer,
globals: Optional[Dict[str, Any]] = None,
label: Optional[str] = None,
sub_label: Optional[str] = None,
description: Optional[str] = None,
env: Optional[str] = None,
num_threads: int = 1,
language: Union[Language, str] = Language.PYTHON,
):
if not isinstance(stmt, str):
raise ValueError("Currently only a `str` stmt is supported.")
# We copy `globals` to prevent mutations from leaking.
# (For instance, `eval` adds the `__builtins__` key)
self._globals = dict(globals or {})
if language in (Language.PYTHON, "py", "python"):
# Include `torch` if not specified as a convenience feature.
self._globals.setdefault("torch", torch)
self._language: Language = Language.PYTHON
elif language in (Language.CPP, "cpp", "c++"):
assert self._timer_cls is timeit.Timer, "_timer_cls has already been swapped."
self._timer_cls = CPPTimer
setup = ("" if setup == "pass" else setup)
self._language = Language.CPP
else:
raise ValueError(f"Invalid language `{language}`.")
# Convenience adjustment so that multi-line code snippets defined in
# functions do not IndentationError (Python) or look odd (C++). The
# leading newline removal is for the initial newline that appears when
# defining block strings. For instance:
# textwrap.dedent("""
# print("This is a stmt")
# """)
# produces '\nprint("This is a stmt")\n'.
#
# Stripping this down to 'print("This is a stmt")' doesn't change
# what gets executed, but it makes __repr__'s nicer.
stmt = textwrap.dedent(stmt)
stmt = (stmt[1:] if stmt and stmt[0] == "\n" else stmt).rstrip()
setup = textwrap.dedent(setup)
setup = (setup[1:] if setup and setup[0] == "\n" else setup).rstrip()
self._timer = self._timer_cls(
stmt=stmt,
setup=setup,
timer=timer,
globals=valgrind_timer_interface.CopyIfCallgrind.unwrap_all(self._globals),
)
self._task_spec = common.TaskSpec(
stmt=stmt,
setup=setup,
label=label,
sub_label=sub_label,
description=description,
env=env,
num_threads=num_threads,
)
def timeit(self, number: int = 1000000) -> common.Measurement:
"""Mirrors the semantics of timeit.Timer.timeit().
Execute the main statement (`stmt`) `number` times.
https://docs.python.org/3/library/timeit.html#timeit.Timer.timeit
"""
with common.set_torch_threads(self._task_spec.num_threads):
# Warmup
self._timer.timeit(number=max(int(number // 100), 1))
return common.Measurement(
number_per_run=number,
raw_times=[self._timer.timeit(number=number)],
task_spec=self._task_spec
)
def repeat(self, repeat: int = -1, number: int = -1) -> None:
raise NotImplementedError("See `Timer.blocked_autorange.`")
def autorange(self, callback: Optional[Callable[[int, float], NoReturn]] = None) -> None:
raise NotImplementedError("See `Timer.blocked_autorange.`")
def _threaded_measurement_loop(
self,
number: int,
time_hook: Callable[[], float],
stop_hook: Callable[[List[float]], bool],
min_run_time: float,
max_run_time: Optional[float] = None,
callback: Optional[Callable[[int, float], NoReturn]] = None
) -> List[float]:
total_time = 0.0
can_stop = False
times: List[float] = []
with common.set_torch_threads(self._task_spec.num_threads):
while (total_time < min_run_time) or (not can_stop):
time_spent = time_hook()
times.append(time_spent)
total_time += time_spent
if callback:
callback(number, time_spent)
can_stop = stop_hook(times)
if max_run_time and total_time > max_run_time:
break
return times
def _estimate_block_size(self, min_run_time: float) -> int:
with common.set_torch_threads(self._task_spec.num_threads):
# Estimate the block size needed for measurement to be negligible
# compared to the inner loop. This also serves as a warmup.
overhead = np.median([self._timer.timeit(0) for _ in range(5)])
number = 1
while True:
time_taken = self._timer.timeit(number)
relative_overhead = overhead / time_taken
if relative_overhead <= 1e-4 and time_taken >= min_run_time / 1000:
break
if time_taken > min_run_time:
break
number *= 10
return number
def adaptive_autorange(
self,
threshold: float = 0.1,
*,
min_run_time: float = 0.01,
max_run_time: float = 10.0,
callback: Optional[Callable[[int, float], NoReturn]] = None,
) -> common.Measurement:
number = self._estimate_block_size(min_run_time=0.05)
def time_hook() -> float:
return self._timer.timeit(number)
def stop_hook(times: List[float]) -> bool:
if len(times) > 3:
return common.Measurement(
number_per_run=number,
raw_times=times,
task_spec=self._task_spec
).meets_confidence(threshold=threshold)
return False
times = self._threaded_measurement_loop(
number, time_hook, stop_hook, min_run_time, max_run_time, callback=callback)
return common.Measurement(
number_per_run=number,
raw_times=times,
task_spec=self._task_spec
)
def blocked_autorange(
self,
callback: Optional[Callable[[int, float], NoReturn]] = None,
min_run_time: float = 0.2,
) -> common.Measurement:
"""Measure many replicates while keeping timer overhead to a minimum.
At a high level, blocked_autorange executes the following pseudo-code::
`setup`
total_time = 0
while total_time < min_run_time
start = timer()
for _ in range(block_size):
`stmt`
total_time += (timer() - start)
Note the variable `block_size` in the inner loop. The choice of block
size is important to measurement quality, and must balance two
competing objectives:
1) A small block size results in more replicates and generally
better statistics.
2) A large block size better amortizes the cost of `timer`
invocation, and results in a less biased measurement. This is
important because CUDA syncronization time is non-trivial
(order single to low double digit microseconds) and would
otherwise bias the measurement.
blocked_autorange sets block_size by running a warmup period,
increasing block size until timer overhead is less than 0.1% of
the overall computation. This value is then used for the main
measurement loop.
Returns:
A `Measurement` object that contains measured runtimes and
repetition counts, and can be used to compute statistics.
(mean, median, etc.)
"""
number = self._estimate_block_size(min_run_time)
def time_hook() -> float:
return self._timer.timeit(number)
def stop_hook(times: List[float]) -> bool:
return True
times = self._threaded_measurement_loop(
number, time_hook, stop_hook,
min_run_time=min_run_time,
callback=callback)
return common.Measurement(
number_per_run=number,
raw_times=times,
task_spec=self._task_spec
)
def collect_callgrind(
self,
number: int = 100,
collect_baseline: bool = True
) -> valgrind_timer_interface.CallgrindStats:
"""Collect instruction counts using Callgrind.
Unlike wall times, instruction counts are deterministic
(modulo non-determinism in the program itself and small amounts of
jitter from the Python interpreter.) This makes them ideal for detailed
performance analysis. This method runs `stmt` in a separate process
so that Valgrind can instrument the program. Performance is severely
degraded due to the instrumentation, howevever this is ameliorated by
the fact that a small number of iterations is generally sufficient to
obtain good measurements.
In order to to use this method `valgrind`, `callgrind_control`, and
`callgrind_annotate` must be installed.
Because there is a process boundary between the caller (this process)
and the `stmt` execution, `globals` cannot contain arbitrary in-memory
data structures. (Unlike timing methods) Instead, globals are
restricted to builtins, `nn.Modules`'s, and TorchScripted functions/modules
to reduce the surprise factor from serialization and subsequent
deserialization. The `GlobalsBridge` class provides more detail on this
subject. Take particular care with nn.Modules: they rely on pickle and
you may need to add an import to `setup` for them to transfer properly.
By default, a profile for an empty statement will be collected and
cached to indicate how many instructions are from the Python loop which
drives `stmt`.
Returns:
A `CallgrindStats` object which provides instruction counts and
some basic facilities for analyzing and manipulating results.
"""
if not isinstance(self._task_spec.stmt, str):
raise ValueError("`collect_callgrind` currently only supports string `stmt`")
# Check that the statement is valid. It doesn't guarantee success, but it's much
# simpler and quicker to raise an exception for a faulty `stmt` or `setup` in
# the parent process rather than the valgrind subprocess.
self._timer.timeit(1)
is_python = (self._language == Language.PYTHON)
assert is_python or not self._globals
return valgrind_timer_interface.wrapper_singleton().collect_callgrind(
task_spec=self._task_spec,
globals=self._globals,
number=number,
collect_baseline=collect_baseline and is_python,
is_python=is_python)
| [
[
[
75,
79
],
[
701,
705
]
],
[
[
87,
93
],
[
663,
669
],
[
6580,
6586
],
[
622,
628
],
[
937,
943
],
[
7642,
7648
]
],
[
[
101,
109
],
[
1562,
1570
],
[
1611,
1619
],
[
8488,
8496
],
[
8599,
8607
]
],
[
[
129,
132
],
[
892,
895
],
[
6763,
6766
]
],
[
[
134,
142
],
[
844,
852
],
[
6698,
6706
],
[
9971,
9979
],
[
10181,
10189
],
[
10221,
10229
],
[
10354,
10362
],
[
11949,
11957
],
[
12851,
12859
]
],
[
[
144,
148
],
[
882,
886
],
[
6753,
6757
]
],
[
[
150,
154
],
[
10404,
10408
],
[
10231,
10235
],
[
10482,
10486
],
[
12193,
12197
],
[
14603,
14607
]
],
[
[
156,
164
],
[
9994,
10002
],
[
10377,
10385
],
[
11972,
11980
],
[
12874,
12882
]
],
[
[
166,
174
],
[
1663,
1671
],
[
6744,
6752
],
[
6792,
6800
],
[
6833,
6841
],
[
6876,
6884
],
[
6911,
6919
],
[
9962,
9970
],
[
10303,
10311
],
[
10345,
10353
],
[
11940,
11948
],
[
12842,
12850
]
],
[
[
176,
180
],
[
6561,
6565
]
],
[
[
182,
187
],
[
6981,
6986
]
],
[
[
196,
207
],
[
11293,
11295
]
],
[
[
215,
220
],
[
502,
507
],
[
521,
526
],
[
582,
587
],
[
7486,
7491
]
],
[
[
261,
267
],
[
8937,
8943
],
[
9213,
9219
],
[
9452,
9458
],
[
9614,
9620
],
[
10512,
10518
],
[
11065,
11071
],
[
12000,
12006
],
[
12650,
12656
],
[
12937,
12943
],
[
14827,
14833
],
[
12269,
12275
]
],
[
[
269,
276
],
[
1816,
1823
]
],
[
[
324,
334
],
[
6566,
6576
]
],
[
[
336,
352
],
[
1672,
1688
]
],
[
[
410,
453
],
[
8833,
8857
],
[
15075,
15099
],
[
17374,
17398
]
],
[
[
456,
463
]
],
[
[
556,
561
],
[
6720,
6725
]
],
[
[
655,
660
],
[
6720,
6725
]
],
[
[
692,
700
],
[
7004,
7012
],
[
6987,
6995
],
[
7333,
7341
],
[
7532,
7540
],
[
7521,
7529
],
[
7575,
7583
],
[
7818,
7826
],
[
17296,
17304
]
],
[
[
748,
756
],
[
7725,
7733
]
],
[
[
1979,
1984
]
]
] |
from Ranger.src.Range.Cut import Cut
class Range(object):
"""
Class used to represent a range along some 1-D domain. The range
is represented by 2 cutpoints can can be unbounded by specifying an
aboveAll or belowAll Cut.
"""
def __init__(self, lowerCut, upperCut):
""" Instantiates a Range
Parameters
----------
lowerCut : Cut object
Specifies the lower cut for the range
upperCut : Cut object
Specifies the upper cut for the range
Raises
------
ValueError
If bound(s) are not Cut objects or lower > upper
"""
if not all(map(lambda x: isinstance(x, Cut), (lowerCut,upperCut))):
raise ValueError("Bounds must be Cut objects")
elif lowerCut > upperCut:
raise ValueError("Lower bound cannot be greater than upper bound")
self.lowerCut = lowerCut
self.upperCut = upperCut
def __repr__(self):
try:
return_str = '[' if self.isLowerBoundClosed() else '('
except TypeError:
return_str = '('
return_str += (str(self.lowerCut.point) if not self.lowerCut.belowAll \
else '')
return_str += ' , '
return_str += (str(self.upperCut.point) if not self.upperCut.aboveAll \
else '')
try:
return_str += ']' if self.isUpperBoundClosed() else ')'
except TypeError:
return_str += ')'
return return_str
def __hash__(self):
return (hash(self.lowerCut)*31 + hash(self.upperCut))
def __eq__(self, other):
if not isinstance(other, Range):
return False
else:
return ((self.lowerCut == other.lowerCut) and \
(self.upperCut == other.upperCut))
def __ne__(self, other):
return not self.__eq__(other)
def contains(self, val):
""" Returns true if the range contains the value
Parameters
----------
val : Comparable object of the appropriate type for the range
Value to query whether in the range
Raises
------
ValueError
If the value type not compatible with cutpoint type
Returns
-------
True if the range contains the value
"""
return (self.lowerCut < val and \
self.upperCut > val)
def containsAll(self, vals):
""" Returns True if the range contains all values in some
iterable
Parameters
----------
vals : Iterable of comparable object of appropriate type for range
Values to query against the range
Raises
------
ValueError
If there is a value type not compatible with the cutpoint type
Returns
-------
True if the range contains all values
"""
for val in vals:
if not self.contains(val):
return False
return True
def getDistanceFromPoint(self, val, distFunc = lambda x1, x2: abs(x1-x2)):
""" Returns the minimum distance of a Range from a Point, returning 0
if there is an overlap.
Note that both upper and lower bounds must be closed for this function
to work
Parameters
----------
val : comparable, compatible with cutpoint type
The value of the point where the distance is desired
distFunc : callable
Function that calculates the distance between two points in the
domain of the Range
Raises
------
TypeError
If the upper and/or lower bounds of this Range are not closed
or if the distFunc not compatible with the type
Returns
-------
The minimum distance between the Range and the Point. Returns 0
if there is an overlap
"""
if not all((self.isLowerBoundClosed(), self.isUpperBoundClosed())):
raise TypeError("Range is not closed")
if self.contains(val):
return 0.
else:
return min(distFunc(self.lowerCut.point, val),
distFunc(self.upperCut.point, val))
def getDistanceFromRange(self, other, distFunc = lambda x1,x2: abs(x1-x2)):
""" Returns the minimum distance of a Range from another Range, returning
0 if there is any overlap
Note that both Ranges must be closed for this function to work
Parameters
----------
other : Range, compatible with this Range's domain
The Range to compare to
distFunc : callable
Function that calculates the distance between two points in the
domain of the Range
Raises
------
TypeError
If the upper and/or lower bounds of this Range are not closed
or if the distFunc not compatible with the type
Returns
-------
Minimum distance between the ranges
"""
if not isinstance(other, Range):
raise TypeError("other is not a Range")
if not all((self.isLowerBoundClosed(), self.isUpperBoundClosed(),
other.isLowerBoundClosed(), other.isUpperBoundClosed())):
raise TypeError("Not all Ranges closed")
if self.isConnected(other):
return 0.
else:
return min(distFunc(self.lowerCut.point, other.upperCut.point),
distFunc(other.lowerCut.point, self.upperCut.point))
def hasLowerBound(self):
""" Returns True if the range has a lower endpoint (not unbounded
at the lower end)
Returns
-------
True if the range has a lower endpoint
"""
return (not self.lowerCut.belowAll)
def hasUpperBound(self):
""" Returns True if the range has an upper endpoint (not unbounded
at the upper end)
Returns
-------
True if the range has an upper endpoint
"""
return (not self.upperCut.aboveAll)
def lowerEndpoint(self):
""" Returns the lower endpoint of the range if it exists. Otherwise
raises a TypeError
Raises
------
TypeError
If the range is unbounded below
Returns
-------
The lower endpoint of the range
"""
if self.lowerCut.point is None:
raise TypeError("Range unbounded below")
else:
return self.lowerCut.point
def upperEndpoint(self):
""" Returns the upper endpoint of the range if it exists. Otherwise
raises a TypeError
Raises
------
TypeError
If the range is unbounded above
Returns
-------
The upper endpoint of the range
"""
if self.upperCut.point is None:
raise TypeError("Range unbounded above")
else:
return self.upperCut.point
def isLowerBoundClosed(self):
""" Returns whether the lower bound is closed (if there is a
lower bound)
Raises
------
TypeError
If the range is unbounded below
Returns
-------
True if the lower bound is closed
"""
if self.lowerCut.point is None:
raise TypeError("Range unbounded below")
else:
return self.lowerCut.below
def isUpperBoundClosed(self):
""" Returns whether the upper bound is closed (if there is an
upper bound)
Raises
------
TypeError
If the range is unbounded above
Returns
-------
True if the upper bound is closed
"""
if self.upperCut.point is None:
raise TypeError("Range unbounded above")
else:
return (not self.upperCut.below)
def isEmpty(self):
""" Returns True if the range is of form [v, v) or (v, v]
Returns
-------
True if the range is of the form [v,v) or (v,v]
"""
return self.lowerCut == self.upperCut
def encloses(self, other):
""" Returns True if the bounds of the other range do not extend
outside the bounds of this range
Examples:
[3,6] encloses [4,5]
(3,6) encloses (3,6)
[3,6] encloses [4,4]
(3,6] does not enclose [3,6]
[4,5] does not enclose (3,6)
Parameters
----------
other : A Range
The range to compare to
Raises
------
ValueError
If object passed in is not a Range
Returns
-------
True if the bounds of the other range do not extend outside
the bounds of this range
"""
if not isinstance(other, Range):
raise ValueError("Range required")
return ((self.lowerCut <= other.lowerCut) and \
(self.upperCut >= other.upperCut))
def isConnected(self, other):
""" Returns True if there is a (possibly empty) range that is
enclosed by both this range and other
Examples:
[2,4] and [5,7] are not connected
[2,4] and [3,5] are connected
[2,4] and [4,6] are connected
[3,5] and (5,10) are connected
Parameters
----------
other : A range
The range to compare to
Raises
------
ValueError
If object passed in is not a Range
Returns
-------
True if there is a (possibly empty) range that is enclosed by
both this range and other
"""
if not isinstance(other, Range):
raise ValueError("Range required")
return ((self.lowerCut <= other.upperCut) and \
(other.lowerCut <= self.upperCut))
def intersection(self, other):
""" Returns the maximal range enclosed by both this range and the
other range, if such a range exists
Examples:
Intersection of [1,5] and [3,7] is [3,5]
Intersection of [1,5] and [5,7] is [5,5]
Parameters
----------
other : A range
The range to compare to
Raises
------
ValueError
If object passed in is not a Range or if there is no intersection
Returns
-------
The intersection range
"""
if not isinstance(other, Range):
raise ValueError("Range required")
if ((self.lowerCut >= other.lowerCut) and \
(self.upperCut <= other.upperCut)):
return Range(self.lowerCut, self.upperCut)
elif ((self.lowerCut <= other.lowerCut) and \
(self.upperCut >= other.upperCut)):
return Range(other.lowerCut, other.upperCut)
else:
newLower = self.lowerCut if (self.lowerCut >= other.lowerCut) else \
other.lowerCut
newUpper = self.upperCut if (self.upperCut <= other.upperCut) else \
other.upperCut
return Range(newLower, newUpper)
def span(self, other):
""" Returns the minimal range that encloses both this range and
the other. Note that if the input ranges are not connected, the span can
contain values that are not contained within either input range
Examples:
Span of [1,3] and [5,7] is [1,7]
Parameters
----------
other : A range
A range to span with
Raises
------
ValueError
If object passed in is not a Range or if there is no intersection
Returns
-------
The minimal range enclosing both with and the other range
"""
if ((self.lowerCut <= other.lowerCut) and \
(self.upperCut >= other.upperCut)):
return Range(self.lowerCut, self.upperCut)
elif ((self.lowerCut >= other.lowerCut) and \
(self.upperCut <= other.upperCut)):
return Range(other.lowerCut, other.upperCut)
else:
newLower = self.lowerCut if (self.lowerCut <= other.lowerCut) else \
other.lowerCut
newUpper = self.upperCut if (self.upperCut >= other.upperCut) else \
other.upperCut
return Range(newLower, newUpper)
##################
# Static methods #
##################
@staticmethod
def _validate_cutpoints(*pts):
if not all(map(lambda x: (hasattr(x, "__lt__") and \
hasattr(x, "__gt__")) or hasattr(x,'__cmp__'), pts)):
raise ValueError("Cutpoint type(s) not comparable")
if len(pts) == 2:
if not (issubclass(type(pts[0]),type(pts[1])) or \
issubclass(type(pts[1]),type(pts[0]))):
raise ValueError("Cutpoints are not compatible")
return True
@staticmethod
def _get_type(*pts):
if len(pts) == 1: return type(pts[0])
elif len(pts) == 2:
if issubclass(type(pts[0]),type(pts[1])):
return type(pts[1])
elif issubclass(type(pts[1]),type(pts[0])):
return type(pts[0])
else:
raise ValueError("Cutpoints are not compatible")
@staticmethod
def closed(lower, upper):
""" Creates a range including the endpoints (i.e. [lower, upper])
Parameters
----------
lower : comparable, of same type as or subclass of upper type
The lower bound
upper : comparable, of same type as or subclass of lower type
The upper bound
Raises
------
ValueError
If type(s) are not comparable or compatible
Returns
-------
A Range object [lower, upper]
"""
# Ensure cutpoints are of compatible, appropriate types
Range._validate_cutpoints(lower, upper)
theType = Range._get_type(lower,upper)
return Range(Cut.belowValue(lower, theType=theType),
Cut.aboveValue(upper, theType=theType))
@staticmethod
def closedOpen(lower, upper):
""" Creates a range including the lower endpoint (i.e. [lower, upper))
Parameters
----------
lower : comparable, of same type as or subclass of upper type
The lower bound
upper : comparable, of same type as or subclass of lower type
The upper bound
Raises
------
ValueError
If type(s) are not comparable or compatible
Returns
-------
A Range object [lower, upper)
"""
# Ensure cutpoints are of compatible, appropriate types
Range._validate_cutpoints(lower, upper)
theType = Range._get_type(lower,upper)
return Range(Cut.belowValue(lower, theType=theType),
Cut.belowValue(upper, theType=theType))
@staticmethod
def openClosed(lower, upper):
""" Creates a range including the upper (i.e. (lower, upper])
Parameters
----------
lower : comparable, of same type as or subclass of upper type
The lower bound
upper : comparable, of same type as or subclass of lower type
The upper bound
Raises
------
ValueError
If type(s) are not comparable or compatible
Returns
-------
A Range object (lower, upper]
"""
# Ensure cutpoints are of compatible, appropriate types
Range._validate_cutpoints(lower, upper)
theType = Range._get_type(lower,upper)
return Range(Cut.aboveValue(lower, theType=theType),
Cut.aboveValue(upper, theType=theType))
@staticmethod
def open(lower, upper):
""" Creates a range excluding the endpoints (i.e. (lower, upper))
Parameters
----------
lower : comparable, of same type as or subclass of upper type
The lower bound
upper : comparable, of same type as or subclass of lower type
The upper bound
Raises
------
ValueError
If type(s) are not comparable or compatible or if constructing
a range of type (v,v), which is invalid
Returns
-------
A Range object (lower, upper)
"""
# Ensure cutpoints are of compatible, appropriate types
Range._validate_cutpoints(lower, upper)
theType = Range._get_type(lower,upper)
if lower == upper:
raise TypeError("Range of type (v,v) is not valid")
return Range(Cut.aboveValue(lower, theType=theType),
Cut.belowValue(upper, theType=theType))
@staticmethod
def lessThan(val):
""" Makes range including all values less than some value
(i.e. (-inf, val))
Parameters
----------
val : comparable
The upper bound
Raises
------
ValueError
If type not comparable
Returns
-------
A Range object (-inf, val)
"""
Range._validate_cutpoints(val)
theType = Range._get_type(val)
return Range(Cut.belowAll(theType=theType),
Cut.belowValue(val, theType=theType))
@staticmethod
def atMost(val):
""" Makes range including all values less than or equal to
some value (i.e. (-inf, val])
Parameters
----------
val : comparable
The upper bound
Raises
------
ValueError
If type not comparable
Returns
-------
A Range object (-inf, val]
"""
Range._validate_cutpoints(val)
theType = Range._get_type(val)
return Range(Cut.belowAll(theType=theType),
Cut.aboveValue(val, theType=theType))
@staticmethod
def greaterThan(val):
""" Makes range including all values greater than
some value (i.e. (val, inf])
Parameters
----------
val : comparable
The lower bound
Raises
------
ValueError
If type not comparable
Returns
-------
A Range object (val, inf)
"""
Range._validate_cutpoints(val)
theType = Range._get_type(val)
return Range(Cut.aboveValue(val,theType=theType),
Cut.aboveAll(theType=theType))
@staticmethod
def atLeast(val):
""" Makes range including all values greater than or equal to
some value (i.e. [val, inf))
Parameters
----------
val : comparable
The lower bound
Raises
------
ValueError
If type not comparable
Returns
-------
A Range object [val, inf)
"""
Range._validate_cutpoints(val)
theType = Range._get_type(val)
return Range(Cut.belowValue(val, theType=theType),
Cut.aboveAll(theType=theType))
| [
[
[
33,
36
],
[
14187,
14190
],
[
14248,
14251
],
[
15031,
15034
],
[
15092,
15095
],
[
15866,
15869
],
[
15927,
15930
],
[
16869,
16872
],
[
16930,
16933
],
[
17460,
17463
],
[
17512,
17515
],
[
18050,
18053
],
[
18102,
18105
],
[
18634,
18637
],
[
18692,
18695
],
[
19225,
19228
],
[
19284,
19287
],
[
692,
695
]
],
[
[
44,
49
],
[
1655,
1660
],
[
5098,
5103
],
[
8894,
8899
],
[
9788,
9793
],
[
10564,
10569
],
[
10738,
10743
],
[
10897,
10902
],
[
11242,
11247
],
[
12046,
12051
],
[
12205,
12210
],
[
12500,
12505
],
[
14079,
14084
],
[
14137,
14142
],
[
14181,
14186
],
[
14923,
14928
],
[
14981,
14986
],
[
15025,
15030
],
[
15758,
15763
],
[
15816,
15821
],
[
15860,
15865
],
[
16662,
16667
],
[
16720,
16725
],
[
16863,
16868
],
[
17369,
17374
],
[
17418,
17423
],
[
17454,
17459
],
[
17959,
17964
],
[
18008,
18013
],
[
18044,
18049
],
[
18543,
18548
],
[
18592,
18597
],
[
18628,
18633
],
[
19134,
19139
],
[
19183,
19188
],
[
19219,
19224
]
]
] |
from pyrelational.data.data_manager import GenericDataManager
| [
[
[
43,
61
]
]
] |
from selfdrive.car import dbc_dict
from cereal import car
Ecu = car.CarParams.Ecu
class CarControllerParams:
ANGLE_DELTA_BP = [0., 5., 15.]
ANGLE_DELTA_V = [5., .8, .15] # windup limit
ANGLE_DELTA_VU = [5., 3.5, 0.4] # unwind limit
LKAS_MAX_TORQUE = 1 # A value of 1 is easy to overpower
STEER_THRESHOLD = 1.0
class CAR:
XTRAIL = "NISSAN X-TRAIL 2017"
LEAF = "NISSAN LEAF 2018"
# Leaf with ADAS ECU found behind instrument cluster instead of glovebox
# Currently the only known difference between them is the inverted seatbelt signal.
LEAF_IC = "NISSAN LEAF 2018 Instrument Cluster"
ROGUE = "NISSAN ROGUE 2019"
ALTIMA = "NISSAN ALTIMA 2020"
FINGERPRINTS = {
CAR.XTRAIL: [
{
2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 520: 2, 523: 6, 548: 8, 645: 8, 658: 8, 665: 8, 666: 8, 674: 2, 682: 8, 683: 8, 689: 8, 723: 8, 758: 3, 768: 2, 783: 3, 851: 8, 855: 8, 1041: 8, 1055: 2, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1111: 4, 1227: 8, 1228: 8, 1247: 4, 1266: 8, 1273: 7, 1342: 1, 1376: 6, 1401: 8, 1474: 2, 1497: 3, 1821: 8, 1823: 8, 1837: 8, 2015: 8, 2016: 8, 2024: 8
},
{
2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 520: 2, 523: 6, 527: 1, 548: 8, 637: 4, 645: 8, 658: 8, 665: 8, 666: 8, 674: 2, 682: 8, 683: 8, 689: 8, 723: 8, 758: 3, 768: 6, 783: 3, 851: 8, 855: 8, 1041: 8, 1055: 2, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1111: 4, 1227: 8, 1228: 8, 1247: 4, 1266: 8, 1273: 7, 1342: 1, 1376: 6, 1401: 8, 1474: 8, 1497: 3, 1534: 6, 1792: 8, 1821: 8, 1823: 8, 1837: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 2015: 8, 2016: 8, 2024: 8
},
],
CAR.LEAF: [
{
2: 5, 42: 6, 264: 3, 361: 8, 372: 8, 384: 8, 389: 8, 403: 8, 459: 7, 460: 4, 470: 8, 520: 1, 569: 8, 581: 8, 634: 7, 640: 8, 644: 8, 645: 8, 646: 5, 658: 8, 682: 8, 683: 8, 689: 8, 724: 6, 758: 3, 761: 2, 783: 3, 852: 8, 853: 8, 856: 8, 861: 8, 944: 1, 976: 6, 1008: 7, 1011: 7, 1057: 3, 1227: 8, 1228: 8, 1261: 5, 1342: 1, 1354: 8, 1361: 8, 1459: 8, 1477: 8, 1497: 3, 1549: 8, 1573: 6, 1821: 8, 1837: 8, 1856: 8, 1859: 8, 1861: 8, 1864: 8, 1874: 8, 1888: 8, 1891: 8, 1893: 8, 1906: 8, 1947: 8, 1949: 8, 1979: 8, 1981: 8, 2016: 8, 2017: 8, 2021: 8, 643: 5, 1792: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8
},
# 2020 Leaf SV Plus
{
2: 5, 42: 8, 264: 3, 361: 8, 372: 8, 384: 8, 389: 8, 403: 8, 459: 7, 460: 4, 470: 8, 520: 1, 569: 8, 581: 8, 634: 7, 640: 8, 643: 5, 644: 8, 645: 8, 646: 5, 658: 8, 682: 8, 683: 8, 689: 8, 724: 6, 758: 3, 761: 2, 772: 8, 773: 6, 774: 7, 775: 8, 776: 6, 777: 7, 778: 6, 783: 3, 852: 8, 853: 8, 856: 8, 861: 8, 943: 8, 944: 1, 976: 6, 1008: 7, 1009: 8, 1010: 8, 1011: 7, 1012: 8, 1013: 8, 1019: 8, 1020: 8, 1021: 8, 1022: 8, 1057: 3, 1227: 8, 1228: 8, 1261: 5, 1342: 1, 1354: 8, 1361: 8, 1402: 8, 1459: 8, 1477: 8, 1497: 3, 1549: 8, 1573: 6, 1821: 8, 1837: 8
},
],
CAR.LEAF_IC: [
{
2: 5, 42: 6, 264: 3, 282: 8, 361: 8, 372: 8, 384: 8, 389: 8, 403: 8, 459: 7, 460: 4, 470: 8, 520: 1, 569: 8, 581: 8, 634: 7, 640: 8, 643: 5, 644: 8, 645: 8, 646: 5, 658: 8, 682: 8, 683: 8, 689: 8, 756: 5, 758: 3, 761: 2, 783: 3, 830: 2, 852: 8, 853: 8, 856: 8, 861: 8, 943: 8, 944: 1, 1001: 6, 1057: 3, 1227: 8, 1228: 8, 1229: 8, 1342: 1, 1354: 8, 1361: 8, 1459: 8, 1477: 8, 1497: 3, 1514: 6, 1549: 8, 1573: 6, 1792: 8, 1821: 8, 1822: 8, 1837: 8, 1838: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8, 2016: 8, 2017: 8
},
],
CAR.ROGUE: [
{
2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 520: 2, 523: 6, 548: 8, 634: 7, 643: 5, 645: 8, 658: 8, 665: 8, 666: 8, 674: 2, 682: 8, 683: 8, 689: 8, 723: 8, 758: 3, 772: 8, 773: 6, 774: 7, 775: 8, 776: 6, 777: 7, 778: 6, 783: 3, 851: 8, 855: 8, 1041: 8, 1042: 8, 1055: 2, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1110: 7, 1111: 7, 1227: 8, 1228: 8, 1247: 4, 1266: 8, 1273: 7, 1342: 1, 1376: 6, 1401: 8, 1474: 2, 1497: 3, 1534: 7, 1792: 8, 1821: 8, 1823: 8, 1837: 8, 1839: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8
},
],
CAR.ALTIMA: [
{
2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 438: 8, 451: 8, 517: 8, 520: 2, 522: 8, 523: 6, 539: 8, 541: 7, 542: 8, 543: 8, 544: 8, 545: 8, 546: 8, 547: 8, 548: 8, 570: 8, 576: 8, 577: 8, 582: 8, 583: 8, 584: 8, 586: 8, 587: 8, 588: 8, 589: 8, 590: 8, 591: 8, 592: 8, 600: 8, 601: 8, 610: 8, 611: 8, 612: 8, 614: 8, 615: 8, 616: 8, 617: 8, 622: 8, 623: 8, 634: 7, 638: 8, 645: 8, 648: 5, 654: 6, 658: 8, 659: 8, 660: 8, 661: 8, 665: 8, 666: 8, 674: 2, 675: 8, 676: 8, 682: 8, 683: 8, 684: 8, 685: 8, 686: 8, 687: 8, 689: 8, 690: 8, 703: 8, 708: 7, 709: 7, 711: 7, 712: 7, 713: 7, 714: 8, 715: 8, 716: 8, 717: 7, 718: 7, 719: 7, 720: 7, 723: 8, 726: 7, 727: 7, 728: 7, 735: 8, 746: 8, 748: 6, 749: 6, 750: 8, 758: 3, 772: 8, 773: 6, 774: 7, 775: 8, 776: 6, 777: 7, 778: 6, 779: 7, 781: 7, 782: 7, 783: 3, 851: 8, 855: 5, 1001: 6, 1041: 8, 1042: 8, 1055: 3, 1100: 7, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1110: 7, 1111: 7, 1144: 7, 1145: 7, 1227: 8, 1228: 8, 1229: 8, 1232: 8, 1247: 4, 1258: 8, 1259: 8, 1266: 8, 1273: 7, 1306: 1, 1314: 8, 1323: 8, 1324: 8, 1342: 1, 1376: 8, 1401: 8, 1454: 8, 1497: 3, 1514: 6, 1526: 8, 1527: 5, 1792: 8, 1821: 8, 1823: 8, 1837: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8
},
]
}
FW_VERSIONS = {
CAR.ALTIMA: {
(Ecu.fwdCamera, 0x707, None): [
b'284N86CA1D',
],
(Ecu.eps, 0x742, None): [
b'6CA2B\xa9A\x02\x02G8A89P90D6A\x00\x00\x01\x80',
],
(Ecu.engine, 0x7e0, None): [
b'237109HE2B',
],
(Ecu.gateway, 0x18dad0f1, None): [
b'284U29HE0A',
],
},
CAR.LEAF_IC: {
(Ecu.fwdCamera, 0x707, None): [
b'5SH1BDB\x04\x18\x00\x00\x00\x00\x00_-?\x04\x91\xf2\x00\x00\x00\x80',
b'5SK0ADB\x04\x18\x00\x00\x00\x00\x00_(5\x07\x9aQ\x00\x00\x00\x80',
],
(Ecu.esp, 0x740, None): [
b'476605SH1D',
b'476605SK2A',
],
(Ecu.eps, 0x742, None): [
b'5SH2A\x99A\x05\x02N123F\x15\x81\x00\x00\x00\x00\x00\x00\x00\x80',
b'5SK3A\x99A\x05\x02N123F\x15u\x00\x00\x00\x00\x00\x00\x00\x80',
],
(Ecu.gateway, 0x18dad0f1, None): [
b'284U25SH3A',
b'284U25SK2D',
],
},
CAR.XTRAIL: {
(Ecu.fwdCamera, 0x707, None): [
b'284N86FR2A',
],
(Ecu.esp, 0x740, None): [
b'6FU1BD\x11\x02\x00\x02e\x95e\x80iX#\x01\x00\x00\x00\x00\x00\x80',
b'6FU0AD\x11\x02\x00\x02e\x95e\x80iQ#\x01\x00\x00\x00\x00\x00\x80',
],
(Ecu.eps, 0x742, None): [
b'6FP2A\x99A\x05\x02N123F\x18\x02\x00\x00\x00\x00\x00\x00\x00\x80',
],
(Ecu.combinationMeter, 0x743, None): [
b'6FR2A\x18B\x05\x17\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80',
],
(Ecu.engine, 0x7e0, None): [
b'6FU9B\xa0A\x06\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80',
b'6FR9A\xa0A\x06\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80',
],
(Ecu.gateway, 0x18dad0f1, None): [
b'284U26FR0E',
],
},
}
DBC = {
CAR.XTRAIL: dbc_dict('nissan_x_trail_2017', None),
CAR.LEAF: dbc_dict('nissan_leaf_2018', None),
CAR.LEAF_IC: dbc_dict('nissan_leaf_2018', None),
CAR.ROGUE: dbc_dict('nissan_x_trail_2017', None),
CAR.ALTIMA: dbc_dict('nissan_x_trail_2017', None),
}
| [
[
[
26,
34
],
[
7380,
7388
],
[
7431,
7439
],
[
7482,
7490
],
[
7531,
7539
],
[
7584,
7592
]
],
[
[
54,
57
],
[
64,
67
]
],
[
[
58,
61
],
[
5700,
5703
],
[
5764,
5767
],
[
5857,
5860
],
[
5918,
5921
],
[
6007,
6010
],
[
6201,
6204
],
[
6280,
6283
],
[
6462,
6465
],
[
6571,
6574
],
[
6635,
6638
],
[
6820,
6823
],
[
6931,
6934
],
[
7070,
7073
],
[
7288,
7291
]
],
[
[
90,
109
]
],
[
[
349,
352
],
[
709,
712
],
[
1708,
1711
],
[
3001,
3004
],
[
3605,
3608
],
[
4287,
4290
],
[
5681,
5684
],
[
5987,
5990
],
[
6552,
6555
],
[
7368,
7371
],
[
7421,
7424
],
[
7469,
7472
],
[
7520,
7523
],
[
7572,
7575
]
],
[
[
690,
702
]
],
[
[
5663,
5674
]
],
[
[
7358,
7361
]
]
] |
from .core import Core, Settings
class Download(Core):
host = 'https://artifacts.elastic.co/downloads/beats/elastic-agent/{endpoint}'
endpoint = Settings.download_endpoint
kwargs = {
'stream': True
}
def parse_response(self, response):
self.__logger.debug('Saving file to download path: {}'.format(Settings.download_path))
with open(Settings.download_path, 'wb+') as f:
for chunk in response.raw.stream(1024, decode_content=False):
if chunk:
f.write(chunk)
self.__logger.debug('File saved successfully')
| [
[
[
18,
22
],
[
50,
54
]
],
[
[
24,
32
],
[
156,
164
],
[
338,
346
],
[
381,
389
]
],
[
[
41,
49
]
]
] |
from .single_stage import SingleStageDetector
from ..registry import DETECTORS
from mmdet.core import bbox2result
import torch.nn as nn
import torch
from .. import builder
import numpy as np
import cv2
from mmdet.core import bbox2roi, bbox2result, build_assigner, build_sampler
@DETECTORS.register_module
class CSP(SingleStageDetector):
def __init__(self,
backbone,
neck,
bbox_head,
refine_roi_extractor=None,
refine_head=None,
train_cfg=None,
test_cfg=None,
pretrained=None,
detached=True,
return_feature_maps=False):
super(CSP, self).__init__(backbone, neck, bbox_head, train_cfg,
test_cfg, pretrained)
if refine_head is not None:
self.refine_roi_extractor = builder.build_roi_extractor(
refine_roi_extractor)
self.refine_head = builder.build_head(refine_head)
self.return_feature_maps = return_feature_maps
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self.detached = detached
def show_input_debug(self, img, classification_maps, scale_maps, offset_maps):
img_numpy = img.cpu().numpy().copy()[0]
# img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53]
img_numpy = np.transpose(img_numpy, [1, 2, 0]) + [102.9801, 115.9465, 122.7717]
img_numpy = img_numpy[:, :, ::-1]
img_numpy = img_numpy.astype(np.uint8)
strides = [8, 16, 32, 64, 128]
img_nows = []
for i, stride in enumerate(strides):
img_now = img_numpy.copy()
# cls_numpy = classification_maps[0][i].cpu().numpy().copy()[0][2]
cls_numpy = classification_maps[0][i].cpu().numpy().copy()[0][:80]
scale_numpy = scale_maps[0][i].cpu().numpy().copy()[0][0] * stride
offset_numpy = offset_maps[0][i].cpu().numpy().copy()[0][:2]
cs, ys, xs = cls_numpy.nonzero()
print(len(ys))
for c, x, y in zip(cs, xs, ys):
cv2.imshow(str(c), classification_maps[0][i].cpu().numpy().copy()[0][80+c])
realx = x
realy = y
height = scale_numpy[y, x]
realy = realy + 0.5 + offset_numpy[0][y, x]
realx = realx + 0.5 + offset_numpy[1][y, x]
realy = realy * stride
realx = realx * stride
top_y = int(realy - height/2)
top_x = int(realx)
down_y = int(realy + height/2)
down_x = int(realx)
top_left = (int(top_x - height * 0.1), int(top_y))
down_right = (int(down_x + height * 0.1), down_y)
cv2.rectangle(img_now, top_left, down_right, (255, 255, 5*int(c)), 2)
img_nows.append(img_now)
cv2.imshow(str(i) +'img', img_now)
cv2.waitKey(0)
def show_input_debug_caltech(self, img, classification_maps, scale_maps, offset_maps):
for j in range(img.shape[0]):
img_numpy = img.cpu().numpy().copy()[j]
img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53]
img_numpy = img_numpy[:, :, ::-1]
img_numpy = img_numpy.astype(np.uint8)
strides = [4]
img_nows = []
for i, stride in enumerate(strides):
img_now = img_numpy.copy()
cls_numpy = classification_maps[j][i].cpu().numpy().copy()[0][2]
ignore_numpy = classification_maps[j][i].cpu().numpy().copy()[0][1]
cv2.imshow('ignore', ignore_numpy)
scale_numpy = scale_maps[j][i].cpu().numpy().copy()[0][0] * stride
offset_numpy = offset_maps[j][i].cpu().numpy().copy()[0][:2]
ys, xs = cls_numpy.nonzero()
print(len(ys))
for x, y in zip(xs, ys):
# cv2.imshow(str(c), classification_maps[j][i].cpu().numpy().copy()[0][c])
realx = x
realy = y
height = scale_numpy[y, x]
realy = realy + 0.5 + offset_numpy[0][y, x]
realx = realx + 0.5 + offset_numpy[1][y, x]
realy = realy * stride
realx = realx * stride
top_y = int(realy - height/2)
top_x = int(realx)
down_y = int(realy + height/2)
down_x = int(realx)
top_left = (int(top_x - height * 0.1), int(top_y))
down_right = (int(down_x + height * 0.1), down_y)
cv2.rectangle(img_now, top_left, down_right, (255, 255, 125), 2)
img_nows.append(img_now)
cv2.imshow(str(i) +'img', img_now)
cv2.waitKey(0)
def show_input_debug_head(self, img, classification_maps, scale_maps, offset_maps):
for j in range(img.shape[0]):
img_numpy = img.cpu().numpy().copy()[j]
img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53]
img_numpy = img_numpy[:, :, ::-1]
img_numpy = img_numpy.astype(np.uint8)
strides = [4]
img_nows = []
for i, stride in enumerate(strides):
img_now = img_numpy.copy()
cls_numpy = classification_maps[j][i].cpu().numpy().copy()[0][2]
ignore_numpy = classification_maps[j][i].cpu().numpy().copy()[0][1]
cv2.imshow('ignore', ignore_numpy)
scale_numpy = scale_maps[j][i].exp().cpu().numpy().copy()[0][0] * stride
offset_numpy = offset_maps[j][i].cpu().numpy().copy()[0][:2]
ys, xs = cls_numpy.nonzero()
for x, y in zip(xs, ys):
# cv2.imshow(str(c), classification_maps[j][i].cpu().numpy().copy()[0][c])
realx = x
realy = y
height = scale_numpy[y, x]
realy = realy + 0.5 + offset_numpy[0][y, x]
realx = realx + 0.5 + offset_numpy[1][y, x]
realy = realy * stride
realx = realx * stride
top_y = int(realy)
top_x = int(realx)
down_y = int(realy + height)
down_x = int(realx)
top_left = (int(top_x - height * 0.41/2), int(top_y))
down_right = (int(down_x + height * 0.41/2), down_y)
cv2.rectangle(img_now, top_left, down_right, (255, 255, 125), 2)
img_nows.append(img_now)
cv2.imshow(str(i) +'img', img_now)
cv2.waitKey(0)
def show_mot_input_debug(self, img, classification_maps, scale_maps, offset_maps):
for j in range(img.shape[0]):
img_numpy = img.cpu().numpy().copy()[j]
img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53]
# img_numpy = np.transpose(img_numpy, [1, 2, 0]) + [102.9801, 115.9465, 122.7717]
img_numpy = img_numpy[:, :, ::-1]
img_numpy = img_numpy.astype(np.uint8)
strides = [4]
img_nows = []
for i, stride in enumerate(strides):
img_now = img_numpy.copy()
# cls_numpy = classification_maps[0][i].cpu().numpy().copy()[0][2]
cls_numpy = classification_maps[j][i].cpu().numpy().copy()[0][2]
instance_numpy = classification_maps[j][i].cpu().numpy().copy()[0][3]
scale_numpy = scale_maps[j][i].cpu().numpy().copy()[0][0] * stride
offset_numpy = offset_maps[j][i].cpu().numpy().copy()[0][:2]
ys, xs = cls_numpy.nonzero()
for x, y in zip(xs, ys):
c=0
cv2.imshow(str(c), classification_maps[j][i].cpu().numpy().copy()[0][2])
realx = x
realy = y
height = scale_numpy[y, x]
realy = realy + 0.5 + offset_numpy[0][y, x]
realx = realx + 0.5 + offset_numpy[1][y, x]
realy = realy * stride
realx = realx * stride
top_y = int(realy - height/2)
top_x = int(realx)
down_y = int(realy + height/2)
down_x = int(realx)
top_left = (int(top_x - height * 0.1), int(top_y))
down_right = (int(down_x + height * 0.1), down_y)
cv2.rectangle(img_now, top_left, down_right, (255, 255, 5*int(c)), 2)
instance = instance_numpy[y, x]
cv2.putText(img_now, str(instance), top_left, cv2.FONT_HERSHEY_COMPLEX, 1, 255)
img_nows.append(img_now)
cv2.imshow(str(i) +'img', img_now)
cv2.waitKey(0)
@property
def refine(self):
return hasattr(self, 'refine_head') and self.refine_head is not None
def forward_train(self,
img,
img_metas,
gt_bboxes,
gt_labels,
gt_bboxes_ignore=None,
classification_maps=None,
scale_maps=None,
offset_maps=None):
# for tracking data which batch is produced by dataset instead of data loader
if type(img) == list:
img=img[0]
img_metas=img_metas[0]
gt_bboxes=gt_bboxes[0]
gt_labels=gt_labels[0]
gt_bboxes_ignore = gt_bboxes_ignore[0]
classification_maps = classification_maps[0]
scale_maps = scale_maps[0]
offset_maps = offset_maps[0]
losses = dict()
x = self.extract_feat(img)
# self.show_input_debug(img, classification_maps, scale_maps, offset_maps)
# self.show_input_debug_caltech(img, classification_maps, scale_maps, offset_maps)
# self.show_mot_input_debug(img, classification_maps, scale_maps, offset_maps)
# self.show_input_debug_head(img, classification_maps, scale_maps, offset_maps)
outs = self.bbox_head(x)
loss_inputs = outs + (gt_bboxes, gt_labels, classification_maps, scale_maps, offset_maps, img_metas, self.train_cfg.csp_head if self.refine else self.train_cfg)
losses_bbox = self.bbox_head.loss(
*loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore)
losses.update(losses_bbox)
if self.refine:
if self.detached:
x = tuple([i.detach() for i in x])
bbox_inputs = outs + (img_metas, self.train_cfg.csp_head, False)
bbox_list = self.bbox_head.get_bboxes(*bbox_inputs, no_strides=False) # no_strides to not upscale yet
bbox_list = [
bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes)[0]
for det_bboxes, det_labels in bbox_list
]
bbox_assigner = build_assigner(self.train_cfg.rcnn.assigner)
bbox_sampler = build_sampler(
self.train_cfg.rcnn.sampler, context=self)
num_imgs = img.size(0)
if gt_bboxes_ignore is None:
gt_bboxes_ignore = [None for _ in range(num_imgs)]
sampling_results = []
for i in range(num_imgs):
if bbox_list[i].shape[0] == 0 or gt_bboxes[i].shape[0] == 0:
continue
bbox = torch.tensor(bbox_list[i]).float().cuda()
assign_result = bbox_assigner.assign(
bbox, gt_bboxes[i], gt_bboxes_ignore[i],
gt_labels[i])
sampling_result = bbox_sampler.sample(
assign_result,
bbox,
gt_bboxes[i],
gt_labels[i])
sampling_results.append(sampling_result)
samp_list = [res.bboxes for res in sampling_results]
if len(samp_list) == 0:
losses.update(dict(loss_refine_cls=torch.tensor(0).float().cuda(), acc=torch.tensor(0).float().cuda()))
return losses
rois = bbox2roi(samp_list).float()
if self.refine_head.loss_opinion is not None:
pred_scores = torch.cat([torch.tensor(bbox[:, 4]).float().cuda() for bbox in bbox_list], dim=0)
pred_rois = bbox2roi([torch.tensor(bbox).float().cuda() for bbox in bbox_list])
pred_feats = self.refine_roi_extractor(
x, pred_rois)
pred_scores_refine = self.refine_head(pred_feats)
loss_opinion = self.refine_head.compute_opinion_loss(pred_scores, pred_scores_refine)
losses.update(loss_opinion)
bbox_feats = self.refine_roi_extractor(
x, rois)
cls_score = self.refine_head(bbox_feats)
bbox_targets = self.refine_head.get_target(
sampling_results, gt_bboxes, gt_labels, self.train_cfg.rcnn)
loss_refine = self.refine_head.loss(cls_score,
*bbox_targets[:2])
losses.update(dict(loss_refine_cls=loss_refine["loss_cls"], distL1=loss_refine["dist"]))
return losses
def simple_test_accuracy(self, img, img_meta):
gts = img_meta[0]["gts"]
x = self.extract_feat(img)
if self.detached:
x = (x[0].detach(),)
rois = bbox2roi(gts)
if rois.shape[0] == 0:
return 0, 0
roi_feats = self.refine_roi_extractor(
x, rois)
cls_score = self.refine_head.get_scores(roi_feats)
return (cls_score > 0.5).float().sum(), rois.size(0)
def simple_test(self, img, img_meta, rescale=False, return_id=False):
x = self.extract_feat(img)
outs = self.bbox_head(x)
bbox_inputs = outs + (img_meta, self.test_cfg.csp_head if self.refine else self.test_cfg, False) # TODO://Handle rescalling
if self.return_feature_maps:
return self.bbox_head.get_bboxes_features(*bbox_inputs)
bbox_list = self.bbox_head.get_bboxes(*bbox_inputs, no_strides=False)
im_scale = img_meta[0]["scale_factor"]
if "id" in img_meta[0]:
img_id = img_meta[0]["id"]
else:
img_id = 0
if self.refine:
if self.detached:
x = (x[0].detach(),)
bbox_list = [
bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes)[0]
for det_bboxes, det_labels in bbox_list
]
refine_cfg = self.test_cfg.get('rcnn', None)
bbox_list = [torch.tensor(bbox).float().cuda() for bbox in bbox_list]
rois = bbox2roi(bbox_list)
bbox_list = [bbox/im_scale for bbox in bbox_list]
if rois.shape[0] == 0:
cls_score = None
else:
roi_feats = self.refine_roi_extractor(
x, rois)
cls_score = self.refine_head.get_scores(roi_feats)
res_buffer = []
if cls_score is not None:
if refine_cfg is not None:
res_buffer = self.refine_head.suppress_boxes(rois, cls_score, img_meta, cfg=refine_cfg)
else:
res_buffer = self.refine_head.combine_scores(bbox_list, cls_score)
if return_id:
return res_buffer, img_id
return res_buffer
bbox_results = [
bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes)
for det_bboxes, det_labels in bbox_list
]
if return_id:
return bbox_results[0], img_id
return bbox_results[0]
def foward_features(self, features):
bbox_list = self.bbox_head.get_bboxes(*features)
bbox_results = [
bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes)
for det_bboxes, det_labels in bbox_list
]
return bbox_results[0]
| [
[
[
27,
46
],
[
317,
336
]
],
[
[
70,
79
],
[
281,
290
]
],
[
[
103,
114
]
],
[
[
122,
136
]
],
[
[
144,
149
],
[
11879,
11884
],
[
12464,
12469
],
[
12500,
12505
],
[
12698,
12703
],
[
12709,
12714
],
[
12818,
12823
],
[
15106,
15111
]
],
[
[
165,
172
],
[
901,
908
],
[
999,
1006
]
],
[
[
180,
191
],
[
1450,
1452
],
[
1597,
1599
],
[
3263,
3265
],
[
3439,
3441
],
[
5234,
5236
],
[
5410,
5412
],
[
7172,
7174
],
[
7442,
7444
]
],
[
[
199,
202
],
[
2194,
2197
],
[
2876,
2879
],
[
2999,
3002
],
[
3042,
3045
],
[
3774,
3777
],
[
4843,
4846
],
[
4969,
4972
],
[
5016,
5019
],
[
5745,
5748
],
[
6782,
6785
],
[
6908,
6911
],
[
6955,
6958
],
[
8136,
8139
],
[
8871,
8874
],
[
9013,
9016
],
[
9059,
9062
],
[
9154,
9157
],
[
9201,
9204
]
],
[
[
226,
234
],
[
12582,
12590
],
[
12808,
12816
],
[
13882,
13890
],
[
15182,
15190
]
],
[
[
236,
247
],
[
11210,
11221
],
[
14887,
14898
],
[
15964,
15975
],
[
16322,
16333
]
],
[
[
249,
263
],
[
11376,
11390
]
],
[
[
265,
278
],
[
11448,
11461
]
],
[
[
313,
316
],
[
710,
713
]
]
] |
# Simple demo of sending and recieving data with the RFM95 LoRa radio.
# Author: Tony DiCola
import board
import busio
import digitalio
import adafruit_rfm9x
# Define radio parameters.
RADIO_FREQ_MHZ = 915.0 # Frequency of the radio in Mhz. Must match your
# module! Can be a value like 915.0, 433.0, etc.
# Define pins connected to the chip, use these if wiring up the breakout according to the guide:
CS = digitalio.DigitalInOut(board.D5)
RESET = digitalio.DigitalInOut(board.D6)
# Or uncomment and instead use these if using a Feather M0 RFM9x board and the appropriate
# CircuitPython build:
# CS = digitalio.DigitalInOut(board.RFM9X_CS)
# RESET = digitalio.DigitalInOut(board.RFM9X_RST)
# Define the onboard LED
LED = digitalio.DigitalInOut(board.D13)
LED.direction = digitalio.Direction.OUTPUT
# Initialize SPI bus.
spi = busio.SPI(board.SCK, MOSI=board.MOSI, MISO=board.MISO)
# Initialze RFM radio
rfm9x = adafruit_rfm9x.RFM9x(spi, CS, RESET, RADIO_FREQ_MHZ)
# Note that the radio is configured in LoRa mode so you can't control sync
# word, encryption, frequency deviation, or other settings!
# You can however adjust the transmit power (in dB). The default is 13 dB but
# high power radios like the RFM95 can go up to 23 dB:
rfm9x.tx_power = 23
# Send a packet. Note you can only send a packet up to 252 bytes in length.
# This is a limitation of the radio packet size, so if you need to send larger
# amounts of data you will need to break it into smaller send calls. Each send
# call will wait for the previous one to finish before continuing.
rfm9x.send(bytes("Hello world!\r\n", "utf-8"))
print("Sent Hello World message!")
# Wait to receive packets. Note that this library can't receive data at a fast
# rate, in fact it can only receive and process one 252 byte packet at a time.
# This means you should only use this for low bandwidth scenarios, like sending
# and receiving a single message at a time.
print("Waiting for packets...")
while True:
packet = rfm9x.receive()
# Optionally change the receive timeout from its default of 0.5 seconds:
# packet = rfm9x.receive(timeout=5.0)
# If no packet was received during the timeout then None is returned.
if packet is None:
# Packet has not been received
LED.value = False
print("Received nothing! Listening again...")
else:
# Received a packet!
LED.value = True
# Print out the raw bytes of the packet:
print("Received (raw bytes): {0}".format(packet))
# And decode to ASCII text and print it too. Note that you always
# receive raw bytes and need to convert to a text format like ASCII
# if you intend to do string processing on your data. Make sure the
# sending side is sending ASCII data before you try to decode!
packet_text = str(packet, "ascii")
print("Received (ASCII): {0}".format(packet_text))
# Also read the RSSI (signal strength) of the last received message and
# print it.
rssi = rfm9x.last_rssi
print("Received signal strength: {0} dB".format(rssi))
| [
[
[
100,
105
],
[
436,
441
],
[
477,
482
],
[
752,
757
],
[
845,
850
],
[
861,
866
],
[
878,
883
]
],
[
[
113,
118
],
[
835,
840
]
],
[
[
126,
135
],
[
413,
422
],
[
454,
463
],
[
729,
738
],
[
779,
788
]
],
[
[
144,
158
],
[
921,
935
]
],
[
[
188,
202
],
[
958,
972
]
],
[
[
408,
410
],
[
947,
949
]
],
[
[
446,
451
],
[
951,
956
]
],
[
[
723,
726
],
[
763,
766
],
[
2272,
2275
],
[
2391,
2394
]
],
[
[
829,
832
],
[
942,
945
]
],
[
[
913,
918
],
[
1245,
1250
],
[
1569,
1574
],
[
1993,
1998
],
[
3031,
3036
]
],
[
[
1984,
1990
],
[
2209,
2215
],
[
2506,
2512
],
[
2840,
2846
]
],
[
[
2822,
2833
],
[
2902,
2913
]
],
[
[
3024,
3028
],
[
3103,
3107
]
]
] |
"""
Class to hold clinical outcome model.
Predicts probability of good outcome of patient(s) or group(s) of patients.
Call `calculate_outcome_for_all(args)` from outside of the object
Inputs
======
All inputs take np arrays (for multiple groups of patients).
mimic: proportion of patients with stroke mimic
ich: proportion of patients with intracerebral haemorrhage (ICH).
Or probability of a patient having an ICH, when using for a single patient.
nlvo: proportion of patients with non-large vessel occlusions (nLVO).
Or probability of a patient having an NLVO, when using for a single patient.
lvo: proportion of patients with large vessel occlusions (LVO).
Or probability of a patient having a LVO, when using for a single patient.
onset_to_needle: minutes from onset to thrombolysis
onset_to_ouncture: minutes from onset to thrombectomy
nlvo_eligible_for_treatment: proportion of patients with NLVO suitable for
treatment with thrombolysis. Or probability of a patient with NVLO being
eligible for treatment.
lvo_eligible_for_treatment: proportion of patients with LVO suitable for
treatment with thrombolysis and/or thrombectomy. Or probability of a patient
with LVO being eligible for treatment.
Returns
=======
Probability of good outcome: The probability of having a good outcome (modified
Rankin Scale 0-1) for the patient or group of patients (np array).
References for decay of effect of thrombolysis and thrombectomy
===============================================================
Decay of effect of thrombolysis without image selection of patients taken from:
Emberson, Jonathan, Kennedy R. Lees, Patrick Lyden, Lisa Blackwell,
Gregory Albers, Erich Bluhmki, Thomas Brott, et al (2014). “Effect of Treatment
Delay, Age, and Stroke Severity on the Effects of Intravenous Thrombolysis with
Alteplase for Acute Ischaemic Stroke: A Meta-Analysis of Individual Patient
Data from Randomised Trials.” The Lancet 384: 1929–1935.
https://doi.org/10.1016/S0140-6736(14)60584-5.
* Time to no effect = 6.3hrs
Decay of effect of thrombectomy without image selection of patients taken from:
Fransen, Puck S. S., Olvert A. Berkhemer, Hester F. Lingsma, Debbie Beumer,
Lucie A. van den Berg, Albert J. Yoo, Wouter J. Schonewille, et al. (2016)
“Time to Reperfusion and Treatment Effect for Acute Ischemic Stroke: A
Randomized Clinical Trial.” JAMA Neurology 73: 190–96.
https://doi.org/10.1001/jamaneurol.2015.3886.
* Time to no effect = 8hrs
"""
import numpy as np
import pandas as pd
class Clinical_outcome:
def __init__(self):
"""Constructor for clinical outcome model
"""
self.name = "Clinical outcome model"
self.thrombectomy_time_no_effect = 8 * 60
self.thrombolysis_time_no_effect = 6.3 * 60
self.maximum_permitted_time_to_thrombectomy = 360
self.maximum_permitted_time_to_thrombolysis = 270
def calculate_outcome_for_all(self,
mimic,
ich,
nlvo,
lvo,
onset_to_needle,
onset_to_puncture,
nlvo_eligible_for_treatment,
lvo_eligible_for_treatment,
prop_thrombolysed_lvo_receiving_thrombectomy):
"""
Calculates the probability of good outcome for all patients admitted
with acute stroke.
Based on:
Holodinsky JK, Williamson TS, Demchuk AM, et al. Modeling Stroke Patient
Transport for All Patients With Suspected Large-Vessel Occlusion. JAMA
Neurol. 2018;75(12):1477-1486. doi:10.1001/jamaneurol.2018.2424
Sums outcomes for:
1) mimics
2) ICH
3) non-LVO
4) LVO treated with thrombolysis
5) LVO treated with thrombectomy (if thrombolysis not successful in a
drip and ship configuration)
arguments
---------
np arrays (each row is a given geographic area with different
characteristics)
mimic: proportion of patients with stroke mimic
ich: proportion of patients with ICH
nlvo: proportion of patients with non-lvo
lvo: proportion of patients with lvo
onset_to_needle: minutes from onset to thrombolysis
onset_to_ounctureL minutes from onset to thrombectomy
nlvo_eligible_for_treatment: proportion of nlvo suitable for treatment
lvo_eligible_for_treatment: proportion of lvo suitable for treatment
returns
-------
probability of good outcome for all (np array)
"""
# Get outcomes
# ------------
outcomes = pd.DataFrame()
# Calculate good outcomes for mimics
outcomes['mimic'] = self._calculate_outcome_for_stroke_mimics(
mimic.shape)
# Calculate good outcomes for ich
outcomes['ich'] = self._calculate_outcome_for_ICH(mimic.shape)
# Calculate good outcomes for nlvo without treatment
outcomes['nlvo_base'] = \
np.full(nlvo.shape, 0.4622)
# Calculate good outcomes for nlvo with thrombolysis
outcomes['nlvo_add_ivt'] = \
self._calculate_thrombolysis_outcome_for_nlvo(onset_to_needle)
# Calculate good outcomes for lvo without treatment
outcomes['lvo_base'] = \
np.full(nlvo.shape, 0.1328)
# Calculate good outcomes for lvo with thrombolysis
outcomes['lvo_add_ivt'] = \
self._calculate_thrombolysis_outcome_for_lvo(onset_to_needle)
# Calculate good outcomes for lvo with thrombolysis
outcomes['lvo_add_et'] = \
self._calculate_thrombectomy_outcome_for_lvo(onset_to_puncture)
# Weight outcome results by proportion of patients
# ------------------------------------------------
# 'Results' are good outcomes
results = pd.DataFrame()
# Results for mimic
results['mimic'] = outcomes['mimic'] * mimic
# Results for ich
results['ich'] = outcomes['ich'] * ich
# Results for nlvo
results['nlvo_base'] = nlvo * outcomes['nlvo_base']
results['nlvo_ivt'] = \
nlvo * outcomes['nlvo_add_ivt'] * nlvo_eligible_for_treatment
# Results for lvo
results['lvo_base'] = lvo * outcomes['lvo_base']
results['lvo_ivt'] = \
lvo * outcomes['lvo_add_ivt'] * lvo_eligible_for_treatment
# Adjust thrombectomy/thrombolysis ratio for LVO
# Reduce thrombectomy treatment by LVO responding to IVT
lvo_receiving_et = ((lvo * lvo_eligible_for_treatment *
prop_thrombolysed_lvo_receiving_thrombectomy) -
results['lvo_ivt'])
results['lvo_et'] = lvo_receiving_et * outcomes['lvo_add_et']
p_good = results.sum(axis=1).values
return p_good
@staticmethod
def _calculate_outcome_for_ICH(array_shape):
"""
Calculates the probability of good outcome for patients with intra-
cranial haemorrhage (ICH).
Sets all values to 0.24
Based on Holodinsky et al. (2018) Drip-and-Ship vs. Mothership:
Modelling Stroke Patient Transport for All Suspected Large Vessel
Occlusion Patients. JAMA Neuro (in press)
arguments
---------
array size
returns
-------
probability of good outcome for ICH (np array)
"""
# Create an array of required length and set all values to 0.24
p_good = np.zeros(array_shape)
p_good[:] = 0.24
return p_good
@staticmethod
def _calculate_outcome_for_stroke_mimics(array_shape):
"""
Calculates the probability of good outcome for patients with stroke
mimic
Sets all values to 1
Based on Holodinsky et al. (2018) Drip-and-Ship vs. Mothership:
Modelling Stroke Patient Transport for All Suspected Large Vessel
Occlusion Patients. JAMA Neuro (in press)
arguments
---------
array size
returns
-------
probability of good outcome for stroke mimiccs (np array)
"""
# Create an array of required length and set all values to 0.9
p_good = np.zeros(array_shape)
p_good[:] = 1
return p_good
def _calculate_thrombectomy_outcome_for_lvo(self, onset_to_puncture):
"""
Calculates the probability of additional good outcome for LVO patients
receiving thrombectomy.
arguments
---------
onset_to_puncture : np array in minutes
returns
-------
probability of additional good outcome if given thrombectomy (np array)
"""
p_good_max = 0.5208
p_good_min = 0.1328
# Convert probability to odds
odds_good_max = p_good_max / (1 - p_good_max)
odds_good_min = p_good_min / (1 - p_good_min)
# Calculate fraction of effective time used
fraction_max_effect_time_used = \
onset_to_puncture / self.thrombectomy_time_no_effect
# Calculate odds of good outcome with treatment
odds_good = np.exp(np.log(odds_good_max) -
((np.log(odds_good_max) - np.log(odds_good_min))
* fraction_max_effect_time_used))
# Convert odds to probability
prob_good = odds_good / (1 + odds_good)
prob_good[prob_good < p_good_min] = p_good_min
# Calculate probability of additional good outcome
p_good_add = prob_good - p_good_min
# Set additional good outcomes to zero if past permitted treatment time
mask = onset_to_puncture > self.maximum_permitted_time_to_thrombectomy
p_good_add[mask] = 0
# Ensure no negative outcomes
mask = p_good_add < 0
p_good_add[mask] = 0
return p_good_add
def _calculate_thrombolysis_outcome_for_lvo(self, onset_to_needle):
"""
Calculates the probability of additional good outcome for LVO patients
receiving thrombolysis. Does not include baseline untreated good
comes.
arguments
---------
onset_to_needle : np array in minutes
returns
-------
probability of additional good outcome if given thrombolysis
(np array)
"""
p_good_max = 0.2441
p_good_min = 0.1328
# Convert probability to odds
odds_good_max = p_good_max / (1 - p_good_max)
odds_good_min = p_good_min / (1 - p_good_min)
# Calculate fraction of effective time used
fraction_max_effect_time_used = \
onset_to_needle / self.thrombolysis_time_no_effect
# Calculate odds of good outcome with treatment
odds_good = np.exp(np.log(odds_good_max) -
((np.log(odds_good_max) - np.log(odds_good_min))
* fraction_max_effect_time_used))
# Convert odds to probability
prob_good = odds_good / (1 + odds_good)
prob_good[prob_good < p_good_min] = p_good_min
# Calculate probability of additional good outcome
p_good_add = prob_good - p_good_min
# Set additional good outcomes to zero if past permitted treatment time
mask = onset_to_needle> self.maximum_permitted_time_to_thrombolysis
p_good_add[mask] = 0
# Ensure no negative outcomes
mask = p_good_add < 0
p_good_add[mask] = 0
# return outcome and proportion of treated who respond
return p_good_add
def _calculate_thrombolysis_outcome_for_nlvo(self, onset_to_needle):
"""
Calculates the probability of good outcome for non-LVO patients
receiving thrombolysis.
arguments
---------
onset_to_needle : np array in minutes
returns
-------
probability of good outcome if given thrombolysis (np array)
"""
p_good_max = 0.6444
p_good_min = 0.4622
# Convert probability to odds
odds_good_max = p_good_max / (1 - p_good_max)
odds_good_min = p_good_min / (1 - p_good_min)
# Calculate fraction of effective time used
fraction_max_effect_time_used = (onset_to_needle /
self.thrombolysis_time_no_effect)
# Calculate odds of good outcome with treatment
odds_good = np.exp(np.log(odds_good_max) -
((np.log(odds_good_max) - np.log(odds_good_min))
* fraction_max_effect_time_used))
# Convert odds to probability
prob_good = odds_good / (1 + odds_good)
prob_good[prob_good < p_good_min] = p_good_min
# Calculate probability of additional good outcome
p_good_add = prob_good - p_good_min
mask = onset_to_needle> self.maximum_permitted_time_to_thrombolysis
p_good_add[mask] = 0
# Ensure no negative outcomes
mask = p_good_add < 0
p_good_add[mask] = 0
# return outcome and proportion of treated who respond
return p_good_add
| [
[
[
2483,
2494
],
[
5194,
5196
],
[
5514,
5516
],
[
7799,
7801
],
[
8543,
8545
],
[
9489,
9491
],
[
9496,
9498
],
[
9535,
9537
],
[
9559,
9561
],
[
11185,
11187
],
[
11192,
11194
],
[
11231,
11233
],
[
11255,
11257
],
[
12857,
12859
],
[
12864,
12866
],
[
12903,
12905
],
[
12927,
12929
]
],
[
[
2502,
2514
],
[
4811,
4813
],
[
6086,
6088
]
],
[
[
2523,
2539
]
]
] |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, url
from djangopypi.feeds import ReleaseFeed
urlpatterns = patterns("djangopypi.views",
url(r'^$', "root", name="djangopypi-root"),
url(r'^packages/$','packages.index', name='djangopypi-package-index'),
url(r'^simple/$','packages.simple_index', name='djangopypi-package-index-simple'),
url(r'^search/$','packages.search',name='djangopypi-search'),
url(r'^pypi/$', 'root', name='djangopypi-release-index'),
url(r'^rss/$', ReleaseFeed(), name='djangopypi-rss'),
url(r'^simple/(?P<package>[\w\d_\.\-]+)/$','packages.simple_details',
name='djangopypi-package-simple'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/$','packages.details',
name='djangopypi-package'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/rss/$', ReleaseFeed(),
name='djangopypi-package-rss'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/doap.rdf$','packages.doap',
name='djangopypi-package-doap'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/manage/$','packages.manage',
name='djangopypi-package-manage'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/manage/versions/$','packages.manage_versions',
name='djangopypi-package-manage-versions'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/$',
'releases.details',name='djangopypi-release'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/doap.rdf$',
'releases.doap',name='djangopypi-release-doap'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/manage/$',
'releases.manage',name='djangopypi-release-manage'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/metadata/$',
'releases.manage_metadata',name='djangopypi-release-manage-metadata'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/files/$',
'releases.manage_files',name='djangopypi-release-manage-files'),
url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/files/upload/$',
'releases.upload_file',name='djangopypi-release-upload-file'),
) | [
[
[
62,
70
],
[
132,
140
]
],
[
[
72,
75
],
[
165,
168
],
[
213,
216
],
[
288,
291
],
[
375,
378
],
[
441,
444
],
[
503,
506
],
[
566,
569
],
[
688,
691
],
[
789,
792
],
[
898,
901
],
[
1009,
1012
],
[
1123,
1126
],
[
1269,
1272
],
[
1396,
1399
],
[
1533,
1536
],
[
1673,
1676
],
[
1833,
1836
],
[
1984,
1987
]
],
[
[
105,
116
],
[
518,
529
],
[
835,
846
]
],
[
[
118,
129
]
]
] |
import pandas as pd
from pandas.compat import StringIO
import numpy
numpy.set_printoptions(threshold=numpy.nan)
def main():
df = pd.read_csv(StringIO(earnings), sep=",", header=None,
names=['symbol', 'exchange', 'eps_pct_diff_surp', 'asof_date'])
df = df.sort_values(by=['asof_date'])
print(df.head())
print(len(df))
df.to_csv('../../data/events/nyse_earnings_surprises_2013.csv', index=False)
myString = ', '.join('"{0}"'.format(s) for s in df.symbol.unique())
myString = myString.replace(" ", "")
print(myString)
#earnings = 'CFN, NYSE, -21.82, 2013-02-09\nNDZ, NYSE, 30.77, 2013-01-29\nAZZ, NYSE, -1.64, 2013-01-10'
earnings = 'CFN, NYSE, -21.82, 2013-02-09\n NDZ, NYSE, 30.77, 2013-01-29\n AZZ, NYSE, -1.64, 2013-01-10\n CLC, NYSE, 2.86, 2013-01-17\n CMC, NYSE, 64.71, 2013-01-08\n FC, NYSE, 15.38, 2013-01-04\n FDO, NYSE, -6.76, 2013-01-04\n FUL, NYSE, 14.29, 2013-01-17\n LEN, NYSE, 30.23, 2013-01-16\n LNN, NYSE, 53.33, 2013-01-09\n MKC, NYSE, -3.48, 2013-01-25\n RT, NYSE, 0.00, 2013-01-10\n MSM, NYSE, 1.00, 2013-01-11\n RPM, NYSE, -4.76, 2013-01-09\n SVU, NYSE, -50.00, 2013-01-11\n TISI, NYSE, 10.00, 2013-01-08\n TXI, NYSE, -5.88, 2013-01-10\n UNF, NYSE, 15.79, 2013-01-04\n WOR, NYSE, 12.20, 2013-01-04\n GBX, NYSE, 12.90, 2013-01-10\n SJR, NYSE, 11.11, 2013-01-10\n OMN, NYSE, -50.00, 2013-01-23\n MON, NYSE, 67.57, 2013-01-09\n GPN, NYSE, 6.90, 2013-01-09\n AYI, NYSE, -13.75, 2013-01-09\n STZ, NYSE, 14.55, 2013-01-10\n SNX, NYSE, 11.54, 2013-01-11\n TAL, NYSE, 600.00, 2013-01-23\n IHS, NYSE, 12.35, 2013-01-09\n EDU, NYSE, -150.00, 2013-01-30\n SAR, NYSE, 28.57, 2013-01-15\n ZEP, NYSE, 11.11, 2013-01-08\n MG, NYSE, 0.00, 2013-01-09\n MOS, NYSE, 7.14, 2013-01-04\n ABT, NYSE, 1.33, 2013-01-24\n ABX, NYSE, 1.83, 2013-02-15\n AB, NYSE, 21.21, 2013-02-13\n TAP, NYSE, 7.81, 2013-02-15\n ACO, NYSE, -15.91, 2013-01-26\n ADM, NYSE, -26.83, 2013-02-05\n AEM, NYSE, -13.33, 2013-02-14\n AEP, NYSE, 11.11, 2013-02-16\n AES, NYSE, 6.67, 2013-02-28\n AET, NYSE, -2.08, 2013-02-01\n AFL, NYSE, 0.00, 2013-02-06\n AGCO, NYSE, 1.02, 2013-02-06\n HES, NYSE, -2.44, 2013-01-31\n AIG, NYSE, 322.22, 2013-02-22\n AIN, NYSE, -9.68, 2013-02-07\n AJG, NYSE, 2.63, 2013-01-30\n ALU, NYSE, 0.00, 2013-02-08\n MATX, NYSE, 24.14, 2013-02-08\n ALK, NYSE, -4.11, 2013-01-25\n ALX, NYSE, -11.52, 2013-02-27\n BEAM, NYSE, 0.00, 2013-02-02\n AME, NYSE, 2.08, 2013-01-25\n TWX, NYSE, 6.36, 2013-02-07\n AVD, NYSE, 11.43, 2013-03-01\n AMN, NYSE, 36.36, 2013-02-22\n AN, NYSE, 3.08, 2013-02-01\n AON, NYSE, 1.60, 2013-02-02\n AP, NYSE, 77.78, 2013-02-05\n APA, NYSE, -1.30, 2013-02-15\n APC, NYSE, 30.00, 2013-02-05\n APD, NYSE, 0.78, 2013-01-24\n APH, NYSE, 4.44, 2013-01-18\n ARG, NYSE, -3.70, 2013-01-25\n AAN, NYSE, -4.00, 2013-02-08\n ARW, NYSE, 13.89, 2013-02-08\n ASGN, NYSE, -25.00, 2013-02-15\n ASH, NYSE, -17.65, 2013-01-30\n ASR, NYSE, 56.88, 2013-02-26\n GAS, NYSE, -9.90, 2013-02-07\n ATO, NYSE, -5.13, 2013-02-07\n ATW, NYSE, 17.02, 2013-01-31\n AU, NYSE, -67.44, 2013-02-21\n AVP, NYSE, 37.04, 2013-02-13\n AVT, NYSE, 21.69, 2013-01-25\n AVY, NYSE, 10.20, 2013-01-31\n AXP, NYSE, 0.00, 2013-01-18\n B, NYSE, 7.84, 2013-02-23\n BA, NYSE, 7.56, 2013-01-31\n BAC, NYSE, 50.00, 2013-01-18\n BAX, NYSE, 0.00, 2013-01-25\n BC, NYSE, 122.22, 2013-01-25\n OMX, NYSE, 6.67, 2013-02-21\n BCE, NYSE, -2.99, 2013-02-08\n BCR, NYSE, 1.80, 2013-02-01\n BCS, NYSE, 40.74, 2013-02-13\n BDX, NYSE, 9.76, 2013-02-06\n BEN, NYSE, 1.68, 2013-02-02\n BGG, NYSE, 250.00, 2013-01-25\n BHE, NYSE, 10.00, 2013-02-05\n BHI, NYSE, 1.64, 2013-01-24\n BID, NYSE, 0.92, 2013-03-01\n BIO, NYSE, 15.67, 2013-02-27\n BK, NYSE, 0.00, 2013-01-16\n BKH, NYSE, 9.68, 2013-02-01\n WRB, NYSE, 28.00, 2013-01-29\n BLC, NYSE, 5.71, 2013-02-09\n BLL, NYSE, -3.03, 2013-02-01\n BLX, NYSE, 20.75, 2013-02-08\n BMI, NYSE, -11.36, 2013-02-07\n BMS, NYSE, 4.00, 2013-02-01\n BMY, NYSE, 9.30, 2013-01-25\n BOH, NYSE, 1.12, 2013-01-31\n BXS, NYSE, -25.00, 2013-01-24\n BPL, NYSE, 18.52, 2013-02-09\nBRK.A, NYSE, 175.73, 2013-03-02\n BRO, NYSE, 7.41, 2013-02-02\n BSX, NYSE, 63.64, 2013-01-30\n BT, NYSE, -89.22, 2013-02-02\n MTRN, NYSE, 17.14, 2013-03-01\n CACI, NYSE, 3.66, 2013-01-31\n CAT, NYSE, -13.10, 2013-01-29\n CB, NYSE, 10.00, 2013-01-30\n CBI, NYSE, 9.64, 2013-02-28\n CBM, NYSE, 100.00, 2013-02-07\n CBU, NYSE, -3.70, 2013-01-23\n CBT, NYSE, -28.57, 2013-01-31\n CCC, NYSE, 35.71, 2013-02-22\n CCE, NYSE, 4.65, 2013-02-08\n C, NYSE, -20.69, 2013-01-18\n CCK, NYSE, -7.27, 2013-01-31\n CCU, NYSE, -12.21, 2013-02-01\n CDE, NYSE, -15.15, 2013-02-22\n CDI, NYSE, 8.70, 2013-02-27\n CAH, NYSE, 9.41, 2013-02-06\n CFR, NYSE, 5.38, 2013-01-31\n CHD, NYSE, 0.00, 2013-02-06\n CKP, NYSE, -50.00, 2013-03-06\n CPK, NYSE, 18.60, 2013-03-08\n CI, NYSE, 6.08, 2013-02-08\n CIA, NYSE, -100.00, 2013-03-12\n CKH, NYSE, -93.55, 2013-02-28\n CL, NYSE, 0.71, 2013-02-01\n CLF, NYSE, -25.45, 2013-02-13\n CLH, NYSE, -25.00, 2013-02-21\n CLX, NYSE, 11.11, 2013-02-05\n CMA, NYSE, 7.81, 2013-01-17\n CMO, NYSE, -6.06, 2013-01-31\n CRK, NYSE, -77.42, 2013-02-12\n CMS, NYSE, 4.17, 2013-02-22\n CNA, NYSE, -150.00, 2013-02-12\n CNW, NYSE, -10.34, 2013-02-07\n CHG, NYSE, -4.12, 2013-02-27\n CNL, NYSE, 12.50, 2013-02-20\n COG, NYSE, 14.29, 2013-02-22\n COT, NYSE, -66.67, 2013-02-16\n CP, NYSE, -0.78, 2013-01-30\n CPF, NYSE, 11.54, 2013-02-01\n CQB, NYSE, -17.65, 2013-03-12\n CR, NYSE, -5.15, 2013-01-29\nCRD.B, NYSE, 52.38, 2013-02-14\n CRS, NYSE, 1.64, 2013-02-01\n CSC, NYSE, 22.22, 2013-02-06\n CSL, NYSE, 6.49, 2013-02-09\n CTB, NYSE, 35.29, 2013-02-26\n CTL, NYSE, -1.47, 2013-02-14\n CTS, NYSE, -21.74, 2013-01-29\n CUB, NYSE, -32.86, 2013-02-12\n CMI, NYSE, 14.94, 2013-02-07\n CUZ, NYSE, 40.00, 2013-02-14\n CVC, NYSE, -400.00, 2013-03-01\n CVH, NYSE, 35.82, 2013-02-07\n CW, NYSE, 4.40, 2013-02-21\n CWT, NYSE, 33.33, 2013-02-28\n CX, NYSE, -258.33, 2013-02-08\n CYN, NYSE, -13.00, 2013-01-25\n D, NYSE, 1.47, 2013-02-01\n DBD, NYSE, -8.16, 2013-02-13\n DCO, NYSE, -23.81, 2013-03-05\n DD, NYSE, 22.22, 2013-01-23\n CVA, NYSE, -13.04, 2013-02-07\n DHR, NYSE, 0.00, 2013-01-30\n DIS, NYSE, 2.60, 2013-02-06\n DLX, NYSE, 11.76, 2013-01-25\n DNB, NYSE, -1.24, 2013-02-12\n RRD, NYSE, 16.22, 2013-02-27\n DOV, NYSE, 1.87, 2013-01-25\n DOW, NYSE, -2.94, 2013-02-01\n DRE, NYSE, 0.00, 2013-01-31\n DHI, NYSE, 42.86, 2013-01-30\n UFS, NYSE, -7.09, 2013-02-02\n DTE, NYSE, 0.00, 2013-02-21\n DUK, NYSE, 7.69, 2013-02-14\n DVN, NYSE, 2.63, 2013-02-21\n DV, NYSE, 55.36, 2013-02-07\n EAT, NYSE, 0.00, 2013-01-23\n ECL, NYSE, 0.00, 2013-02-27\n ED, NYSE, -6.85, 2013-02-01\n EDE, NYSE, 27.78, 2013-02-15\n EFX, NYSE, 4.00, 2013-02-07\n EGN, NYSE, -15.58, 2013-01-24\n EGP, NYSE, 0.00, 2013-02-13\n ELY, NYSE, 2.00, 2013-01-31\n EMC, NYSE, 6.98, 2013-01-30\n EMR, NYSE, 0.00, 2013-02-06\n EOG, NYSE, 19.26, 2013-02-14\n EQT, NYSE, 14.29, 2013-01-25\n ESE, NYSE, -44.44, 2013-02-08\n ESV, NYSE, 7.87, 2013-02-21\n ETN, NYSE, -10.87, 2013-02-06\n ETR, NYSE, 21.99, 2013-02-09\n EXAR, NYSE, -14.29, 2013-01-24\n F, NYSE, 19.23, 2013-01-30\n OPY, NYSE, 115.79, 2013-02-02\n CLGX, NYSE, -3.12, 2013-02-22\n FNB, NYSE, 4.55, 2013-01-24\n FCF, NYSE, -18.18, 2013-01-31\n FBP, NYSE, -30.00, 2013-02-06\n FICO, NYSE, 6.94, 2013-01-31\n FLO, NYSE, 12.00, 2013-02-08\n FMC, NYSE, 0.00, 2013-02-07\n FOE, NYSE, -250.00, 2013-03-06\n S, NYSE, 4.35, 2013-02-08\n NEE, NYSE, 9.57, 2013-01-30\n FRT, NYSE, 0.91, 2013-02-13\n FRX, NYSE, -61.54, 2013-01-16\n FUN, NYSE, -433.33, 2013-02-20\n FUR, NYSE, -48.15, 2013-03-08\n GBL, NYSE, -28.72, 2013-02-06\n GVA, NYSE, -29.03, 2013-03-01\n BGC, NYSE, -3.45, 2013-02-26\n GD, NYSE, -26.84, 2013-01-24\n GE, NYSE, 2.33, 2013-01-19\n RHP, NYSE, -50.00, 2013-02-13\n AXLL, NYSE, 95.08, 2013-02-13\n GGG, NYSE, 13.33, 2013-01-29\n GHM, NYSE, -22.22, 2013-02-02\n GIB, NYSE, -4.35, 2013-01-31\n GLT, NYSE, -25.71, 2013-02-08\n GLW, NYSE, 3.03, 2013-01-30\n GSK, NYSE, 8.33, 2013-02-07\n GLF, NYSE, -160.71, 2013-02-26\n GNI, NYSE, -14.44, 2013-01-30\n GPC, NYSE, 0.00, 2013-02-20\n GRA, NYSE, 4.72, 2013-02-07\n GTY, NYSE, -10.34, 2013-03-01\n GWW, NYSE, -7.28, 2013-01-25\n HAE, NYSE, 4.17, 2013-01-31\n HAL, NYSE, 3.28, 2013-01-26\n HAR, NYSE, -32.95, 2013-02-01\n HVT, NYSE, 30.43, 2013-02-26\n HRC, NYSE, 6.82, 2013-01-24\n HCC, NYSE, 43.75, 2013-02-13\n HCN, NYSE, 1.19, 2013-02-26\n HCP, NYSE, 1.41, 2013-02-13\n HOG, NYSE, 0.00, 2013-01-30\n HE, NYSE, 21.88, 2013-02-16\n HL, NYSE, -25.00, 2013-02-26\n HMA, NYSE, -5.00, 2013-02-15\n HMC, NYSE, -29.58, 2013-02-01\n HMN, NYSE, 91.43, 2013-02-06\n HFC, NYSE, -8.97, 2013-02-27\n HOT, NYSE, 7.69, 2013-02-08\n HP, NYSE, 8.53, 2013-02-01\n HLS, NYSE, 40.63, 2013-02-19\n HRS, NYSE, 4.17, 2013-01-30\n HSC, NYSE, -3.23, 2013-02-15\n HSY, NYSE, -1.33, 2013-02-01\n HUBB, NYSE, 0.00, 2013-01-25\n HUM, NYSE, 11.21, 2013-02-05\n HXL, NYSE, -5.26, 2013-01-24\n IBM, NYSE, 2.67, 2013-01-23\n IDA, NYSE, 10.00, 2013-02-22\n IEX, NYSE, 2.99, 2013-02-05\n IFF, NYSE, -1.19, 2013-02-08\n DIN, NYSE, 1.22, 2013-02-28\n INT, NYSE, 0.00, 2013-02-22\n IP, NYSE, 6.15, 2013-01-30\n IPG, NYSE, 3.70, 2013-02-23\n IO, NYSE, 30.77, 2013-02-14\n IR, NYSE, 8.57, 2013-02-02\n IRF, NYSE, 6.38, 2013-01-29\n ITW, NYSE, -1.11, 2013-01-30\n IVC, NYSE, -56.00, 2013-02-09\n JEC, NYSE, 0.00, 2013-01-24\n JNJ, NYSE, 1.71, 2013-01-23\n JNY, NYSE, 75.00, 2013-02-14\n K, NYSE, 3.08, 2013-02-06\n KAMN, NYSE, 0.00, 2013-02-26\n KDN, NYSE, 0.00, 2013-02-22\n KEX, NYSE, 9.30, 2013-01-31\n KEY, NYSE, -4.55, 2013-01-25\n KIM, NYSE, 6.45, 2013-02-06\n KMB, NYSE, 0.74, 2013-01-26\n KEM, NYSE, 53.33, 2013-02-01\n KMT, NYSE, -21.88, 2013-01-25\n KO, NYSE, 2.27, 2013-02-13\n KSU, NYSE, 10.98, 2013-01-23\n LDL, NYSE, -10.53, 2013-02-27\n LDR, NYSE, 10.42, 2013-02-12\n LEE, NYSE, 25.00, 2013-01-23\n LEG, NYSE, 10.34, 2013-02-05\n LLY, NYSE, 8.97, 2013-01-30\n LM, NYSE, 29.63, 2013-02-02\n LNC, NYSE, 3.77, 2013-02-07\n LPX, NYSE, -10.00, 2013-02-09\n LXU, NYSE, 145.00, 2013-03-01\n LTC, NYSE, -1.72, 2013-02-22\n L, NYSE, -37.93, 2013-02-12\n LUK, NYSE, 210.17, 2013-02-26\n LUV, NYSE, 28.57, 2013-01-25\n LUX, NYSE, 4.35, 2013-03-01\n MKL, NYSE, 314.07, 2013-02-05\n MAN, NYSE, 18.18, 2013-01-31\n MTW, NYSE, 12.50, 2013-02-01\n SM, NYSE, 95.65, 2013-02-21\n MAS, NYSE, 500.00, 2013-02-12\n MTZ, NYSE, 2.22, 2013-03-01\n MCD, NYSE, 3.76, 2013-01-24\n MDC, NYSE, 40.48, 2013-02-01\n MDP, NYSE, 1.14, 2013-01-25\n MDR, NYSE, 13.04, 2013-03-01\n MDU, NYSE, 2.56, 2013-02-05\n MED, NYSE, 12.00, 2013-03-08\n CVS, NYSE, 2.73, 2013-02-07\n MFC, NYSE, -12.50, 2013-02-08\n MGA, NYSE, 36.84, 2013-03-02\n MGM, NYSE, 0.00, 2013-02-21\n MLR, NYSE, -11.76, 2013-03-07\n MLI, NYSE, 14.29, 2013-02-06\n MMC, NYSE, 0.00, 2013-02-13\n MMM, NYSE, 0.00, 2013-01-25\n MSA, NYSE, 3.64, 2013-02-14\n MNR, NYSE, 38.46, 2013-02-08\n MO, NYSE, 1.85, 2013-02-01\n MOD, NYSE, -75.00, 2013-02-02\nMOG.A, NYSE, -8.54, 2013-01-26\n MHK, NYSE, 7.45, 2013-02-22\n MSI, NYSE, 7.61, 2013-01-24\n MCY, NYSE, -168.00, 2013-02-05\n MRK, NYSE, 2.47, 2013-02-02\n MRO, NYSE, -19.12, 2013-02-07\n POWR, NYSE, 18.18, 2013-03-08\n MTG, NYSE, -37.87, 2013-03-01\n MTB, NYSE, 2.76, 2013-01-17\n MTX, NYSE, 6.38, 2013-02-01\n MUR, NYSE, 59.23, 2013-01-31\n MYE, NYSE, -7.14, 2013-02-14\n NBL, NYSE, 54.21, 2013-02-08\n NBR, NYSE, 3.45, 2013-02-20\n NE, NYSE, -19.35, 2013-01-24\n NEM, NYSE, 13.27, 2013-02-22\n NFG, NYSE, 6.58, 2013-02-08\n NHI, NYSE, 1.20, 2013-02-15\n NI, NYSE, 0.00, 2013-02-20\n NJR, NYSE, -17.48, 2013-02-08\n THC, NYSE, -24.64, 2013-02-27\n NNN, NYSE, 4.55, 2013-02-08\n NOC, NYSE, 18.39, 2013-01-31\n NPK, NYSE, -11.23, 2013-02-16\n NR, NYSE, 0.00, 2013-02-15\n NSC, NYSE, 9.24, 2013-01-23\n NUE, NYSE, 55.17, 2013-01-30\n NVR, NYSE, 8.22, 2013-01-25\n NWL, NYSE, 2.38, 2013-02-02\n NWN, NYSE, -4.55, 2013-03-02\n NYT, NYSE, 3.23, 2013-02-08\n OCR, NYSE, 1.18, 2013-02-20\n OGE, NYSE, 14.71, 2013-02-28\n OHI, NYSE, 3.57, 2013-02-12\n OI, NYSE, 8.11, 2013-01-31\n OII, NYSE, 2.78, 2013-02-14\n OKE, NYSE, 17.78, 2013-02-26\n OLN, NYSE, 2.94, 2013-01-29\n BRS, NYSE, 32.95, 2013-02-05\n OLP, NYSE, 0.00, 2013-03-15\n OMC, NYSE, 3.67, 2013-02-13\n OMI, NYSE, -12.77, 2013-02-12\n ORB, NYSE, 31.82, 2013-02-15\n ORI, NYSE, -28.57, 2013-01-25\n OSK, NYSE, 93.55, 2013-01-26\n OXY, NYSE, 10.24, 2013-02-01\n PHX, NYSE, -18.75, 2013-02-08\n FCFS, NYSE, 2.20, 2013-01-24\n PBI, NYSE, 7.69, 2013-02-01\n PCG, NYSE, 3.51, 2013-02-22\n PCL, NYSE, 68.97, 2013-01-29\n PCP, NYSE, -3.23, 2013-01-25\n TPC, NYSE, 0.00, 2013-02-22\n PDS, NYSE, 250.00, 2013-02-15\n PEG, NYSE, 5.13, 2013-02-22\n PEI, NYSE, 0.00, 2013-02-26\n PEP, NYSE, 3.81, 2013-02-15\n PFE, NYSE, 6.82, 2013-01-30\n PG, NYSE, 9.91, 2013-01-26\n PGR, NYSE, 0.00, 2013-01-19\n PH, NYSE, 6.25, 2013-01-19\n PHG, NYSE, -4.17, 2013-01-30\n PHM, NYSE, 9.68, 2013-02-01\n PKD, NYSE, -150.00, 2013-02-22\n PKY, NYSE, 17.39, 2013-02-12\n PNC, NYSE, 24.82, 2013-01-18\n PNM, NYSE, 18.18, 2013-03-02\n PNR, NYSE, 6.82, 2013-01-30\n PNW, NYSE, 41.18, 2013-02-23\n POM, NYSE, -5.00, 2013-03-02\n POT, NYSE, -11.86, 2013-02-01\n PPG, NYSE, -0.65, 2013-01-15\n PPL, NYSE, 6.52, 2013-02-15\n PRGO, NYSE, 3.82, 2013-02-02\n PL, NYSE, 11.36, 2013-02-07\n PSB, NYSE, 5.04, 2013-02-20\n CSH, NYSE, 12.61, 2013-01-25\n PWR, NYSE, 36.11, 2013-02-22\n PX, NYSE, 0.00, 2013-01-24\n KWR, NYSE, 26.32, 2013-03-07\n R, NYSE, 6.36, 2013-02-01\n RBC, NYSE, 2.70, 2013-02-05\n RDC, NYSE, 28.57, 2013-03-01\n HTSI, NYSE, -20.69, 2013-02-01\n RES, NYSE, 8.33, 2013-01-24\n RGS, NYSE, -76.92, 2013-02-01\n RGR, NYSE, 36.99, 2013-02-28\n RHI, NYSE, 2.44, 2013-01-30\n RJF, NYSE, 0.00, 2013-01-24\n RLI, NYSE, 102.27, 2013-01-24\n ROG, NYSE, -8.62, 2013-02-20\n ROK, NYSE, -2.38, 2013-01-31\n ROL, NYSE, -5.88, 2013-01-24\n ROP, NYSE, 1.37, 2013-01-29\n RTI, NYSE, 25.00, 2013-02-07\n RTN, NYSE, 23.08, 2013-01-25\n RYL, NYSE, 12.00, 2013-01-30\n BSAC, NYSE, -1.96, 2013-02-05\n T, NYSE, -6.38, 2013-01-25\n SCG, NYSE, 0.00, 2013-02-22\n SCHW, NYSE, 0.00, 2013-01-17\n SCL, NYSE, -5.56, 2013-02-20\n SMG, NYSE, 0.88, 2013-02-07\n SEE, NYSE, 17.24, 2013-02-20\n SF, NYSE, 5.17, 2013-02-26\n SFE, NYSE, -121.74, 2013-03-08\n SHW, NYSE, -0.87, 2013-02-01\n STC, NYSE, 29.27, 2013-02-15\n SJI, NYSE, -6.67, 2013-03-01\n JOE, NYSE, -1000.00, 2013-03-01\n SJW, NYSE, 72.22, 2013-02-20\n SLB, NYSE, 0.00, 2013-01-19\n HSH, NYSE, 29.17, 2013-02-01\n AOS, NYSE, 12.35, 2013-01-25\n SNA, NYSE, 4.38, 2013-02-08\n PII, NYSE, 0.81, 2013-01-30\n SNV, NYSE, 0.00, 2013-01-23\n SO, NYSE, 12.82, 2013-01-31\n SON, NYSE, 3.70, 2013-02-14\n SPA, NYSE, 30.00, 2013-02-06\n TRV, NYSE, 500.00, 2013-01-23\n SR, NYSE, 14.68, 2013-02-06\n NVE, NYSE, 0.00, 2013-02-23\n SCI, NYSE, 10.00, 2013-02-13\n SSP, NYSE, -3.85, 2013-02-27\n STT, NYSE, 11.00, 2013-01-19\n STI, NYSE, 6.56, 2013-01-19\n STJ, NYSE, 2.22, 2013-01-24\n STL, NYSE, 14.29, 2013-01-24\n STR, NYSE, 8.57, 2013-02-21\n STE, NYSE, 3.57, 2013-02-07\n SYK, NYSE, 0.88, 2013-01-24\n SUN, NYSE, -4.88, 2013-03-30\n SUP, NYSE, -61.54, 2013-03-02\n SWK, NYSE, 3.01, 2013-01-25\n SWN, NYSE, 2.33, 2013-02-21\n SWS, NYSE, 0.00, 2013-02-07\n SWX, NYSE, -2.44, 2013-02-27\n SWY, NYSE, 23.68, 2013-02-22\n SXI, NYSE, 1.10, 2013-02-02\n SYY, NYSE, 19.51, 2013-02-05\n TNC, NYSE, 6.90, 2013-02-20\n TCB, NYSE, -16.67, 2013-01-31\n TCO, NYSE, 5.15, 2013-02-14\n TDS, NYSE, -725.00, 2013-02-27\n TDW, NYSE, 38.64, 2013-02-02\n TDY, NYSE, 8.33, 2013-01-25\n TE, NYSE, 0.00, 2013-02-06\n TER, NYSE, 600.00, 2013-01-24\n TEVA, NYSE, -0.75, 2013-02-08\n TEX, NYSE, -51.28, 2013-02-20\n TFX, NYSE, 1.79, 2013-02-22\n TEN, NYSE, -2.94, 2013-02-01\n TKR, NYSE, 25.00, 2013-01-25\n TMK, NYSE, 1.53, 2013-02-05\n TMO, NYSE, 6.25, 2013-02-01\n TOT, NYSE, -1.12, 2013-02-14\n TM, NYSE, -44.72, 2013-02-06\n TR, NYSE, 37.50, 2013-02-14\n TRN, NYSE, 7.14, 2013-02-21\n TRP, NYSE, -15.09, 2013-02-13\n TRR, NYSE, 566.67, 2013-02-07\n TSO, NYSE, -2.90, 2013-02-07\n TSS, NYSE, -3.03, 2013-01-23\n TTI, NYSE, -21.05, 2013-03-01\n TXT, NYSE, -1.75, 2013-01-24\n TYL, NYSE, 10.71, 2013-02-07\n TSN, NYSE, 23.08, 2013-02-02\n UDR, NYSE, 2.94, 2013-02-06\n UFI, NYSE, -42.86, 2013-01-23\n UGI, NYSE, -15.89, 2013-02-01\n UAM, NYSE, 45.45, 2013-02-20\n UHS, NYSE, 9.89, 2013-03-01\n UHT, NYSE, 268.42, 2013-02-28\n UIL, NYSE, -9.68, 2013-02-22\n UNH, NYSE, 0.00, 2013-01-18\n KMPR, NYSE, -250.00, 2013-02-08\n UNM, NYSE, 5.13, 2013-02-06\n UNP, NYSE, 1.39, 2013-01-25\n UNT, NYSE, 2.06, 2013-02-20\n URS, NYSE, -1.04, 2013-02-26\n USG, NYSE, -67.86, 2013-02-07\n MUX, NYSE, -600.00, 2013-03-09\n USM, NYSE, -1100.00, 2013-02-27\n USPH, NYSE, 3.03, 2013-03-08\n UTL, NYSE, 3.13, 2013-01-31\n UTX, NYSE, 26.47, 2013-01-24\n VMI, NYSE, 8.48, 2013-02-13\n VAR, NYSE, 3.49, 2013-01-24\n VFC, NYSE, 1.32, 2013-02-16\n CBS, NYSE, -8.57, 2013-02-15\n VLO, NYSE, 57.98, 2013-01-30\n VMC, NYSE, -81.82, 2013-02-15\n VLY, NYSE, 0.00, 2013-01-31\n VNO, NYSE, 6.09, 2013-02-27\n VSH, NYSE, 37.50, 2013-02-06\n WTS, NYSE, 5.17, 2013-02-20\n WBS, NYSE, 6.12, 2013-01-19\n WEC, NYSE, 4.88, 2013-01-31\n WFC, NYSE, 3.41, 2013-01-14\n WG, NYSE, 57.14, 2013-03-07\n WGL, NYSE, 9.62, 2013-02-07\n WHR, NYSE, 3.15, 2013-02-01\n WMB, NYSE, -3.85, 2013-02-21\n WMK, NYSE, 20.29, 2013-03-06\n WNC, NYSE, 3.23, 2013-02-06\n TEG, NYSE, -5.32, 2013-03-01\n WR, NYSE, 80.00, 2013-03-01\n WRE, NYSE, 2.17, 2013-02-14\n WRI, NYSE, 4.44, 2013-02-15\n WPP, NYSE, -175.00, 2013-02-12\n WSO, NYSE, -12.77, 2013-02-15\n WST, NYSE, 8.93, 2013-02-22\n WWW, NYSE, 200.00, 2013-02-20\n WY, NYSE, 36.84, 2013-01-26\n X, NYSE, 45.33, 2013-01-30\n XL, NYSE, 138.24, 2013-02-08\n XOM, NYSE, 10.00, 2013-02-02\n XRX, NYSE, 7.14, 2013-01-25\n Y, NYSE, 54.64, 2013-02-22\n HRG, NYSE, -50.00, 2013-02-09\n CRY, NYSE, 33.33, 2013-02-15\n CHK, NYSE, 85.71, 2013-02-22\n DDR, NYSE, 0.00, 2013-02-13\n ELS, NYSE, 0.00, 2013-01-29\n ALG, NYSE, 37.93, 2013-03-07\n ETH, NYSE, 5.41, 2013-01-23\n ATR, NYSE, 0.00, 2013-02-08\n GGP, NYSE, 6.90, 2013-02-05\n MSL, NYSE, -10.00, 2013-01-30\n RCL, NYSE, 66.67, 2013-02-05\n CWEI, NYSE, -34.04, 2013-02-22\n HR, NYSE, 0.00, 2013-02-21\n RGA, NYSE, 35.56, 2013-02-01\n RIG, NYSE, 12.35, 2013-03-02\n SKT, NYSE, 2.22, 2013-02-13\n TWI, NYSE, -80.85, 2013-02-26\n BDN, NYSE, 17.86, 2013-02-07\n KGC, NYSE, -4.55, 2013-02-14\n YPF, NYSE, 26.67, 2013-03-13\n CPT, NYSE, 1.04, 2013-02-01\n SGY, NYSE, 67.27, 2013-02-26\n BFS, NYSE, -11.48, 2013-03-08\n BWA, NYSE, 3.57, 2013-02-15\n EQR, NYSE, 0.00, 2013-02-06\n CLP, NYSE, -81.25, 2013-02-08\n KOF, NYSE, -7.78, 2013-02-28\n OKS, NYSE, 3.13, 2013-02-26\n SQM, NYSE, -15.63, 2013-03-06\n BYD, NYSE, -138.46, 2013-03-05\n CBL, NYSE, 8.77, 2013-02-06\n DECK, NYSE, 7.36, 2013-03-01\n IT, NYSE, 6.78, 2013-02-08\n GFI, NYSE, -36.36, 2013-02-15\n HST, NYSE, 8.11, 2013-02-22\n LXP, NYSE, 0.00, 2013-02-22\n OMG, NYSE, -533.33, 2013-02-20\n REG, NYSE, 8.62, 2013-01-31\n TUC, NYSE, -5.56, 2013-03-08\n AF, NYSE, 7.14, 2013-01-24\n BFR, NYSE, 13.33, 2013-02-09\n HHS, NYSE, 26.32, 2013-02-01\n MHO, NYSE, -3.45, 2013-02-01\n NFX, NYSE, -36.36, 2013-02-20\n SPG, NYSE, 13.93, 2013-02-05\n SU, NYSE, -14.20, 2013-02-06\n SUI, NYSE, -2.44, 2013-02-22\n TV, NYSE, 5.13, 2013-02-26\n CGI, NYSE, 0.00, 2013-01-24\n CYT, NYSE, 77.42, 2013-02-01\n EMN, NYSE, 0.00, 2013-02-01\n GRT, NYSE, 0.00, 2013-02-15\n MAA, NYSE, -1.74, 2013-02-07\n PLT, NYSE, 0.00, 2013-01-30\n BZH, NYSE, 24.27, 2013-01-29\n ELX, NYSE, 0.00, 2013-02-01\n AGM, NYSE, -5.41, 2013-03-19\n MLM, NYSE, -13.21, 2013-02-13\n AKS, NYSE, 14.29, 2013-01-30\n ALB, NYSE, 18.18, 2013-01-23\n VRX, NYSE, -4.00, 2013-03-01\n CBR, NYSE, 140.00, 2013-02-22\n MAC, NYSE, 3.45, 2013-02-07\n RKT, NYSE, 5.47, 2013-01-23\n RYN, NYSE, 3.51, 2013-01-25\n ADC, NYSE, 1.96, 2013-02-28\nBRK.B, NYSE, 0.88, 2013-03-02\n EXP, NYSE, 0.00, 2013-02-07\n GGB, NYSE, -66.67, 2013-02-22\n SSD, NYSE, -100.00, 2013-02-08\n ESS, NYSE, 4.02, 2013-02-01\n FR, NYSE, 0.00, 2013-02-21\n HIW, NYSE, 0.00, 2013-02-13\n IMAX, NYSE, 58.33, 2013-02-22\n AIV, NYSE, 4.00, 2013-02-08\n FCH, NYSE, 50.00, 2013-02-20\n ITGR, NYSE, 6.00, 2013-02-26\n GEO, NYSE, 7.32, 2013-02-22\n CLI, NYSE, 4.76, 2013-02-08\n DAR, NYSE, -20.00, 2013-02-28\n RS, NYSE, 9.28, 2013-02-22\n CPE, NYSE, -66.67, 2013-03-15\n KNX, NYSE, 4.76, 2013-01-31\n O, NYSE, 3.70, 2013-02-15\n PKX, NYSE, -15.35, 2013-03-02\n COF, NYSE, -12.35, 2013-01-18\n CYD, NYSE, -23.14, 2013-02-28\n IRS, NYSE, 57.50, 2013-02-20\n MCK, NYSE, -13.50, 2013-02-01\n SWC, NYSE, 116.67, 2013-02-28\n STM, NYSE, -22.22, 2013-01-31\n TEO, NYSE, 28.36, 2013-03-01\n TRK, NYSE, 400.00, 2013-03-07\n GFF, NYSE, 300.00, 2013-01-31\n LMT, NYSE, -0.56, 2013-01-25\n APU, NYSE, -13.89, 2013-02-01\n AGU, NYSE, 6.93, 2013-02-22\n LH, NYSE, -4.35, 2013-02-09\n DDD, NYSE, 0.00, 2013-02-26\n WEX, NYSE, 0.94, 2013-02-07\n AFG, NYSE, 3.08, 2013-02-12\n RMD, NYSE, 3.92, 2013-01-25\n WAB, NYSE, 2.29, 2013-02-20\n CIB, NYSE, 20.39, 2013-03-05\n CAM, NYSE, -1.04, 2013-02-01\n FCX, NYSE, 5.41, 2013-01-23\n RNR, NYSE, 70.27, 2013-02-06\n AVX, NYSE, -20.00, 2013-01-25\n RWT, NYSE, 85.19, 2013-02-22\n AXE, NYSE, 0.76, 2013-01-30\n CLB, NYSE, 3.54, 2013-01-31\n MD, NYSE, 1.54, 2013-02-01\n THG, NYSE, 6.25, 2013-02-07\n BAP, NYSE, 3.72, 2013-02-06\n DO, NYSE, 28.18, 2013-02-06\n RE, NYSE, 175.86, 2013-02-07\n DST, NYSE, 17.82, 2013-02-01\n EL, NYSE, 11.54, 2013-02-06\n ESC, NYSE, -34.88, 2013-03-01\n MIG, NYSE, -100.00, 2013-02-13\n WAT, NYSE, 0.63, 2013-01-23\n EME, NYSE, 11.48, 2013-02-27\n HIG, NYSE, 80.00, 2013-02-05\n ITT, NYSE, 2.63, 2013-02-28\n SPN, NYSE, 4.26, 2013-02-27\n SWM, NYSE, -9.18, 2013-02-07\n SCCO, NYSE, 0.00, 2013-02-02\n RCI, NYSE, 20.55, 2013-02-15\n EIX, NYSE, 66.04, 2013-02-27\n IRM, NYSE, -20.00, 2013-03-01\n REV, NYSE, -19.18, 2013-02-06\n SPH, NYSE, -17.46, 2013-02-08\n CCJ, NYSE, 46.34, 2013-02-09\n PGI, NYSE, -6.67, 2013-02-14\n CRR, NYSE, 2.30, 2013-02-01\n BVN, NYSE, -26.67, 2013-03-01\n FCN, NYSE, 11.67, 2013-03-01\n RPT, NYSE, 8.00, 2013-02-13\n TUP, NYSE, 1.79, 2013-01-30\n ASB, NYSE, 0.00, 2013-01-18\n GWR, NYSE, -2.47, 2013-02-13\n TBI, NYSE, 35.71, 2013-02-07\n FFG, NYSE, 24.00, 2013-02-08\n USNA, NYSE, 4.96, 2013-02-06\n CSV, NYSE, 4.35, 2013-02-26\n LVB, NYSE, 12.77, 2013-03-07\n ALR, NYSE, 6.25, 2013-02-16\n OCN, NYSE, -7.84, 2013-03-01\n PAA, NYSE, 42.03, 2013-02-07\n DNR, NYSE, 24.14, 2013-02-22\n HMY, NYSE, 50.00, 2013-02-05\n TGI, NYSE, 5.80, 2013-01-31\n PAG, NYSE, 7.55, 2013-02-07\n GEL, NYSE, -2.86, 2013-02-15\n IM, NYSE, 23.73, 2013-02-14\n LIN, NYSE, -21.92, 2013-03-01\n NUS, NYSE, 2.11, 2013-02-07\n CNI, NYSE, -0.70, 2013-01-23\n LAD, NYSE, 10.45, 2013-02-21\n NSP, NYSE, 4.44, 2013-02-09\n DEL, NYSE, -29.63, 2013-02-28\n DGX, NYSE, -3.81, 2013-01-24\n KRC, NYSE, 3.23, 2013-01-31\n MTH, NYSE, 50.00, 2013-02-01\n NCR, NYSE, 4.35, 2013-02-08\n OFG, NYSE, -50.00, 2013-02-08\n IVZ, NYSE, -4.26, 2013-02-01\n DX, NYSE, 9.68, 2013-02-21\n FBC, NYSE, 38.27, 2013-02-09\n ALV, NYSE, 9.85, 2013-02-01\n ARE, NYSE, 0.87, 2013-02-08\n BBT, NYSE, 2.86, 2013-01-18\n CGG, NYSE, -59.32, 2013-03-02\n BXP, NYSE, 2.42, 2013-01-30\n MS, NYSE, 73.08, 2013-01-19\n SRT, NYSE, 200.00, 2013-02-28\n HLX, NYSE, 162.86, 2013-02-21\n FLS, NYSE, 0.35, 2013-02-22\n MT, NYSE, -880.00, 2013-02-07\n PXD, NYSE, -2.35, 2013-02-14\n SLG, NYSE, 0.87, 2013-01-31\n NAT, NYSE, 0.00, 2013-02-12\n CSU, NYSE, -22.22, 2013-03-07\n DRQ, NYSE, 2.70, 2013-03-01\n FDP, NYSE, -100.00, 2013-02-20\n NLY, NYSE, 35.29, 2013-02-07\n TLM, NYSE, -300.00, 2013-02-18\n TSM, NYSE, 0.00, 2013-01-18\n YUM, NYSE, 2.47, 2013-02-05\n AMG, NYSE, 4.94, 2013-01-30\n EPR, NYSE, -4.40, 2013-02-27\n FE, NYSE, 1.27, 2013-02-26\n LFL, NYSE, -80.00, 2013-05-01\n MTD, NYSE, 8.44, 2013-02-07\n SID, NYSE, 57.14, 2013-03-29\n IN, NYSE, -18.18, 2013-03-12\n AI, NYSE, 9.91, 2013-02-07\n URI, NYSE, 23.30, 2013-01-24\n INGR, NYSE, 4.26, 2013-02-08\n RAS, NYSE, 153.85, 2013-02-14\n UNS, NYSE, 12.50, 2013-02-27\n ASI, NYSE, -17.95, 2013-03-07\n ANH, NYSE, 7.14, 2013-02-08\n OFC, NYSE, 4.08, 2013-02-09\n GPX, NYSE, 6.67, 2013-02-27\n WAC, NYSE, 11.32, 2013-03-19\n RBA, NYSE, -12.50, 2013-02-27\n WDR, NYSE, 5.17, 2013-01-30\n LHO, NYSE, 4.44, 2013-02-21\n LNT, NYSE, -1.72, 2013-02-15\n LVLT, NYSE, 11.11, 2013-02-13\n MFA, NYSE, 0.00, 2013-03-07\n OME, NYSE, 33.33, 2013-03-06\n EQY, NYSE, 7.14, 2013-02-21\n FII, NYSE, 10.00, 2013-01-25\n FMX, NYSE, 39.60, 2013-02-28\n LLL, NYSE, 6.13, 2013-01-31\n VTR, NYSE, 2.06, 2013-02-16\n WCN, NYSE, -7.69, 2013-02-15\n AVB, NYSE, -0.71, 2013-01-31\n GIL, NYSE, 6.67, 2013-02-07\n HZO, NYSE, 10.00, 2013-01-30\n AWR, NYSE, 43.24, 2013-03-01\n CLS, NYSE, 46.67, 2013-01-23\n EPD, NYSE, 7.58, 2013-02-01\n RSG, NYSE, -13.95, 2013-02-08\n WM, NYSE, -5.00, 2013-02-15\n AKR, NYSE, 3.57, 2013-02-06\n CVG, NYSE, 4.17, 2013-02-08\n RRC, NYSE, 228.57, 2013-02-27\n SAP, NYSE, -2.38, 2013-01-24\n CCI, NYSE, 57.14, 2013-01-24\n PQ, NYSE, -20.00, 2013-03-01\n WFT, NYSE, -94.44, 2013-02-27\n CAA, NYSE, 14.29, 2013-02-01\n ENB, NYSE, -6.67, 2013-02-16\n GMK, NYSE, -8.33, 2013-02-28\n MMR, NYSE, 75.00, 2013-01-19\n PB, NYSE, 1.19, 2013-01-26\n VIV, NYSE, -7.25, 2013-02-26\n AXL, NYSE, -111.76, 2013-02-09\n BP, NYSE, 19.05, 2013-02-06\n ETM, NYSE, 13.04, 2013-02-09\n HT, NYSE, 10.00, 2013-02-21\n BYI, NYSE, 5.26, 2013-02-01\n CEB, NYSE, 4.84, 2013-02-07\n INFY, NYSE, 5.56, 2013-01-12\n JLL, NYSE, -0.38, 2013-01-30\n AZN, NYSE, 24.64, 2013-02-01\n SFG, NYSE, 7.23, 2013-01-30\n TREX, NYSE, 27.78, 2013-02-20\n GS, NYSE, 61.38, 2013-01-17\n SYX, NYSE, -144.44, 2013-03-06\n WCC, NYSE, -2.75, 2013-02-01\n JNPR, NYSE, 26.67, 2013-01-25\n RDN, NYSE, -146.43, 2013-02-12\n RAI, NYSE, 4.11, 2013-02-13\n SKX, NYSE, 172.73, 2013-02-14\n WTM, NYSE, 724.10, 2013-02-06\n NCI, NYSE, 29.17, 2013-02-15\n BLT, NYSE, -21.74, 2013-03-08\n BLK, NYSE, 5.88, 2013-01-18\n CIR, NYSE, 25.45, 2013-03-01\n PKG, NYSE, -1.61, 2013-01-23\n PKI, NYSE, 0.00, 2013-02-01\n UGP, NYSE, 38.10, 2013-02-21\n WWE, NYSE, 0.00, 2013-03-01\n SNN, NYSE, 2.86, 2013-02-08\n UPS, NYSE, -4.35, 2013-02-01\n XOXO, NYSE, 62.50, 2013-03-07\n SLF, NYSE, 36.36, 2013-02-14\n CDR, NYSE, 33.33, 2013-03-08\n RLH, NYSE, -21.43, 2013-03-01\n EW, NYSE, 16.88, 2013-02-05\n MET, NYSE, 5.93, 2013-02-13\n FBR, NYSE, -28.57, 2013-01-31\n VVC, NYSE, 23.81, 2013-02-15\n BAM, NYSE, 148.28, 2013-02-16\n NVS, NYSE, 0.00, 2013-01-24\n VGR, NYSE, -43.75, 2013-02-27\n BHLB, NYSE, 0.00, 2013-01-29\n CRL, NYSE, 6.67, 2013-02-14\n CYH, NYSE, 0.00, 2013-02-22\n MBT, NYSE, 65.71, 2013-03-20\n MTOR, NYSE, -375.00, 2013-01-31\n CNQ, NYSE, -29.55, 2013-03-08\n ERJ, NYSE, -25.27, 2013-03-13\n VZ, NYSE, -28.30, 2013-01-23\n EVC, NYSE, 12.50, 2013-02-28\n PBR, NYSE, 0.00, 2013-02-05\n XEL, NYSE, 3.57, 2013-02-01\n ALE, NYSE, 0.00, 2013-02-16\n HW, NYSE, -20.00, 2013-01-30\n POL, NYSE, 0.00, 2013-01-30\n UMC, NYSE, 0.00, 2013-02-07\n ASX, NYSE, 41.43, 2013-01-31\n COH, NYSE, -4.65, 2013-01-23\n CXW, NYSE, 7.32, 2013-02-14\n DVA, NYSE, 6.33, 2013-02-15\n EXC, NYSE, -1.54, 2013-02-08\n MCO, NYSE, 7.14, 2013-02-09\n BRFS, NYSE, 43.48, 2013-03-06\n TU, NYSE, -1.15, 2013-02-16\n WIT, NYSE, 0.00, 2013-01-18\n ERF, NYSE, 462.50, 2013-02-22\n GG, NYSE, -22.22, 2013-02-15\n HNT, NYSE, -2.70, 2013-01-31\n NXY, NYSE, -23.44, 2013-02-26\n NYCB, NYSE, -3.45, 2013-01-31\n SXT, NYSE, -8.33, 2013-02-08\n CPG, NYSE, -191.67, 2013-03-15\n AMX, NYSE, -40.00, 2013-02-13\n MPX, NYSE, -50.00, 2013-01-24\n OIS, NYSE, -5.82, 2013-02-20\n BH, NYSE, -35.35, 2013-01-26\n MMP, NYSE, 6.15, 2013-02-06\n PES, NYSE, 250.00, 2013-02-14\n ABB, NYSE, -18.75, 2013-02-15\n RDY, NYSE, -27.27, 2013-02-15\n KMR, NYSE, -19.23, 2013-02-22\n GEN, NYSE, -20.00, 2013-02-12\n ADS, NYSE, 2.38, 2013-02-01\n CVI, NYSE, 5.15, 2013-03-13\n FTI, NYSE, 0.00, 2013-02-13\n PRA, NYSE, 10.64, 2013-02-20\n STO, NYSE, 26.47, 2013-02-08\n BEL, NYSE, -266.67, 2013-02-21\n FIS, NYSE, -8.82, 2013-02-13\n COL, NYSE, 4.44, 2013-01-19\n KAI, NYSE, 7.32, 2013-02-27\n FRM, NYSE, 233.33, 2013-03-09\n ABC, NYSE, 0.00, 2013-01-25\n BG, NYSE, -76.15, 2013-02-08\n FRO, NYSE, 106.52, 2013-02-22\n ECA, NYSE, -3.12, 2013-02-15\n CS, NYSE, -54.76, 2013-02-08\n EEP, NYSE, -30.77, 2013-02-14\n CVX, NYSE, -1.65, 2013-02-02\n DB, NYSE, 280.49, 2013-02-01\n GXP, NYSE, 200.00, 2013-03-01\n JHX, NYSE, 371.43, 2013-02-28\n PFG, NYSE, 10.81, 2013-02-01\n PVR, NYSE, -227.78, 2013-02-21\n AAP, NYSE, 17.33, 2013-02-08\n KND, NYSE, 4.55, 2013-02-26\n WTW, NYSE, 9.09, 2013-02-14\n CNC, NYSE, 42.42, 2013-02-06\n PRU, NYSE, -2.87, 2013-02-07\n BCH, NYSE, 12.94, 2013-02-06\n NS, NYSE, -19.35, 2013-02-02\n ITUB, NYSE, -5.00, 2013-02-05\n SXL, NYSE, 20.88, 2013-02-21\n VALE, NYSE, -26.00, 2013-02-28\n TNP, NYSE, -128.57, 2013-04-20\n LCI, NYSE, 233.33, 2013-02-08\n AUO, NYSE, -122.73, 2013-02-07\n GTI, NYSE, 19.05, 2013-02-27\n HNR, NYSE, -127.27, 2013-05-04\n MWE, NYSE, -38.89, 2013-02-28\n NLS, NYSE, 4.55, 2013-03-05\n RGC, NYSE, 40.00, 2013-02-08\n SBS, NYSE, 48.25, 2013-03-22\n JAH, NYSE, 2.40, 2013-02-15\n NPO, NYSE, 110.71, 2013-02-08\n TRI, NYSE, 9.09, 2013-02-14\n CAE, NYSE, 12.50, 2013-02-14\n LF, NYSE, 971.43, 2013-02-07\n SNY, NYSE, 1.30, 2013-02-08\n WHG, NYSE, 15.91, 2013-02-08\n BANC, NYSE, -300.00, 2013-03-02\n GTN, NYSE, 4.35, 2013-02-21\n BAK, NYSE, -150.00, 2013-02-08\n COP, NYSE, 1.42, 2013-01-31\n CNP, NYSE, 40.00, 2013-02-28\n EEQ, NYSE, -18.18, 2013-02-15\n MRH, NYSE, 60.26, 2013-02-08\n NGS, NYSE, 26.09, 2013-03-15\n NRP, NYSE, 34.88, 2013-02-14\n PXP, NYSE, -22.64, 2013-02-22\n XEC, NYSE, 9.26, 2013-02-20\n IAG, NYSE, -11.11, 2013-02-21\n TS, NYSE, -16.44, 2013-02-22\n EGO, NYSE, 6.67, 2013-02-23\n JNS, NYSE, 35.71, 2013-01-25\n PFS, NYSE, 7.41, 2013-02-02\n ENH, NYSE, 21.68, 2013-02-08\n IHG, NYSE, 5.56, 2013-02-20\n CNX, NYSE, 95.45, 2013-02-01\n AMT, NYSE, -17.07, 2013-02-27\n ABG, NYSE, 10.77, 2013-02-20\n LII, NYSE, 0.00, 2013-02-06\n SRE, NYSE, 11.34, 2013-02-27\n AEE, NYSE, -36.36, 2013-02-21\n PLD, NYSE, 0.00, 2013-02-07\n SAH, NYSE, 4.00, 2013-02-21\n GPI, NYSE, -17.50, 2013-02-20\n FIX, NYSE, -11.11, 2013-03-01\n MMS, NYSE, 12.50, 2013-02-08\n SRI, NYSE, -28.57, 2013-03-02\n RTEC, NYSE, 6.25, 2013-02-05\n NOV, NYSE, 3.47, 2013-02-02\n DF, NYSE, 33.33, 2013-02-14\n SAM, NYSE, 1.63, 2013-02-21\n RL, NYSE, 8.60, 2013-02-07\n FLR, NYSE, 132.35, 2013-02-21\n ALL, NYSE, 942.86, 2013-02-07\n ATI, NYSE, 5.88, 2013-01-24\n EE, NYSE, -14.29, 2013-02-20\n AIT, NYSE, 0.00, 2013-02-01\n CHH, NYSE, 9.76, 2013-02-12\n FMS, NYSE, 105.77, 2013-02-27\n BCO, NYSE, -7.69, 2013-02-02\n CBB, NYSE, -125.00, 2013-02-28\n MWW, NYSE, 0.00, 2013-02-08\n PSA, NYSE, 5.68, 2013-02-22\n E, NYSE, 2.83, 2013-02-16\n JPM, NYSE, 15.83, 2013-01-17\n USB, NYSE, 1.35, 2013-01-17\n HON, NYSE, 0.92, 2013-01-26\n ITG, NYSE, 100.00, 2013-02-01\n ARB, NYSE, 6.25, 2013-02-26\n APL, NYSE, 0.00, 2013-02-19\n AVA, NYSE, -42.22, 2013-02-21\n AXS, NYSE, 64.96, 2013-02-05\n CHT, NYSE, 5.26, 2013-01-31\n MOH, NYSE, 145.45, 2013-02-08\n CVD, NYSE, 2.82, 2013-01-25\n AHT, NYSE, 2.63, 2013-02-28\n GPK, NYSE, 12.50, 2013-02-08\n CNO, NYSE, 8.70, 2013-02-12\n AUQ, NYSE, -28.57, 2013-03-26\n JRN, NYSE, 34.62, 2013-03-08\nGRP.U, NYSE, -14.92, 2013-03-06\n NFP, NYSE, 11.43, 2013-02-15\n CRI, NYSE, 2.30, 2013-02-28\n FMD, NYSE, -20.00, 2013-02-08\n FPO, NYSE, 10.34, 2013-02-22\n TRQ, NYSE, -350.00, 2013-03-26\n WLL, NYSE, 9.21, 2013-02-28\n AEL, NYSE, 14.63, 2013-02-21\n AHL, NYSE, 87.60, 2013-02-08\n AUY, NYSE, -3.70, 2013-02-21\n CMP, NYSE, 0.00, 2013-02-07\n KRO, NYSE, -400.00, 2013-03-13\n TPX, NYSE, 9.09, 2013-01-25\n UTI, NYSE, 75.00, 2013-02-01\n PJC, NYSE, 31.34, 2013-01-31\n TRW, NYSE, 14.81, 2013-02-16\n AIZ, NYSE, 122.58, 2013-02-07\n HTH, NYSE, 62.50, 2013-03-16\n ETP, NYSE, 0.00, 2013-02-21\n SMI, NYSE, 500.00, 2013-02-07\n LSE, NYSE, -6.25, 2013-02-16\n BBD, NYSE, -2.63, 2013-01-29\n NRG, NYSE, 124.14, 2013-02-28\n HOS, NYSE, 29.17, 2013-02-07\n ABR, NYSE, 160.00, 2013-02-16\n FHN, NYSE, 0.00, 2013-01-19\n AGO, NYSE, 32.39, 2013-02-28\n HSP, NYSE, 1.85, 2013-02-14\n HNI, NYSE, -6.98, 2013-02-06\n GHL, NYSE, -32.43, 2013-01-24\n XPO, NYSE, -14.00, 2013-02-28\n CVO, NYSE, 23.08, 2013-02-28\n CHE, NYSE, 16.92, 2013-02-19\n GNW, NYSE, 30.77, 2013-02-06\n CBG, NYSE, 12.24, 2013-02-07\n SFL, NYSE, -26.67, 2013-02-26\n NEU, NYSE, -15.57, 2013-01-29\n GOL, NYSE, -109.09, 2013-03-26\n CAB, NYSE, 4.17, 2013-02-15\n LTM, NYSE, 1.82, 2013-02-22\n VVI, NYSE, 10.53, 2013-02-02\n WCG, NYSE, 0.00, 2013-02-14\n HEP, NYSE, -2.63, 2013-02-22\n DPZ, NYSE, 8.47, 2013-03-01\n BDC, NYSE, 9.86, 2013-02-08\n EGY, NYSE, -171.43, 2013-03-15\n LPL, NYSE, 2.63, 2013-02-22\n ENS, NYSE, 12.82, 2013-02-07\n BMR, NYSE, 5.88, 2013-02-06\n ACC, NYSE, 9.26, 2013-02-13\n KRG, NYSE, -9.09, 2013-02-08\n WLK, NYSE, 13.60, 2013-02-20\n EXR, NYSE, 4.65, 2013-02-22\n CNS, NYSE, 16.67, 2013-01-24\n IOC, NYSE, 264.29, 2013-02-28\n STON, NYSE, -233.33, 2013-03-16\n CPL, NYSE, 38.10, 2013-03-13\n TPGI, NYSE, -114.29, 2013-02-14\n SHO, NYSE, -3.33, 2013-02-20\n CUBE, NYSE, 5.00, 2013-02-22\n NRF, NYSE, 170.37, 2013-02-15\n BBW, NYSE, -68.29, 2013-02-15\n DLR, NYSE, 4.31, 2013-02-16\n NWE, NYSE, 2.63, 2013-02-15\n ORA, NYSE, 200.00, 2013-02-28\n NP, NYSE, 5.26, 2013-02-21\n SMA, NYSE, -21.05, 2013-02-22\n BBG, NYSE, 25.00, 2013-02-22\n BXC, NYSE, -163.16, 2013-02-14\n KNL, NYSE, 32.14, 2013-02-06\n LVS, NYSE, -8.47, 2013-01-31\n HLF, NYSE, 0.96, 2013-02-20\n MIC, NYSE, -20.41, 2013-02-21\n PHH, NYSE, -11.54, 2013-02-07\n CE, NYSE, 6.35, 2013-01-29\n EDR, NYSE, 0.00, 2013-02-20\n WTI, NYSE, 8.33, 2013-02-27\n ARC, NYSE, -100.00, 2013-03-01\n PBH, NYSE, 8.82, 2013-02-08\n HUN, NYSE, 0.00, 2013-02-13\n DLB, NYSE, 4.44, 2013-01-30\n DSX, NYSE, -33.33, 2013-03-15\n LAZ, NYSE, 84.85, 2013-02-08\n TGP, NYSE, 1.82, 2013-02-22\n TLP, NYSE, -43.48, 2013-03-13\n DRH, NYSE, 16.00, 2013-03-01\n HTGC, NYSE, 8.70, 2013-03-01\n KFN, NYSE, 5.26, 2013-02-06\n THS, NYSE, 0.00, 2013-02-22\n NSR, NYSE, -12.50, 2013-02-06\n WAL, NYSE, 0.00, 2013-01-25\n SLW, NYSE, 2.04, 2013-03-22\n MPW, NYSE, 0.00, 2013-02-08\nRDS.B, NYSE, 16.00, 2013-02-01\n GNK, NYSE, -24.71, 2013-02-21\n MFB, NYSE, 4.76, 2013-03-07\nRDS.A, NYSE, 9.95, 2013-02-01\n ITC, NYSE, 0.93, 2013-02-28\n FTK, NYSE, -158.82, 2013-03-14\n PIKE, NYSE, 168.00, 2013-02-06\n ALJ, NYSE, 0.00, 2013-03-07\n DRC, NYSE, -4.55, 2013-03-01\n STN, NYSE, 8.06, 2013-02-22\n SSW, NYSE, -6.90, 2013-03-06\n CF, NYSE, 3.41, 2013-02-20\n HPY, NYSE, 0.00, 2013-02-08\n ACCO, NYSE, 0.00, 2013-02-14\n ROC, NYSE, -6.25, 2013-02-20\n WPZ, NYSE, -28.57, 2013-02-20\n LCC, NYSE, 44.44, 2013-01-24\n GLP, NYSE, 58.82, 2013-03-15\n AMP, NYSE, 15.54, 2013-01-31\n DHT, NYSE, 108.33, 2013-01-30\n FNF, NYSE, 17.86, 2013-02-20\n NM, NYSE, 20.00, 2013-02-20\n CCO, NYSE, 25.00, 2013-02-20\n BWP, NYSE, 0.00, 2013-02-12\n ICE, NYSE, 5.14, 2013-02-07\n BKD, NYSE, -57.14, 2013-02-12\n AAV, NYSE, 350.00, 2013-03-28\n BAS, NYSE, -42.11, 2013-02-20\n CPA, NYSE, -9.87, 2013-02-07\n LYV, NYSE, -147.06, 2013-02-27\n WNR, NYSE, 5.84, 2013-03-01\n CMG, NYSE, 0.00, 2013-02-06\n RGP, NYSE, -180.00, 2013-02-21\n KOP, NYSE, 11.86, 2013-02-15\n UAL, NYSE, -7.41, 2013-01-25\n ETE, NYSE, -90.91, 2013-02-21\n RSO, NYSE, -17.65, 2013-03-05\n XCO, NYSE, 6.25, 2013-02-21\n PAC, NYSE, 41.18, 2013-02-28\n NYX, NYSE, 10.26, 2013-02-06\n TDG, NYSE, 51.65, 2013-02-05\n BMA, NYSE, 18.40, 2013-02-15\n THI, NYSE, -2.82, 2013-02-22\n BTE, NYSE, -40.48, 2013-03-08\n CNH, NYSE, 29.58, 2013-02-01\n GLA, NYSE, 67.44, 2013-02-14\n POR, NYSE, -9.52, 2013-02-23\n HIL, NYSE, -100.00, 2013-03-12\n HVB, NYSE, -20.00, 2013-02-01\n KS, NYSE, 0.00, 2013-02-14\n HK, NYSE, 0.00, 2013-03-01\n DCP, NYSE, 59.62, 2013-02-28\n DK, NYSE, 10.10, 2013-03-08\n CODI, NYSE, 14.81, 2013-03-07\n VG, NYSE, 25.00, 2013-02-14\n MA, NYSE, 1.46, 2013-02-01\n MWA, NYSE, -200.00, 2013-02-06\n KOG, NYSE, 14.29, 2013-03-01\n PWE, NYSE, -500.00, 2013-02-15\n PGTI, NYSE, 100.00, 2013-02-21\n AWH, NYSE, 16.23, 2013-02-14\n NSH, NYSE, -65.71, 2013-02-02\n WYN, NYSE, 5.00, 2013-02-07\n WNS, NYSE, 0.00, 2013-01-17\n AYR, NYSE, 36.84, 2013-02-22\n EVR, NYSE, 55.77, 2013-01-31\n HBI, NYSE, 7.00, 2013-02-06\n WU, NYSE, 20.00, 2013-02-13\n OC, NYSE, -31.25, 2013-02-21\n MR, NYSE, 2.08, 2013-02-26\n DAC, NYSE, -21.43, 2013-02-12\n AWI, NYSE, 3.03, 2013-02-20\n SUSS, NYSE, 444.44, 2013-02-28\n DEI, NYSE, 0.00, 2013-02-13\n OB, NYSE, -200.00, 2013-02-06\n SBH, NYSE, -5.88, 2013-02-08\n EBS, NYSE, -4.35, 2013-03-08\n KBR, NYSE, 122.22, 2013-02-21\n AER, NYSE, 30.95, 2013-02-21\n NOA, NYSE, -11.11, 2013-02-06\n SPR, NYSE, -2.27, 2013-02-13\n ANW, NYSE, 0.00, 2013-02-28\n DCT, NYSE, 10.00, 2013-02-08\n SE, NYSE, -3.03, 2013-02-06\n TOO, NYSE, 16.67, 2013-02-22\n TSL, NYSE, -39.77, 2013-02-27\n TWC, NYSE, 1.95, 2013-02-01\n MVO, NYSE, -5.06, 2013-03-15\n CO, NYSE, 40.00, 2013-02-27\n EXK, NYSE, -45.83, 2013-03-13\n EIG, NYSE, -25.00, 2013-02-28\n HF, NYSE, 21.62, 2013-03-07\n CEL, NYSE, 34.78, 2013-03-05\n FIG, NYSE, 53.85, 2013-02-28\n NGLS, NYSE, 0.00, 2013-02-15\n TCAP, NYSE, 3.64, 2013-03-07\n GFA, NYSE, -483.33, 2013-03-12\n BR, NYSE, -5.56, 2013-02-08\n SCR, NYSE, 85.71, 2013-03-08\n CNK, NYSE, -12.82, 2013-02-21\n DAL, NYSE, 0.00, 2013-01-23\n ORN, NYSE, 250.00, 2013-03-01\n ACM, NYSE, 9.09, 2013-02-06\n JMP, NYSE, 62.50, 2013-02-14\n SLH, NYSE, 1.69, 2013-02-08\n CLR, NYSE, 16.85, 2013-02-28\n BGS, NYSE, -17.95, 2013-02-15\n STAR, NYSE, 12.50, 2013-02-27\n YGE, NYSE, -74.07, 2013-03-05\n DFS, NYSE, -9.40, 2013-03-06\n TEL, NYSE, 1.56, 2013-01-24\n BX, NYSE, 25.53, 2013-02-01\n SEP, NYSE, 8.11, 2013-02-06\n BZ, NYSE, -30.00, 2013-02-27\n PPO, NYSE, -28.26, 2013-02-21\n PRO, NYSE, 25.00, 2013-02-13\n WBC, NYSE, 13.68, 2013-02-16\n DHX, NYSE, 7.14, 2013-01-31\n PMC, NYSE, 13.79, 2013-02-08\n HGG, NYSE, 0.00, 2013-02-01\n OWW, NYSE, -14.29, 2013-02-15\n VR, NYSE, 35.58, 2013-02-01\n CXO, NYSE, -5.88, 2013-02-21\n G, NYSE, 4.76, 2013-02-08\n EJ, NYSE, 160.00, 2013-03-13\n WX, NYSE, 32.00, 2013-03-08\n CMLP, NYSE, -50.00, 2013-02-06\n VMW, NYSE, -5.56, 2013-01-29\n CZZ, NYSE, 63.64, 2013-02-08\n CGA, NYSE, -3.23, 2013-02-09\n TDC, NYSE, 5.71, 2013-02-08\n FLY, NYSE, 137.65, 2013-03-08\n DUF, NYSE, 6.25, 2013-02-26\n MAIN, NYSE, 12.00, 2013-03-08\n REN, NYSE, -50.00, 2013-03-08\n TGH, NYSE, 9.57, 2013-02-13\n DFT, NYSE, -5.00, 2013-02-07\n RF, NYSE, 10.00, 2013-01-23\n PZN, NYSE, -22.22, 2013-02-13\n LL, NYSE, 19.05, 2013-02-21\n NMM, NYSE, 0.00, 2013-01-25\n OZM, NYSE, 5.48, 2013-02-08\n ES, NYSE, -5.08, 2013-02-20\n MSCI, NYSE, -1.89, 2013-02-08\n ARR, NYSE, -18.52, 2013-02-23\n KW, NYSE, 275.00, 2013-03-13\n GTS, NYSE, -10.17, 2013-02-07\n FOR, NYSE, 222.22, 2013-02-14\n LRN, NYSE, 4.35, 2013-02-06\n TNK, NYSE, -125.00, 2013-02-22\n N, NYSE, 21.43, 2013-02-01\n DAN, NYSE, 5.56, 2013-02-22\n BIP, NYSE, 12.07, 2013-02-09\n CPN, NYSE, -500.00, 2013-02-14\n SOL, NYSE, 2.70, 2013-03-15\n PM, NYSE, 1.64, 2013-02-08\n HI, NYSE, 7.89, 2013-02-05\n V, NYSE, 2.25, 2013-02-07\n IPI, NYSE, 0.00, 2013-02-14\n AWK, NYSE, -14.29, 2013-02-27\n HTS, NYSE, 37.84, 2013-02-13\n DPS, NYSE, -4.71, 2013-02-14\n CFX, NYSE, 7.69, 2013-02-07\n WES, NYSE, -27.91, 2013-02-28\n SB, NYSE, -10.00, 2013-02-21\n LO, NYSE, 3.95, 2013-02-14\n LPS, NYSE, 10.45, 2013-02-08\n FF, NYSE, -31.82, 2013-03-19\n NNA, NYSE, 150.00, 2013-02-13\n EPB, NYSE, 14.55, 2013-01-17\n JBT, NYSE, 3.23, 2013-03-07\n DL, NYSE, 33.33, 2013-02-27\n RAX, NYSE, -4.55, 2013-02-13\n HCI, NYSE, 67.61, 2013-03-06\n EC, NYSE, -20.47, 2013-02-16\n CLW, NYSE, 10.53, 2013-02-21\n MJN, NYSE, 5.88, 2013-02-01\n EPC, NYSE, 1.85, 2013-02-01\n BPI, NYSE, -3.33, 2013-03-13\n RST, NYSE, 55.56, 2013-03-01\n DGI, NYSE, 92.31, 2013-02-27\n SWI, NYSE, 10.34, 2013-02-05\n CYS, NYSE, -46.15, 2013-02-07\n IVR, NYSE, 20.31, 2013-02-06\n BUD, NYSE, -5.08, 2013-02-28\n PMT, NYSE, -2.35, 2013-02-08\n STWD, NYSE, 15.38, 2013-02-28\n CFN, NYSE, -16.98, 2013-02-09\n SPB, NYSE, 71.43, 2013-02-07\n ARI, NYSE, -10.34, 2013-02-28\n CLNY, NYSE, -13.89, 2013-03-07\n ART, NYSE, 300.00, 2013-02-15\n SEM, NYSE, 12.00, 2013-02-22\n BSBR, NYSE, 578.57, 2013-03-28\n DOLE, NYSE, -6100.00, 2013-03-13\n VSI, NYSE, 0.00, 2013-02-27\n TWO, NYSE, -15.15, 2013-02-07\n CVE, NYSE, -14.29, 2013-02-15\n H, NYSE, 81.82, 2013-02-14\n LEA, NYSE, 7.25, 2013-02-02\n CLD, NYSE, 8.00, 2013-02-14\n AOL, NYSE, 7.50, 2013-02-09\n CHSP, NYSE, 5.13, 2013-02-22\n PEB, NYSE, 0.00, 2013-02-22\n CIT, NYSE, 60.94, 2013-01-30\n KAR, NYSE, -4.55, 2013-02-21\n CIE, NYSE, -66.67, 2013-02-27\n TMH, NYSE, 8.33, 2013-02-06\n KRA, NYSE, -300.00, 2013-02-28\n SYA, NYSE, -29.41, 2013-02-05\n TRNO, NYSE, -162.50, 2013-02-16\n PDM, NYSE, -2.70, 2013-02-08\n GNRC, NYSE, 26.09, 2013-02-15\n ACW, NYSE, -2.17, 2013-03-07\n BALT, NYSE, -11.76, 2013-02-21\n ST, NYSE, 2.17, 2013-01-31\n SEMG, NYSE, 55.56, 2013-03-01\n CALX, NYSE, 20.00, 2013-02-06\n MXL, NYSE, -57.14, 2013-02-06\n STNG, NYSE, -60.00, 2013-02-26\n PRI, NYSE, -1.43, 2013-02-08\n SDRL, NYSE, -93.65, 2013-03-01\n CLDT, NYSE, 0.00, 2013-02-20\n EXL, NYSE, 0.00, 2013-02-28\n LYB, NYSE, -0.88, 2013-02-02\n PNG, NYSE, 7.14, 2013-02-07\n PLOW, NYSE, -25.00, 2013-03-12\n SIX, NYSE, 198.00, 2013-02-21\n NKA, NYSE, 1066.67, 2013-02-01\n RRTS, NYSE, 0.00, 2013-02-07\n JKS, NYSE, -332.48, 2013-04-11\n CODE, NYSE, -13.64, 2013-01-30\n FAF, NYSE, 44.64, 2013-02-22\n QEP, NYSE, 3.13, 2013-02-20\n OAS, NYSE, 6.52, 2013-02-26\n VPG, NYSE, 15.38, 2013-02-13\n HPP, NYSE, 9.52, 2013-03-07\n FN, NYSE, 9.09, 2013-02-05\n ECT, NYSE, 65.85, 2013-03-16\n QUAD, NYSE, -6.67, 2013-03-05\n KKR, NYSE, 54.84, 2013-02-08\n RLD, NYSE, 20.00, 2013-02-07\n AMRC, NYSE, 44.44, 2013-03-19\n GDOT, NYSE, 50.00, 2013-02-01\n AT, NYSE, -160.00, 2013-03-01\n ENV, NYSE, 0.00, 2013-02-15\n IL, NYSE, 200.00, 2013-02-22\n WSR, NYSE, -12.00, 2013-03-13\n SFUN, NYSE, 35.71, 2013-02-09\n COR, NYSE, 5.00, 2013-02-23\n VC, NYSE, 20.62, 2013-03-01\n CCSC, NYSE, -20.00, 2013-03-07\n CCG, NYSE, 0.00, 2013-02-27\n EFC, NYSE, -72.73, 2013-02-14\n TOWR, NYSE, 183.33, 2013-02-16\n CHMT, NYSE, -53.13, 2013-02-26\n HBM, NYSE, 200.00, 2013-02-21\n EXAM, NYSE, 55.56, 2013-02-28\n XUE, NYSE, 7.69, 2013-02-28\n CMRE, NYSE, 6.67, 2013-01-24\n NOAH, NYSE, 20.00, 2013-02-26\n IPHI, NYSE, -40.00, 2013-02-05\n BITA, NYSE, 33.33, 2013-03-08\n BAH, NYSE, 11.11, 2013-01-31\n GM, NYSE, -2.04, 2013-02-15\n TROX, NYSE, -60.00, 2013-02-21\n DANG, NYSE, 20.00, 2013-03-08\n YOKU, NYSE, 9.09, 2013-03-01\n FRC, NYSE, -16.44, 2013-01-17\n RFP, NYSE, 52.38, 2013-02-13\n ISS, NYSE, 15.38, 2013-03-09\n WD, NYSE, -14.29, 2013-03-07\n FLT, NYSE, 10.00, 2013-02-08\n GCAP, NYSE, -325.00, 2013-03-13\n FRF, NYSE, -25.93, 2013-03-29\n SWFT, NYSE, 46.15, 2013-01-24\n AG, NYSE, -10.34, 2013-02-27\n QRE, NYSE, -174.07, 2013-03-07\n AAT, NYSE, 11.76, 2013-02-20\n MCC, NYSE, 5.41, 2013-02-07\n NLSN, NYSE, 3.51, 2013-02-12\n AGRO, NYSE, -71.43, 2013-03-22\n BKU, NYSE, 27.08, 2013-01-30\n INXN, NYSE, -38.89, 2013-02-28\n NPTN, NYSE, 16.67, 2013-02-22\n INN, NYSE, 25.00, 2013-02-27\n KMI, NYSE, -5.88, 2013-01-17\n HCA, NYSE, 9.64, 2013-02-05\n MX, NYSE, 135.21, 2013-01-31\n HII, NYSE, 8.89, 2013-02-28\n QIHU, NYSE, 175.00, 2013-03-06\n APO, NYSE, 119.48, 2013-02-09\n GNC, NYSE, 8.70, 2013-02-15\n SDT, NYSE, 11.48, 2013-03-16\n UAN, NYSE, 16.67, 2013-02-28\n ARCO, NYSE, 5.00, 2013-03-09\n ELLI, NYSE, 36.36, 2013-02-15\n TMS, NYSE, -23.81, 2013-02-15\n SQNS, NYSE, -16.00, 2013-02-08\n STAG, NYSE, 17.24, 2013-02-21\n AL, NYSE, 8.33, 2013-03-01\n TLLP, NYSE, 10.42, 2013-02-12\n RENN, NYSE, 14.29, 2013-03-12\n NQ, NYSE, 800.00, 2013-03-07\n THR, NYSE, -14.29, 2013-02-08\n KOS, NYSE, 125.00, 2013-02-26\n RLJ, NYSE, 4.35, 2013-02-28\n NGL, NYSE, -7.41, 2013-02-16\n FENG, NYSE, 100.00, 2013-03-07\n LNKD, NYSE, 900.00, 2013-02-08\n NMFC, NYSE, 5.88, 2013-03-07\n ACTV, NYSE, 5.26, 2013-02-15\n TAOM, NYSE, 700.00, 2013-03-15\n RATE, NYSE, -60.00, 2013-02-13\n VHS, NYSE, -22.22, 2013-01-31\n MPC, NYSE, 8.13, 2013-01-31\n MITT, NYSE, -1.16, 2013-03-06\n OILT, NYSE, 0.00, 2013-03-07\n SXC, NYSE, 14.71, 2013-02-06\n AMTG, NYSE, -8.57, 2013-03-07\n AMID, NYSE, -2500.00, 2013-04-17\n WAIR, NYSE, -7.41, 2013-01-30\n PER, NYSE, -7.58, 2013-03-02\n PPP, NYSE, -44.44, 2013-02-22\n FNV, NYSE, -8.33, 2013-03-20\n FSM, NYSE, 16.67, 2013-03-21\n FBHS, NYSE, 4.55, 2013-02-01\n XLS, NYSE, 4.44, 2013-03-02\n XYL, NYSE, 2.17, 2013-02-08\n NDRO, NYSE, 4.76, 2013-03-19\n RNF, NYSE, -33.33, 2013-03-20\n VAC, NYSE, 25.53, 2013-02-22\n CHKR, NYSE, -7.25, 2013-03-16\n PACD, NYSE, 14.29, 2013-02-28\n INVN, NYSE, 0.00, 2013-01-24\n DLPH, NYSE, 3.45, 2013-02-06\n MN, NYSE, 0.00, 2013-02-14\n RRMS, NYSE, -25.00, 2013-03-01\n WPX, NYSE, -400.00, 2013-03-01\n LPI, NYSE, 0.00, 2013-03-13\n SN, NYSE, -80.00, 2013-03-07\n KORS, NYSE, 60.00, 2013-02-13\n BCEI, NYSE, -7.89, 2013-03-15\n BOXC, NYSE, 4.78, 2013-01-29\n PVG, NYSE, -25.00, 2013-03-06\n POST, NYSE, 30.43, 2013-02-08\n SLCA, NYSE, 32.26, 2013-02-27\n MTDR, NYSE, -116.67, 2013-03-14\n GWAY, NYSE, -200.00, 2013-02-13\n EPAM, NYSE, -10.81, 2013-02-28\n RNDY, NYSE, 5.56, 2013-03-01\n CPAC, NYSE, -13.33, 2013-02-21\n PRLB, NYSE, 7.69, 2013-02-14\n YELP, NYSE, -50.00, 2013-02-07\n NSM, NYSE, 7.58, 2013-03-08\n ALSN, NYSE, 257.14, 2013-02-20\n DWRE, NYSE, 350.00, 2013-02-15\n VNTV, NYSE, 16.13, 2013-02-21\n ET, NYSE, 34.78, 2013-02-22\n VIPS, NYSE, 1100.00, 2013-02-22\n VCRA, NYSE, -33.33, 2013-02-28\n RM, NYSE, -1.89, 2013-02-28\n BNNY, NYSE, 0.00, 2013-02-12\n MM, NYSE, 200.00, 2013-02-20\n RXN, NYSE, -15.00, 2013-02-12\n GLOG, NYSE, -20.00, 2013-02-28\n PBA, NYSE, 44.44, 2013-03-02\n RPAI, NYSE, 15.79, 2013-02-20\n OAK, NYSE, 63.33, 2013-02-15\n FET, NYSE, -3.45, 2013-02-15\n MRC, NYSE, 17.02, 2013-02-22\n PSX, NYSE, 21.18, 2013-01-31\n TUMI, NYSE, 0.00, 2013-03-21\n ACRE, NYSE, -38.10, 2013-04-02\n EVER, NYSE, 17.24, 2013-01-31\n PDH, NYSE, -13.79, 2013-02-07\n WMC, NYSE, 3.23, 2013-04-03\n WAGE, NYSE, 0.00, 2013-02-21\n HTA, NYSE, 0.00, 2013-02-21\n ALEX, NYSE, 42.86, 2013-02-20\n BKW, NYSE, 53.33, 2013-02-16\n EQM, NYSE, 51.22, 2013-01-25\n NOW, NYSE, 38.46, 2013-01-31\n EGL, NYSE, 18.46, 2013-03-13\n NGVC, NYSE, 25.00, 2013-02-01\n NTI, NYSE, -25.00, 2013-03-14\n AMRE, NYSE, 4.35, 2013-02-20\n GMED, NYSE, 15.79, 2013-02-28\n MANU, NYSE, -46.43, 2013-02-15\n HCLP, NYSE, -28.57, 2013-02-01\n ADT, NYSE, 4.76, 2013-01-31\n TRLA, NYSE, -20.00, 2013-02-13\n SRC, NYSE, 8.82, 2013-02-28\n NBHC, NYSE, -14.29, 2013-01-29\n BSMX, NYSE, -4.17, 2013-02-19\n HY, NYSE, 14.53, 2013-02-20\n SMLP, NYSE, 40.00, 2013-03-14\n DYN, NYSE, -1714.29, 2013-03-15\n LXFR, NYSE, 43.75, 2013-03-12\n LOCK, NYSE, 16.67, 2013-02-21\n JMI, NYSE, 97.78, 2013-03-22\n BERY, NYSE, -40.00, 2013-02-01\n FLTX, NYSE, 0.00, 2013-02-21\n ANFI, NYSE, 30.77, 2013-02-26\n SSTK, NYSE, -100.00, 2013-02-22\n SDLP, NYSE, 90.91, 2013-03-01\n MPLX, NYSE, -25.00, 2013-01-31\n WWAV, NYSE, 5.88, 2013-02-14\n SXE, NYSE, -4121.43, 2013-03-29\n DKL, NYSE, -5.56, 2013-03-06\n RKUS, NYSE, -20.00, 2013-02-13\n WGP, NYSE, 57.14, 2013-02-28\n PBF, NYSE, -92.31, 2013-03-01\n SBY, NYSE, 0.00, 2013-03-01\n RIOM, NYSE, 77.78, 2013-03-29\n BFAM, NYSE, -1186.36, 2013-03-27\n ZTS, NYSE, -79.41, 2013-03-29\n DDC, NYSE, -39.13, 2013-04-04\n ABM, NYSE, 18.18, 2013-03-05\n ANN, NYSE, 0.00, 2013-03-09\n BBY, NYSE, 5.81, 2013-03-02\n BF.B, NYSE, 4.29, 2013-03-07\n BKE, NYSE, 2.40, 2013-03-15\n BNS, NYSE, -3.17, 2013-03-06\n BRC, NYSE, -22.45, 2013-02-22\n CATO, NYSE, -3.57, 2013-03-22\n COO, NYSE, 2.50, 2013-03-08\n CPB, NYSE, 6.06, 2013-02-16\n CFI, NYSE, 10.34, 2013-02-28\n DCI, NYSE, -10.53, 2013-02-26\n DDS, NYSE, -1.03, 2013-02-26\n DE, NYSE, 17.02, 2013-02-14\n DY, NYSE, 50.00, 2013-02-27\n EV, NYSE, -3.85, 2013-02-21\n ENZ, NYSE, -133.33, 2013-03-13\n ESL, NYSE, 13.11, 2013-03-01\nFCE.A, NYSE, 9.09, 2013-03-28\n M, NYSE, 3.54, 2013-02-27\n GCO, NYSE, 1.41, 2013-03-09\n GPS, NYSE, 2.82, 2013-03-01\n HD, NYSE, 4.69, 2013-02-27\n HEI, NYSE, -12.50, 2013-02-21\n HNZ, NYSE, 10.00, 2013-02-28\n HOV, NYSE, -66.67, 2013-03-07\n HRB, NYSE, -633.33, 2013-03-08\n HRL, NYSE, -2.04, 2013-02-22\n HPQ, NYSE, 15.49, 2013-02-22\n JCP, NYSE, -926.32, 2013-02-28\n KR, NYSE, 25.71, 2013-03-08\n KSS, NYSE, 1.84, 2013-03-01\n LB, NYSE, 1.15, 2013-02-28\n LOW, NYSE, 13.04, 2013-02-26\n LZB, NYSE, 16.67, 2013-02-20\n MDT, NYSE, 2.20, 2013-02-20\n MEI, NYSE, 350.00, 2013-03-01\n MPR, NYSE, 0.00, 2013-03-22\n NAV, NYSE, 14.11, 2013-03-08\n JWN, NYSE, 4.48, 2013-02-22\n ODC, NYSE, -35.42, 2013-03-12\n OXM, NYSE, -5.80, 2013-04-03\n PBY, NYSE, -225.00, 2013-04-16\n PLL, NYSE, 8.96, 2013-02-28\n PNY, NYSE, 1.72, 2013-03-07\n PVH, NYSE, 6.67, 2013-03-28\n THO, NYSE, 0.00, 2013-03-08\n TIF, NYSE, 2.19, 2013-03-23\n TJX, NYSE, 1.23, 2013-02-28\n TOL, NYSE, -81.82, 2013-02-21\n TTC, NYSE, 23.26, 2013-02-22\n VAL, NYSE, -9.09, 2013-02-13\n JW.A, NYSE, 13.41, 2013-03-08\n WMT, NYSE, 6.37, 2013-02-22\n WSM, NYSE, 4.69, 2013-03-20\n FL, NYSE, -11.11, 2013-03-09\n CHS, NYSE, 0.00, 2013-03-01\n REX, NYSE, -800.00, 2013-03-29\n BKS, NYSE, -136.00, 2013-03-01\n CAL, NYSE, 75.00, 2013-03-16\n SIG, NYSE, 1.44, 2013-03-29\n ZLC, NYSE, -1.92, 2013-02-22\n AEO, NYSE, 0.00, 2013-03-07\n FGP, NYSE, -10.00, 2013-03-08\n BMO, NYSE, 1.37, 2013-02-27\n RY, NYSE, 0.75, 2013-03-01\n GEF, NYSE, -13.21, 2013-02-28\n MOV, NYSE, 70.83, 2013-03-22\n SKS, NYSE, 13.33, 2013-02-27\n TD, NYSE, 1.55, 2013-03-01\n ANF, NYSE, 14.51, 2013-02-23\n CIEN, NYSE, 116.00, 2013-03-08\n KMG, NYSE, -17.65, 2013-03-09\n IRET, NYSE, -5.88, 2013-03-13\n CM, NYSE, 0.00, 2013-03-01\nHEI.A, NYSE, -18.60, 2013-02-21\n UBA, NYSE, 13.04, 2013-03-07\n KFY, NYSE, 6.90, 2013-03-07\n TGT, NYSE, 12.24, 2013-02-28\n KKD, NYSE, 0.00, 2013-03-15\n NDZ, NYSE, 0.00, 2013-03-06\n MVC, NYSE, -20.00, 2013-03-08\n CBK, NYSE, 52.17, 2013-03-14\n SJM, NYSE, 7.30, 2013-02-16\n BIG, NYSE, 5.03, 2013-03-07\n IDT, NYSE, -7.14, 2013-03-08\n JOY, NYSE, 14.91, 2013-02-28\n SSI, NYSE, -5.93, 2013-03-13\n GME, NYSE, 3.35, 2013-03-29\n DKS, NYSE, -3.74, 2013-03-12\n A, NYSE, -5.97, 2013-02-15\n MTN, NYSE, -3.51, 2013-03-07\n GES, NYSE, 10.47, 2013-03-21\n CRM, NYSE, 66.67, 2013-03-01\n NWY, NYSE, 25.00, 2013-03-22\n PAY, NYSE, 8.11, 2013-03-06\n DSW, NYSE, -4.17, 2013-03-20\n NX, NYSE, -183.33, 2013-03-08\n AGX, NYSE, 15.00, 2013-04-11\n CMD, NYSE, -5.26, 2013-03-08\n DG, NYSE, 7.78, 2013-03-26\n EXPR, NYSE, 1.35, 2013-03-14\n P, NYSE, 0.00, 2013-03-07\n GWRE, NYSE, 181.82, 2013-02-27\n BLOX, NYSE, -20.00, 2013-02-22\n TLYS, NYSE, 6.67, 2013-03-21\n PANW, NYSE, -250.00, 2013-03-01\n WDAY, NYSE, 24.00, 2013-03-08\n RH, NYSE, 4.92, 2013-04-19\n AIR, NYSE, 4.55, 2013-03-20\n ATU, NYSE, -5.41, 2013-03-21\n AZO, NYSE, 0.84, 2013-02-27\n AZZ, NYSE, 2.04, 2013-04-09\n CAG, NYSE, -3.51, 2013-04-04\n CLC, NYSE, 2.17, 2013-03-21\n CMC, NYSE, -80.00, 2013-03-29\n KMX, NYSE, 0.00, 2013-04-11\n FC, NYSE, -27.27, 2013-04-05\n FDO, NYSE, -0.82, 2013-04-11\n FDX, NYSE, -10.87, 2013-03-21\n FUL, NYSE, -3.92, 2013-03-28\n GIS, NYSE, 12.28, 2013-03-21\n KBH, NYSE, 30.43, 2013-03-22\n LEN, NYSE, 100.00, 2013-03-21\n LNN, NYSE, 16.28, 2013-03-28\n LUB, NYSE, -100.00, 2013-03-21\n MKC, NYSE, 1.79, 2013-04-03\n RT, NYSE, 0.00, 2013-04-11\n MSM, NYSE, 0.00, 2013-04-11\n NKE, NYSE, 8.96, 2013-03-22\n ORCL, NYSE, -1.56, 2013-03-21\n PIR, NYSE, 0.00, 2013-04-12\n PKE, NYSE, -21.43, 2013-05-10\n RPM, NYSE, 16.67, 2013-04-05\n SVU, NYSE, -200.00, 2013-04-25\n TXI, NYSE, 25.00, 2013-03-28\n UNF, NYSE, 18.75, 2013-03-28\n WGO, NYSE, 37.50, 2013-03-29\n WOR, NYSE, 6.12, 2013-03-22\n JBL, NYSE, -2.17, 2013-03-21\n GBX, NYSE, 21.62, 2013-04-05\n DRI, NYSE, 0.99, 2013-03-23\n FDS, NYSE, -21.24, 2013-03-20\n SCS, NYSE, 0.00, 2013-03-28\n SJR, NYSE, 5.56, 2013-04-13\n RHT, NYSE, 19.05, 2013-03-28\n OMN, NYSE, -75.00, 2013-04-04\n MON, NYSE, 7.06, 2013-04-04\n GPN, NYSE, -1.14, 2013-04-03\n AYI, NYSE, 0.00, 2013-04-04\n CCL, NYSE, 100.00, 2013-03-16\n CUK, NYSE, 33.33, 2013-03-16\n STZ, NYSE, 4.44, 2013-04-11\n ACN, NYSE, 3.09, 2013-03-29\n SNX, NYSE, 1.15, 2013-03-28\n TAL, NYSE, 50.00, 2013-04-24\n IHS, NYSE, 11.90, 2013-03-22\n EDU, NYSE, 63.64, 2013-04-25\n KED, NYSE, -99.22, 2013-05-02\n CORR, NYSE, -9.09, 2013-05-11\n DFS, NYSE, 18.75, 2013-04-24\n ZEP, NYSE, 54.55, 2013-04-10\n MG, NYSE, -58.82, 2013-04-09\n MOS, NYSE, 5.62, 2013-03-28\n ABT, NYSE, 0.00, 2013-04-18\n ABX, NYSE, 6.98, 2013-04-25\n AB, NYSE, 8.57, 2013-05-02\n ACO, NYSE, -10.64, 2013-04-27\n ADM, NYSE, -5.88, 2013-05-01\n AEM, NYSE, -35.29, 2013-04-26\n AEP, NYSE, 0.00, 2013-04-27\n AES, NYSE, -14.29, 2013-05-10\n AET, NYSE, 8.70, 2013-05-01\n AFL, NYSE, 4.32, 2013-04-25\n AGCO, NYSE, 35.23, 2013-05-01\n HES, NYSE, 24.20, 2013-04-25\n AIG, NYSE, 52.27, 2013-05-03\n AIN, NYSE, 0.00, 2013-05-02\n AJG, NYSE, 33.33, 2013-05-01\n ALU, NYSE, -81.82, 2013-04-27\n MATX, NYSE, 31.25, 2013-05-07\n ALK, NYSE, 15.09, 2013-04-26\n ALX, NYSE, -2.56, 2013-05-07\n BEAM, NYSE, 18.52, 2013-05-03\n AME, NYSE, 3.92, 2013-04-26\n TWX, NYSE, 9.33, 2013-05-02\n AVD, NYSE, 47.50, 2013-05-03\n AMN, NYSE, 33.33, 2013-05-03\n AN, NYSE, 7.94, 2013-04-19\n AON, NYSE, 0.00, 2013-04-27\n APA, NYSE, -9.01, 2013-05-10\n APC, NYSE, 17.39, 2013-05-07\n APD, NYSE, 0.00, 2013-04-24\n APH, NYSE, 1.16, 2013-04-19\n ARG, NYSE, 0.88, 2013-05-03\n AAN, NYSE, -5.63, 2013-04-26\n ARW, NYSE, 3.49, 2013-05-02\n ASGN, NYSE, 94.44, 2013-04-25\n ASH, NYSE, 14.10, 2013-04-25\n ASR, NYSE, -13.25, 2013-04-23\n GAS, NYSE, -2.96, 2013-05-01\n ATO, NYSE, 1.63, 2013-05-02\n ATW, NYSE, 2.40, 2013-05-02\n AU, NYSE, -26.67, 2013-05-14\n AVP, NYSE, 85.71, 2013-05-01\n AVT, NYSE, 3.45, 2013-04-26\n AVY, NYSE, 3.51, 2013-04-25\n AXP, NYSE, 3.60, 2013-04-18\n B, NYSE, -11.11, 2013-04-27\n BA, NYSE, 17.69, 2013-04-25\n BAC, NYSE, -13.04, 2013-04-17\n BAX, NYSE, 0.96, 2013-04-19\n BC, NYSE, 22.58, 2013-04-26\n OMX, NYSE, -52.17, 2013-05-08\n BCE, NYSE, 10.00, 2013-05-10\n BCR, NYSE, 0.00, 2013-04-24\n BDX, NYSE, 6.67, 2013-05-03\n BEN, NYSE, 8.47, 2013-05-01\n BGG, NYSE, -17.59, 2013-04-20\n BHE, NYSE, 10.00, 2013-04-26\n BHI, NYSE, 4.84, 2013-04-20\n BID, NYSE, -175.00, 2013-05-10\n BIO, NYSE, -38.18, 2013-05-08\n BK, NYSE, 9.62, 2013-04-18\n BKH, NYSE, 19.18, 2013-05-03\n WRB, NYSE, 0.00, 2013-04-24\n BLC, NYSE, 6.67, 2013-04-26\n BLL, NYSE, -9.38, 2013-04-26\n BLX, NYSE, -21.82, 2013-04-18\n BMI, NYSE, -58.33, 2013-04-17\n BMS, NYSE, -1.85, 2013-04-26\n BMY, NYSE, 0.00, 2013-04-26\n BOH, NYSE, -6.90, 2013-04-23\n BXS, NYSE, 4.76, 2013-04-23\n BPL, NYSE, 19.44, 2013-05-04\nBRK.A, NYSE, 197.70, 2013-05-04\n BRO, NYSE, 5.13, 2013-04-16\n BSX, NYSE, 0.00, 2013-04-26\n MTRN, NYSE, -2.94, 2013-04-26\n CAI, NYSE, -1.32, 2013-04-25\n CAT, NYSE, -2.24, 2013-04-23\n CB, NYSE, 12.44, 2013-04-23\n CBI, NYSE, 15.49, 2013-05-03\n CBM, NYSE, 85.00, 2013-05-04\n CBU, NYSE, -1.96, 2013-04-24\n CBT, NYSE, -7.25, 2013-05-01\n CCC, NYSE, 20.00, 2013-05-07\n CCE, NYSE, 2.63, 2013-04-26\n C, NYSE, 9.32, 2013-04-16\n CCK, NYSE, 4.17, 2013-04-18\n CDE, NYSE, -74.07, 2013-05-10\n CDI, NYSE, -40.91, 2013-05-03\n CAH, NYSE, 26.32, 2013-05-03\n CFR, NYSE, -4.21, 2013-04-25\n CHD, NYSE, 5.56, 2013-05-03\n CPK, NYSE, 14.93, 2013-05-03\n CI, NYSE, 20.28, 2013-05-03\n CIA, NYSE, 0.00, 2013-05-03\n CKH, NYSE, -156.12, 2013-04-30\n CL, NYSE, 0.00, 2013-04-26\n CLF, NYSE, 87.50, 2013-04-25\n CLH, NYSE, 25.81, 2013-05-02\n CLX, NYSE, -5.66, 2013-05-02\n CMA, NYSE, 4.48, 2013-04-17\n CMO, NYSE, 3.33, 2013-04-25\n CRK, NYSE, -11.36, 2013-04-30\n CMS, NYSE, 15.22, 2013-04-26\n CNA, NYSE, 21.13, 2013-05-01\n CNW, NYSE, -29.63, 2013-05-02\n CHG, NYSE, 19.00, 2013-05-10\n CNL, NYSE, -8.33, 2013-04-30\n COG, NYSE, -20.00, 2013-04-25\n COT, NYSE, -100.00, 2013-05-02\n CP, NYSE, 2.54, 2013-04-25\n CPF, NYSE, 105.00, 2013-04-27\n CQB, NYSE, 28.57, 2013-05-08\n CR, NYSE, -0.95, 2013-04-23\nCRD.B, NYSE, -29.17, 2013-05-09\n CRS, NYSE, -9.21, 2013-04-26\n CSC, NYSE, 32.29, 2013-05-16\n CSL, NYSE, 0.00, 2013-04-25\n CTB, NYSE, 31.82, 2013-05-10\n CTL, NYSE, 10.14, 2013-05-09\n CTS, NYSE, 16.67, 2013-04-24\n CUB, NYSE, 52.24, 2013-05-03\n CMI, NYSE, -22.58, 2013-05-01\n CUZ, NYSE, -8.33, 2013-05-09\n CVC, NYSE, -185.71, 2013-05-10\n CVH, NYSE, 26.58, 2013-05-02\n CW, NYSE, 28.21, 2013-05-02\n CWT, NYSE, -200.00, 2013-05-02\n CX, NYSE, -140.00, 2013-04-27\n CYN, NYSE, -2.17, 2013-04-19\n D, NYSE, -7.78, 2013-04-26\n DBD, NYSE, -125.00, 2013-05-01\n DCO, NYSE, -18.60, 2013-05-07\n DD, NYSE, 1.30, 2013-04-24\n CVA, NYSE, -61.54, 2013-04-18\n DHR, NYSE, -1.32, 2013-04-19\n DIS, NYSE, 2.60, 2013-05-08\n DLX, NYSE, 3.41, 2013-04-26\n DNB, NYSE, 2.26, 2013-05-03\n RRD, NYSE, 12.12, 2013-04-26\n DOV, NYSE, 1.85, 2013-04-18\n DOW, NYSE, 15.00, 2013-04-26\n DRE, NYSE, 0.00, 2013-04-25\n DHI, NYSE, 60.00, 2013-04-27\n UFS, NYSE, -35.37, 2013-04-26\n DTE, NYSE, 30.10, 2013-04-27\n DUK, NYSE, -1.92, 2013-05-04\n DVN, NYSE, 17.86, 2013-05-02\n DV, NYSE, 8.43, 2013-04-24\n EAT, NYSE, 4.35, 2013-04-24\n ECL, NYSE, 3.45, 2013-05-01\n ED, NYSE, 4.85, 2013-05-03\n EDE, NYSE, 11.11, 2013-04-26\n EFX, NYSE, 0.00, 2013-04-25\n EGN, NYSE, -7.32, 2013-04-30\n EGP, NYSE, -1.30, 2013-04-19\n ELP, NYSE, 0.00, 2013-05-17\n ELY, NYSE, 65.00, 2013-04-26\n EMC, NYSE, 3.23, 2013-04-25\n EMR, NYSE, -1.28, 2013-05-08\n EOG, NYSE, 59.29, 2013-05-07\n EQT, NYSE, 26.92, 2013-04-26\n ESE, NYSE, -17.65, 2013-05-08\n ESV, NYSE, 5.43, 2013-04-30\n ETN, NYSE, 6.33, 2013-04-30\n ETR, NYSE, 0.00, 2013-04-26\n EXAR, NYSE, 16.67, 2013-05-01\n F, NYSE, 7.89, 2013-04-25\n CLGX, NYSE, 8.11, 2013-04-25\n FNB, NYSE, -4.76, 2013-04-24\n FCF, NYSE, 0.00, 2013-04-24\n FBP, NYSE, -122.22, 2013-05-04\n FICO, NYSE, -9.38, 2013-04-25\n FLO, NYSE, 6.98, 2013-05-17\n FMC, NYSE, 1.85, 2013-05-01\n FOE, NYSE, 66.67, 2013-04-25\n S, NYSE, 38.24, 2013-04-25\n NEE, NYSE, 10.89, 2013-05-01\n FRT, NYSE, 0.88, 2013-05-02\n FRX, NYSE, 47.06, 2013-04-24\n FSS, NYSE, 20.00, 2013-05-07\n FUN, NYSE, 24.32, 2013-05-09\n FUR, NYSE, 77.78, 2013-05-03\n GBL, NYSE, 17.86, 2013-05-08\n GVA, NYSE, -103.85, 2013-05-10\n BGC, NYSE, -319.23, 2013-05-01\n GD, NYSE, 8.00, 2013-04-25\n GE, NYSE, 11.43, 2013-04-20\n RHP, NYSE, 26.47, 2013-05-08\n AXLL, NYSE, -38.02, 2013-05-08\n GGG, NYSE, 15.07, 2013-04-25\n GHM, NYSE, 28.13, 2013-06-01\n GIB, NYSE, 14.58, 2013-05-01\n GLT, NYSE, 17.65, 2013-05-01\n GLW, NYSE, 15.38, 2013-04-25\n GSK, NYSE, 6.49, 2013-04-26\n GLF, NYSE, 175.00, 2013-04-30\n GNI, NYSE, -14.58, 2013-04-26\n GPC, NYSE, -6.06, 2013-04-20\n GRA, NYSE, 0.00, 2013-04-25\n GTY, NYSE, 0.00, 2013-05-03\n GWW, NYSE, 7.69, 2013-04-17\n HAE, NYSE, 4.35, 2013-05-02\n HAL, NYSE, 17.54, 2013-04-23\n HAR, NYSE, 25.40, 2013-05-03\n HVT, NYSE, 33.33, 2013-05-02\n HRC, NYSE, -2.00, 2013-04-25\n HCC, NYSE, 31.71, 2013-05-01\n HCN, NYSE, 1.11, 2013-05-08\n HCP, NYSE, 2.78, 2013-05-01\n HOG, NYSE, 2.06, 2013-04-26\n HE, NYSE, -12.82, 2013-05-09\n HL, NYSE, -66.67, 2013-05-11\n HMA, NYSE, 0.00, 2013-05-03\n HMC, NYSE, -28.57, 2013-04-27\n HMN, NYSE, 7.84, 2013-04-25\n HFC, NYSE, -7.91, 2013-05-08\n HOT, NYSE, 43.40, 2013-05-01\n HP, NYSE, 5.43, 2013-04-26\n HLS, NYSE, 14.29, 2013-04-26\n HRS, NYSE, 0.00, 2013-05-01\n HSC, NYSE, 50.00, 2013-05-10\n HSY, NYSE, 4.81, 2013-04-26\n HUBB, NYSE, -0.90, 2013-04-19\n HUM, NYSE, 51.12, 2013-05-02\n HXL, NYSE, 4.88, 2013-04-23\n IBM, NYSE, -1.96, 2013-04-19\n IDA, NYSE, 17.54, 2013-05-03\n IEX, NYSE, 4.23, 2013-04-23\n IFF, NYSE, 5.31, 2013-05-08\n DIN, NYSE, 12.87, 2013-05-03\n INT, NYSE, 14.06, 2013-05-01\n IP, NYSE, -12.16, 2013-05-03\n IPG, NYSE, -7.69, 2013-04-20\n IO, NYSE, -85.71, 2013-05-01\n IR, NYSE, 2.44, 2013-04-24\n IRF, NYSE, 27.50, 2013-04-30\n ITW, NYSE, 0.00, 2013-04-24\n JEC, NYSE, -2.44, 2013-04-30\n JNJ, NYSE, 2.13, 2013-04-17\n JNY, NYSE, 0.00, 2013-05-02\n K, NYSE, 0.00, 2013-05-03\n KAMN, NYSE, -2.94, 2013-04-30\n KDN, NYSE, 5.71, 2013-05-10\n KEX, NYSE, 2.15, 2013-04-25\n KEY, NYSE, 5.00, 2013-04-19\n KIM, NYSE, 3.13, 2013-05-01\n KMB, NYSE, 10.45, 2013-04-20\n KEM, NYSE, -133.33, 2013-05-10\n KMT, NYSE, -8.45, 2013-04-26\n KO, NYSE, 2.22, 2013-04-17\n KSU, NYSE, 2.30, 2013-04-20\n LDR, NYSE, -9.52, 2013-05-07\n LEG, NYSE, -13.16, 2013-04-26\n LLY, NYSE, 8.57, 2013-04-25\n LM, NYSE, -13.33, 2013-05-01\n LNC, NYSE, -7.27, 2013-05-02\n LPX, NYSE, 0.00, 2013-05-08\n LXU, NYSE, -110.53, 2013-05-07\n LTC, NYSE, -1.67, 2013-05-01\n L, NYSE, 1.19, 2013-04-30\n LUV, NYSE, 133.33, 2013-04-26\n LUX, NYSE, 7.14, 2013-05-02\n MKL, NYSE, 40.11, 2013-05-01\n MAN, NYSE, 40.00, 2013-04-20\n MTW, NYSE, -35.71, 2013-05-01\n SM, NYSE, 46.43, 2013-05-01\n MAS, NYSE, -7.14, 2013-04-30\n MTZ, NYSE, 12.50, 2013-05-03\n MCD, NYSE, -0.79, 2013-04-20\n MDC, NYSE, 73.08, 2013-05-03\n MDP, NYSE, 4.35, 2013-04-26\n MDR, NYSE, -40.00, 2013-05-09\n MDU, NYSE, 36.36, 2013-05-01\n MED, NYSE, 26.47, 2013-05-09\n CVS, NYSE, 5.06, 2013-05-02\n MFC, NYSE, 18.52, 2013-05-03\n MGA, NYSE, 13.57, 2013-05-11\n MGM, NYSE, 130.00, 2013-05-03\n MMC, NYSE, 4.29, 2013-05-03\n MMM, NYSE, -2.42, 2013-04-26\n MSA, NYSE, -20.31, 2013-04-25\n MNR, NYSE, -7.69, 2013-05-09\n MO, NYSE, 1.89, 2013-04-26\n MOD, NYSE, 5.88, 2013-05-31\nMOG.A, NYSE, -1.23, 2013-04-27\n MHK, NYSE, 3.57, 2013-05-03\n MSI, NYSE, -1.79, 2013-04-25\n MCY, NYSE, 46.81, 2013-04-30\n MRK, NYSE, 8.97, 2013-05-02\n MRO, NYSE, -28.17, 2013-05-08\n POWR, NYSE, 0.00, 2013-05-09\n MTG, NYSE, -60.00, 2013-05-01\n MTB, NYSE, 6.19, 2013-04-16\n MTX, NYSE, 0.00, 2013-04-26\n MUR, NYSE, 11.34, 2013-05-02\n MYE, NYSE, -11.11, 2013-04-25\n NBL, NYSE, 21.31, 2013-04-26\n NBR, NYSE, 13.79, 2013-04-24\n NE, NYSE, 3.51, 2013-04-18\n NEM, NYSE, -8.97, 2013-04-30\n NFG, NYSE, 7.37, 2013-05-03\n NHI, NYSE, 4.94, 2013-05-07\n NI, NYSE, -1.43, 2013-05-01\n NJR, NYSE, 3.16, 2013-05-03\n THC, NYSE, 17.86, 2013-05-01\n NNN, NYSE, 4.35, 2013-05-03\n NOC, NYSE, 12.14, 2013-04-25\n NR, NYSE, 5.88, 2013-04-26\n NSC, NYSE, 3.39, 2013-04-24\n NUE, NYSE, 4.00, 2013-04-19\n NVR, NYSE, -9.64, 2013-04-23\n NWL, NYSE, 9.38, 2013-05-04\n NWN, NYSE, -5.41, 2013-05-03\n NYT, NYSE, -20.00, 2013-04-26\n OCR, NYSE, 4.65, 2013-04-25\n OGE, NYSE, -32.35, 2013-05-03\n OHI, NYSE, 5.08, 2013-05-08\n OI, NYSE, 7.14, 2013-04-24\n OII, NYSE, 16.95, 2013-04-24\n OKE, NYSE, -6.90, 2013-05-01\n OLN, NYSE, 10.64, 2013-04-26\n BRS, NYSE, -1.94, 2013-05-23\n OMC, NYSE, 1.33, 2013-04-19\n OMI, NYSE, 4.76, 2013-04-24\n ORB, NYSE, 43.48, 2013-04-24\n ORI, NYSE, 600.00, 2013-04-26\n OSK, NYSE, 12.94, 2013-05-01\n OXY, NYSE, 7.64, 2013-04-26\n FCFS, NYSE, 0.00, 2013-04-18\n PBI, NYSE, 0.00, 2013-05-01\n PCG, NYSE, -10.00, 2013-05-03\n PCL, NYSE, 9.38, 2013-04-30\n PCP, NYSE, 1.81, 2013-05-10\n TPC, NYSE, 34.78, 2013-05-02\n PDS, NYSE, 14.29, 2013-04-26\n PEG, NYSE, 14.86, 2013-05-01\n PEI, NYSE, 4.76, 2013-04-23\n PEP, NYSE, 8.45, 2013-04-19\n PFE, NYSE, -1.82, 2013-05-01\n PG, NYSE, 3.13, 2013-04-25\n PGR, NYSE, -4.55, 2013-04-11\n PH, NYSE, 0.60, 2013-04-26\n PHM, NYSE, 31.25, 2013-04-26\n PKD, NYSE, 200.00, 2013-05-02\n PKY, NYSE, 15.38, 2013-05-07\n PNC, NYSE, 12.10, 2013-04-18\n PNM, NYSE, -10.00, 2013-05-07\n PNR, NYSE, 3.57, 2013-04-24\n PNW, NYSE, 175.00, 2013-05-04\n POM, NYSE, -4.00, 2013-05-04\n POT, NYSE, 3.28, 2013-04-26\n PPG, NYSE, 1.28, 2013-04-19\n PPL, NYSE, 0.00, 2013-05-03\n PRGO, NYSE, -1.39, 2013-05-08\n PL, NYSE, -4.30, 2013-05-07\n PSB, NYSE, 0.00, 2013-05-07\n WTR, NYSE, 7.41, 2013-05-02\n CSH, NYSE, 8.21, 2013-04-26\n PWR, NYSE, 24.14, 2013-05-03\n PX, NYSE, 0.00, 2013-04-25\n KWR, NYSE, 14.29, 2013-04-30\n R, NYSE, 1.28, 2013-04-24\n RBC, NYSE, -6.09, 2013-05-01\n RDC, NYSE, 5.77, 2013-05-02\n HTSI, NYSE, 11.67, 2013-05-03\n RES, NYSE, -33.33, 2013-04-25\n RGS, NYSE, -90.77, 2013-05-08\n RGR, NYSE, 15.38, 2013-04-30\n RHI, NYSE, -2.44, 2013-04-24\n RJF, NYSE, -9.33, 2013-04-25\n RLI, NYSE, -1.89, 2013-04-18\n ROG, NYSE, 0.00, 2013-05-01\n ROK, NYSE, 2.31, 2013-04-25\n ROL, NYSE, -5.88, 2013-04-25\n ROP, NYSE, 4.10, 2013-04-30\n RTI, NYSE, 20.00, 2013-05-01\n RTN, NYSE, 21.88, 2013-04-26\n RYL, NYSE, 43.33, 2013-04-25\n BSAC, NYSE, -21.74, 2013-04-26\n T, NYSE, 0.00, 2013-04-24\n SCG, NYSE, 7.77, 2013-04-26\n SCHW, NYSE, -6.25, 2013-04-16\n SCL, NYSE, -4.08, 2013-05-01\n SMG, NYSE, -19.60, 2013-05-07\n SEE, NYSE, -5.56, 2013-05-02\n SF, NYSE, 1.75, 2013-05-10\n SFE, NYSE, -46.15, 2013-04-26\n SHW, NYSE, 2.78, 2013-04-19\n SJI, NYSE, -8.43, 2013-05-04\n JOE, NYSE, -200.00, 2013-05-09\n SJW, NYSE, -12.50, 2013-04-25\n SLB, NYSE, 2.02, 2013-04-20\n HSH, NYSE, 9.38, 2013-05-03\n AOS, NYSE, 24.68, 2013-04-24\n SMP, NYSE, 31.25, 2013-05-04\n SNA, NYSE, 4.48, 2013-04-19\n PII, NYSE, 5.94, 2013-04-24\n SNV, NYSE, 0.00, 2013-04-24\n SO, NYSE, -3.92, 2013-04-25\n SON, NYSE, -5.66, 2013-04-19\n SPA, NYSE, -46.15, 2013-05-08\n TRV, NYSE, 14.93, 2013-04-24\n SR, NYSE, -3.36, 2013-05-01\n NVE, NYSE, 12.50, 2013-05-04\n SCI, NYSE, 21.74, 2013-04-25\n SSP, NYSE, 58.33, 2013-05-07\n STT, NYSE, 3.23, 2013-04-20\n STI, NYSE, 3.28, 2013-04-20\n STJ, NYSE, 0.00, 2013-04-18\n STL, NYSE, 7.14, 2013-04-23\n STR, NYSE, -2.38, 2013-05-01\n STE, NYSE, 6.06, 2013-05-08\n SYK, NYSE, 1.98, 2013-04-25\n SUN, NYSE, -7.32, 2013-05-09\n SUP, NYSE, 5.88, 2013-05-04\n SWK, NYSE, 7.29, 2013-04-26\n SWN, NYSE, 7.69, 2013-05-03\n SWX, NYSE, 0.61, 2013-05-04\n SWY, NYSE, -2.78, 2013-04-26\n SYY, NYSE, 16.67, 2013-05-07\n TAC, NYSE, -33.33, 2013-04-24\n TNC, NYSE, -17.14, 2013-04-23\n TCB, NYSE, -15.79, 2013-04-20\n TCO, NYSE, 7.14, 2013-04-26\n TDS, NYSE, 350.00, 2013-05-04\n TDW, NYSE, 55.74, 2013-05-22\n TDY, NYSE, 10.31, 2013-04-25\n TE, NYSE, 11.76, 2013-05-01\n TER, NYSE, 200.00, 2013-04-25\n TEVA, NYSE, 1.82, 2013-05-03\n TEX, NYSE, -17.86, 2013-04-25\n TFX, NYSE, 1.98, 2013-05-01\n TEN, NYSE, 10.77, 2013-04-30\n TKR, NYSE, 0.00, 2013-04-25\n TMK, NYSE, 1.46, 2013-04-24\n TMO, NYSE, 6.20, 2013-04-25\n TOT, NYSE, -2.38, 2013-04-27\n TM, NYSE, 80.67, 2013-05-09\n TR, NYSE, -11.76, 2013-04-25\n TRN, NYSE, 13.75, 2013-05-01\n TRP, NYSE, -8.93, 2013-04-27\n TSO, NYSE, 2.82, 2013-05-02\n TSS, NYSE, -2.94, 2013-04-24\n TTI, NYSE, -40.00, 2013-05-09\n TXT, NYSE, -14.89, 2013-04-18\n TYL, NYSE, 26.09, 2013-04-25\n TSN, NYSE, -21.74, 2013-05-07\n UDR, NYSE, 3.03, 2013-05-01\n UFI, NYSE, -43.75, 2013-04-25\n UAM, NYSE, 17.65, 2013-04-30\n UHS, NYSE, 5.17, 2013-04-25\n UIL, NYSE, 3.06, 2013-05-03\n UIS, NYSE, -145.61, 2013-04-24\n UNH, NYSE, 0.00, 2013-04-19\n KMPR, NYSE, 35.85, 2013-05-03\n UNM, NYSE, 2.56, 2013-05-02\n UNP, NYSE, 3.57, 2013-04-19\n UNT, NYSE, 6.98, 2013-05-08\n URS, NYSE, -14.29, 2013-05-08\n USG, NYSE, -88.89, 2013-04-25\n MUX, NYSE, -300.00, 2013-05-10\n USM, NYSE, 214.29, 2013-05-04\n USPH, NYSE, -3.12, 2013-05-10\n UTL, NYSE, -9.20, 2013-04-24\n UTX, NYSE, -1.54, 2013-04-24\n VMI, NYSE, 15.60, 2013-04-19\n VAR, NYSE, 2.97, 2013-04-25\n CBS, NYSE, 7.35, 2013-05-02\n VLO, NYSE, 16.83, 2013-05-01\n VMC, NYSE, -24.32, 2013-05-03\n VLY, NYSE, -11.11, 2013-04-25\n VNO, NYSE, -38.38, 2013-05-07\n VSH, NYSE, 63.64, 2013-05-01\n WTS, NYSE, -14.04, 2013-05-01\n WBS, NYSE, -2.22, 2013-04-16\n WEC, NYSE, 7.04, 2013-05-01\n WFC, NYSE, 5.75, 2013-04-13\n WG, NYSE, -2400.00, 2013-05-09\n WGL, NYSE, 19.05, 2013-05-02\n WHR, NYSE, 1.03, 2013-04-25\n WMB, NYSE, -8.33, 2013-05-08\n WNC, NYSE, 0.00, 2013-05-01\n TEG, NYSE, 10.69, 2013-05-02\n WR, NYSE, 33.33, 2013-05-09\n WRE, NYSE, -4.35, 2013-04-26\n WRI, NYSE, 4.35, 2013-05-01\n WPP, NYSE, 33.33, 2013-04-30\n WSO, NYSE, 18.18, 2013-04-19\n WST, NYSE, 1.16, 2013-05-03\n WWW, NYSE, 50.00, 2013-04-17\n WY, NYSE, 18.18, 2013-04-27\n X, NYSE, -84.21, 2013-05-01\n XL, NYSE, 38.81, 2013-05-03\n XOM, NYSE, 4.43, 2013-04-26\n XRX, NYSE, 12.50, 2013-04-24\n Y, NYSE, 53.96, 2013-05-07\n HRG, NYSE, 60.00, 2013-05-10\n CRY, NYSE, 28.57, 2013-05-01\n CHK, NYSE, 30.43, 2013-05-02\n DDR, NYSE, 0.00, 2013-05-01\n ELS, NYSE, 0.71, 2013-04-23\n ALG, NYSE, 5.56, 2013-05-02\n ETH, NYSE, -22.22, 2013-04-24\n ATR, NYSE, -3.03, 2013-04-26\n GGP, NYSE, 4.17, 2013-04-30\n MSL, NYSE, 3.70, 2013-05-01\n RCL, NYSE, 84.21, 2013-04-26\n CWEI, NYSE, -61.22, 2013-04-25\n HR, NYSE, 0.00, 2013-05-02\n RGA, NYSE, 2.48, 2013-04-26\n RIG, NYSE, -7.92, 2013-05-09\n SKT, NYSE, 2.44, 2013-05-01\n TWI, NYSE, -16.28, 2013-04-25\n BDN, NYSE, 2.94, 2013-04-25\n KGC, NYSE, 25.00, 2013-05-08\n CPT, NYSE, 2.11, 2013-05-03\n SGY, NYSE, 18.84, 2013-05-07\n BFS, NYSE, -24.49, 2013-05-01\n BWA, NYSE, 6.56, 2013-04-26\n EQR, NYSE, -1.54, 2013-05-01\n CLP, NYSE, 3.03, 2013-04-26\n KOF, NYSE, -16.24, 2013-04-25\n OKS, NYSE, -27.59, 2013-05-01\n SQM, NYSE, -6.45, 2013-05-29\n BYD, NYSE, 114.29, 2013-04-25\n CBL, NYSE, 3.92, 2013-04-30\n DECK, NYSE, 133.33, 2013-04-26\n IT, NYSE, -2.50, 2013-05-03\n HST, NYSE, 21.74, 2013-05-04\n LXP, NYSE, 0.00, 2013-05-03\n REG, NYSE, 3.23, 2013-05-08\n TUC, NYSE, -24.00, 2013-05-03\n AF, NYSE, 7.69, 2013-04-18\n BFR, NYSE, -2.56, 2013-05-11\n HHS, NYSE, 10.00, 2013-04-26\n MHO, NYSE, 28.57, 2013-04-26\n NFX, NYSE, -2.17, 2013-04-24\n SPG, NYSE, 1.99, 2013-04-27\n SU, NYSE, -1.41, 2013-04-30\n SUI, NYSE, 2.20, 2013-04-26\n TV, NYSE, -22.50, 2013-04-26\n CGI, NYSE, -26.92, 2013-04-26\n CYT, NYSE, -12.79, 2013-04-19\n EMN, NYSE, 3.18, 2013-04-26\n GRT, NYSE, 14.29, 2013-04-25\n MAA, NYSE, 5.04, 2013-05-02\n PLT, NYSE, 4.62, 2013-05-08\n BZH, NYSE, 15.38, 2013-05-03\n ELX, NYSE, 114.29, 2013-05-03\n MLM, NYSE, -69.44, 2013-05-01\n AKS, NYSE, 41.67, 2013-04-24\n ALB, NYSE, -7.00, 2013-04-18\n VRX, NYSE, 1.56, 2013-05-03\n CBR, NYSE, 0.00, 2013-05-01\n MAC, NYSE, 8.86, 2013-05-02\n RKT, NYSE, 9.80, 2013-04-24\n RYN, NYSE, 27.42, 2013-04-26\n ADC, NYSE, -2.00, 2013-04-30\nBRK.B, NYSE, 52.31, 2013-05-04\n EXP, NYSE, 5.00, 2013-05-15\n GGB, NYSE, -66.67, 2013-05-08\n SSD, NYSE, -52.38, 2013-04-26\n ESS, NYSE, -0.53, 2013-05-02\n FR, NYSE, -7.69, 2013-04-26\n HIW, NYSE, -2.90, 2013-05-01\n IMAX, NYSE, 0.00, 2013-04-26\n AIV, NYSE, 2.13, 2013-05-03\n FCH, NYSE, 0.00, 2013-05-01\n ITGR, NYSE, 2.33, 2013-04-26\n NOK, NYSE, 33.33, 2013-04-19\n GEO, NYSE, -3.51, 2013-05-09\n CLI, NYSE, 0.00, 2013-04-26\n RS, NYSE, -5.22, 2013-04-26\n CPE, NYSE, 100.00, 2013-05-10\n KNX, NYSE, 0.00, 2013-04-25\n O, NYSE, 1.69, 2013-04-26\n COF, NYSE, 17.79, 2013-04-19\n IRS, NYSE, 10.34, 2013-05-18\n MCK, NYSE, -0.43, 2013-05-08\n SWC, NYSE, 200.00, 2013-04-30\n STM, NYSE, 23.53, 2013-04-23\n TEO, NYSE, 1.30, 2013-04-30\n TRK, NYSE, -400.00, 2013-05-02\n LMT, NYSE, 23.38, 2013-04-24\n APU, NYSE, -35.48, 2013-05-16\n AGU, NYSE, -12.15, 2013-05-10\n LH, NYSE, -1.69, 2013-04-20\n DDD, NYSE, -10.00, 2013-05-01\n AFG, NYSE, 10.84, 2013-05-09\n RMD, NYSE, 3.51, 2013-04-26\n WAB, NYSE, 3.60, 2013-04-25\n CIB, NYSE, 6.78, 2013-05-08\n CAM, NYSE, -5.41, 2013-04-26\n FCX, NYSE, 1.39, 2013-04-19\n RNR, NYSE, 34.25, 2013-05-02\n AVX, NYSE, 7.14, 2013-04-25\n RWT, NYSE, 46.81, 2013-05-03\n AXE, NYSE, -6.62, 2013-04-24\n CLB, NYSE, 6.09, 2013-04-18\n MD, NYSE, 0.92, 2013-05-03\n THG, NYSE, 30.69, 2013-04-30\n BAP, NYSE, -10.94, 2013-05-07\n DO, NYSE, 10.43, 2013-04-26\n RE, NYSE, 36.11, 2013-04-23\n DST, NYSE, -6.60, 2013-04-26\n EL, NYSE, 36.36, 2013-05-03\n ESC, NYSE, -57.14, 2013-05-03\n LXK, NYSE, -7.55, 2013-04-24\n MIG, NYSE, 7.69, 2013-05-01\n WAT, NYSE, -1.83, 2013-04-24\n EME, NYSE, 2.27, 2013-04-26\n HIG, NYSE, 10.84, 2013-04-30\n ITT, NYSE, 9.30, 2013-05-03\n SPN, NYSE, 0.00, 2013-04-26\n SWM, NYSE, 8.60, 2013-05-09\n SCCO, NYSE, -4.84, 2013-04-27\n RCI, NYSE, -1.27, 2013-04-23\n EIX, NYSE, 20.31, 2013-05-01\n IRM, NYSE, 0.00, 2013-05-02\n SPH, NYSE, -4.82, 2013-05-10\n CCJ, NYSE, 0.00, 2013-05-02\n PGI, NYSE, 0.00, 2013-04-19\n CRR, NYSE, -14.61, 2013-04-26\n BVN, NYSE, -40.30, 2013-04-30\n FCN, NYSE, 13.46, 2013-05-10\n RPT, NYSE, 6.90, 2013-04-24\n TUP, NYSE, 4.42, 2013-04-25\n ASB, NYSE, 8.00, 2013-04-19\n GWR, NYSE, -10.11, 2013-05-02\n TBI, NYSE, -50.00, 2013-04-25\n FFG, NYSE, 12.66, 2013-05-03\n USNA, NYSE, 14.29, 2013-04-24\n CSV, NYSE, -3.03, 2013-05-08\n LVB, NYSE, 10.53, 2013-05-09\n ALR, NYSE, 6.25, 2013-05-10\n OCN, NYSE, 0.00, 2013-05-03\n PAA, NYSE, 37.50, 2013-05-07\n DNR, NYSE, 13.79, 2013-05-03\n HMY, NYSE, -119.23, 2013-05-04\n TGI, NYSE, 5.66, 2013-05-02\n PAG, NYSE, 1.61, 2013-04-30\n GEL, NYSE, -17.65, 2013-05-03\n IM, NYSE, 0.00, 2013-04-26\n NUS, NYSE, 13.92, 2013-05-03\n CNI, NYSE, -1.67, 2013-04-23\n LAD, NYSE, 16.67, 2013-04-25\n NSP, NYSE, 0.00, 2013-04-30\n DGX, NYSE, -14.42, 2013-04-18\n KRC, NYSE, 0.00, 2013-05-01\n MTH, NYSE, 32.00, 2013-04-25\n NCR, NYSE, 35.00, 2013-05-01\n OFG, NYSE, 2.78, 2013-04-26\n IVZ, NYSE, 10.64, 2013-05-01\n DX, NYSE, 9.68, 2013-05-02\n FBC, NYSE, -65.98, 2013-04-24\n ALV, NYSE, 1.57, 2013-04-27\n ARE, NYSE, 0.00, 2013-04-30\n BBT, NYSE, 2.99, 2013-04-19\n CGG, NYSE, 6.25, 2013-05-04\n BXP, NYSE, -0.83, 2013-05-01\n CBD, NYSE, -23.73, 2013-05-01\n MS, NYSE, 7.02, 2013-04-19\n SRT, NYSE, -314.29, 2013-05-10\n HLX, NYSE, 38.89, 2013-04-22\n FLS, NYSE, 3.61, 2013-04-25\n MT, NYSE, -400.00, 2013-05-11\n PXD, NYSE, 5.15, 2013-05-02\n SLG, NYSE, 0.83, 2013-04-24\n NAT, NYSE, -16.22, 2013-05-14\n CSU, NYSE, -36.36, 2013-05-07\n DRQ, NYSE, 22.50, 2013-05-04\n FDP, NYSE, -24.47, 2013-05-01\n NLY, NYSE, 30.56, 2013-05-02\n TLM, NYSE, -250.00, 2013-05-02\n TSM, NYSE, 13.04, 2013-04-19\n YUM, NYSE, 12.90, 2013-04-24\n AMG, NYSE, 12.38, 2013-05-01\n EPR, NYSE, -1.05, 2013-05-01\n FE, NYSE, 10.14, 2013-05-08\n LFL, NYSE, 80.00, 2013-05-15\n MTD, NYSE, 2.79, 2013-05-03\n SID, NYSE, -66.67, 2013-05-16\n IN, NYSE, -271.43, 2013-05-04\n CBZ, NYSE, 25.64, 2013-05-03\n URI, NYSE, 11.54, 2013-04-17\n INGR, NYSE, 6.82, 2013-05-03\n RAS, NYSE, 181.82, 2013-05-03\n UNS, NYSE, 35.00, 2013-04-30\n ASI, NYSE, 18.92, 2013-05-09\n ANH, NYSE, 15.38, 2013-04-30\n OFC, NYSE, 17.07, 2013-04-27\n GPX, NYSE, 0.00, 2013-05-03\n WAC, NYSE, 1427.27, 2013-05-10\n RBA, NYSE, -13.33, 2013-05-01\n WDR, NYSE, 1.61, 2013-04-24\n LHO, NYSE, 8.00, 2013-04-18\n LNT, NYSE, 18.03, 2013-05-04\n LVLT, NYSE, 7.14, 2013-04-26\n MFA, NYSE, -4.76, 2013-05-02\n OME, NYSE, 50.00, 2013-05-08\n EQY, NYSE, 6.90, 2013-05-02\n FII, NYSE, -2.38, 2013-04-26\n FMX, NYSE, -37.89, 2013-04-25\n LLL, NYSE, 3.63, 2013-04-26\n VTR, NYSE, 4.04, 2013-04-27\n WCN, NYSE, 20.00, 2013-05-02\n AVB, NYSE, 0.74, 2013-05-01\n GIL, NYSE, 5.36, 2013-05-03\n HZO, NYSE, -92.86, 2013-04-26\n AWR, NYSE, 38.00, 2013-05-11\n CLS, NYSE, 10.00, 2013-04-24\n EPD, NYSE, 16.67, 2013-05-01\n RSG, NYSE, 15.00, 2013-04-26\n WM, NYSE, -2.44, 2013-04-25\n AKR, NYSE, 3.33, 2013-04-24\n CVG, NYSE, 17.39, 2013-05-01\n RRC, NYSE, -38.89, 2013-04-26\n SAP, NYSE, 41.51, 2013-04-20\n CCI, NYSE, 0.00, 2013-04-25\n PQ, NYSE, 100.00, 2013-05-08\n WFT, NYSE, 0.00, 2013-05-03\n CAA, NYSE, 0.00, 2013-05-03\n ENB, NYSE, 13.21, 2013-05-09\n GMK, NYSE, 60.00, 2013-04-25\n MMR, NYSE, 0.00, 2013-05-07\n PB, NYSE, 2.38, 2013-04-25\n VIV, NYSE, -20.00, 2013-05-08\n AXL, NYSE, 53.33, 2013-05-04\n BP, NYSE, 33.33, 2013-05-01\n ETM, NYSE, 0.00, 2013-05-09\n HT, NYSE, 0.00, 2013-05-01\n BYI, NYSE, 10.71, 2013-04-25\n CEB, NYSE, 1.64, 2013-05-02\n INFY, NYSE, 5.41, 2013-04-13\n JLL, NYSE, 56.52, 2013-05-01\n AZN, NYSE, 5.22, 2013-04-26\n SFG, NYSE, 33.75, 2013-04-24\n TREX, NYSE, 14.68, 2013-05-04\n GS, NYSE, 11.43, 2013-04-17\n SYX, NYSE, -157.14, 2013-05-01\n WCC, NYSE, -4.27, 2013-04-19\n JNPR, NYSE, 33.33, 2013-04-24\n RDN, NYSE, 28.57, 2013-05-02\n RAI, NYSE, 4.35, 2013-04-24\n SKX, NYSE, -27.78, 2013-05-16\n WTM, NYSE, 178.02, 2013-04-30\n NCI, NYSE, 12.50, 2013-04-26\n BLT, NYSE, -17.39, 2013-05-08\n QTM, NYSE, -33.33, 2013-05-09\n BLK, NYSE, 1.67, 2013-04-17\n CIR, NYSE, 4.00, 2013-05-03\n MSO, NYSE, 12.50, 2013-05-01\n PKG, NYSE, 10.71, 2013-04-23\n PKI, NYSE, -25.00, 2013-04-26\n WWE, NYSE, -37.50, 2013-05-03\n SNN, NYSE, -2.11, 2013-05-03\n UPS, NYSE, 2.97, 2013-04-26\n XOXO, NYSE, 16.67, 2013-05-10\n SLF, NYSE, 7.25, 2013-05-09\n CDR, NYSE, 9.09, 2013-05-10\n EW, NYSE, -5.26, 2013-04-24\n MET, NYSE, 13.85, 2013-05-01\n FBR, NYSE, -89.47, 2013-04-24\n VVC, NYSE, -7.58, 2013-05-02\n BAM, NYSE, 70.00, 2013-05-10\n NVS, NYSE, 4.00, 2013-04-25\n BHLB, NYSE, -1.82, 2013-04-30\n CRL, NYSE, -2.82, 2013-05-02\n CYH, NYSE, 3.57, 2013-04-30\n MBT, NYSE, -13.04, 2013-06-08\n MTOR, NYSE, 500.00, 2013-05-01\n CNQ, NYSE, -44.19, 2013-05-03\n ERJ, NYSE, -62.79, 2013-04-30\n VZ, NYSE, 3.03, 2013-04-19\n EVC, NYSE, 0.00, 2013-05-03\n PBR, NYSE, 0.00, 2013-04-27\n XEL, NYSE, 11.63, 2013-05-03\n ALE, NYSE, 10.67, 2013-05-09\n HW, NYSE, -30.00, 2013-05-01\n POL, NYSE, 14.81, 2013-05-02\n COH, NYSE, 3.70, 2013-04-24\n CXW, NYSE, 6.38, 2013-05-09\n DVA, NYSE, 3.37, 2013-05-08\n EXC, NYSE, 4.41, 2013-05-02\n MCO, NYSE, 11.49, 2013-05-04\n BRFS, NYSE, 23.53, 2013-04-30\n TU, NYSE, 3.77, 2013-05-10\n WIT, NYSE, 0.00, 2013-04-20\n ERF, NYSE, 100.00, 2013-05-11\n GG, NYSE, -35.00, 2013-05-03\n HNT, NYSE, 34.15, 2013-04-30\n NYCB, NYSE, 3.85, 2013-04-25\n SXT, NYSE, 3.33, 2013-04-19\n CPG, NYSE, -20.00, 2013-05-10\n AMX, NYSE, 16.67, 2013-04-20\n MPX, NYSE, 0.00, 2013-04-25\n OIS, NYSE, -2.70, 2013-04-25\n MMP, NYSE, 4.08, 2013-05-03\n PES, NYSE, 33.33, 2013-05-01\n ABB, NYSE, -12.12, 2013-04-25\n KMR, NYSE, -3.28, 2013-05-02\n GEN, NYSE, -41.18, 2013-05-07\n ADS, NYSE, -2.88, 2013-04-19\n CVI, NYSE, 25.00, 2013-05-03\n FTI, NYSE, -6.52, 2013-04-24\n PRA, NYSE, 27.63, 2013-05-07\n STO, NYSE, -16.46, 2013-05-03\n BEL, NYSE, 41.67, 2013-05-02\n FIS, NYSE, 1.64, 2013-05-01\n COL, NYSE, 0.86, 2013-04-20\n KAI, NYSE, 20.51, 2013-04-30\n ABC, NYSE, -2.25, 2013-04-26\n BG, NYSE, 18.56, 2013-04-26\n FRO, NYSE, 27.08, 2013-05-31\n ECA, NYSE, 150.00, 2013-04-24\n CIG, NYSE, 108.33, 2013-05-17\n EEP, NYSE, 16.67, 2013-05-01\n CVX, NYSE, 3.25, 2013-04-27\n GXP, NYSE, 41.67, 2013-05-10\n JHX, NYSE, -2.78, 2013-05-24\n PFG, NYSE, 5.33, 2013-04-26\n PVR, NYSE, 14.29, 2013-04-26\n AAP, NYSE, 2.48, 2013-05-24\n KND, NYSE, 36.11, 2013-05-02\n WTW, NYSE, 38.10, 2013-05-03\n CNC, NYSE, 5.00, 2013-04-24\n BCH, NYSE, 3.70, 2013-05-09\n NS, NYSE, -86.67, 2013-04-25\n ITUB, NYSE, -4.88, 2013-04-26\n SXL, NYSE, 26.74, 2013-05-09\n VALE, NYSE, 50.00, 2013-04-25\n TNP, NYSE, 150.00, 2013-05-25\n LCI, NYSE, 40.00, 2013-05-09\n GTI, NYSE, 50.00, 2013-04-26\n HNR, NYSE, -26.67, 2013-06-06\n MWE, NYSE, -90.00, 2013-05-09\n NLS, NYSE, 50.00, 2013-05-07\n RGC, NYSE, -7.14, 2013-05-01\n JAH, NYSE, 30.43, 2013-04-25\n NPO, NYSE, -23.29, 2013-05-03\n TRI, NYSE, 22.58, 2013-05-01\n CAE, NYSE, 10.53, 2013-05-17\n LF, NYSE, 28.57, 2013-05-02\n SNY, NYSE, -10.11, 2013-05-03\n BANC, NYSE, 400.00, 2013-05-09\n COP, NYSE, 0.00, 2013-04-26\n CNP, NYSE, -8.11, 2013-05-03\n EEQ, NYSE, -321.43, 2013-05-02\n MRH, NYSE, 32.58, 2013-04-25\n NGS, NYSE, 23.08, 2013-05-10\n NRP, NYSE, 4.88, 2013-05-07\n PXP, NYSE, 17.98, 2013-05-03\n XEC, NYSE, -0.93, 2013-05-08\n IAG, NYSE, 7.14, 2013-05-08\n EGO, NYSE, 0.00, 2013-05-03\n JNS, NYSE, -6.25, 2013-04-24\n PFS, NYSE, 14.81, 2013-04-27\n ENH, NYSE, 74.79, 2013-05-02\n CNX, NYSE, -5.00, 2013-04-26\n AMT, NYSE, -10.42, 2013-05-02\n ABG, NYSE, 13.43, 2013-04-25\n LII, NYSE, 22.22, 2013-04-23\n SRE, NYSE, -4.90, 2013-05-03\n AEE, NYSE, -21.43, 2013-05-03\n PLD, NYSE, 0.00, 2013-04-25\n SAH, NYSE, -2.38, 2013-04-24\n GPI, NYSE, 11.54, 2013-05-03\n FIX, NYSE, 800.00, 2013-05-02\n MMS, NYSE, 1.41, 2013-05-10\n SRI, NYSE, 50.00, 2013-05-10\n RTEC, NYSE, 50.00, 2013-05-03\n NOV, NYSE, -5.84, 2013-04-27\n DF, NYSE, 11.54, 2013-05-10\n SAM, NYSE, -17.74, 2013-05-02\n RL, NYSE, 8.46, 2013-05-24\n FLR, NYSE, 6.25, 2013-05-03\n ALL, NYSE, 2.27, 2013-05-02\n ATI, NYSE, 0.00, 2013-04-25\n EE, NYSE, 72.73, 2013-05-02\n AIT, NYSE, 0.00, 2013-05-03\n CHH, NYSE, -3.70, 2013-04-30\n FMS, NYSE, -17.78, 2013-05-01\n BCO, NYSE, 16.67, 2013-04-26\n CBB, NYSE, 133.33, 2013-05-10\n MWW, NYSE, 14.29, 2013-05-03\n PSA, NYSE, -3.09, 2013-05-10\n E, NYSE, 0.00, 2013-04-25\n JPM, NYSE, 15.22, 2013-04-13\n USB, NYSE, 0.00, 2013-04-17\n HON, NYSE, 6.14, 2013-04-20\n ITG, NYSE, 50.00, 2013-05-03\n ARB, NYSE, -15.49, 2013-05-08\n APL, NYSE, -28.95, 2013-04-30\n AVA, NYSE, 0.00, 2013-05-02\n AXS, NYSE, 85.71, 2013-04-26\n MOH, NYSE, 146.15, 2013-04-26\n CVD, NYSE, 4.17, 2013-05-02\n AHT, NYSE, 2.94, 2013-05-09\n GPK, NYSE, 25.00, 2013-04-26\n CNO, NYSE, 0.00, 2013-04-25\n AUQ, NYSE, -60.00, 2013-05-10\n NFP, NYSE, -5.45, 2013-05-04\n CRI, NYSE, 12.86, 2013-05-10\n FMD, NYSE, 27.27, 2013-04-30\n FPO, NYSE, 3.45, 2013-04-26\n TRQ, NYSE, -25.00, 2013-05-14\n WLL, NYSE, 2.17, 2013-04-25\n AEL, NYSE, 11.36, 2013-05-02\n AHL, NYSE, 0.95, 2013-04-25\n AUY, NYSE, -23.81, 2013-05-01\n CMP, NYSE, 24.32, 2013-04-30\n KRO, NYSE, -800.00, 2013-05-09\n TPX, NYSE, 3.33, 2013-05-03\n UTI, NYSE, -300.00, 2013-05-01\n PJC, NYSE, 9.09, 2013-04-18\n TRW, NYSE, 3.42, 2013-05-01\n AIZ, NYSE, -14.56, 2013-04-25\n HTH, NYSE, 11.43, 2013-05-07\n ETP, NYSE, 33.33, 2013-05-09\n LSE, NYSE, 0.00, 2013-05-09\n BBD, NYSE, 0.00, 2013-04-23\n NRG, NYSE, -37.04, 2013-05-08\n HOS, NYSE, 96.67, 2013-05-02\n ABR, NYSE, 84.62, 2013-05-04\n FHN, NYSE, 0.00, 2013-04-20\n AGO, NYSE, 86.11, 2013-05-10\n HSP, NYSE, 18.18, 2013-05-02\n HNI, NYSE, 250.00, 2013-04-18\n GHL, NYSE, -34.78, 2013-04-18\n XPO, NYSE, -16.44, 2013-05-08\n CVO, NYSE, -200.00, 2013-05-09\n CHE, NYSE, 9.92, 2013-04-19\n GNW, NYSE, 11.11, 2013-05-01\n CBG, NYSE, -5.88, 2013-04-26\n SFL, NYSE, -43.33, 2013-05-31\n NEU, NYSE, 3.28, 2013-04-25\n GOL, NYSE, -1200.00, 2013-05-14\n CAB, NYSE, 18.64, 2013-04-26\n LTM, NYSE, 3.08, 2013-04-26\n VVI, NYSE, 68.00, 2013-04-27\n WCG, NYSE, -8.70, 2013-05-04\n HEP, NYSE, -36.36, 2013-05-01\n DPZ, NYSE, 5.36, 2013-05-01\n BDC, NYSE, 6.33, 2013-05-03\n ENS, NYSE, 2.56, 2013-05-29\n BMR, NYSE, 7.89, 2013-05-02\n ACC, NYSE, -1.54, 2013-04-24\n KRG, NYSE, 27.27, 2013-05-03\n WLK, NYSE, 42.64, 2013-05-07\n EXR, NYSE, 4.55, 2013-04-30\n CNS, NYSE, 7.32, 2013-04-18\n IOC, NYSE, 161.54, 2013-05-14\n STON, NYSE, -150.00, 2013-05-08\n TTM, NYSE, 60.56, 2013-05-30\n CPL, NYSE, 7.69, 2013-05-11\n TPGI, NYSE, -460.00, 2013-05-07\n SHO, NYSE, 0.00, 2013-05-07\n CUBE, NYSE, 0.00, 2013-05-03\n NRF, NYSE, -51.35, 2013-05-04\n DLR, NYSE, -1.69, 2013-04-27\n MTL, NYSE, 100.00, 2013-06-19\n NWE, NYSE, 8.60, 2013-04-26\n ORA, NYSE, 550.00, 2013-05-08\n NP, NYSE, 7.25, 2013-05-09\n SMA, NYSE, -73.33, 2013-05-03\n BBG, NYSE, -2600.00, 2013-05-03\n BXC, NYSE, 35.29, 2013-05-02\n KNL, NYSE, 8.33, 2013-04-19\n LVS, NYSE, 7.58, 2013-05-02\n HLF, NYSE, 18.69, 2013-04-30\n MIC, NYSE, -89.09, 2013-04-30\n PHH, NYSE, -81.13, 2013-05-02\n CE, NYSE, 44.30, 2013-04-19\n EDR, NYSE, 0.00, 2013-04-30\n WTI, NYSE, 34.62, 2013-05-08\n ARC, NYSE, 0.00, 2013-05-08\n PBH, NYSE, 5.88, 2013-05-17\n HUN, NYSE, 18.75, 2013-05-01\n WEX, NYSE, 3.16, 2013-05-02\n DLB, NYSE, 14.29, 2013-04-26\n DSX, NYSE, 66.67, 2013-05-23\n LAZ, NYSE, -17.65, 2013-04-27\n TGP, NYSE, 14.29, 2013-05-10\n TLP, NYSE, 7.69, 2013-05-08\n DRH, NYSE, 55.56, 2013-05-11\n HTGC, NYSE, 8.00, 2013-05-03\n KFN, NYSE, 27.78, 2013-05-02\n THS, NYSE, 5.71, 2013-05-10\n NSR, NYSE, -8.86, 2013-05-03\n WAL, NYSE, 14.29, 2013-04-19\n SLW, NYSE, -9.76, 2013-05-11\n MPW, NYSE, -3.85, 2013-04-27\n GNK, NYSE, -2.75, 2013-05-02\n MFB, NYSE, 28.57, 2013-05-09\nRDS.A, NYSE, 21.74, 2013-05-03\n ITC, NYSE, -3.45, 2013-04-24\n FTK, NYSE, -11.76, 2013-05-10\n PIKE, NYSE, -20.00, 2013-05-07\n ALJ, NYSE, 63.27, 2013-05-09\n DRC, NYSE, 2.38, 2013-04-26\n STN, NYSE, 0.00, 2013-05-10\n SSW, NYSE, -8.70, 2013-04-30\n CF, NYSE, 0.50, 2013-05-09\n HPY, NYSE, 12.50, 2013-05-01\n ROC, NYSE, 1.49, 2013-05-01\n WPZ, NYSE, -57.58, 2013-05-01\n LCC, NYSE, 29.17, 2013-04-24\n GLP, NYSE, -7.27, 2013-05-10\n AMP, NYSE, 1.27, 2013-04-23\n DHT, NYSE, 58.33, 2013-04-30\n FNF, NYSE, 5.00, 2013-05-02\n NM, NYSE, 52.38, 2013-05-22\n CCO, NYSE, -57.14, 2013-05-03\n BWP, NYSE, 5.00, 2013-04-30\n ICE, NYSE, 2.53, 2013-05-02\n BKD, NYSE, 50.00, 2013-05-02\n BAS, NYSE, 12.00, 2013-04-25\n CPA, NYSE, 21.21, 2013-05-14\n LYV, NYSE, 8.33, 2013-05-08\n WNR, NYSE, -6.93, 2013-05-03\n CMG, NYSE, 9.81, 2013-04-19\n RGP, NYSE, -50.00, 2013-05-09\n KOP, NYSE, -16.92, 2013-05-04\n TX, NYSE, 40.43, 2013-05-01\n UAL, NYSE, 10.09, 2013-04-26\n ETE, NYSE, -27.03, 2013-05-09\n RSO, NYSE, -45.00, 2013-05-08\n XCO, NYSE, 62.50, 2013-05-01\n PAC, NYSE, 30.00, 2013-04-26\n NYX, NYSE, 1.79, 2013-05-01\n TDG, NYSE, 0.61, 2013-05-08\n BMA, NYSE, 11.68, 2013-05-09\n THI, NYSE, 1.67, 2013-05-09\n BTE, NYSE, -112.00, 2013-05-10\n CNH, NYSE, 41.49, 2013-05-01\n GLA, NYSE, -82.35, 2013-05-02\n POR, NYSE, 0.00, 2013-05-02\n HIL, NYSE, 50.00, 2013-05-03\n HVB, NYSE, 12.50, 2013-04-24\n KS, NYSE, -9.30, 2013-05-08\n HK, NYSE, -28.57, 2013-05-03\n DCP, NYSE, 3.28, 2013-05-07\n DK, NYSE, 7.56, 2013-05-09\n CODI, NYSE, 0.00, 2013-05-08\n MA, NYSE, 0.65, 2013-05-02\n MWA, NYSE, 150.00, 2013-05-01\n KOG, NYSE, -21.43, 2013-05-03\n PWE, NYSE, -150.00, 2013-05-03\n PGTI, NYSE, 100.00, 2013-05-02\n AWH, NYSE, 8.45, 2013-04-25\n NSH, NYSE, -29.73, 2013-04-25\n WYN, NYSE, 7.58, 2013-04-25\n WNS, NYSE, 15.38, 2013-04-18\n PGH, NYSE, 0.00, 2013-05-02\n AYR, NYSE, 34.48, 2013-05-03\n EVR, NYSE, -24.49, 2013-04-25\n HBI, NYSE, 2.00, 2013-04-24\n WU, NYSE, 12.12, 2013-05-01\n OC, NYSE, 45.00, 2013-04-25\n DAC, NYSE, 44.44, 2013-04-30\n AWI, NYSE, -43.59, 2013-04-30\n SUSS, NYSE, 0.00, 2013-05-09\n DEI, NYSE, 5.71, 2013-05-08\n OB, NYSE, 79.31, 2013-04-30\n SBH, NYSE, -7.69, 2013-05-03\n EBS, NYSE, -144.44, 2013-05-03\n KBR, NYSE, 25.53, 2013-04-26\n AER, NYSE, 23.40, 2013-05-08\n NOA, NYSE, -442.86, 2013-06-11\n SPR, NYSE, 29.79, 2013-05-03\n ANW, NYSE, -7.14, 2013-05-16\n DCT, NYSE, 10.00, 2013-05-03\n SE, NYSE, 6.25, 2013-05-04\n TOO, NYSE, -17.86, 2013-05-10\n TSL, NYSE, -27.78, 2013-05-30\n TWC, NYSE, 2.92, 2013-04-26\n MVO, NYSE, -13.92, 2013-05-09\n CO, NYSE, 150.00, 2013-06-19\n EXK, NYSE, -18.75, 2013-05-07\n EIG, NYSE, 22.22, 2013-05-09\n HF, NYSE, -50.00, 2013-05-02\n FIG, NYSE, 33.33, 2013-05-03\n NGLS, NYSE, -20.00, 2013-05-04\n TCAP, NYSE, -1.75, 2013-05-09\n GFA, NYSE, -211.11, 2013-05-14\n BR, NYSE, 18.18, 2013-05-08\n SCR, NYSE, 12.50, 2013-05-10\n CNK, NYSE, 12.00, 2013-05-08\n DAL, NYSE, 42.86, 2013-04-24\n ORN, NYSE, 42.86, 2013-05-03\n ACM, NYSE, 3.92, 2013-05-08\n SLH, NYSE, 5.00, 2013-05-08\n CLR, NYSE, 2.63, 2013-05-09\n BGS, NYSE, -5.13, 2013-04-19\n STAR, NYSE, 26.42, 2013-05-01\n YGE, NYSE, -40.00, 2013-05-31\n DFS, NYSE, 18.75, 2013-04-24\n TEL, NYSE, 7.04, 2013-04-25\n BX, NYSE, 1.85, 2013-04-19\n SEP, NYSE, 4.65, 2013-05-04\n BZ, NYSE, -77.78, 2013-05-03\n PPO, NYSE, -41.18, 2013-05-09\n PRO, NYSE, 100.00, 2013-05-03\n WBC, NYSE, 7.34, 2013-04-26\n DHX, NYSE, 0.00, 2013-04-24\n PMC, NYSE, 23.53, 2013-05-02\n HGG, NYSE, 3.33, 2013-05-21\n OWW, NYSE, -33.33, 2013-05-10\n VR, NYSE, 35.97, 2013-04-26\n CXO, NYSE, -27.50, 2013-05-02\n G, NYSE, 5.00, 2013-05-02\n EJ, NYSE, 89.47, 2013-05-16\n WX, NYSE, 11.11, 2013-05-14\n CMLP, NYSE, -92.86, 2013-05-08\n VMW, NYSE, 10.87, 2013-04-24\n CZZ, NYSE, -40.00, 2013-06-06\n CGA, NYSE, 6.67, 2013-05-14\n TDC, NYSE, -26.92, 2013-05-03\n FLY, NYSE, 61.73, 2013-05-03\n MAIN, NYSE, 2.04, 2013-05-10\n REN, NYSE, 100.00, 2013-05-07\n TGH, NYSE, -12.90, 2013-05-08\n DFT, NYSE, -5.00, 2013-05-08\n RF, NYSE, 15.00, 2013-04-24\n PZN, NYSE, 0.00, 2013-04-25\n LL, NYSE, 29.55, 2013-04-25\n NMM, NYSE, 0.00, 2013-04-26\n OZM, NYSE, 81.25, 2013-05-03\n ES, NYSE, 12.31, 2013-05-02\n MSCI, NYSE, 5.56, 2013-05-02\n ARR, NYSE, -21.74, 2013-05-03\n KW, NYSE, 62.50, 2013-05-08\n GTS, NYSE, 52.78, 2013-05-02\n FOR, NYSE, 450.00, 2013-05-09\n LRN, NYSE, 34.78, 2013-05-04\n TNK, NYSE, -100.00, 2013-05-10\n N, NYSE, -21.43, 2013-04-26\n DAN, NYSE, -33.33, 2013-04-26\n BIP, NYSE, 0.00, 2013-05-03\n CPN, NYSE, -6.67, 2013-05-03\n SOL, NYSE, -15.38, 2013-05-17\n PM, NYSE, -4.44, 2013-04-19\n V, NYSE, 6.08, 2013-05-02\n IPI, NYSE, 5.26, 2013-05-02\n AWK, NYSE, -5.88, 2013-05-08\n HTS, NYSE, -7.46, 2013-04-23\n DPS, NYSE, 12.77, 2013-04-25\n CFX, NYSE, 8.33, 2013-04-26\n WES, NYSE, -22.50, 2013-05-02\n SB, NYSE, 0.00, 2013-05-16\n LO, NYSE, 4.76, 2013-04-25\n LPS, NYSE, 0.00, 2013-04-25\n FF, NYSE, -6.90, 2013-05-08\n NNA, NYSE, 200.00, 2013-05-03\n EPB, NYSE, 7.41, 2013-04-18\n JBT, NYSE, -17.65, 2013-05-08\n DL, NYSE, -33.33, 2013-05-22\n RAX, NYSE, -5.00, 2013-05-09\n GSL, NYSE, -50.00, 2013-05-10\n HCI, NYSE, 66.06, 2013-05-03\n EC, NYSE, -18.58, 2013-05-04\n CLW, NYSE, -98.08, 2013-04-25\n MJN, NYSE, -1.16, 2013-04-26\n EPC, NYSE, 39.53, 2013-05-02\n BPI, NYSE, 0.00, 2013-05-07\n RST, NYSE, 25.00, 2013-05-09\n DGI, NYSE, 22.22, 2013-05-08\n SWI, NYSE, 6.25, 2013-05-01\n CYS, NYSE, -45.16, 2013-04-18\n IVR, NYSE, 1.59, 2013-05-02\n BUD, NYSE, 50.65, 2013-05-01\n SLD, NYSE, -66.67, 2013-05-15\n PMT, NYSE, 11.11, 2013-04-24\n STWD, NYSE, -20.93, 2013-05-09\n CFN, NYSE, 11.32, 2013-05-10\n SPB, NYSE, 7.32, 2013-05-01\n ARI, NYSE, 33.33, 2013-05-02\n CLNY, NYSE, -26.47, 2013-05-07\n ART, NYSE, -800.00, 2013-05-07\n SEM, NYSE, -11.11, 2013-05-03\n BSBR, NYSE, -71.43, 2013-04-26\n DOLE, NYSE, -50.00, 2013-05-03\n VSI, NYSE, 2.86, 2013-05-08\n TWO, NYSE, -9.38, 2013-05-08\n CVE, NYSE, -6.38, 2013-04-25\n H, NYSE, 12.50, 2013-05-02\n LEA, NYSE, 19.27, 2013-04-26\n SVN, NYSE, -81.82, 2013-05-14\n CLD, NYSE, -59.26, 2013-05-01\n AOL, NYSE, 6.25, 2013-05-09\n CHSP, NYSE, 25.00, 2013-05-08\n PEB, NYSE, 5.88, 2013-04-26\n CIT, NYSE, -8.99, 2013-04-24\n KAR, NYSE, -3.03, 2013-05-02\n CIE, NYSE, -15.38, 2013-05-01\n TMH, NYSE, 0.00, 2013-05-01\n KRA, NYSE, -75.00, 2013-05-02\n SYA, NYSE, 8.82, 2013-04-25\n TRNO, NYSE, -11.11, 2013-05-09\n PDM, NYSE, 0.00, 2013-05-03\n GNRC, NYSE, 23.47, 2013-05-03\n ACW, NYSE, -9.68, 2013-04-24\n BALT, NYSE, -9.52, 2013-05-02\n ST, NYSE, 4.35, 2013-04-24\n SEMG, NYSE, -15.00, 2013-05-09\n CALX, NYSE, 50.00, 2013-04-26\n MXL, NYSE, 33.33, 2013-05-01\n STNG, NYSE, 60.00, 2013-04-30\n PRI, NYSE, -4.35, 2013-05-08\n SDRL, NYSE, 16.95, 2013-05-29\n CLDT, NYSE, 7.50, 2013-05-08\n EXL, NYSE, 5.00, 2013-05-02\n LYB, NYSE, 9.09, 2013-04-27\n PNG, NYSE, 4.35, 2013-05-07\n PLOW, NYSE, 13.33, 2013-05-07\n SIX, NYSE, 19.61, 2013-04-23\n NKA, NYSE, -140.00, 2013-05-10\n RRTS, NYSE, 3.57, 2013-05-02\n JKS, NYSE, 66.27, 2013-06-08\n CODE, NYSE, 7.69, 2013-05-01\n FAF, NYSE, -31.71, 2013-04-26\n QEP, NYSE, -6.67, 2013-05-01\n OAS, NYSE, 31.37, 2013-05-08\n HPP, NYSE, 18.18, 2013-05-07\n FN, NYSE, 3.70, 2013-04-30\n ECT, NYSE, 7.32, 2013-05-11\n QUAD, NYSE, -88.10, 2013-05-08\n KKR, NYSE, 4.76, 2013-04-26\n RLD, NYSE, 70.00, 2013-06-07\n AMRC, NYSE, -200.00, 2013-05-10\n GDOT, NYSE, 9.37, 2013-05-01\n AT, NYSE, 40.00, 2013-05-09\n ENV, NYSE, 0.00, 2013-05-17\n COR, NYSE, 0.00, 2013-04-25\n VC, NYSE, 75.65, 2013-05-10\n CCG, NYSE, 5.88, 2013-05-01\n EFC, NYSE, -32.00, 2013-05-07\n TOWR, NYSE, 255.56, 2013-05-03\n CHMT, NYSE, -21.05, 2013-05-03\n HBM, NYSE, 200.00, 2013-05-02\n EXAM, NYSE, 0.00, 2013-05-09\n XUE, NYSE, -25.00, 2013-05-17\n CMRE, NYSE, 26.09, 2013-04-25\n NOAH, NYSE, 112.50, 2013-05-07\n IPHI, NYSE, 18.18, 2013-05-02\n BITA, NYSE, 0.00, 2013-05-10\n BAH, NYSE, 11.43, 2013-05-23\n GM, NYSE, 19.64, 2013-05-03\n XNY, NYSE, 28.57, 2013-05-20\n TROX, NYSE, -181.25, 2013-05-09\n TRGP, NYSE, 52.38, 2013-05-04\n DANG, NYSE, 21.05, 2013-05-17\n YOKU, NYSE, 0.00, 2013-05-16\n FRC, NYSE, 0.00, 2013-04-16\n RFP, NYSE, 64.29, 2013-05-01\n ISS, NYSE, 50.00, 2013-05-18\n WD, NYSE, -45.65, 2013-05-09\n FLT, NYSE, 10.39, 2013-05-03\n GCAP, NYSE, -15.38, 2013-05-08\n FRF, NYSE, -27.27, 2013-05-14\n SWFT, NYSE, 23.53, 2013-04-23\n AG, NYSE, -8.00, 2013-05-16\n QRE, NYSE, 0.00, 2013-05-09\n AAT, NYSE, 8.57, 2013-05-01\n MCC, NYSE, -2.70, 2013-05-03\n NLSN, NYSE, 9.09, 2013-04-26\n AGRO, NYSE, -100.00, 2013-05-17\n BKU, NYSE, 4.44, 2013-04-25\n INXN, NYSE, -7.14, 2013-05-09\n NPTN, NYSE, 10.00, 2013-05-10\n INN, NYSE, 5.88, 2013-05-07\n KMI, NYSE, -12.50, 2013-04-18\n HCA, NYSE, -4.82, 2013-05-03\n MX, NYSE, 13.04, 2013-05-01\n HII, NYSE, 0.00, 2013-05-09\n QIHU, NYSE, 100.00, 2013-05-20\n APO, NYSE, 56.20, 2013-05-07\n GNC, NYSE, 1.39, 2013-04-27\n SDT, NYSE, 16.07, 2013-05-11\n UAN, NYSE, 4.26, 2013-05-02\n ARCO, NYSE, -142.86, 2013-05-01\n ELLI, NYSE, -16.67, 2013-05-01\n TMS, NYSE, -12.00, 2013-04-26\n SQNS, NYSE, 0.00, 2013-04-26\n STAG, NYSE, 3.13, 2013-05-07\n AL, NYSE, 5.13, 2013-05-10\n TLLP, NYSE, -14.89, 2013-05-07\n RENN, NYSE, 85.71, 2013-05-14\n NQ, NYSE, -16.67, 2013-05-16\n KOS, NYSE, -37.50, 2013-05-10\n RLJ, NYSE, 10.81, 2013-05-09\n NGL, NYSE, -62.86, 2013-06-15\n FENG, NYSE, 60.00, 2013-05-15\n LNKD, NYSE, 340.00, 2013-05-03\n NMFC, NYSE, -2.86, 2013-05-07\n ACTV, NYSE, 32.14, 2013-05-03\n FIO, NYSE, 20.00, 2013-04-25\n TAOM, NYSE, -25.00, 2013-05-24\n RATE, NYSE, 10.00, 2013-05-01\n VHS, NYSE, 8.33, 2013-05-01\n MPC, NYSE, 0.00, 2013-05-01\n MITT, NYSE, -9.64, 2013-05-07\n OILT, NYSE, 17.07, 2013-05-09\n SXC, NYSE, -40.00, 2013-04-26\n AMTG, NYSE, 14.06, 2013-05-07\n AMID, NYSE, -200.00, 2013-05-14\n WAIR, NYSE, 22.22, 2013-04-30\n PER, NYSE, -7.58, 2013-05-11\n PPP, NYSE, 260.00, 2013-05-09\n FSM, NYSE, -28.57, 2013-05-08\n FBHS, NYSE, 41.18, 2013-05-03\n XLS, NYSE, 73.91, 2013-05-04\n XYL, NYSE, -3.57, 2013-05-01\n GNE, NYSE, -550.00, 2013-05-08\n NDRO, NYSE, -8.11, 2013-05-04\n RNF, NYSE, -29.63, 2013-05-10\n VAC, NYSE, 10.20, 2013-04-26\n CHKR, NYSE, -2.90, 2013-05-10\n PACD, NYSE, 250.00, 2013-05-07\n INVN, NYSE, -13.33, 2013-05-03\n DLPH, NYSE, 11.46, 2013-05-02\n MN, NYSE, 0.00, 2013-05-02\n RRMS, NYSE, 51.28, 2013-05-10\n WPX, NYSE, -4.17, 2013-05-03\n LPI, NYSE, -15.38, 2013-05-10\n SN, NYSE, -82.61, 2013-05-08\n KORS, NYSE, 35.14, 2013-05-30\n BCEI, NYSE, -20.93, 2013-05-10\n BOXC, NYSE, 2.56, 2013-04-23\n PVG, NYSE, -25.00, 2013-05-11\n POST, NYSE, -29.63, 2013-05-14\n SLCA, NYSE, -2.78, 2013-05-01\n MTDR, NYSE, 0.00, 2013-05-09\n GWAY, NYSE, -120.00, 2013-05-07\n EPAM, NYSE, -14.71, 2013-05-09\n RNDY, NYSE, -9.52, 2013-05-10\n PRLB, NYSE, 0.00, 2013-04-26\n YELP, NYSE, -40.00, 2013-05-02\n NSM, NYSE, 23.19, 2013-05-08\n ALSN, NYSE, 95.24, 2013-04-30\n DWRE, NYSE, -22.73, 2013-05-08\n VNTV, NYSE, 3.70, 2013-05-07\n ET, NYSE, 0.00, 2013-05-10\n VCRA, NYSE, -160.00, 2013-05-03\n RM, NYSE, -1.82, 2013-05-03\n BNNY, NYSE, 3.57, 2013-06-11\n MM, NYSE, 25.00, 2013-05-09\n RXN, NYSE, 0.00, 2013-05-22\n GLOG, NYSE, -16.67, 2013-05-16\n RPAI, NYSE, 9.52, 2013-05-07\n OAK, NYSE, 39.86, 2013-05-08\n FET, NYSE, 3.03, 2013-04-26\n MRC, NYSE, 4.65, 2013-05-03\n PSX, NYSE, 17.74, 2013-05-02\n TUMI, NYSE, 6.67, 2013-05-09\n ACRE, NYSE, -5.88, 2013-05-16\n EVER, NYSE, 13.79, 2013-04-25\n PDH, NYSE, -13.24, 2013-04-25\n ROYT, NYSE, 10.00, 2013-05-11\n WMC, NYSE, -2.15, 2013-05-16\n WAGE, NYSE, 35.71, 2013-05-10\n HTA, NYSE, 6.67, 2013-05-08\n ALEX, NYSE, -28.57, 2013-05-10\n BKW, NYSE, 0.00, 2013-04-27\n CNCO, NYSE, -88.24, 2013-05-31\n EQM, NYSE, 41.30, 2013-04-26\n NOW, NYSE, 0.00, 2013-04-25\n EGL, NYSE, -11.24, 2013-05-14\n NGVC, NYSE, 7.69, 2013-05-10\n NTI, NYSE, 3.51, 2013-05-14\n AMRE, NYSE, 4.00, 2013-05-08\n GMED, NYSE, 5.00, 2013-05-03\n MANU, NYSE, -25.00, 2013-05-03\n HCLP, NYSE, -23.08, 2013-05-15\n ADT, NYSE, -4.65, 2013-05-02\n TRLA, NYSE, -75.00, 2013-05-01\n SRC, NYSE, 19.44, 2013-05-09\n NBHC, NYSE, -50.00, 2013-04-30\n BSMX, NYSE, 30.43, 2013-04-27\n HY, NYSE, 67.05, 2013-05-02\n SMLP, NYSE, -10.71, 2013-05-14\n DYN, NYSE, -254.55, 2013-05-03\n LXFR, NYSE, 0.00, 2013-05-08\n LOCK, NYSE, 25.00, 2013-05-02\n JMI, NYSE, 224.44, 2013-05-08\n BERY, NYSE, 16.67, 2013-05-03\n FLTX, NYSE, 8.33, 2013-05-09\n ANFI, NYSE, 0.00, 2013-06-11\n SSTK, NYSE, 23.08, 2013-05-09\n RLGY, NYSE, -13.33, 2013-05-02\n SDLP, NYSE, 88.64, 2013-05-29\n MPLX, NYSE, -7.14, 2013-05-01\n WWAV, NYSE, 6.67, 2013-05-10\n SXE, NYSE, -44.44, 2013-05-09\n DKL, NYSE, 31.58, 2013-05-08\n SCM, NYSE, -8.82, 2013-05-10\n RKUS, NYSE, -100.00, 2013-05-07\n ALDW, NYSE, -1.32, 2013-05-08\n WGP, NYSE, 0.00, 2013-05-02\n ABBV, NYSE, 3.03, 2013-04-27\n PBF, NYSE, -54.72, 2013-05-03\n SBY, NYSE, -433.33, 2013-05-14\n RIOM, NYSE, 0.00, 2013-05-15\n USAC, NYSE, -30.00, 2013-05-10\n CVRR, NYSE, -2.56, 2013-05-03\n SXCP, NYSE, -9.76, 2013-04-26\n BFAM, NYSE, 81.82, 2013-05-10\n TPH, NYSE, 200.00, 2013-05-15\n ZTS, NYSE, 5.88, 2013-05-01\n BCC, NYSE, 146.15, 2013-04-23\n AGI, NYSE, 0.00, 2013-04-26\n APAM, NYSE, -11.32, 2013-05-02\n SSNI, NYSE, -1211.77, 2013-05-02\n MODN, NYSE, 0.00, 2013-05-08\n AVIV, NYSE, 150.00, 2013-05-08\n OAKS, NYSE, 509.09, 2013-05-04\n MRIN, NYSE, -7.50, 2013-05-09\n PF, NYSE, 17.24, 2013-05-16\n TMHC, NYSE, -66.67, 2013-05-16\n ARPI, NYSE, -600.00, 2013-06-25\n CSTM, NYSE, -105.08, 2013-06-18\n DDC, NYSE, -80.00, 2013-06-06\n ABM, NYSE, 9.09, 2013-06-04\n ANN, NYSE, 4.76, 2013-06-07\n BBY, NYSE, 28.00, 2013-05-22\n BF.B, NYSE, -2.17, 2013-06-06\n BKE, NYSE, -4.88, 2013-05-24\n NCS, NYSE, -21.74, 2013-06-05\n BNS, NYSE, -0.83, 2013-05-29\n BRC, NYSE, -6.78, 2013-05-17\n CATO, NYSE, 1.94, 2013-05-24\n COO, NYSE, 9.49, 2013-06-07\n CPB, NYSE, 10.71, 2013-05-21\n CFI, NYSE, 10.81, 2013-06-13\n DCI, NYSE, -4.17, 2013-05-18\n DDS, NYSE, 15.38, 2013-05-15\n DE, NYSE, 0.73, 2013-05-16\n DY, NYSE, 0.00, 2013-05-22\n EV, NYSE, 0.00, 2013-05-23\n ESL, NYSE, -11.81, 2013-05-31\n M, NYSE, 3.77, 2013-05-16\n GCO, NYSE, 11.90, 2013-06-01\n GPS, NYSE, 2.90, 2013-05-24\n HD, NYSE, 7.79, 2013-05-22\n HEI, NYSE, 10.00, 2013-05-23\n HOV, NYSE, 120.00, 2013-06-06\n HRB, NYSE, -1.93, 2013-06-13\n HRL, NYSE, 0.00, 2013-05-24\n HPQ, NYSE, 7.41, 2013-05-23\n JCP, NYSE, -12.93, 2013-05-17\n KR, NYSE, 4.55, 2013-06-21\n KSS, NYSE, 15.79, 2013-05-17\n LB, NYSE, 4.35, 2013-05-23\n LOW, NYSE, -3.92, 2013-05-23\n LZB, NYSE, 7.14, 2013-06-19\n MDT, NYSE, 6.80, 2013-05-22\n MEI, NYSE, 60.00, 2013-06-21\n MPR, NYSE, -33.33, 2013-06-07\n NAV, NYSE, -302.75, 2013-06-11\n JWN, NYSE, -3.95, 2013-05-17\n OXM, NYSE, 5.13, 2013-06-12\n PBY, NYSE, -85.71, 2013-06-11\n PLL, NYSE, 1.37, 2013-05-31\n PNY, NYSE, 0.00, 2013-06-08\n PVH, NYSE, 39.42, 2013-06-13\n THO, NYSE, -7.87, 2013-06-07\n TIF, NYSE, 32.08, 2013-05-29\n TJX, NYSE, 0.00, 2013-05-22\n TOL, NYSE, 0.00, 2013-05-23\n TTC, NYSE, 10.92, 2013-05-24\n VAL, NYSE, 2.25, 2013-05-15\n JW.A, NYSE, -16.47, 2013-06-19\n TGT, NYSE, 23.53, 2013-05-23\n WMT, NYSE, -0.87, 2013-05-17\n WSM, NYSE, 11.11, 2013-05-24\n FL, NYSE, 3.41, 2013-05-25\n CHS, NYSE, -11.11, 2013-05-30\n BKS, NYSE, 52.22, 2013-06-26\n CAL, NYSE, 45.45, 2013-05-30\n SIG, NYSE, 0.89, 2013-05-24\n ZLC, NYSE, 1200.00, 2013-05-23\n AEO, NYSE, 5.88, 2013-05-23\n FGP, NYSE, 15.69, 2013-06-07\n BMO, NYSE, -4.73, 2013-05-30\n RY, NYSE, -2.34, 2013-05-31\n GEF, NYSE, 1.45, 2013-06-06\n SKS, NYSE, 0.00, 2013-05-22\n TD, NYSE, 1.09, 2013-05-24\n ANF, NYSE, -80.00, 2013-05-25\n CIEN, NYSE, 20.00, 2013-06-07\n KMG, NYSE, 8.70, 2013-06-11\n IRET, NYSE, 11.76, 2013-07-02\n CM, NYSE, 0.00, 2013-05-31\n UBA, NYSE, 12.00, 2013-06-08\n KFY, NYSE, 3.23, 2013-06-18\n KKD, NYSE, 25.00, 2013-05-31\n MVC, NYSE, -37.50, 2013-06-11\n CBK, NYSE, 150.00, 2013-06-08\n SJM, NYSE, 12.17, 2013-06-07\n BIG, NYSE, 0.00, 2013-05-31\n JOY, NYSE, 11.61, 2013-05-31\n SSI, NYSE, -122.22, 2013-05-18\n GME, NYSE, 15.00, 2013-05-24\n DKS, NYSE, 0.00, 2013-05-22\n A, NYSE, 14.93, 2013-05-15\n MTN, NYSE, -3.62, 2013-06-07\n GES, NYSE, 75.00, 2013-05-31\n CRM, NYSE, -600.00, 2013-05-24\n NWY, NYSE, 128.57, 2013-05-24\n PAY, NYSE, -7.69, 2013-06-06\n DSW, NYSE, 11.11, 2013-05-30\n NX, NYSE, -300.00, 2013-06-08\n DG, NYSE, -1.39, 2013-06-05\n EXPR, NYSE, 5.56, 2013-05-31\n P, NYSE, 0.00, 2013-05-23\n GWRE, NYSE, 44.44, 2013-05-29\n BLOX, NYSE, 100.00, 2013-05-24\n TLYS, NYSE, 14.29, 2013-05-30\n PANW, NYSE, -900.00, 2013-05-31\n WDAY, NYSE, 13.04, 2013-05-23\n RH, NYSE, 50.00, 2013-06-14\n RALY, NYSE, 14.78, 2013-06-07\n AIR, NYSE, 13.64, 2013-07-26\n ATU, NYSE, -1.59, 2013-06-20\n AZO, NYSE, 0.69, 2013-05-22\n AZZ, NYSE, -8.20, 2013-06-29\n CAG, NYSE, 1.69, 2013-06-28\n CLC, NYSE, -1.49, 2013-06-20\n CMC, NYSE, -15.79, 2013-06-28\n FC, NYSE, 18.18, 2013-07-10\n FDO, NYSE, 1.94, 2013-07-11\n FDX, NYSE, 8.67, 2013-06-20\n FUL, NYSE, -5.63, 2013-06-27\n GIS, NYSE, -1.85, 2013-06-27\n KBH, NYSE, 20.00, 2013-06-28\n LEN, NYSE, 30.30, 2013-06-26\n LNN, NYSE, 12.92, 2013-06-27\n MKC, NYSE, 0.00, 2013-06-28\n RT, NYSE, -36.84, 2013-07-25\n MCS, NYSE, -6.25, 2013-07-26\n MSM, NYSE, 9.37, 2013-07-11\n NKE, NYSE, 2.70, 2013-06-28\n ORCL, NYSE, 0.00, 2013-06-21\n PIR, NYSE, 0.00, 2013-06-21\n PKE, NYSE, -13.79, 2013-06-27\n RAD, NYSE, 0.00, 2013-06-21\n RPM, NYSE, 7.46, 2013-07-23\n SVU, NYSE, 250.00, 2013-07-19\n TISI, NYSE, 0.00, 2013-08-07\n TXI, NYSE, 116.00, 2013-07-11\n UNF, NYSE, 2.88, 2013-06-27\n WGO, NYSE, 0.00, 2013-06-28\n WOR, NYSE, -7.46, 2013-06-28\n JBL, NYSE, 4.35, 2013-06-20\n GBX, NYSE, -5.66, 2013-07-03\n DRI, NYSE, -1.94, 2013-06-22\n FDS, NYSE, -1.71, 2013-06-19\n KMX, NYSE, 12.28, 2013-06-22\n SCS, NYSE, 0.00, 2013-06-20\n SJR, NYSE, 16.28, 2013-06-29\n RHT, NYSE, 9.09, 2013-06-20\n OMN, NYSE, 14.29, 2013-06-28\n MON, NYSE, 3.75, 2013-06-27\n GPN, NYSE, -3.92, 2013-07-26\n AYI, NYSE, 7.78, 2013-07-03\n CCL, NYSE, 50.00, 2013-06-26\n CUK, NYSE, 50.00, 2013-06-26\n STZ, NYSE, -7.32, 2013-07-03\n ACN, NYSE, 0.00, 2013-06-28\n SNX, NYSE, 0.00, 2013-06-26\n TAL, NYSE, 66.67, 2013-07-23\n IHS, NYSE, 1.45, 2013-06-21\n EDU, NYSE, 20.00, 2013-07-24\n ZEP, NYSE, -31.71, 2013-07-03\n MG, NYSE, -5.88, 2013-08-08\n MOS, NYSE, -0.88, 2013-07-16\n ABT, NYSE, 4.55, 2013-07-18\n ABX, NYSE, 17.86, 2013-08-02\n AB, NYSE, 7.89, 2013-08-01\n TAP, NYSE, 8.63, 2013-08-07\n ACO, NYSE, 1.79, 2013-07-27\n ADM, NYSE, 9.52, 2013-08-07\n AEM, NYSE, -85.71, 2013-07-25\n AEP, NYSE, -5.19, 2013-07-26\n AES, NYSE, 23.08, 2013-08-09\n AET, NYSE, 9.35, 2013-07-31\n AFL, NYSE, 6.58, 2013-07-31\n AGCO, NYSE, 18.78, 2013-08-01\n AGN, NYSE, 1.01, 2013-07-26\n HES, NYSE, 7.09, 2013-08-01\n AIG, NYSE, 31.76, 2013-08-02\n AIN, NYSE, -23.08, 2013-08-01\n AJG, NYSE, 5.80, 2013-07-31\n ALU, NYSE, 33.33, 2013-07-31\n MATX, NYSE, 6.82, 2013-08-08\n ALK, NYSE, -0.68, 2013-07-26\n BEAM, NYSE, 6.67, 2013-08-09\n AME, NYSE, 0.00, 2013-08-08\n TWX, NYSE, 10.67, 2013-08-08\n AVD, NYSE, -17.14, 2013-08-06\n AMN, NYSE, 20.00, 2013-08-02\n AN, NYSE, -1.35, 2013-07-19\n AON, NYSE, 0.91, 2013-07-27\n APA, NYSE, -0.50, 2013-08-02\n APC, NYSE, 16.67, 2013-07-30\n APD, NYSE, 0.00, 2013-07-24\n APH, NYSE, 1.06, 2013-07-19\n ARG, NYSE, -0.87, 2013-07-26\n AAN, NYSE, 0.00, 2013-07-25\n ARW, NYSE, 8.74, 2013-07-25\n ASGN, NYSE, 14.29, 2013-07-25\n ASH, NYSE, -8.29, 2013-07-26\n ASR, NYSE, 21.90, 2013-07-23\n GAS, NYSE, 51.85, 2013-08-01\n ATO, NYSE, 13.51, 2013-08-07\n ATW, NYSE, 0.74, 2013-08-01\n AVP, NYSE, 11.54, 2013-08-02\n AVT, NYSE, 3.16, 2013-08-08\n AVY, NYSE, 2.90, 2013-07-24\n AXP, NYSE, 4.96, 2013-07-18\n B, NYSE, 0.00, 2013-07-27\n BA, NYSE, 5.70, 2013-07-25\n BAC, NYSE, 28.00, 2013-07-18\n BAX, NYSE, 2.65, 2013-07-19\n BC, NYSE, 13.89, 2013-07-26\n OMX, NYSE, -33.33, 2013-08-07\n BCE, NYSE, -2.67, 2013-08-09\n BCR, NYSE, 2.90, 2013-07-24\n BDX, NYSE, 7.48, 2013-08-02\n BEN, NYSE, 1.18, 2013-07-30\n BGG, NYSE, 15.79, 2013-08-16\n BHE, NYSE, 10.71, 2013-07-26\n BHI, NYSE, -6.15, 2013-07-20\n BID, NYSE, -9.56, 2013-08-07\n BIO, NYSE, 7.14, 2013-08-07\n BK, NYSE, 6.90, 2013-07-18\n BKH, NYSE, -2.38, 2013-08-06\n WRB, NYSE, -2.99, 2013-07-23\n BLC, NYSE, 9.09, 2013-07-31\n BLL, NYSE, 1.19, 2013-07-26\n BLX, NYSE, 5.56, 2013-07-19\n BMI, NYSE, -20.00, 2013-07-19\n BMS, NYSE, 1.67, 2013-07-26\n BMY, NYSE, 0.00, 2013-07-26\n BOH, NYSE, 2.41, 2013-07-23\n BXS, NYSE, 10.00, 2013-07-23\n BPL, NYSE, -8.86, 2013-08-03\nBRK.A, NYSE, 176.30, 2013-08-03\n BRO, NYSE, 2.86, 2013-07-16\n BSX, NYSE, 12.50, 2013-07-26\n BT, NYSE, 6.17, 2013-07-26\n MTRN, NYSE, 7.50, 2013-07-27\n CAI, NYSE, -8.54, 2013-07-31\n CAT, NYSE, -15.20, 2013-07-25\n CB, NYSE, 19.27, 2013-07-24\n CBI, NYSE, 0.00, 2013-07-31\n CBM, NYSE, -64.29, 2013-08-02\n CBU, NYSE, 4.00, 2013-07-24\n CBT, NYSE, -4.35, 2013-08-01\n CCC, NYSE, 14.29, 2013-08-07\n CCE, NYSE, 2.67, 2013-07-26\n C, NYSE, 5.93, 2013-07-16\n CCK, NYSE, 3.23, 2013-07-18\n CCU, NYSE, 25.00, 2013-08-08\n CDE, NYSE, -1100.00, 2013-08-09\n CDI, NYSE, 6.25, 2013-08-02\n CAH, NYSE, 2.60, 2013-08-02\n CFR, NYSE, 0.00, 2013-07-25\n CHD, NYSE, 1.67, 2013-08-03\n CKP, NYSE, -15.38, 2013-08-07\n CPK, NYSE, -7.02, 2013-08-10\n CI, NYSE, 11.95, 2013-08-02\n CKH, NYSE, 51.67, 2013-07-31\n CL, NYSE, 0.00, 2013-07-26\n CLF, NYSE, 85.25, 2013-07-26\n CLH, NYSE, -25.00, 2013-08-08\n CLX, NYSE, 2.99, 2013-08-02\n CMA, NYSE, 8.57, 2013-07-17\n CMO, NYSE, -15.63, 2013-07-25\n CRK, NYSE, -6.67, 2013-07-30\n CMS, NYSE, -14.71, 2013-07-26\n CNA, NYSE, 17.19, 2013-07-31\n CNW, NYSE, 13.56, 2013-08-01\n CNL, NYSE, -6.06, 2013-08-01\n COG, NYSE, 35.48, 2013-07-25\n COT, NYSE, -4.76, 2013-08-02\n CP, NYSE, -4.14, 2013-07-25\n CPF, NYSE, 25.93, 2013-07-26\n CQB, NYSE, 43.48, 2013-08-09\n CR, NYSE, 0.00, 2013-07-23\nCRD.B, NYSE, 42.86, 2013-08-06\n CRS, NYSE, 11.59, 2013-07-31\n CSC, NYSE, 42.19, 2013-08-07\n CSL, NYSE, -14.93, 2013-07-24\n CTB, NYSE, -38.20, 2013-08-09\n CTL, NYSE, 2.99, 2013-08-08\n CTS, NYSE, 33.33, 2013-07-23\n CUB, NYSE, 9.52, 2013-08-02\n CMI, NYSE, 11.11, 2013-07-31\n CUZ, NYSE, 9.09, 2013-07-30\n CVC, NYSE, 80.00, 2013-08-03\n CW, NYSE, 6.06, 2013-08-01\n CWT, NYSE, 0.00, 2013-08-01\n CX, NYSE, 0.00, 2013-07-26\n CYN, NYSE, 8.33, 2013-07-19\n D, NYSE, -4.62, 2013-08-07\n DBD, NYSE, 0.00, 2013-08-15\n DCO, NYSE, 30.77, 2013-08-06\n DD, NYSE, 0.79, 2013-07-24\n CVA, NYSE, 150.00, 2013-07-18\n DHR, NYSE, 2.35, 2013-07-19\n DIS, NYSE, 0.00, 2013-08-07\n DLX, NYSE, 10.34, 2013-07-26\n DNB, NYSE, 2.00, 2013-08-08\n RRD, NYSE, 4.65, 2013-07-30\n DOV, NYSE, 5.43, 2013-07-19\n DOW, NYSE, 1.59, 2013-07-26\n DRE, NYSE, 0.00, 2013-08-01\n DHI, NYSE, 23.53, 2013-07-26\n UFS, NYSE, -25.00, 2013-07-26\n DTE, NYSE, -21.52, 2013-07-27\n DUK, NYSE, -6.45, 2013-08-08\n DVN, NYSE, 28.72, 2013-08-08\n DV, NYSE, 31.71, 2013-08-09\n EAT, NYSE, 4.05, 2013-08-03\n ECL, NYSE, 2.38, 2013-07-31\n ED, NYSE, -5.26, 2013-08-02\n EDE, NYSE, 8.00, 2013-07-26\n EFX, NYSE, 2.22, 2013-07-25\n EGN, NYSE, 8.20, 2013-08-01\n EGP, NYSE, 2.56, 2013-07-19\n ELP, NYSE, 17.65, 2013-08-16\n ELY, NYSE, 20.00, 2013-07-26\n EMC, NYSE, 2.94, 2013-07-25\n EMR, NYSE, -2.02, 2013-08-07\n EOG, NYSE, 19.32, 2013-08-07\n EQT, NYSE, 3.64, 2013-07-26\n ESE, NYSE, -41.07, 2013-08-09\n ESV, NYSE, 3.33, 2013-07-30\n ETN, NYSE, -1.80, 2013-08-03\n ETR, NYSE, 3.06, 2013-07-31\n EXAR, NYSE, 14.29, 2013-07-25\n F, NYSE, 21.62, 2013-07-25\n CLGX, NYSE, 13.64, 2013-07-25\n FNB, NYSE, 0.00, 2013-07-24\n FCF, NYSE, -50.00, 2013-07-25\n FBP, NYSE, -11.11, 2013-07-25\n FICO, NYSE, 6.35, 2013-07-31\n FLO, NYSE, 4.35, 2013-08-14\n FMC, NYSE, 0.00, 2013-07-30\n FOE, NYSE, 27.27, 2013-08-01\n S, NYSE, 6.06, 2013-07-31\n NEE, NYSE, 13.18, 2013-07-31\n FRT, NYSE, 0.88, 2013-08-01\n FRX, NYSE, 300.00, 2013-07-24\n FSS, NYSE, 64.29, 2013-08-10\n FUN, NYSE, 2.41, 2013-08-09\n FUR, NYSE, -48.15, 2013-08-02\n GBL, NYSE, 17.20, 2013-08-07\n GVA, NYSE, -78.13, 2013-08-02\n BGC, NYSE, 23.21, 2013-08-01\n GD, NYSE, 11.73, 2013-07-25\n GE, NYSE, 0.00, 2013-07-20\n RHP, NYSE, -26.85, 2013-08-07\n AXLL, NYSE, 2.59, 2013-08-01\n GGG, NYSE, 9.52, 2013-07-25\n GHM, NYSE, 52.00, 2013-07-26\n GIB, NYSE, 10.71, 2013-08-01\n GLT, NYSE, 20.00, 2013-07-31\n GLW, NYSE, 3.23, 2013-07-31\n GSK, NYSE, -5.88, 2013-07-25\n GLF, NYSE, 25.71, 2013-07-23\n GPC, NYSE, 14.88, 2013-07-19\n GRA, NYSE, 2.75, 2013-07-26\n GTY, NYSE, 36.00, 2013-08-08\n GWW, NYSE, 2.71, 2013-07-18\n HAE, NYSE, 0.00, 2013-07-30\n HAL, NYSE, 1.39, 2013-07-23\n HAR, NYSE, 4.60, 2013-08-07\n HVT, NYSE, 31.25, 2013-08-01\n HRC, NYSE, 0.00, 2013-07-25\n HCC, NYSE, 21.69, 2013-07-31\n HCN, NYSE, 1.09, 2013-08-07\n HCP, NYSE, -2.70, 2013-07-31\n HOG, NYSE, 3.42, 2013-07-26\n HE, NYSE, 7.89, 2013-08-09\n HMA, NYSE, -46.15, 2013-08-10\n HMN, NYSE, 30.00, 2013-07-25\n HFC, NYSE, 0.00, 2013-08-08\n HOT, NYSE, 8.22, 2013-07-26\n HP, NYSE, 6.67, 2013-07-27\n HLS, NYSE, 18.60, 2013-07-26\n HRS, NYSE, 23.68, 2013-07-31\n HSC, NYSE, -11.76, 2013-08-09\n HSY, NYSE, 1.41, 2013-07-26\n HUBB, NYSE, 5.38, 2013-07-19\n HUM, NYSE, 6.91, 2013-08-01\n HXL, NYSE, 2.13, 2013-07-23\n IBM, NYSE, 3.44, 2013-07-18\n IDA, NYSE, 33.82, 2013-08-02\n IEX, NYSE, 2.70, 2013-07-23\n IFF, NYSE, -3.39, 2013-08-07\n DIN, NYSE, 12.09, 2013-07-31\n INT, NYSE, 11.76, 2013-08-01\n IP, NYSE, -5.45, 2013-07-26\n IPG, NYSE, -14.29, 2013-07-20\n IO, NYSE, -100.00, 2013-08-08\n IR, NYSE, 5.56, 2013-07-20\n IRF, NYSE, 81.82, 2013-08-20\n ITW, NYSE, -0.92, 2013-07-24\n JEC, NYSE, -1.19, 2013-07-30\n JNJ, NYSE, 5.71, 2013-07-17\n JNY, NYSE, 116.67, 2013-08-01\n K, NYSE, 3.09, 2013-08-02\n KAMN, NYSE, 13.56, 2013-07-30\n KDN, NYSE, 10.53, 2013-07-26\n KEX, NYSE, 0.94, 2013-07-25\n KEY, NYSE, 5.00, 2013-07-19\n KIM, NYSE, 6.06, 2013-07-30\n KMB, NYSE, 1.44, 2013-07-23\n KEM, NYSE, -95.00, 2013-07-26\n KMT, NYSE, 4.11, 2013-07-26\n KO, NYSE, 0.00, 2013-07-17\n KSU, NYSE, 1.05, 2013-07-20\n LDR, NYSE, -19.64, 2013-08-06\n LEG, NYSE, 0.00, 2013-07-26\n LLY, NYSE, 13.73, 2013-07-25\n LM, NYSE, -1.45, 2013-07-26\n LNC, NYSE, 10.43, 2013-08-01\n LPX, NYSE, 32.26, 2013-08-07\n LXU, NYSE, 29.17, 2013-08-09\n LTC, NYSE, -3.39, 2013-08-09\n L, NYSE, -5.48, 2013-07-30\n LUV, NYSE, -2.56, 2013-07-26\n LUX, NYSE, -1.67, 2013-07-26\n MKL, NYSE, 7.46, 2013-08-08\n MAN, NYSE, 17.98, 2013-07-20\n MTW, NYSE, 25.00, 2013-07-30\n SM, NYSE, 0.00, 2013-07-31\n MAS, NYSE, 21.05, 2013-07-30\n MTZ, NYSE, 2.33, 2013-08-02\n MCD, NYSE, -1.43, 2013-07-23\n MDC, NYSE, 38.18, 2013-07-31\n MDP, NYSE, 5.63, 2013-07-26\n MDR, NYSE, -1966.67, 2013-08-06\n MDU, NYSE, -3.85, 2013-08-01\n MED, NYSE, 2.00, 2013-08-07\n CVS, NYSE, 1.04, 2013-08-07\n MFC, NYSE, -3.12, 2013-08-09\n MGA, NYSE, 11.25, 2013-08-10\n MGM, NYSE, 300.00, 2013-08-07\n MMC, NYSE, 2.94, 2013-08-08\n MMM, NYSE, 0.59, 2013-07-26\n MSA, NYSE, 0.00, 2013-07-25\n MNR, NYSE, -27.78, 2013-08-07\n MO, NYSE, -1.59, 2013-07-24\n MOD, NYSE, 145.45, 2013-08-02\nMOG.A, NYSE, 8.43, 2013-07-27\n MHK, NYSE, 10.84, 2013-08-02\n MSI, NYSE, 11.96, 2013-07-25\n MCY, NYSE, 3.28, 2013-07-30\n MRK, NYSE, 2.44, 2013-07-31\n MRO, NYSE, -5.63, 2013-08-07\n POWR, NYSE, 20.00, 2013-08-08\n MTG, NYSE, 118.75, 2013-07-24\n MTB, NYSE, 26.19, 2013-07-18\n MTX, NYSE, 8.62, 2013-07-26\n MUR, NYSE, 12.90, 2013-08-01\n MYE, NYSE, 19.05, 2013-07-19\n NBL, NYSE, -5.48, 2013-07-26\n NBR, NYSE, -11.11, 2013-07-24\n NE, NYSE, 12.50, 2013-07-18\n NEM, NYSE, -124.39, 2013-07-27\n NFG, NYSE, 6.15, 2013-08-09\n NHI, NYSE, -1.14, 2013-08-07\n NI, NYSE, -4.17, 2013-08-01\n NJR, NYSE, 15.00, 2013-08-08\n THC, NYSE, -4.35, 2013-08-07\n NNN, NYSE, 0.00, 2013-08-02\n NOC, NYSE, 20.59, 2013-07-25\n NR, NYSE, -5.26, 2013-07-26\n NSC, NYSE, -2.67, 2013-07-24\n NUE, NYSE, -10.00, 2013-07-19\n NVR, NYSE, -18.34, 2013-07-23\n NWL, NYSE, 2.04, 2013-07-27\n NWN, NYSE, -11.11, 2013-08-08\n NYT, NYSE, 16.67, 2013-08-02\n OCR, NYSE, 4.65, 2013-07-25\n OGE, NYSE, -2.13, 2013-08-09\n OHI, NYSE, 1.64, 2013-08-01\n OI, NYSE, 2.53, 2013-07-25\n OII, NYSE, 8.33, 2013-07-25\n OKE, NYSE, -225.93, 2013-07-31\n OLN, NYSE, 3.85, 2013-07-26\n BRS, NYSE, 1.01, 2013-08-06\n OMC, NYSE, 0.00, 2013-07-19\n OMI, NYSE, 0.00, 2013-07-30\n ORB, NYSE, 17.39, 2013-07-19\n ORI, NYSE, 1750.00, 2013-07-26\n OSK, NYSE, 53.21, 2013-07-31\n OXY, NYSE, -1.86, 2013-07-31\n FCFS, NYSE, 1.79, 2013-07-18\n PBI, NYSE, 15.56, 2013-07-31\n PCG, NYSE, 9.72, 2013-08-01\n PCL, NYSE, 21.74, 2013-07-30\n PCP, NYSE, -0.69, 2013-07-26\n TPC, NYSE, -11.11, 2013-08-10\n PEG, NYSE, 4.35, 2013-07-31\n PEI, NYSE, 7.69, 2013-07-24\n PEP, NYSE, 10.08, 2013-07-25\n PFE, NYSE, 3.70, 2013-07-31\n PG, NYSE, 2.60, 2013-08-02\n PGR, NYSE, -2.44, 2013-07-12\n PH, NYSE, -8.72, 2013-08-07\n PHM, NYSE, -10.34, 2013-07-26\n PKD, NYSE, 0.00, 2013-08-07\n PKY, NYSE, 0.00, 2013-08-06\n PNC, NYSE, 21.34, 2013-07-18\n PNM, NYSE, 15.15, 2013-08-03\n PNR, NYSE, 2.22, 2013-07-24\n PNW, NYSE, 3.51, 2013-08-03\n POM, NYSE, -8.33, 2013-08-08\n POT, NYSE, -10.98, 2013-07-26\n PPG, NYSE, 4.70, 2013-07-19\n PPL, NYSE, 0.00, 2013-08-02'
if __name__ == "__main__":
main()
| [
[
[
7,
19
],
[
135,
137
]
],
[
[
46,
54
],
[
147,
155
]
],
[
[
62,
67
],
[
68,
73
],
[
101,
106
]
],
[
[
118,
122
],
[
129640,
129644
]
],
[
[
681,
689
],
[
156,
164
]
]
] |
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_utils import fixture as utils_fixture
from oslo_utils import timeutils
from oslo_utils import uuidutils
from neutron.conf.services import metering_agent as metering_agent_config
from neutron.services.metering.agents import metering_agent
from neutron.tests import base
from neutron.tests import fake_notifier
_uuid = uuidutils.generate_uuid
TENANT_ID = _uuid()
LABEL_ID = _uuid()
ROUTERS = [{'status': 'ACTIVE',
'name': 'router1',
'gw_port_id': None,
'admin_state_up': True,
'tenant_id': TENANT_ID,
'_metering_labels': [{'rules': [],
'id': LABEL_ID}],
'id': _uuid()}]
ROUTERS_WITH_RULE = [{'status': 'ACTIVE',
'name': 'router1',
'gw_port_id': None,
'admin_state_up': True,
'tenant_id': TENANT_ID,
'_metering_labels': [{'rule': {},
'id': LABEL_ID}],
'id': _uuid()}]
class TestMeteringOperations(base.BaseTestCase):
def setUp(self):
super(TestMeteringOperations, self).setUp()
metering_agent_config.register_metering_agent_opts()
self.noop_driver = ('neutron.services.metering.drivers.noop.'
'noop_driver.NoopMeteringDriver')
cfg.CONF.set_override('driver', 'noop')
cfg.CONF.set_override('measure_interval', 0)
cfg.CONF.set_override('report_interval', 0)
self.setup_notification_driver()
metering_rpc = ('neutron.services.metering.agents.metering_agent.'
'MeteringPluginRpc._get_sync_data_metering')
self.metering_rpc_patch = mock.patch(metering_rpc, return_value=[])
self.metering_rpc_patch.start()
self.driver_patch = mock.patch(self.noop_driver, spec=True)
self.driver_patch.start()
loopingcall_patch = mock.patch(
'oslo_service.loopingcall.FixedIntervalLoopingCall')
loopingcall_patch.start()
self.agent = metering_agent.MeteringAgent('my agent', cfg.CONF)
self.driver = self.agent.metering_driver
def test_add_metering_label(self):
self.agent.add_metering_label(None, ROUTERS)
self.assertEqual(1, self.driver.add_metering_label.call_count)
def test_remove_metering_label(self):
self.agent.remove_metering_label(None, ROUTERS)
self.assertEqual(1, self.driver.remove_metering_label.call_count)
def test_update_metering_label_rule(self):
self.agent.update_metering_label_rules(None, ROUTERS)
self.assertEqual(1, self.driver.update_metering_label_rules.call_count)
def test_add_metering_label_rule(self):
self.agent.add_metering_label_rule(None, ROUTERS_WITH_RULE)
self.assertEqual(1, self.driver.add_metering_label_rule.call_count)
def test_remove_metering_label_rule(self):
self.agent.remove_metering_label_rule(None, ROUTERS_WITH_RULE)
self.assertEqual(1, self.driver.remove_metering_label_rule.call_count)
def test_routers_updated(self):
self.agent.routers_updated(None, ROUTERS)
self.assertEqual(1, self.driver.update_routers.call_count)
def test_get_traffic_counters(self):
self.agent._get_traffic_counters(None, ROUTERS)
self.assertEqual(1, self.driver.get_traffic_counters.call_count)
def test_sync_router_namespaces(self):
self.agent._sync_router_namespaces(None, ROUTERS)
self.assertEqual(1, self.driver.sync_router_namespaces.call_count)
def test_notification_report(self):
self.agent.routers_updated(None, ROUTERS)
self.driver.get_traffic_counters.return_value = {LABEL_ID:
{'pkts': 88,
'bytes': 444}}
self.agent._metering_loop()
self.assertNotEqual(len(fake_notifier.NOTIFICATIONS), 0)
for n in fake_notifier.NOTIFICATIONS:
if n['event_type'] == 'l3.meter':
break
self.assertEqual('l3.meter', n['event_type'])
payload = n['payload']
self.assertEqual(TENANT_ID, payload['tenant_id'])
self.assertEqual(LABEL_ID, payload['label_id'])
self.assertEqual(88, payload['pkts'])
self.assertEqual(444, payload['bytes'])
def test_notification_report_interval(self):
measure_interval = 30
report_interval = 600
now = timeutils.utcnow()
time_fixture = self.useFixture(utils_fixture.TimeFixture(now))
self.agent.routers_updated(None, ROUTERS)
self.driver.get_traffic_counters.return_value = {LABEL_ID:
{'pkts': 889,
'bytes': 4440}}
cfg.CONF.set_override('measure_interval', measure_interval)
cfg.CONF.set_override('report_interval', report_interval)
for i in range(report_interval):
self.agent._metering_loop()
count = 0
if len(fake_notifier.NOTIFICATIONS) > 1:
for n in fake_notifier.NOTIFICATIONS:
if n['event_type'] == 'l3.meter':
# skip the first notification because the time is 0
count += 1
if count > 1:
break
time_fixture.advance_time_seconds(measure_interval)
self.assertEqual('l3.meter', n['event_type'])
payload = n['payload']
self.assertEqual(TENANT_ID, payload['tenant_id'])
self.assertEqual(LABEL_ID, payload['label_id'])
self.assertLess((payload['time'] - report_interval),
measure_interval, payload)
interval = (payload['last_update'] - payload['first_update']) \
- report_interval
self.assertLess(interval, measure_interval, payload)
def test_router_deleted(self):
label_id = _uuid()
self.driver.get_traffic_counters = mock.MagicMock()
self.driver.get_traffic_counters.return_value = {label_id:
{'pkts': 44,
'bytes': 222}}
self.agent._add_metering_info = mock.MagicMock()
self.agent.routers_updated(None, ROUTERS)
self.agent.router_deleted(None, ROUTERS[0]['id'])
self.assertEqual(1, self.agent._add_metering_info.call_count)
self.assertEqual(1, self.driver.remove_router.call_count)
self.agent._add_metering_info.assert_called_with(label_id, 44, 222)
@mock.patch('time.time')
def _test_purge_metering_info(self, current_timestamp, is_empty,
mock_time):
mock_time.return_value = current_timestamp
self.agent.metering_infos = {'fake': {'last_update': 1}}
self.config(report_interval=1)
self.agent._purge_metering_info()
self.assertEqual(0 if is_empty else 1, len(self.agent.metering_infos))
self.assertEqual(1, mock_time.call_count)
def test_purge_metering_info(self):
# 1 < 2 - 1 -> False
self._test_purge_metering_info(2, False)
def test_purge_metering_info_delete(self):
# 1 < 3 - 1 -> False
self._test_purge_metering_info(3, True)
@mock.patch('time.time')
def _test_add_metering_info(self, expected_info, current_timestamp,
mock_time):
mock_time.return_value = current_timestamp
actual_info = self.agent._add_metering_info('fake_label_id', 1, 1)
self.assertEqual(1, len(self.agent.metering_infos))
self.assertEqual(expected_info, actual_info)
self.assertEqual(expected_info,
self.agent.metering_infos['fake_label_id'])
self.assertEqual(1, mock_time.call_count)
def test_add_metering_info_create(self):
expected_info = {'bytes': 1, 'pkts': 1, 'time': 0, 'first_update': 1,
'last_update': 1}
self._test_add_metering_info(expected_info, 1)
def test_add_metering_info_update(self):
expected_info = {'bytes': 1, 'pkts': 1, 'time': 0, 'first_update': 1,
'last_update': 1}
self.agent.metering_infos = {'fake_label_id': expected_info}
expected_info.update({'bytes': 2, 'pkts': 2, 'time': 1,
'last_update': 2})
self._test_add_metering_info(expected_info, 2)
def test_metering_agent_host_value(self):
expected_host = 'my agent'
self.assertEqual(expected_host, self.agent.host)
class TestMeteringDriver(base.BaseTestCase):
def setUp(self):
super(TestMeteringDriver, self).setUp()
metering_agent_config.register_metering_agent_opts()
cfg.CONF.set_override('driver', 'noop')
self.agent = metering_agent.MeteringAgent('my agent', cfg.CONF)
self.driver = mock.Mock()
self.agent.metering_driver = self.driver
def test_add_metering_label_with_bad_driver_impl(self):
del self.driver.add_metering_label
with mock.patch.object(metering_agent, 'LOG') as log:
self.agent.add_metering_label(None, ROUTERS)
log.exception.assert_called_with(mock.ANY,
{'driver': 'noop',
'func': 'add_metering_label'})
def test_add_metering_label_runtime_error(self):
self.driver.add_metering_label.side_effect = RuntimeError
with mock.patch.object(metering_agent, 'LOG') as log:
self.agent.add_metering_label(None, ROUTERS)
log.exception.assert_called_with(mock.ANY,
{'driver': 'noop',
'func':
'add_metering_label'})
def test_init_chain(self):
with mock.patch('oslo_service.'
'periodic_task.PeriodicTasks.__init__') as init:
metering_agent.MeteringAgent('my agent', cfg.CONF)
init.assert_called_once_with(cfg.CONF)
| [
[
[
614,
618
],
[
7400,
7404
],
[
8116,
8120
],
[
2401,
2405
],
[
2512,
2516
],
[
2615,
2619
],
[
6787,
6791
],
[
7054,
7058
],
[
9740,
9744
],
[
9919,
9923
],
[
10070,
10074
],
[
10355,
10359
],
[
10506,
10510
],
[
10748,
10752
]
],
[
[
643,
646
],
[
2035,
2038
],
[
2083,
2086
],
[
2136,
2139
],
[
2789,
2792
],
[
5565,
5568
],
[
5633,
5636
],
[
9605,
9608
],
[
9708,
9711
],
[
10901,
10904
],
[
10948,
10951
]
],
[
[
670,
694
],
[
5260,
5273
]
],
[
[
718,
727
],
[
5202,
5211
]
],
[
[
751,
760
],
[
976,
985
]
],
[
[
796,
835
],
[
1841,
1862
],
[
9543,
9564
]
],
[
[
881,
895
],
[
2748,
2762
],
[
9667,
9681
],
[
9937,
9951
],
[
10373,
10387
],
[
10860,
10874
]
],
[
[
922,
926
],
[
1739,
1743
],
[
9446,
9450
]
],
[
[
953,
966
],
[
4635,
4648
],
[
4685,
4698
],
[
5815,
5828
],
[
5874,
5887
]
],
[
[
968,
973
],
[
1013,
1018
],
[
1032,
1037
],
[
1324,
1329
],
[
1698,
1703
],
[
6736,
6741
]
],
[
[
1001,
1010
],
[
1196,
1205
],
[
1541,
1550
],
[
4894,
4903
],
[
6317,
6326
]
],
[
[
1021,
1029
],
[
1294,
1302
],
[
1658,
1666
],
[
4413,
4421
],
[
4952,
4960
],
[
5401,
5409
],
[
6375,
6383
]
],
[
[
1040,
1047
],
[
2932,
2939
],
[
3102,
3109
],
[
3286,
3293
],
[
3840,
3847
],
[
4005,
4012
],
[
4180,
4187
],
[
4346,
4353
],
[
5334,
5341
],
[
7113,
7120
],
[
7162,
7169
],
[
10016,
10023
],
[
10452,
10459
]
],
[
[
1335,
1352
],
[
3469,
3486
],
[
3664,
3681
]
],
[
[
1716,
1738
],
[
1795,
1817
]
],
[
[
9427,
9445
],
[
9501,
9519
]
]
] |
from PIL import ImageChops, Image as PILImage
from http.client import HTTPConnection
from time import sleep
from traceback import format_stack, print_exc
def Tint(image, color):
return ImageChops.blend(image, PILImage.new('RGB', image.size, color), 0.36)
def GetStatusCode(host, path="/"):
""" This function retreives the status code of a website by requesting
HEAD data from the host. This means that it only requests the headers.
If the host cannot be reached or something else goes wrong, it returns
None instead.
"""
try:
conn = HTTPConnection(host)
conn.request("HEAD", path)
return conn.getresponse().status
except Exception:
return None
def WaitOK(host, path="/"):
while GetStatusCode(host, path) != 200:
sleep(5)
| [
[
[
16,
26
],
[
191,
201
]
],
[
[
28,
45
],
[
215,
223
]
],
[
[
70,
84
],
[
584,
598
]
],
[
[
102,
107
],
[
808,
813
]
],
[
[
130,
142
]
],
[
[
144,
153
]
],
[
[
160,
164
]
],
[
[
266,
279
],
[
766,
779
]
],
[
[
732,
738
]
]
] |
# Copyright 2016 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import (
ABCMeta,
abstractmethod,
abstractproperty,
)
from numpy import concatenate
from lru import LRU
from pandas import isnull
from pandas.tslib import normalize_date
from toolz import sliding_window
from six import with_metaclass
from zipline.assets import Equity, Future
from zipline.assets.continuous_futures import ContinuousFuture
from zipline.lib._int64window import AdjustedArrayWindow as Int64Window
from zipline.lib._float64window import AdjustedArrayWindow as Float64Window
from zipline.lib.adjustment import Float64Multiply, Float64Add
from zipline.utils.cache import ExpiringCache
from zipline.utils.math_utils import number_of_decimal_places
from zipline.utils.memoize import lazyval
from zipline.utils.numpy_utils import float64_dtype
from zipline.utils.pandas_utils import find_in_sorted_index
# Default number of decimal places used for rounding asset prices.
DEFAULT_ASSET_PRICE_DECIMALS = 3
class HistoryCompatibleUSEquityAdjustmentReader(object):
def __init__(self, adjustment_reader):
self._adjustments_reader = adjustment_reader
def load_adjustments(self, columns, dts, assets):
"""
Returns
-------
adjustments : list[dict[int -> Adjustment]]
A list, where each element corresponds to the `columns`, of
mappings from index to adjustment objects to apply at that index.
"""
out = [None] * len(columns)
for i, column in enumerate(columns):
adjs = {}
for asset in assets:
adjs.update(self._get_adjustments_in_range(
asset, dts, column))
out[i] = adjs
return out
def _get_adjustments_in_range(self, asset, dts, field):
"""
Get the Float64Multiply objects to pass to an AdjustedArrayWindow.
For the use of AdjustedArrayWindow in the loader, which looks back
from current simulation time back to a window of data the dictionary is
structured with:
- the key into the dictionary for adjustments is the location of the
day from which the window is being viewed.
- the start of all multiply objects is always 0 (in each window all
adjustments are overlapping)
- the end of the multiply object is the location before the calendar
location of the adjustment action, making all days before the event
adjusted.
Parameters
----------
asset : Asset
The assets for which to get adjustments.
dts : iterable of datetime64-like
The dts for which adjustment data is needed.
field : str
OHLCV field for which to get the adjustments.
Returns
-------
out : dict[loc -> Float64Multiply]
The adjustments as a dict of loc -> Float64Multiply
"""
sid = int(asset)
start = normalize_date(dts[0])
end = normalize_date(dts[-1])
adjs = {}
if field != 'volume':
mergers = self._adjustments_reader.get_adjustments_for_sid(
'mergers', sid)
for m in mergers:
dt = m[0]
if start < dt <= end:
end_loc = dts.searchsorted(dt)
adj_loc = end_loc
mult = Float64Multiply(0,
end_loc - 1,
0,
0,
m[1])
try:
adjs[adj_loc].append(mult)
except KeyError:
adjs[adj_loc] = [mult]
divs = self._adjustments_reader.get_adjustments_for_sid(
'dividends', sid)
for d in divs:
dt = d[0]
if start < dt <= end:
end_loc = dts.searchsorted(dt)
adj_loc = end_loc
mult = Float64Multiply(0,
end_loc - 1,
0,
0,
d[1])
try:
adjs[adj_loc].append(mult)
except KeyError:
adjs[adj_loc] = [mult]
splits = self._adjustments_reader.get_adjustments_for_sid(
'splits', sid)
for s in splits:
dt = s[0]
if start < dt <= end:
if field == 'volume':
ratio = 1.0 / s[1]
else:
ratio = s[1]
end_loc = dts.searchsorted(dt)
adj_loc = end_loc
mult = Float64Multiply(0,
end_loc - 1,
0,
0,
ratio)
try:
adjs[adj_loc].append(mult)
except KeyError:
adjs[adj_loc] = [mult]
return adjs
class ContinuousFutureAdjustmentReader(object):
"""
Calculates adjustments for continuous futures, based on the
close and open of the contracts on the either side of each roll.
"""
def __init__(self,
trading_calendar,
asset_finder,
bar_reader,
roll_finders,
frequency):
self._trading_calendar = trading_calendar
self._asset_finder = asset_finder
self._bar_reader = bar_reader
self._roll_finders = roll_finders
self._frequency = frequency
def load_adjustments(self, columns, dts, assets):
"""
Returns
-------
adjustments : list[dict[int -> Adjustment]]
A list, where each element corresponds to the `columns`, of
mappings from index to adjustment objects to apply at that index.
"""
out = [None] * len(columns)
for i, column in enumerate(columns):
adjs = {}
for asset in assets:
adjs.update(self._get_adjustments_in_range(
asset, dts, column))
out[i] = adjs
return out
def _make_adjustment(self,
adjustment_type,
front_close,
back_close,
end_loc):
adj_base = back_close - front_close
if adjustment_type == 'mul':
adj_value = 1.0 + adj_base / front_close
adj_class = Float64Multiply
elif adjustment_type == 'add':
adj_value = adj_base
adj_class = Float64Add
return adj_class(0,
end_loc,
0,
0,
adj_value)
def _get_adjustments_in_range(self, cf, dts, field):
if field == 'volume' or field == 'sid':
return {}
if cf.adjustment is None:
return {}
rf = self._roll_finders[cf.roll_style]
partitions = []
rolls = rf.get_rolls(cf.root_symbol, dts[0], dts[-1],
cf.offset)
tc = self._trading_calendar
adjs = {}
for front, back in sliding_window(2, rolls):
front_sid, roll_dt = front
back_sid = back[0]
dt = tc.previous_session_label(roll_dt)
if self._frequency == 'minute':
dt = tc.open_and_close_for_session(dt)[1]
roll_dt = tc.open_and_close_for_session(roll_dt)[0]
partitions.append((front_sid,
back_sid,
dt,
roll_dt))
for partition in partitions:
front_sid, back_sid, dt, roll_dt = partition
last_front_dt = self._bar_reader.get_last_traded_dt(
self._asset_finder.retrieve_asset(front_sid), dt)
last_back_dt = self._bar_reader.get_last_traded_dt(
self._asset_finder.retrieve_asset(back_sid), dt)
if isnull(last_front_dt) or isnull(last_back_dt):
continue
front_close = self._bar_reader.get_value(
front_sid, last_front_dt, 'close')
back_close = self._bar_reader.get_value(
back_sid, last_back_dt, 'close')
adj_loc = dts.searchsorted(roll_dt)
end_loc = adj_loc - 1
adj = self._make_adjustment(cf.adjustment,
front_close,
back_close,
end_loc)
try:
adjs[adj_loc].append(adj)
except KeyError:
adjs[adj_loc] = [adj]
return adjs
class SlidingWindow(object):
"""
Wrapper around an AdjustedArrayWindow which supports monotonically
increasing (by datetime) requests for a sized window of data.
Parameters
----------
window : AdjustedArrayWindow
Window of pricing data with prefetched values beyond the current
simulation dt.
cal_start : int
Index in the overall calendar at which the window starts.
"""
def __init__(self, window, size, cal_start, offset):
self.window = window
self.cal_start = cal_start
self.current = next(window)
self.offset = offset
self.most_recent_ix = self.cal_start + size
def get(self, end_ix):
"""
Returns
-------
out : A np.ndarray of the equity pricing up to end_ix after adjustments
and rounding have been applied.
"""
if self.most_recent_ix == end_ix:
return self.current
target = end_ix - self.cal_start - self.offset + 1
self.current = self.window.seek(target)
self.most_recent_ix = end_ix
return self.current
class HistoryLoader(with_metaclass(ABCMeta)):
"""
Loader for sliding history windows, with support for adjustments.
Parameters
----------
trading_calendar: TradingCalendar
Contains the grouping logic needed to assign minutes to periods.
reader : DailyBarReader, MinuteBarReader
Reader for pricing bars.
adjustment_reader : SQLiteAdjustmentReader
Reader for adjustment data.
"""
FIELDS = ('open', 'high', 'low', 'close', 'volume', 'sid')
def __init__(self, trading_calendar, reader, equity_adjustment_reader,
asset_finder,
roll_finders=None,
sid_cache_size=1000,
prefetch_length=0):
self.trading_calendar = trading_calendar
self._asset_finder = asset_finder
self._reader = reader
self._adjustment_readers = {}
if equity_adjustment_reader is not None:
self._adjustment_readers[Equity] = \
HistoryCompatibleUSEquityAdjustmentReader(
equity_adjustment_reader)
if roll_finders:
self._adjustment_readers[ContinuousFuture] =\
ContinuousFutureAdjustmentReader(trading_calendar,
asset_finder,
reader,
roll_finders,
self._frequency)
self._window_blocks = {
field: ExpiringCache(LRU(sid_cache_size))
for field in self.FIELDS
}
self._prefetch_length = prefetch_length
@abstractproperty
def _frequency(self):
pass
@abstractproperty
def _calendar(self):
pass
@abstractmethod
def _array(self, start, end, assets, field):
pass
def _decimal_places_for_asset(self, asset, reference_date):
if isinstance(asset, Future) and asset.tick_size:
return number_of_decimal_places(asset.tick_size)
elif isinstance(asset, ContinuousFuture):
# Tick size should be the same for all contracts of a continuous
# future, so arbitrarily get the contract with next upcoming auto
# close date.
oc = self._asset_finder.get_ordered_contracts(asset.root_symbol)
contract_sid = oc.contract_before_auto_close(reference_date.value)
if contract_sid is not None:
contract = self._asset_finder.retrieve_asset(contract_sid)
if contract.tick_size:
return number_of_decimal_places(contract.tick_size)
return DEFAULT_ASSET_PRICE_DECIMALS
def _ensure_sliding_windows(self, assets, dts, field,
is_perspective_after):
"""
Ensure that there is a Float64Multiply window for each asset that can
provide data for the given parameters.
If the corresponding window for the (assets, len(dts), field) does not
exist, then create a new one.
If a corresponding window does exist for (assets, len(dts), field), but
can not provide data for the current dts range, then create a new
one and replace the expired window.
Parameters
----------
assets : iterable of Assets
The assets in the window
dts : iterable of datetime64-like
The datetimes for which to fetch data.
Makes an assumption that all dts are present and contiguous,
in the calendar.
field : str
The OHLCV field for which to retrieve data.
is_perspective_after : bool
see: `PricingHistoryLoader.history`
Returns
-------
out : list of Float64Window with sufficient data so that each asset's
window can provide `get` for the index corresponding with the last
value in `dts`
"""
end = dts[-1]
size = len(dts)
asset_windows = {}
needed_assets = []
cal = self._calendar
assets = self._asset_finder.retrieve_all(assets)
end_ix = find_in_sorted_index(cal, end)
for asset in assets:
try:
window = self._window_blocks[field].get(
(asset, size, is_perspective_after), end)
except KeyError:
needed_assets.append(asset)
else:
if end_ix < window.most_recent_ix:
# Window needs reset. Requested end index occurs before the
# end index from the previous history call for this window.
# Grab new window instead of rewinding adjustments.
needed_assets.append(asset)
else:
asset_windows[asset] = window
if needed_assets:
offset = 0
start_ix = find_in_sorted_index(cal, dts[0])
prefetch_end_ix = min(end_ix + self._prefetch_length, len(cal) - 1)
prefetch_end = cal[prefetch_end_ix]
prefetch_dts = cal[start_ix:prefetch_end_ix + 1]
if is_perspective_after:
adj_end_ix = min(prefetch_end_ix + 1, len(cal) - 1)
adj_dts = cal[start_ix:adj_end_ix + 1]
else:
adj_dts = prefetch_dts
prefetch_len = len(prefetch_dts)
array = self._array(prefetch_dts, needed_assets, field)
if field == 'sid':
window_type = Int64Window
else:
window_type = Float64Window
view_kwargs = {}
if field == 'volume':
array = array.astype(float64_dtype)
for i, asset in enumerate(needed_assets):
adj_reader = None
try:
adj_reader = self._adjustment_readers[type(asset)]
except KeyError:
adj_reader = None
if adj_reader is not None:
adjs = adj_reader.load_adjustments(
[field], adj_dts, [asset])[0]
else:
adjs = {}
window = window_type(
array[:, i].reshape(prefetch_len, 1),
view_kwargs,
adjs,
offset,
size,
int(is_perspective_after),
self._decimal_places_for_asset(asset, dts[-1]),
)
sliding_window = SlidingWindow(window, size, start_ix, offset)
asset_windows[asset] = sliding_window
self._window_blocks[field].set(
(asset, size, is_perspective_after),
sliding_window,
prefetch_end)
return [asset_windows[asset] for asset in assets]
def history(self, assets, dts, field, is_perspective_after):
"""
A window of pricing data with adjustments applied assuming that the
end of the window is the day before the current simulation time.
Parameters
----------
assets : iterable of Assets
The assets in the window.
dts : iterable of datetime64-like
The datetimes for which to fetch data.
Makes an assumption that all dts are present and contiguous,
in the calendar.
field : str
The OHLCV field for which to retrieve data.
is_perspective_after : bool
True, if the window is being viewed immediately after the last dt
in the sliding window.
False, if the window is viewed on the last dt.
This flag is used for handling the case where the last dt in the
requested window immediately precedes a corporate action, e.g.:
- is_perspective_after is True
When the viewpoint is after the last dt in the window, as when a
daily history window is accessed from a simulation that uses a
minute data frequency, the history call to this loader will not
include the current simulation dt. At that point in time, the raw
data for the last day in the window will require adjustment, so the
most recent adjustment with respect to the simulation time is
applied to the last dt in the requested window.
An example equity which has a 0.5 split ratio dated for 05-27,
with the dts for a history call of 5 bars with a '1d' frequency at
05-27 9:31. Simulation frequency is 'minute'.
(In this case this function is called with 4 daily dts, and the
calling function is responsible for stitching back on the
'current' dt)
| | | | | last dt | <-- viewer is here |
| | 05-23 | 05-24 | 05-25 | 05-26 | 05-27 9:31 |
| raw | 10.10 | 10.20 | 10.30 | 10.40 | |
| adj | 5.05 | 5.10 | 5.15 | 5.25 | |
The adjustment is applied to the last dt, 05-26, and all previous
dts.
- is_perspective_after is False, daily
When the viewpoint is the same point in time as the last dt in the
window, as when a daily history window is accessed from a
simulation that uses a daily data frequency, the history call will
include the current dt. At that point in time, the raw data for the
last day in the window will be post-adjustment, so no adjustment
is applied to the last dt.
An example equity which has a 0.5 split ratio dated for 05-27,
with the dts for a history call of 5 bars with a '1d' frequency at
05-27 0:00. Simulation frequency is 'daily'.
| | | | | | <-- viewer is here |
| | | | | | last dt |
| | 05-23 | 05-24 | 05-25 | 05-26 | 05-27 |
| raw | 10.10 | 10.20 | 10.30 | 10.40 | 5.25 |
| adj | 5.05 | 5.10 | 5.15 | 5.20 | 5.25 |
Adjustments are applied 05-23 through 05-26 but not to the last dt,
05-27
Returns
-------
out : np.ndarray with shape(len(days between start, end), len(assets))
"""
block = self._ensure_sliding_windows(assets,
dts,
field,
is_perspective_after)
end_ix = self._calendar.searchsorted(dts[-1])
return concatenate(
[window.get(end_ix) for window in block],
axis=1,
)
class DailyHistoryLoader(HistoryLoader):
@property
def _frequency(self):
return 'daily'
@property
def _calendar(self):
return self._reader.sessions
def _array(self, dts, assets, field):
return self._reader.load_raw_arrays(
[field],
dts[0],
dts[-1],
assets,
)[0]
class MinuteHistoryLoader(HistoryLoader):
@property
def _frequency(self):
return 'minute'
@lazyval
def _calendar(self):
mm = self.trading_calendar.all_minutes
start = mm.searchsorted(self._reader.first_trading_day)
end = mm.searchsorted(self._reader.last_available_dt, side='right')
return mm[start:end]
def _array(self, dts, assets, field):
return self._reader.load_raw_arrays(
[field],
dts[0],
dts[-1],
assets,
)[0]
| [
[
[
603,
610
],
[
10726,
10733
]
],
[
[
616,
630
],
[
12478,
12492
]
],
[
[
636,
652
],
[
12355,
12371
],
[
12417,
12433
]
],
[
[
675,
686
],
[
21502,
21513
]
],
[
[
703,
706
],
[
12233,
12236
]
],
[
[
726,
732
],
[
8849,
8855
],
[
8874,
8880
]
],
[
[
758,
772
],
[
3500,
3514
],
[
3537,
3551
]
],
[
[
791,
805
],
[
8003,
8017
]
],
[
[
823,
837
],
[
10711,
10725
]
],
[
[
866,
872
],
[
11652,
11658
]
],
[
[
874,
880
],
[
12649,
12655
]
],
[
[
927,
943
],
[
11831,
11847
],
[
12770,
12786
]
],
[
[
981,
1015
],
[
16235,
16246
]
],
[
[
1055,
1091
],
[
16295,
16308
]
],
[
[
1127,
1142
],
[
3923,
3938
],
[
4609,
4624
],
[
5396,
5411
],
[
7284,
7299
]
],
[
[
1144,
1154
],
[
7396,
7406
]
],
[
[
1187,
1200
],
[
12219,
12232
]
],
[
[
1238,
1262
],
[
12697,
12721
],
[
13308,
13332
]
],
[
[
1297,
1304
],
[
22081,
22088
]
],
[
[
1343,
1356
],
[
16410,
16423
]
],
[
[
1396,
1416
],
[
14855,
14875
],
[
15619,
15639
]
],
[
[
1485,
1513
],
[
13368,
13396
]
],
[
[
1526,
1567
],
[
11680,
11721
]
],
[
[
5769,
5801
],
[
11868,
11900
]
],
[
[
9573,
9586
],
[
17257,
17270
]
],
[
[
10697,
10710
],
[
21626,
21639
],
[
21994,
22007
]
],
[
[
21607,
21625
]
],
[
[
21974,
21993
]
]
] |
import requests
API_URL = 'https://secure.techfortesco.com/tescolabsapi/restservice.aspx'
class TescoLabsApi(object):
def __init__(self, url, developerkey, applicationkey):
self.url = url
self.developerkey = developerkey
self.applicationkey = applicationkey
res = requests.get(self.url,
params={'command': 'login',
'email': '', 'password': '',
'developerkey': self.developerkey,
'applicationkey': self.applicationkey,
})
self.sessionkey = res.json()['SessionKey']
def _command(self, command, **kwargs):
params = kwargs
params.update({'command': command, 'sessionkey': self.sessionkey})
res = requests.get(self.url, params=params)
return res
def listproductcategories(self):
return self._command('listproductcategories')
def listproductsincategory(self, category):
return self._command('listproductsincategory', category=category)
def listproductoffers(self):
return self._command('listproductoffers')
def productsearch(self, searchtext, page=1, extendedinfo=False):
return self._command('productsearch', searchtext=searchtext,
page=page, extendedinfo=extendedinfo)
| [
[
[
7,
15
],
[
303,
311
],
[
835,
843
]
],
[
[
17,
24
]
],
[
[
99,
111
]
]
] |
# Generated by Django 3.2.12 on 2022-03-21 09:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('catalog', '0002_tag'),
]
operations = [
migrations.AddField(
model_name='item',
name='tags',
field=models.ManyToManyField(related_name='items', to='catalog.Tag', verbose_name='Теги'),
),
]
| [
[
[
72,
82
],
[
109,
119
],
[
221,
231
]
],
[
[
84,
90
],
[
316,
322
]
],
[
[
99,
108
]
]
] |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Base task runner"""
import os
import subprocess
import threading
from pwd import getpwnam
from tempfile import NamedTemporaryFile
from typing import Optional, Union
from airflow.configuration import conf
from airflow.exceptions import AirflowConfigException
from airflow.models.taskinstance import load_error_file
from airflow.utils.configuration import tmp_configuration_copy
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.net import get_hostname
from airflow.utils.platform import getuser
PYTHONPATH_VAR = 'PYTHONPATH'
class BaseTaskRunner(LoggingMixin):
"""
Runs Airflow task instances by invoking the `airflow tasks run` command with raw
mode enabled in a subprocess.
:param local_task_job: The local task job associated with running the
associated task instance.
:type local_task_job: airflow.jobs.local_task_job.LocalTaskJob
"""
def __init__(self, local_task_job):
# Pass task instance context into log handlers to setup the logger.
super().__init__(local_task_job.task_instance)
self._task_instance = local_task_job.task_instance
popen_prepend = []
if self._task_instance.run_as_user:
self.run_as_user = self._task_instance.run_as_user
else:
try:
self.run_as_user = conf.get('core', 'default_impersonation')
except AirflowConfigException:
self.run_as_user = None
# Add sudo commands to change user if we need to. Needed to handle SubDagOperator
# case using a SequentialExecutor.
self.log.debug("Planning to run as the %s user", self.run_as_user)
if self.run_as_user and (self.run_as_user != getuser()):
# We want to include any environment variables now, as we won't
# want to have to specify them in the sudo call - they would show
# up in `ps` that way! And run commands now, as the other user
# might not be able to run the cmds to get credentials
cfg_path = tmp_configuration_copy(chmod=0o600)
# Give ownership of file to user; only they can read and write
subprocess.call(['sudo', 'chown', self.run_as_user, cfg_path], close_fds=True)
# propagate PYTHONPATH environment variable
pythonpath_value = os.environ.get(PYTHONPATH_VAR, '')
popen_prepend = ['sudo', '-E', '-H', '-u', self.run_as_user]
if pythonpath_value:
popen_prepend.append(f'{PYTHONPATH_VAR}={pythonpath_value}')
else:
# Always provide a copy of the configuration file settings. Since
# we are running as the same user, and can pass through environment
# variables then we don't need to include those in the config copy
# - the runner can read/execute those values as it needs
cfg_path = tmp_configuration_copy(chmod=0o600)
self._error_file = NamedTemporaryFile(delete=True)
if self.run_as_user:
try:
os.chown(self._error_file.name, getpwnam(self.run_as_user).pw_uid, -1)
except KeyError:
# No user `run_as_user` found
pass
self._cfg_path = cfg_path
self._command = (
popen_prepend
+ self._task_instance.command_as_list(
raw=True,
pickle_id=local_task_job.pickle_id,
mark_success=local_task_job.mark_success,
job_id=local_task_job.id,
pool=local_task_job.pool,
cfg_path=cfg_path,
)
+ ["--error-file", self._error_file.name]
)
self.process = None
def deserialize_run_error(self) -> Optional[Union[str, Exception]]:
"""Return task runtime error if its written to provided error file."""
return load_error_file(self._error_file)
def _read_task_logs(self, stream):
while True:
line = stream.readline()
if isinstance(line, bytes):
line = line.decode('utf-8')
if not line:
break
self.log.info(
'Job %s: Subtask %s %s',
self._task_instance.job_id,
self._task_instance.task_id,
line.rstrip('\n'),
)
def run_command(self, run_with=None):
"""
Run the task command.
:param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']``
:type run_with: list
:return: the process that was run
:rtype: subprocess.Popen
"""
run_with = run_with or []
full_cmd = run_with + self._command
self.log.info("Running on host: %s", get_hostname())
self.log.info('Running: %s', full_cmd)
proc = subprocess.Popen(
full_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
close_fds=True,
env=os.environ.copy(),
preexec_fn=os.setsid,
)
# Start daemon thread to read subprocess logging output
log_reader = threading.Thread(
target=self._read_task_logs,
args=(proc.stdout,),
)
log_reader.daemon = True
log_reader.start()
return proc
def start(self):
"""Start running the task instance in a subprocess."""
raise NotImplementedError()
def return_code(self) -> Optional[int]:
"""
:return: The return code associated with running the task instance or
None if the task is not yet done.
:rtype: int
"""
raise NotImplementedError()
def terminate(self) -> None:
"""Force kill the running task instance."""
raise NotImplementedError()
def on_finish(self) -> None:
"""A callback that should be called when this is done running."""
if self._cfg_path and os.path.isfile(self._cfg_path):
if self.run_as_user:
subprocess.call(['sudo', 'rm', self._cfg_path], close_fds=True)
else:
os.remove(self._cfg_path)
try:
self._error_file.close()
except FileNotFoundError:
# The subprocess has deleted this file before we do
# so we ignore
pass
| [
[
[
817,
819
],
[
3132,
3134
],
[
3853,
3855
],
[
5847,
5849
],
[
5889,
5891
],
[
6808,
6810
],
[
6987,
6989
]
],
[
[
827,
837
],
[
2965,
2975
],
[
5652,
5662
],
[
5711,
5721
],
[
5747,
5757
],
[
6889,
6899
]
],
[
[
845,
854
],
[
5996,
6005
]
],
[
[
871,
879
],
[
3885,
3893
]
],
[
[
901,
919
],
[
3759,
3777
]
],
[
[
939,
947
],
[
4559,
4567
],
[
6329,
6337
]
],
[
[
949,
954
],
[
4568,
4573
]
],
[
[
990,
994
],
[
2123,
2127
]
],
[
[
1026,
1048
],
[
2184,
2206
]
],
[
[
1089,
1104
],
[
4686,
4701
]
],
[
[
1145,
1167
],
[
2841,
2863
],
[
3695,
3717
]
],
[
[
1212,
1224
],
[
1365,
1377
]
],
[
[
1255,
1267
],
[
5573,
5585
]
],
[
[
1303,
1310
],
[
2510,
2517
]
],
[
[
1312,
1326
],
[
3147,
3161
],
[
3314,
3328
]
],
[
[
1350,
1364
]
]
] |
# =======================================================================================================================================
# VNU-HCM, University of Science
# Department Computer Science, Faculty of Information Technology
# Authors: Nhut-Nam Le (Tich Phan Suy Rong)
# © 2020
import unittest
"""
Given two strings, return True if either of the strings appears at the very end of the other string, ignoring upper/lower case differences (in other words, the computation should not be "case sensitive"). Note: s.lower() returns the lowercase version of a string.
end_other('Hiabc', 'abc') → True
end_other('AbC', 'HiaBc') → True
end_other('abc', 'abXabc') → True
"""
def end_other(a, b):
a = a.lower()
b = b.lower()
return (b[(len(b) - len(a)):] == a, a[(len(a) - len(b)):] == b)[len(a) >= len(b)]
class TestEndOther(unittest.TestCase):
def test_case_00(self):
self.assertEqual(end_other('Hiabc', 'abc'), True)
def test_case_01(self):
self.assertEqual(end_other('AbC', 'HiaBc'), True)
def test_case_02(self):
self.assertEqual(end_other('abc', 'abXabc'), True)
def test_case_03(self):
self.assertEqual(end_other('Hiabc', 'abcd'), False)
def test_case_04(self):
self.assertEqual(end_other('Hiabc', 'bc'), True)
def test_case_05(self):
self.assertEqual(end_other('Hiabcx', 'bc'), False)
def test_case_06(self):
self.assertEqual(end_other('abc', 'abc'), True)
def test_case_07(self):
self.assertEqual(end_other('xyz', '12xyz'), True)
def test_case_08(self):
self.assertEqual(end_other('yz', '12xz'), False)
def test_case_09(self):
self.assertEqual(end_other('Z', '12xz'), True)
def test_case_10(self):
self.assertEqual(end_other('12', '12'), True)
def test_case_11(self):
self.assertEqual(end_other('abcXYZ', 'abcDEF'), False)
def test_case_12(self):
self.assertEqual(end_other('ab', 'ab12'), False)
def test_case_13(self):
self.assertEqual(end_other('ab', '12ab'), True)
if __name__ == "__main__":
unittest.main()
| [
[
[
298,
306
],
[
848,
856
],
[
2114,
2122
]
],
[
[
688,
697
],
[
921,
930
],
[
1008,
1017
],
[
1095,
1104
],
[
1183,
1192
],
[
1272,
1281
],
[
1358,
1367
],
[
1446,
1455
],
[
1531,
1540
],
[
1618,
1627
],
[
1704,
1713
],
[
1788,
1797
],
[
1871,
1880
],
[
1963,
1972
],
[
2049,
2058
]
],
[
[
835,
847
]
]
] |
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
"""Document formats.
:since: pyglet 1.1
"""
| [] |
import os
from dotenv import load_dotenv
load_dotenv()
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://fidel:fidel@localhost/blog'
UPLOADED_PHOTOS_DEST = 'app/static/photos'
QUOTES_URL = 'http://quotes.stormconsultancy.co.uk/random.json'
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get("MAIL_USERNAME")
MAIL_PASSWORD = os.environ.get("MAIL_PASSWORD")
class ProdConfig(Config):
SQLALCHEMY_DATABASE_URI =os.environ.get('DATABASE_URL')
class DevConfig(Config):
#SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://fidel:fidel@localhost/blog'
DEBUG = True
config_options = {
'development':DevConfig,
'production':ProdConfig
} | [
[
[
7,
9
],
[
87,
89
],
[
417,
419
],
[
469,
471
],
[
561,
563
]
],
[
[
29,
40
],
[
41,
52
]
],
[
[
62,
68
],
[
523,
529
],
[
614,
620
]
],
[
[
512,
522
],
[
781,
791
]
],
[
[
604,
613
],
[
757,
766
]
],
[
[
724,
738
]
]
] |
_TF_INCLUDE_PATH = "TF_INCLUDE_PATH"
_TF_LIB_PATH = "TF_LIB_PATH"
def _get_env_var_with_default(repository_ctx, env_var):
"""Returns evironment variable value."""
if env_var in repository_ctx.os.environ:
value = repository_ctx.os.environ[env_var]
return value
else:
fail("Environment variable '%s' was not set." % env_var)
def _get_tf_conf(repository_ctx):
"""Returns structure containing all required information about tensorflow
configuration on host platform.
"""
include_path = _get_env_var_with_default(repository_ctx, _TF_INCLUDE_PATH)
lib_path = _get_env_var_with_default(repository_ctx, _TF_LIB_PATH)
return struct(
include_path = include_path,
lib_path = lib_path
)
def _tensorflow_autoconf_impl(repository_ctx):
"""Implementation of tensorflow autoconf. rule."""
tf_conf = _get_tf_conf(repository_ctx)
print("Using %s=%s" % (_TF_INCLUDE_PATH, tf_conf.include_path))
print("Using %s=%s" % (_TF_LIB_PATH, tf_conf.lib_path))
repository_ctx.symlink(tf_conf.include_path, 'include')
repository_ctx.symlink(tf_conf.lib_path, 'lib')
repository_ctx.template('BUILD', Label("//third_party/tensorflow:tensorflow.BUILD"))
tensorflow_configure = repository_rule(
implementation = _tensorflow_autoconf_impl,
environ = [
_TF_INCLUDE_PATH,
_TF_LIB_PATH
]
) | [
[
[
0,
16
],
[
1288,
1304
],
[
555,
571
],
[
887,
903
]
],
[
[
37,
49
],
[
1310,
1322
],
[
628,
640
],
[
953,
965
]
],
[
[
71,
96
],
[
513,
538
],
[
586,
611
]
],
[
[
347,
359
],
[
833,
845
]
],
[
[
725,
750
],
[
1243,
1268
]
],
[
[
1184,
1204
]
]
] |
import yaml
from os import path
from netmiko import ConnectHandler
home_dir = path.expanduser("~")
filename = path.join(home_dir, ".netmiko.yml")
with open(filename) as f:
yaml_out = yaml.safe_load(f)
cisco3 = yaml_out["cisco3"]
net_connect = ConnectHandler(**cisco3)
print()
print(net_connect.find_prompt())
print()
| [
[
[
7,
11
],
[
190,
194
]
],
[
[
27,
31
],
[
80,
84
],
[
112,
116
]
],
[
[
52,
66
],
[
251,
265
]
],
[
[
69,
77
],
[
122,
130
]
],
[
[
101,
109
],
[
159,
167
]
],
[
[
172,
173
],
[
205,
206
]
],
[
[
179,
187
],
[
218,
226
]
],
[
[
209,
215
],
[
268,
274
]
],
[
[
237,
248
],
[
291,
302
]
]
] |
# -*- coding: iso-8859-1 -*-
| [] |
# Copyright 1999-2017 Tencent Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from QcloudApi.modules import base
class Trade(base.Base):
requestHost = 'trade.api.qcloud.com'
| [
[
[
611,
615
],
[
630,
634
]
],
[
[
624,
629
]
]
] |
"""Support for ICS Calendar."""
import copy
import logging
from datetime import datetime, timedelta
from urllib.error import ContentTooShortError, HTTPError, URLError
from urllib.request import (
HTTPPasswordMgrWithDefaultRealm,
HTTPBasicAuthHandler,
HTTPDigestAuthHandler,
build_opener,
install_opener,
urlopen,
)
import voluptuous as vol
from homeassistant.components.calendar import (
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
CalendarEventDevice,
calculate_offset,
is_offset_reached,
)
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import generate_entity_id
from homeassistant.util import Throttle
from .icalendarparser import ICalendarParser
VERSION = "2.0.0"
_LOGGER = logging.getLogger(__name__)
CONF_DEVICE_ID = "device_id"
CONF_CALENDARS = "calendars"
CONF_CALENDAR = "calendar"
CONF_INCLUDE_ALL_DAY = "includeAllDay"
CONF_PARSER = "parser"
OFFSET = "!!"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
# pylint: disable=no-value-for-parameter
vol.Optional(CONF_CALENDARS, default=[]): vol.All(
cv.ensure_list,
vol.Schema(
[
vol.Schema(
{
vol.Required(CONF_URL): vol.Url(),
vol.Required(CONF_NAME): cv.string,
vol.Optional(
CONF_INCLUDE_ALL_DAY, default=False
): cv.boolean,
vol.Optional(CONF_USERNAME, default=""): cv.string,
vol.Optional(CONF_PASSWORD, default=""): cv.string,
vol.Optional(CONF_PARSER, default="icalevents"): cv.string,
}
)
]
),
)
}
)
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
# MIN_TIME_BETWEEN_DOWNLOADS is smaller than MIN_TIME_BETWEEN_UPDATES so that
# it won't be skipped if an explicit update is called. Eventually, if these are
# configurable, we'll let end users worry about if they mean to have it happen
# that way.
MIN_TIME_BETWEEN_DOWNLOADS = timedelta(minutes=10)
def setup_platform(hass, config, add_entities, _=None):
"""Set up the ICS Calendar platform"""
_LOGGER.debug("Setting up ics calendars")
calendar_devices = []
for calendar in config.get(CONF_CALENDARS):
device_data = {
CONF_NAME: calendar.get(CONF_NAME),
CONF_URL: calendar.get(CONF_URL),
CONF_INCLUDE_ALL_DAY: calendar.get(CONF_INCLUDE_ALL_DAY),
CONF_USERNAME: calendar.get(CONF_USERNAME),
CONF_PASSWORD: calendar.get(CONF_PASSWORD),
CONF_PARSER: calendar.get(CONF_PARSER),
}
device_id = "{}".format(device_data[CONF_NAME])
entity_id = generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass)
calendar_devices.append(ICSCalendarEventDevice(entity_id, device_data))
add_entities(calendar_devices)
class ICSCalendarEventDevice(CalendarEventDevice):
"""A device for getting the next Task from an ICS Calendar"""
def __init__(self, entity_id, device_data):
_LOGGER.debug("Initializing calendar: %s", device_data[CONF_NAME])
self.data = ICSCalendarData(device_data)
self.entity_id = entity_id
self._event = None
self._name = device_data[CONF_NAME]
self._offset_reached = False
self._last_call = None
self._last_event_list = None
@property
def device_state_attributes(self):
"""Return the calendar entity's state attributes."""
return {"offset_reached": self._offset_reached}
@property
def event(self):
"""Returns the current event for the calendar entity or None"""
return self._event
@property
def name(self):
"""Returns the name of the calendar entity"""
return self._name
async def async_get_events(self, hass, start_date, end_date):
"""Get all events in a specific time frame."""
if (
self._last_event_list is None
or self._last_call is None
or (datetime.now() - self._last_call) > MIN_TIME_BETWEEN_UPDATES
):
self._last_call = datetime.now()
self._last_event_list = await self.data.async_get_events(
hass, start_date, end_date
)
return self._last_event_list
def update(self):
"""Update event data."""
self.data.update()
event = copy.deepcopy(self.data.event)
if event is None:
self._event = event
return
event = calculate_offset(event, OFFSET)
self._offset_reached = is_offset_reached(event)
self._event = event
class ICSCalendarData:
"""Calss to use the calendar ICS client object to get next event."""
def __init__(self, device_data):
"""Set up how we are going to connect to the ICS Calendar"""
self.name = device_data[CONF_NAME]
self.url = device_data[CONF_URL]
self.include_all_day = device_data[CONF_INCLUDE_ALL_DAY]
self.parser = ICalendarParser.get_instance(device_data[CONF_PARSER])
self.event = None
self._calendar_data = None
self._last_download = None
if device_data[CONF_USERNAME] != "" and device_data[CONF_PASSWORD] != "":
passman = HTTPPasswordMgrWithDefaultRealm()
passman.add_password(
None, self.url, device_data[CONF_USERNAME], device_data[CONF_PASSWORD]
)
basic_auth_handler = HTTPBasicAuthHandler(passman)
digest_auth_handler = HTTPDigestAuthHandler(passman)
opener = build_opener(digest_auth_handler, basic_auth_handler)
install_opener(opener)
def _download_calendar(self):
if (
self._calendar_data is None
or self._last_download is None
or (datetime.now() - self._last_download) > MIN_TIME_BETWEEN_DOWNLOADS
):
self._last_download = datetime.now()
self._calendar_data = None
try:
with urlopen(self.url) as conn:
self._calendar_data = conn.read().decode().replace("\0", "")
except HTTPError as http_error:
_LOGGER.error(f"{self.name}: Failed to open url: {http_error.reason}")
except ContentTooShortError as content_too_short_error:
_LOGGER.error(
f"{self.name}: Could not download calendar data: {content_too_short_error.reason}"
)
except URLError as url_error:
_LOGGER.error(f"{self.name}: Failed to open url: {url_error.reason}")
except:
_LOGGER.error(f"{self.name}: Failed to open url!")
return
async def async_get_events(self, hass, start_date, end_date):
"""Get all events in a specific time frame."""
event_list = []
await hass.async_add_job(self._download_calendar)
try:
events = self.parser.get_event_list(
content=self._calendar_data,
start=start_date,
end=end_date,
include_all_day=self.include_all_day,
)
event_list = list(map(self.format_dates, events))
except:
_LOGGER.error(f"{self.name}: Failed to parse ICS!")
event_list = []
return event_list
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data."""
self._download_calendar()
try:
self.event = self.parser.get_current_event(
content=self._calendar_data, include_all_day=self.include_all_day
)
self.event["start"] = self.get_hass_date(
self.event["start"], self.event["all_day"]
)
self.event["end"] = self.get_hass_date(
self.event["end"], self.event["all_day"]
)
return True
except:
_LOGGER.error(f"{self.name}: Failed to parse ICS!")
return False
def format_dates(self, event):
event["start"] = self.get_date_formatted(event["start"], event["all_day"])
event["end"] = self.get_date_formatted(event["end"], event["all_day"])
return event
def get_date_formatted(self, dt, is_all_day):
"""Return the formatted date"""
# Note that all day events should have a time of 0, and the timezone
# must be local.
if is_all_day:
return dt.strftime("%Y-%m-%d")
return dt.strftime("%Y-%m-%dT%H:%M:%S.%f%z")
def get_hass_date(self, dt, is_all_day):
"""Return the wrapped and formatted date"""
if is_all_day:
return {"date": self.parser.get_date_formatted(dt, is_all_day)}
return {"dateTime": self.parser.get_date_formatted(dt, is_all_day)}
| [
[
[
39,
43
],
[
4662,
4666
]
],
[
[
51,
58
],
[
838,
845
]
],
[
[
80,
88
],
[
4282,
4290
],
[
4384,
4392
],
[
6088,
6096
],
[
6200,
6208
]
],
[
[
90,
99
],
[
1969,
1978
],
[
2270,
2279
]
],
[
[
125,
145
],
[
6550,
6570
]
],
[
[
147,
156
],
[
6419,
6428
]
],
[
[
158,
166
],
[
6770,
6778
]
],
[
[
200,
231
],
[
5534,
5565
]
],
[
[
237,
257
],
[
5736,
5756
]
],
[
[
263,
284
],
[
5800,
5821
]
],
[
[
290,
302
],
[
5852,
5864
]
],
[
[
308,
322
],
[
5918,
5932
]
],
[
[
328,
335
],
[
6292,
6299
]
],
[
[
347,
364
],
[
1135,
1138
],
[
1177,
1180
],
[
1226,
1229
],
[
1276,
1279
],
[
1342,
1345
],
[
1405,
1408
],
[
1469,
1472
],
[
1622,
1625
],
[
1702,
1705
],
[
1782,
1785
],
[
1366,
1369
]
],
[
[
417,
433
],
[
2970,
2986
]
],
[
[
439,
454
],
[
1048,
1063
]
],
[
[
460,
479
],
[
3157,
3176
]
],
[
[
485,
501
],
[
4786,
4802
]
],
[
[
507,
524
],
[
4849,
4866
]
],
[
[
560,
569
],
[
1418,
1427
],
[
2549,
2558
],
[
2573,
2582
],
[
2919,
2928
],
[
3357,
3366
],
[
3513,
3522
],
[
5139,
5148
]
],
[
[
571,
584
],
[
1715,
1728
],
[
2769,
2782
],
[
2797,
2810
],
[
5490,
5503
],
[
5674,
5687
]
],
[
[
586,
594
],
[
1355,
1363
],
[
2597,
2605
],
[
2620,
2628
],
[
5181,
5189
]
],
[
[
596,
609
],
[
1635,
1648
],
[
2713,
2726
],
[
2741,
2754
],
[
5453,
5466
],
[
5646,
5659
]
],
[
[
617,
662
],
[
1198,
1200
],
[
1430,
1432
],
[
1582,
1584
],
[
1663,
1665
],
[
1743,
1745
],
[
1831,
1833
]
],
[
[
704,
722
],
[
2951,
2969
]
],
[
[
754,
762
],
[
7627,
7635
]
],
[
[
792,
807
],
[
5278,
5293
]
],
[
[
809,
816
]
],
[
[
828,
835
],
[
2397,
2404
],
[
3302,
3309
],
[
6460,
6467
],
[
6615,
6622
],
[
6809,
6816
],
[
6915,
6922
],
[
7514,
7521
],
[
8220,
8227
]
],
[
[
867,
881
]
],
[
[
896,
910
],
[
1148,
1162
],
[
2496,
2510
]
],
[
[
925,
938
]
],
[
[
952,
972
],
[
1515,
1535
],
[
2643,
2663
],
[
2678,
2698
],
[
5234,
5254
]
],
[
[
991,
1002
],
[
1795,
1806
],
[
2825,
2836
],
[
2851,
2862
],
[
5319,
5330
]
],
[
[
1015,
1021
],
[
4810,
4816
]
],
[
[
1030,
1045
]
],
[
[
1942,
1966
],
[
7636,
7660
],
[
4318,
4342
]
],
[
[
2241,
2267
],
[
6128,
6154
]
],
[
[
2298,
2312
]
],
[
[
3134,
3156
],
[
3042,
3064
]
],
[
[
4910,
4925
],
[
3389,
3404
]
]
] |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""SSD dataset"""
from __future__ import division
import os
import json
import xml.etree.ElementTree as et
import numpy as np
import cv2
import mindspore.dataset as de
import mindspore.dataset.vision.c_transforms as C
from mindspore.mindrecord import FileWriter
from .config import config
from .box_utils import jaccard_numpy, ssd_bboxes_encode
def _rand(a=0., b=1.):
"""Generate random."""
return np.random.rand() * (b - a) + a
def get_imageId_from_fileName(filename, id_iter):
"""Get imageID from fileName if fileName is int, else return id_iter."""
filename = os.path.splitext(filename)[0]
if filename.isdigit():
return int(filename)
return id_iter
def random_sample_crop(image, boxes):
"""Random Crop the image and boxes"""
height, width, _ = image.shape
min_iou = np.random.choice([None, 0.1, 0.3, 0.5, 0.7, 0.9])
if min_iou is None:
return image, boxes
# max trails (50)
for _ in range(50):
image_t = image
w = _rand(0.3, 1.0) * width
h = _rand(0.3, 1.0) * height
# aspect ratio constraint b/t .5 & 2
if h / w < 0.5 or h / w > 2:
continue
left = _rand() * (width - w)
top = _rand() * (height - h)
rect = np.array([int(top), int(left), int(top + h), int(left + w)])
overlap = jaccard_numpy(boxes, rect)
# dropout some boxes
drop_mask = overlap > 0
if not drop_mask.any():
continue
if overlap[drop_mask].min() < min_iou and overlap[drop_mask].max() > (min_iou + 0.2):
continue
image_t = image_t[rect[0]:rect[2], rect[1]:rect[3], :]
centers = (boxes[:, :2] + boxes[:, 2:4]) / 2.0
m1 = (rect[0] < centers[:, 0]) * (rect[1] < centers[:, 1])
m2 = (rect[2] > centers[:, 0]) * (rect[3] > centers[:, 1])
# mask in that both m1 and m2 are true
mask = m1 * m2 * drop_mask
# have any valid boxes? try again if not
if not mask.any():
continue
# take only matching gt boxes
boxes_t = boxes[mask, :].copy()
boxes_t[:, :2] = np.maximum(boxes_t[:, :2], rect[:2])
boxes_t[:, :2] -= rect[:2]
boxes_t[:, 2:4] = np.minimum(boxes_t[:, 2:4], rect[2:4])
boxes_t[:, 2:4] -= rect[:2]
return image_t, boxes_t
return image, boxes
def preprocess_fn(img_id, image, box, is_training):
"""Preprocess function for dataset."""
cv2.setNumThreads(2)
def _infer_data(image, input_shape):
img_h, img_w, _ = image.shape
input_h, input_w = input_shape
image = cv2.resize(image, (input_w, input_h))
# When the channels of image is 1
if len(image.shape) == 2:
image = np.expand_dims(image, axis=-1)
image = np.concatenate([image, image, image], axis=-1)
return img_id, image, np.array((img_h, img_w), np.float32)
def _data_aug(image, box, is_training, image_size=(300, 300)):
"""Data augmentation function."""
ih, iw, _ = image.shape
w, h = image_size
if not is_training:
return _infer_data(image, image_size)
# Random crop
box = box.astype(np.float32)
image, box = random_sample_crop(image, box)
ih, iw, _ = image.shape
# Resize image
image = cv2.resize(image, (w, h))
# Flip image or not
flip = _rand() < .5
if flip:
image = cv2.flip(image, 1, dst=None)
# When the channels of image is 1
if len(image.shape) == 2:
image = np.expand_dims(image, axis=-1)
image = np.concatenate([image, image, image], axis=-1)
box[:, [0, 2]] = box[:, [0, 2]] / ih
box[:, [1, 3]] = box[:, [1, 3]] / iw
if flip:
box[:, [1, 3]] = 1 - box[:, [3, 1]]
box, label, num_match = ssd_bboxes_encode(box)
return image, box, label, num_match
return _data_aug(image, box, is_training, image_size=config.img_shape)
def create_voc_label(is_training):
"""Get image path and annotation from VOC."""
voc_root = config.voc_root
cls_map = {name: i for i, name in enumerate(config.classes)}
sub_dir = 'train' if is_training else 'eval'
voc_dir = os.path.join(voc_root, sub_dir)
if not os.path.isdir(voc_dir):
raise ValueError(f'Cannot find {sub_dir} dataset path.')
image_dir = anno_dir = voc_dir
if os.path.isdir(os.path.join(voc_dir, 'Images')):
image_dir = os.path.join(voc_dir, 'Images')
if os.path.isdir(os.path.join(voc_dir, 'Annotations')):
anno_dir = os.path.join(voc_dir, 'Annotations')
if not is_training:
json_file = os.path.join(config.voc_root, config.voc_json)
file_dir = os.path.split(json_file)[0]
if not os.path.isdir(file_dir):
os.makedirs(file_dir)
json_dict = {"images": [], "type": "instances", "annotations": [],
"categories": []}
bnd_id = 1
image_files_dict = {}
image_anno_dict = {}
images = []
id_iter = 0
for anno_file in os.listdir(anno_dir):
print(anno_file)
if not anno_file.endswith('xml'):
continue
tree = et.parse(os.path.join(anno_dir, anno_file))
root_node = tree.getroot()
file_name = root_node.find('filename').text
img_id = get_imageId_from_fileName(file_name, id_iter)
id_iter += 1
image_path = os.path.join(image_dir, file_name)
print(image_path)
if not os.path.isfile(image_path):
print(f'Cannot find image {file_name} according to annotations.')
continue
labels = []
for obj in root_node.iter('object'):
cls_name = obj.find('name').text
if cls_name not in cls_map:
print(f'Label "{cls_name}" not in "{config.classes}"')
continue
bnd_box = obj.find('bndbox')
x_min = int(bnd_box.find('xmin').text) - 1
y_min = int(bnd_box.find('ymin').text) - 1
x_max = int(bnd_box.find('xmax').text) - 1
y_max = int(bnd_box.find('ymax').text) - 1
labels.append([y_min, x_min, y_max, x_max, cls_map[cls_name]])
if not is_training:
o_width = abs(x_max - x_min)
o_height = abs(y_max - y_min)
ann = {'area': o_width * o_height, 'iscrowd': 0, 'image_id': \
img_id, 'bbox': [x_min, y_min, o_width, o_height], \
'category_id': cls_map[cls_name], 'id': bnd_id, \
'ignore': 0, \
'segmentation': []}
json_dict['annotations'].append(ann)
bnd_id = bnd_id + 1
if labels:
images.append(img_id)
image_files_dict[img_id] = image_path
image_anno_dict[img_id] = np.array(labels)
if not is_training:
size = root_node.find("size")
width = int(size.find('width').text)
height = int(size.find('height').text)
image = {'file_name': file_name, 'height': height, 'width': width,
'id': img_id}
json_dict['images'].append(image)
if not is_training:
for cls_name, cid in cls_map.items():
cat = {'supercategory': 'none', 'id': cid, 'name': cls_name}
json_dict['categories'].append(cat)
json_fp = open(json_file, 'w')
json_str = json.dumps(json_dict)
json_fp.write(json_str)
json_fp.close()
return images, image_files_dict, image_anno_dict
def create_coco_label(is_training):
"""Get image path and annotation from COCO."""
from pycocotools.coco import COCO
coco_root = config.coco_root
data_type = config.val_data_type
if is_training:
data_type = config.train_data_type
# Classes need to train or test.
train_cls = config.classes
train_cls_dict = {}
for i, cls in enumerate(train_cls):
train_cls_dict[cls] = i
anno_json = os.path.join(coco_root, config.instances_set.format(data_type))
coco = COCO(anno_json)
classs_dict = {}
cat_ids = coco.loadCats(coco.getCatIds())
for cat in cat_ids:
classs_dict[cat["id"]] = cat["name"]
image_ids = coco.getImgIds()
images = []
image_path_dict = {}
image_anno_dict = {}
for img_id in image_ids:
image_info = coco.loadImgs(img_id)
file_name = image_info[0]["file_name"]
anno_ids = coco.getAnnIds(imgIds=img_id, iscrowd=None)
anno = coco.loadAnns(anno_ids)
image_path = os.path.join(coco_root, data_type, file_name)
annos = []
iscrowd = False
for label in anno:
bbox = label["bbox"]
class_name = classs_dict[label["category_id"]]
iscrowd = iscrowd or label["iscrowd"]
if class_name in train_cls:
x_min, x_max = bbox[0], bbox[0] + bbox[2]
y_min, y_max = bbox[1], bbox[1] + bbox[3]
annos.append(list(map(round, [y_min, x_min, y_max, x_max])) + [train_cls_dict[class_name]])
if not is_training and iscrowd:
continue
if len(annos) >= 1:
images.append(img_id)
image_path_dict[img_id] = image_path
image_anno_dict[img_id] = np.array(annos)
return images, image_path_dict, image_anno_dict
def anno_parser(annos_str):
"""Parse annotation from string to list."""
annos = []
for anno_str in annos_str:
anno = list(map(int, anno_str.strip().split(',')))
annos.append(anno)
return annos
def filter_valid_data(image_dir, anno_path):
"""Filter valid image file, which both in image_dir and anno_path."""
images = []
image_path_dict = {}
image_anno_dict = {}
if not os.path.isdir(image_dir):
raise RuntimeError("Path given is not valid.")
if not os.path.isfile(anno_path):
raise RuntimeError("Annotation file is not valid.")
with open(anno_path, "rb") as f:
lines = f.readlines()
for img_id, line in enumerate(lines):
line_str = line.decode("utf-8").strip()
line_split = str(line_str).split(' ')
file_name = line_split[0]
image_path = os.path.join(image_dir, file_name)
if os.path.isfile(image_path):
images.append(img_id)
image_path_dict[img_id] = image_path
image_anno_dict[img_id] = anno_parser(line_split[1:])
return images, image_path_dict, image_anno_dict
def voc_data_to_mindrecord(mindrecord_dir, is_training, prefix="ssd.mindrecord", file_num=8):
"""Create MindRecord file by image_dir and anno_path."""
mindrecord_path = os.path.join(mindrecord_dir, prefix)
writer = FileWriter(mindrecord_path, file_num)
images, image_path_dict, image_anno_dict = create_voc_label(is_training)
ssd_json = {
"img_id": {"type": "int32", "shape": [1]},
"image": {"type": "bytes"},
"annotation": {"type": "int32", "shape": [-1, 5]},
}
writer.add_schema(ssd_json, "ssd_json")
for img_id in images:
image_path = image_path_dict[img_id]
with open(image_path, 'rb') as f:
img = f.read()
annos = np.array(image_anno_dict[img_id], dtype=np.int32)
img_id = np.array([img_id], dtype=np.int32)
row = {"img_id": img_id, "image": img, "annotation": annos}
writer.write_raw_data([row])
writer.commit()
def data_to_mindrecord_byte_image(dataset="coco", is_training=True, prefix="ssd.mindrecord", file_num=8):
"""Create MindRecord file."""
mindrecord_dir = config.mindrecord_dir
mindrecord_path = os.path.join(mindrecord_dir, prefix)
writer = FileWriter(mindrecord_path, file_num)
if dataset == "coco":
images, image_path_dict, image_anno_dict = create_coco_label(is_training)
else:
images, image_path_dict, image_anno_dict = filter_valid_data(config.image_dir, config.anno_path)
ssd_json = {
"img_id": {"type": "int32", "shape": [1]},
"image": {"type": "bytes"},
"annotation": {"type": "int32", "shape": [-1, 5]},
}
writer.add_schema(ssd_json, "ssd_json")
for img_id in images:
image_path = image_path_dict[img_id]
with open(image_path, 'rb') as f:
img = f.read()
annos = np.array(image_anno_dict[img_id], dtype=np.int32)
img_id = np.array([img_id], dtype=np.int32)
row = {"img_id": img_id, "image": img, "annotation": annos}
writer.write_raw_data([row])
writer.commit()
def create_ssd_dataset(mindrecord_file, batch_size=32, repeat_num=10, device_num=1, rank=0,
is_training=True, num_parallel_workers=4, use_multiprocessing=True):
"""Create SSD dataset with MindDataset."""
ds = de.MindDataset(mindrecord_file, columns_list=["img_id", "image", "annotation"], num_shards=device_num,
shard_id=rank, num_parallel_workers=num_parallel_workers, shuffle=is_training)
decode = C.Decode()
ds = ds.map(operations=decode, input_columns=["image"])
change_swap_op = C.HWC2CHW()
normalize_op = C.Normalize(mean=[0.485 * 255, 0.456 * 255, 0.406 * 255],
std=[0.229 * 255, 0.224 * 255, 0.225 * 255])
color_adjust_op = C.RandomColorAdjust(brightness=0.4, contrast=0.4, saturation=0.4)
compose_map_func = (lambda img_id, image, annotation: preprocess_fn(img_id, image, annotation, is_training))
if is_training:
output_columns = ["image", "box", "label", "num_match"]
trans = [color_adjust_op, normalize_op, change_swap_op]
else:
output_columns = ["img_id", "image", "image_shape"]
trans = [normalize_op, change_swap_op]
ds = ds.map(operations=compose_map_func, input_columns=["img_id", "image", "annotation"],
output_columns=output_columns, column_order=output_columns,
python_multiprocessing=use_multiprocessing,
num_parallel_workers=num_parallel_workers)
ds = ds.map(operations=trans, input_columns=["image"], python_multiprocessing=use_multiprocessing,
num_parallel_workers=num_parallel_workers)
ds = ds.batch(batch_size, drop_remainder=True)
ds = ds.repeat(repeat_num)
return ds
def create_mindrecord(dataset="coco", prefix="ssd.mindrecord", is_training=True):
print("Start create dataset!")
# It will generate mindrecord file in config.mindrecord_dir,
# and the file name is ssd.mindrecord0, 1, ... file_num.
mindrecord_dir = config.mindrecord_dir
mindrecord_file = os.path.join(mindrecord_dir, prefix + "0")
if not os.path.exists(mindrecord_file):
if not os.path.isdir(mindrecord_dir):
os.makedirs(mindrecord_dir)
if dataset == "coco":
if os.path.isdir(config.coco_root):
print("Create Mindrecord.")
data_to_mindrecord_byte_image("coco", is_training, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("coco_root not exits.")
elif dataset == "voc":
if os.path.isdir(config.voc_root):
print("Create Mindrecord.")
voc_data_to_mindrecord(mindrecord_dir, is_training, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("voc_root not exits.")
else:
if os.path.isdir(config.image_dir) and os.path.exists(config.anno_path):
print("Create Mindrecord.")
data_to_mindrecord_byte_image("other", is_training, prefix)
print("Create Mindrecord Done, at {}".format(mindrecord_dir))
else:
print("image_dir or anno_path not exits.")
return mindrecord_file
| [
[
[
710,
718
]
],
[
[
727,
729
],
[
1254,
1256
],
[
4954,
4956
],
[
4997,
4999
],
[
5129,
5131
],
[
5143,
5145
],
[
5197,
5199
],
[
5236,
5238
],
[
5250,
5252
],
[
5308,
5310
],
[
5390,
5392
],
[
5456,
5458
],
[
5499,
5501
],
[
5536,
5538
],
[
5796,
5798
],
[
5930,
5932
],
[
6157,
6159
],
[
6233,
6235
],
[
8777,
8779
],
[
9348,
9350
],
[
10575,
10577
],
[
10667,
10669
],
[
11013,
11015
],
[
11059,
11061
],
[
11468,
11470
],
[
12438,
12440
],
[
15390,
15392
],
[
15444,
15446
],
[
15492,
15494
],
[
15535,
15537
],
[
15608,
15610
],
[
15948,
15950
],
[
16270,
16272
],
[
16306,
16308
]
],
[
[
737,
741
],
[
8202,
8206
]
],
[
[
749,
776
],
[
5921,
5923
]
],
[
[
784,
795
],
[
1079,
1081
],
[
1490,
1492
],
[
1933,
1935
],
[
2811,
2813
],
[
2909,
2911
],
[
7604,
7606
],
[
10081,
10083
],
[
12004,
12006
],
[
12044,
12046
],
[
12071,
12073
],
[
12096,
12098
],
[
13120,
13122
],
[
13160,
13162
],
[
13187,
13189
],
[
13212,
13214
],
[
3434,
3436
],
[
3485,
3487
],
[
3563,
3565
],
[
3588,
3590
],
[
3895,
3897
],
[
4277,
4279
],
[
4328,
4330
]
],
[
[
803,
806
],
[
3142,
3145
],
[
3299,
3302
],
[
4031,
4034
],
[
4151,
4154
]
],
[
[
815,
838
],
[
13589,
13591
]
],
[
[
846,
888
],
[
13808,
13809
],
[
13900,
13901
],
[
13931,
13932
],
[
14087,
14088
]
],
[
[
922,
932
],
[
11518,
11528
],
[
12488,
12498
]
],
[
[
953,
959
],
[
4690,
4696
],
[
4810,
4816
],
[
4874,
4880
],
[
5403,
5409
],
[
5420,
5426
],
[
6563,
6569
],
[
8478,
8484
],
[
8511,
8517
],
[
8572,
8578
],
[
8649,
8655
],
[
8801,
8807
],
[
12394,
12400
],
[
12713,
12719
],
[
12731,
12737
],
[
15346,
15352
],
[
15622,
15628
],
[
15962,
15968
],
[
16284,
16290
],
[
16321,
16327
]
],
[
[
983,
996
],
[
2012,
2025
]
],
[
[
998,
1015
],
[
4565,
4582
]
],
[
[
1022,
1027
],
[
1677,
1682
],
[
1713,
1718
],
[
1858,
1863
],
[
1894,
1899
],
[
4101,
4106
]
],
[
[
1116,
1141
],
[
6069,
6094
]
],
[
[
1365,
1383
],
[
3928,
3946
]
],
[
[
3047,
3060
],
[
14211,
14224
]
],
[
[
4714,
4730
],
[
11603,
11619
]
],
[
[
8340,
8357
],
[
12603,
12620
]
],
[
[
10156,
10167
],
[
11208,
11219
]
],
[
[
10383,
10400
],
[
12695,
12712
]
],
[
[
11295,
11317
],
[
16040,
16062
]
],
[
[
12237,
12266
],
[
15701,
15730
],
[
16400,
16429
]
],
[
[
13353,
13371
]
],
[
[
15084,
15101
]
]
] |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import OrderImmediatelyFillable
from ccxt.base.errors import NotSupported
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import OnMaintenance
from ccxt.base.errors import InvalidNonce
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.precise import Precise
class binance(Exchange):
def describe(self):
return self.deep_extend(super(binance, self).describe(), {
'id': 'binance',
'name': 'Binance',
'countries': ['JP', 'MT'], # Japan, Malta
'rateLimit': 50,
'certified': True,
'pro': True,
# new metainfo interface
'has': {
'cancelAllOrders': True,
'cancelOrder': True,
'CORS': None,
'createOrder': True,
'fetchBalance': True,
'fetchBorrowRate': True,
'fetchBorrowRates': False,
'fetchBidsAsks': True,
'fetchClosedOrders': 'emulated',
'fetchCurrencies': True,
'fetchDepositAddress': True,
'fetchDeposits': True,
'fetchFundingFees': True,
'fetchFundingHistory': True,
'fetchFundingRate': True,
'fetchFundingRateHistory': True,
'fetchFundingRates': True,
'fetchIndexOHLCV': True,
'fetchIsolatedPositions': True,
'fetchMarkets': True,
'fetchMarkOHLCV': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchPositions': True,
'fetchPremiumIndexOHLCV': False,
'fetchStatus': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTradingFee': True,
'fetchTradingFees': True,
'fetchTransactions': False,
'fetchTransfers': True,
'fetchWithdrawals': True,
'setLeverage': True,
'setMarginMode': True,
'setPositionMode': True,
'addMargin': True,
'reduceMargin': True,
'transfer': True,
'withdraw': True,
},
'timeframes': {
'1m': '1m',
'3m': '3m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'2h': '2h',
'4h': '4h',
'6h': '6h',
'8h': '8h',
'12h': '12h',
'1d': '1d',
'3d': '3d',
'1w': '1w',
'1M': '1M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/29604020-d5483cdc-87ee-11e7-94c7-d1a8d9169293.jpg',
'test': {
'dapiPublic': 'https://testnet.binancefuture.com/dapi/v1',
'dapiPrivate': 'https://testnet.binancefuture.com/dapi/v1',
'fapiPublic': 'https://testnet.binancefuture.com/fapi/v1',
'fapiPrivate': 'https://testnet.binancefuture.com/fapi/v1',
'fapiPrivateV2': 'https://testnet.binancefuture.com/fapi/v2',
'public': 'https://testnet.binance.vision/api/v3',
'private': 'https://testnet.binance.vision/api/v3',
'v1': 'https://testnet.binance.vision/api/v1',
},
'api': {
'wapi': 'https://api.binance.com/wapi/v3',
'sapi': 'https://api.binance.com/sapi/v1',
'dapiPublic': 'https://dapi.binance.com/dapi/v1',
'dapiPrivate': 'https://dapi.binance.com/dapi/v1',
'dapiPrivateV2': 'https://dapi.binance.com/dapi/v2',
'dapiData': 'https://dapi.binance.com/futures/data',
'fapiPublic': 'https://fapi.binance.com/fapi/v1',
'fapiPrivate': 'https://fapi.binance.com/fapi/v1',
'fapiData': 'https://fapi.binance.com/futures/data',
'fapiPrivateV2': 'https://fapi.binance.com/fapi/v2',
'public': 'https://api.binance.com/api/v3',
'private': 'https://api.binance.com/api/v3',
'v1': 'https://api.binance.com/api/v1',
},
'www': 'https://www.binance.com',
# 'referral': {
# 'url': 'https://www.binance.com/en/register?ref=BLEJC98C',
# 'discount': 0.2,
# },
'doc': [
'https://binance-docs.github.io/apidocs/spot/en',
],
'api_management': 'https://www.binance.com/en/usercenter/settings/api-management',
'fees': 'https://www.binance.com/en/fee/schedule',
},
'depth': 1,
'api': {
# the API structure below will need 3-layer apidefs
'sapi': {
'get': {
'accountSnapshot': 1,
'system/status': 1,
# these endpoints require self.apiKey
'margin/asset': 1,
'margin/pair': 1,
'margin/allAssets': 1,
'margin/allPairs': 1,
'margin/priceIndex': 1,
# these endpoints require self.apiKey + self.secret
'asset/assetDividend': 1,
'asset/dribblet': 1,
'asset/transfer': 1,
'asset/assetDetail': 1,
'asset/tradeFee': 1,
'asset/get-funding-asset': 1,
'margin/loan': 1,
'margin/repay': 1,
'margin/account': 1,
'margin/transfer': 1,
'margin/interestHistory': 1,
'margin/forceLiquidationRec': 1,
'margin/order': 1,
'margin/openOrders': 1,
'margin/allOrders': 1,
'margin/myTrades': 1,
'margin/maxBorrowable': 5,
'margin/maxTransferable': 5,
'margin/isolated/transfer': 1,
'margin/isolated/account': 1,
'margin/isolated/pair': 1,
'margin/isolated/allPairs': 1,
'margin/isolated/accountLimit': 1,
'margin/interestRateHistory': 1,
'margin/orderList': 2,
'margin/allOrderList': 10,
'margin/openOrderList': 3,
'loan/income': 1,
'fiat/orders': 1,
'fiat/payments': 1,
'futures/transfer': 5,
'futures/loan/borrow/history': 1,
'futures/loan/repay/history': 1,
'futures/loan/wallet': 1,
'futures/loan/configs': 1,
'futures/loan/calcAdjustLevel': 1,
'futures/loan/calcMaxAdjustAmount': 1,
'futures/loan/adjustCollateral/history': 1,
'futures/loan/liquidationHistory': 1,
# https://binance-docs.github.io/apidocs/spot/en/#withdraw-sapi
'capital/config/getall': 1, # get networks for withdrawing USDT ERC20 vs USDT Omni
'capital/deposit/address': 1,
'capital/deposit/hisrec': 1,
'capital/deposit/subAddress': 1,
'capital/deposit/subHisrec': 1,
'capital/withdraw/history': 1,
'account/status': 1,
'account/apiTradingStatus': 1,
'account/apiRestrictions/ipRestriction': 1,
'bnbBurn': 1,
'sub-account/assets': 1,
'sub-account/futures/account': 1,
'sub-account/futures/accountSummary': 1,
'sub-account/futures/positionRisk': 1,
'sub-account/futures/internalTransfer': 1,
'sub-account/list': 1,
'sub-account/margin/account': 1,
'sub-account/margin/accountSummary': 1,
'sub-account/spotSummary': 5,
'sub-account/status': 1,
'sub-account/sub/transfer/history': 1,
'sub-account/transfer/subUserHistory': 1,
'sub-account/universalTransfer': 1,
# lending endpoints
'lending/daily/product/list': 1,
'lending/daily/userLeftQuota': 1,
'lending/daily/userRedemptionQuota': 1,
'lending/daily/token/position': 1,
'lending/union/account': 1,
'lending/union/purchaseRecord': 1,
'lending/union/redemptionRecord': 1,
'lending/union/interestHistory': 1,
'lending/project/list': 1,
'lending/project/position/list': 1,
# mining endpoints
'mining/pub/algoList': 1,
'mining/pub/coinList': 1,
'mining/worker/detail': 5,
'mining/worker/list': 5,
'mining/payment/list': 5,
'mining/statistics/user/status': 5,
'mining/statistics/user/list': 5,
# liquid swap endpoints
'bswap/pools': 1,
'bswap/liquidity': {'cost': 1, 'noPoolId': 10},
'bswap/liquidityOps': 2,
'bswap/quote': 2,
'bswap/swap': 1,
'bswap/poolConfigure': 1,
'bswap/addLiquidityPreview': 1,
'bswap/removeLiquidityPreview': 1,
# leveraged token endpoints
'blvt/tokenInfo': 1,
'blvt/subscribe/record': 1,
'blvt/redeem/record': 1,
'blvt/userLimit': 1,
# broker api
'apiReferral/ifNewUser': 1,
'apiReferral/customization': 1,
'apiReferral/userCustomization': 1,
'apiReferral/rebate/recentRecord': 1,
'apiReferral/rebate/historicalRecord': 1,
'apiReferral/kickback/recentRecord': 1,
'apiReferral/kickback/historicalRecord': 1,
# brokerage API
'broker/subAccountApi': 1,
'broker/subAccount': 1,
'broker/subAccountApi/commission/futures': 1,
'broker/subAccountApi/commission/coinFutures': 1,
'broker/info': 1,
'broker/transfer': 1,
'broker/transfer/futures': 1,
'broker/rebate/recentRecord': 1,
'broker/rebate/historicalRecord': 1,
'broker/subAccount/bnbBurn/status': 1,
'broker/subAccount/depositHist': 1,
'broker/subAccount/spotSummary': 1,
'broker/subAccount/marginSummary': 1,
'broker/subAccount/futuresSummary': 1,
'broker/rebate/futures/recentRecord': 1,
'broker/subAccountApi/ipRestriction': 1,
'broker/universalTransfer': 1,
# v2 not supported yet
# GET /sapi/v2/broker/subAccount/futuresSummary
'account/apiRestrictions': 1,
# subaccounts
'managed-subaccount/asset': 1,
# c2c / p2p
'c2c/orderMatch/listUserOrderHistory': 1,
},
'post': {
'asset/dust': 1,
'asset/transfer': 1,
'asset/get-funding-asset': 1,
'account/disableFastWithdrawSwitch': 1,
'account/enableFastWithdrawSwitch': 1,
'account/apiRestrictions/ipRestriction': 1,
'account/apiRestrictions/ipRestriction/ipList': 1,
'capital/withdraw/apply': 1,
'margin/transfer': 1,
'margin/loan': 1,
'margin/repay': 1,
'margin/order': 4,
'margin/order/oco': 1,
'margin/isolated/create': 1,
'margin/isolated/transfer': 1,
'margin/isolated/account': 1,
'bnbBurn': 1,
'sub-account/margin/transfer': 1,
'sub-account/margin/enable': 1,
# 'sub-account/margin/enable': 1,
'sub-account/futures/enable': 1,
'sub-account/futures/transfer': 1,
'sub-account/futures/internalTransfer': 1,
'sub-account/transfer/subToSub': 1,
'sub-account/transfer/subToMaster': 1,
'sub-account/universalTransfer': 1,
'managed-subaccount/deposit': 1,
'managed-subaccount/withdraw': 1,
'userDataStream': 1,
'userDataStream/isolated': 1,
'futures/transfer': 1,
'futures/loan/borrow': 20,
'futures/loan/repay': 20,
'futures/loan/adjustCollateral': 20,
# lending
'lending/customizedFixed/purchase': 1,
'lending/daily/purchase': 1,
'lending/daily/redeem': 1,
# liquid swap endpoints
'bswap/liquidityAdd': 2,
'bswap/liquidityRemove': 2,
'bswap/swap': 2,
# leveraged token endpoints
'blvt/subscribe': 1,
'blvt/redeem': 1,
# brokerage API
'apiReferral/customization': 1,
'apiReferral/userCustomization': 1,
'apiReferral/rebate/historicalRecord': 1,
'apiReferral/kickback/historicalRecord': 1,
'broker/subAccount': 1,
'broker/subAccount/margin': 1,
'broker/subAccount/futures': 1,
'broker/subAccountApi': 1,
'broker/subAccountApi/permission': 1,
'broker/subAccountApi/commission': 1,
'broker/subAccountApi/commission/futures': 1,
'broker/subAccountApi/commission/coinFutures': 1,
'broker/transfer': 1,
'broker/transfer/futures': 1,
'broker/rebate/historicalRecord': 1,
'broker/subAccount/bnbBurn/spot': 1,
'broker/subAccount/bnbBurn/marginInterest': 1,
'broker/subAccount/blvt': 1,
'broker/subAccountApi/ipRestriction': 1,
'broker/subAccountApi/ipRestriction/ipList': 1,
'broker/universalTransfer': 1,
'broker/subAccountApi/permission/universalTransfer': 1,
'broker/subAccountApi/permission/vanillaOptions': 1,
},
'put': {
'userDataStream': 1,
'userDataStream/isolated': 1,
},
'delete': {
'account/apiRestrictions/ipRestriction/ipList': 1,
'margin/openOrders': 1,
'margin/order': 1,
'margin/orderList': 1,
'margin/isolated/account': 1,
'userDataStream': 1,
'userDataStream/isolated': 1,
# brokerage API
'broker/subAccountApi': 1,
'broker/subAccountApi/ipRestriction/ipList': 1,
},
},
# deprecated
'wapi': {
'post': {
'withdraw': 1,
'sub-account/transfer': 1,
},
'get': {
'depositHistory': 1,
'withdrawHistory': 1,
'depositAddress': 1,
'accountStatus': 1,
'systemStatus': 1,
'apiTradingStatus': 1,
'userAssetDribbletLog': 1,
'tradeFee': 1,
'assetDetail': 1,
'sub-account/list': 1,
'sub-account/transfer/history': 1,
'sub-account/assets': 1,
},
},
'dapiPublic': {
'get': {
'ping': 1,
'time': 1,
'exchangeInfo': 1,
'depth': {'cost': 2, 'byLimit': [[50, 2], [100, 5], [500, 10], [1000, 20]]},
'trades': 1,
'historicalTrades': 20,
'aggTrades': 20,
'premiumIndex': 10,
'fundingRate': 1,
'klines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]},
'continuousKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]},
'indexPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]},
'markPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]},
'ticker/24hr': {'cost': 1, 'noSymbol': 40},
'ticker/price': {'cost': 1, 'noSymbol': 2},
'ticker/bookTicker': {'cost': 1, 'noSymbol': 2},
'openInterest': 1,
},
},
'dapiData': {
'get': {
'openInterestHist': 1,
'topLongShortAccountRatio': 1,
'topLongShortPositionRatio': 1,
'globalLongShortAccountRatio': 1,
'takerBuySellVol': 1,
'basis': 1,
},
},
'dapiPrivate': {
'get': {
'positionSide/dual': 30,
'order': 1,
'openOrder': 1,
'openOrders': {'cost': 1, 'noSymbol': 5},
'allOrders': {'cost': 20, 'noSymbol': 40},
'balance': 1,
'account': 5,
'positionMargin/history': 1,
'positionRisk': 1,
'userTrades': {'cost': 20, 'noSymbol': 40},
'income': 20,
'leverageBracket': 1,
'forceOrders': {'cost': 20, 'noSymbol': 50},
'adlQuantile': 5,
},
'post': {
'positionSide/dual': 1,
'order': 4,
'batchOrders': 5,
'countdownCancelAll': 10,
'leverage': 1,
'marginType': 1,
'positionMargin': 1,
'listenKey': 1,
},
'put': {
'listenKey': 1,
},
'delete': {
'order': 1,
'allOpenOrders': 1,
'batchOrders': 5,
'listenKey': 1,
},
},
'dapiPrivateV2': {
'get': {
'leverageBracket': 1,
},
},
'fapiPublic': {
'get': {
'ping': 1,
'time': 1,
'exchangeInfo': 1,
'depth': {'cost': 2, 'byLimit': [[50, 2], [100, 5], [500, 10], [1000, 20]]},
'trades': 1,
'historicalTrades': 20,
'aggTrades': 20,
'klines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]},
'continuousKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]},
'markPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]},
'indexPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]},
'fundingRate': 1,
'premiumIndex': 1,
'ticker/24hr': {'cost': 1, 'noSymbol': 40},
'ticker/price': {'cost': 1, 'noSymbol': 2},
'ticker/bookTicker': {'cost': 1, 'noSymbol': 2},
'openInterest': 1,
'indexInfo': 1,
'apiTradingStatus': {'cost': 1, 'noSymbol': 10},
'lvtKlines': 1,
},
},
'fapiData': {
'get': {
'openInterestHist': 1,
'topLongShortAccountRatio': 1,
'topLongShortPositionRatio': 1,
'globalLongShortAccountRatio': 1,
'takerlongshortRatio': 1,
},
},
'fapiPrivate': {
'get': {
'forceOrders': {'cost': 20, 'noSymbol': 50},
'allOrders': 5,
'openOrder': 1,
'openOrders': 1,
'order': 1,
'account': 5,
'balance': 5,
'leverageBracket': 1,
'positionMargin/history': 1,
'positionRisk': 5,
'positionSide/dual': 30,
'userTrades': 5,
'income': 30,
'commissionRate': 20,
'apiTradingStatus': 1,
'multiAssetsMargin': 30,
# broker endpoints
'apiReferral/ifNewUser': 1,
'apiReferral/customization': 1,
'apiReferral/userCustomization': 1,
'apiReferral/traderNum': 1,
'apiReferral/overview': 1,
'apiReferral/tradeVol': 1,
'apiReferral/rebateVol': 1,
'apiReferral/traderSummary': 1,
'adlQuantile': 5,
},
'post': {
'batchOrders': 5,
'positionSide/dual': 1,
'positionMargin': 1,
'marginType': 1,
'order': 4,
'leverage': 1,
'listenKey': 1,
'countdownCancelAll': 10,
'multiAssetsMargin': 1,
# broker endpoints
'apiReferral/customization': 1,
'apiReferral/userCustomization': 1,
},
'put': {
'listenKey': 1,
},
'delete': {
'batchOrders': 1,
'order': 1,
'allOpenOrders': 1,
'listenKey': 1,
},
},
'fapiPrivateV2': {
'get': {
'account': 1,
'balance': 1,
'positionRisk': 1,
},
},
'public': {
'get': {
'ping': 1,
'time': 1,
'depth': {'cost': 1, 'byLimit': [[100, 1], [500, 5], [1000, 10], [5000, 50]]},
'trades': 1,
'aggTrades': 1,
'historicalTrades': 5,
'klines': 1,
'ticker/24hr': {'cost': 1, 'noSymbol': 40},
'ticker/price': {'cost': 1, 'noSymbol': 2},
'ticker/bookTicker': {'cost': 1, 'noSymbol': 2},
'exchangeInfo': 10,
},
'put': {
'userDataStream': 1,
},
'post': {
'userDataStream': 1,
},
'delete': {
'userDataStream': 1,
},
},
'private': {
'get': {
'allOrderList': 10, # oco
'openOrderList': 3, # oco
'orderList': 2, # oco
'order': 2,
'openOrders': {'cost': 3, 'noSymbol': 40},
'allOrders': 10,
'account': 10,
'myTrades': 10,
'rateLimit/order': 20,
},
'post': {
'order/oco': 1,
'order': 4,
'order/test': 1,
},
'delete': {
'openOrders': 1, # added on 2020-04-25 for canceling all open orders per symbol
'orderList': 1, # oco
'order': 1,
},
},
},
'fees': {
'trading': {
'feeSide': 'get',
'tierBased': False,
'percentage': True,
'taker': self.parse_number('0.001'),
'maker': self.parse_number('0.001'),
},
'future': {
'trading': {
'feeSide': 'quote',
'tierBased': True,
'percentage': True,
'taker': self.parse_number('0.000400'),
'maker': self.parse_number('0.000200'),
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.000400')],
[self.parse_number('250'), self.parse_number('0.000400')],
[self.parse_number('2500'), self.parse_number('0.000350')],
[self.parse_number('7500'), self.parse_number('0.000320')],
[self.parse_number('22500'), self.parse_number('0.000300')],
[self.parse_number('50000'), self.parse_number('0.000270')],
[self.parse_number('100000'), self.parse_number('0.000250')],
[self.parse_number('200000'), self.parse_number('0.000220')],
[self.parse_number('400000'), self.parse_number('0.000200')],
[self.parse_number('750000'), self.parse_number('0.000170')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.000200')],
[self.parse_number('250'), self.parse_number('0.000160')],
[self.parse_number('2500'), self.parse_number('0.000140')],
[self.parse_number('7500'), self.parse_number('0.000120')],
[self.parse_number('22500'), self.parse_number('0.000100')],
[self.parse_number('50000'), self.parse_number('0.000080')],
[self.parse_number('100000'), self.parse_number('0.000060')],
[self.parse_number('200000'), self.parse_number('0.000040')],
[self.parse_number('400000'), self.parse_number('0.000020')],
[self.parse_number('750000'), self.parse_number('0')],
],
},
},
},
'delivery': {
'trading': {
'feeSide': 'base',
'tierBased': True,
'percentage': True,
'taker': self.parse_number('0.000500'),
'maker': self.parse_number('0.000100'),
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.000500')],
[self.parse_number('250'), self.parse_number('0.000450')],
[self.parse_number('2500'), self.parse_number('0.000400')],
[self.parse_number('7500'), self.parse_number('0.000300')],
[self.parse_number('22500'), self.parse_number('0.000250')],
[self.parse_number('50000'), self.parse_number('0.000240')],
[self.parse_number('100000'), self.parse_number('0.000240')],
[self.parse_number('200000'), self.parse_number('0.000240')],
[self.parse_number('400000'), self.parse_number('0.000240')],
[self.parse_number('750000'), self.parse_number('0.000240')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.000100')],
[self.parse_number('250'), self.parse_number('0.000080')],
[self.parse_number('2500'), self.parse_number('0.000050')],
[self.parse_number('7500'), self.parse_number('0.0000030')],
[self.parse_number('22500'), self.parse_number('0')],
[self.parse_number('50000'), self.parse_number('-0.000050')],
[self.parse_number('100000'), self.parse_number('-0.000060')],
[self.parse_number('200000'), self.parse_number('-0.000070')],
[self.parse_number('400000'), self.parse_number('-0.000080')],
[self.parse_number('750000'), self.parse_number('-0.000090')],
],
},
},
},
},
'commonCurrencies': {
'BCC': 'BCC', # kept for backward-compatibility https://github.com/ccxt/ccxt/issues/4848
'YOYO': 'YOYOW',
},
# exchange-specific options
'options': {
'fetchCurrencies': True, # self is a private call and it requires API keys
# 'fetchTradesMethod': 'publicGetAggTrades', # publicGetTrades, publicGetHistoricalTrades
'defaultTimeInForce': 'GTC', # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel
'defaultType': 'spot', # 'spot', 'future', 'margin', 'delivery'
'hasAlreadyAuthenticatedSuccessfully': False,
'warnOnFetchOpenOrdersWithoutSymbol': True,
'fetchPositions': 'positionRisk', # or 'account'
'recvWindow': 5 * 1000, # 5 sec, binance default
'timeDifference': 0, # the difference between system clock and Binance clock
'adjustForTimeDifference': False, # controls the adjustment logic upon instantiation
'newOrderRespType': {
'market': 'FULL', # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills
'limit': 'FULL', # we change it from 'ACK' by default to 'FULL'(returns immediately if limit is not hit)
},
'quoteOrderQty': True, # whether market orders support amounts in quote currency
'broker': {
'spot': 'x-R4BD3S82',
'margin': 'x-R4BD3S82',
'future': 'x-xcKtGhcu',
'delivery': 'x-xcKtGhcu',
},
'accountsByType': {
'main': 'MAIN',
'spot': 'MAIN',
'funding': 'FUNDING',
'margin': 'MARGIN',
'future': 'UMFUTURE',
'delivery': 'CMFUTURE',
'mining': 'MINING',
},
'typesByAccount': {
'MAIN': 'spot',
'FUNDING': 'funding',
'MARGIN': 'margin',
'UMFUTURE': 'future',
'CMFUTURE': 'delivery',
'MINING': 'mining',
},
'networks': {
'ERC20': 'ETH',
'TRC20': 'TRX',
'BEP2': 'BNB',
'BEP20': 'BSC',
'OMNI': 'OMNI',
'EOS': 'EOS',
'SPL': 'SOL',
},
'reverseNetworks': {
'tronscan.org': 'TRC20',
'etherscan.io': 'ERC20',
'bscscan.com': 'BSC',
'explorer.binance.org': 'BEP2',
'bithomp.com': 'XRP',
'bloks.io': 'EOS',
'stellar.expert': 'XLM',
'blockchair.com/bitcoin': 'BTC',
'blockchair.com/bitcoin-cash': 'BCH',
'blockchair.com/ecash': 'XEC',
'explorer.litecoin.net': 'LTC',
'explorer.avax.network': 'AVAX',
'solscan.io': 'SOL',
'polkadot.subscan.io': 'DOT',
'dashboard.internetcomputer.org': 'ICP',
'explorer.chiliz.com': 'CHZ',
'cardanoscan.io': 'ADA',
'mainnet.theoan.com': 'AION',
'algoexplorer.io': 'ALGO',
'explorer.ambrosus.com': 'AMB',
'viewblock.io/zilliqa': 'ZIL',
'viewblock.io/arweave': 'AR',
'explorer.ark.io': 'ARK',
'atomscan.com': 'ATOM',
'www.mintscan.io': 'CTK',
'explorer.bitcoindiamond.org': 'BCD',
'btgexplorer.com': 'BTG',
'bts.ai': 'BTS',
'explorer.celo.org': 'CELO',
'explorer.nervos.org': 'CKB',
'cerebro.cortexlabs.ai': 'CTXC',
'chainz.cryptoid.info': 'VIA',
'explorer.dcrdata.org': 'DCR',
'digiexplorer.info': 'DGB',
'dock.subscan.io': 'DOCK',
'dogechain.info': 'DOGE',
'explorer.elrond.com': 'EGLD',
'blockscout.com': 'ETC',
'explore-fetchhub.fetch.ai': 'FET',
'filfox.info': 'FIL',
'fio.bloks.io': 'FIO',
'explorer.firo.org': 'FIRO',
'neoscan.io': 'NEO',
'ftmscan.com': 'FTM',
'explorer.gochain.io': 'GO',
'block.gxb.io': 'GXS',
'hash-hash.info': 'HBAR',
'www.hiveblockexplorer.com': 'HIVE',
'explorer.helium.com': 'HNT',
'tracker.icon.foundation': 'ICX',
'www.iostabc.com': 'IOST',
'explorer.iota.org': 'IOTA',
'iotexscan.io': 'IOTX',
'irishub.iobscan.io': 'IRIS',
'kava.mintscan.io': 'KAVA',
'scope.klaytn.com': 'KLAY',
'kmdexplorer.io': 'KMD',
'kusama.subscan.io': 'KSM',
'explorer.lto.network': 'LTO',
'polygonscan.com': 'POLYGON',
'explorer.ont.io': 'ONT',
'minaexplorer.com': 'MINA',
'nanolooker.com': 'NANO',
'explorer.nebulas.io': 'NAS',
'explorer.nbs.plus': 'NBS',
'explorer.nebl.io': 'NEBL',
'nulscan.io': 'NULS',
'nxscan.com': 'NXS',
'explorer.harmony.one': 'ONE',
'explorer.poa.network': 'POA',
'qtum.info': 'QTUM',
'explorer.rsk.co': 'RSK',
'www.oasisscan.com': 'ROSE',
'ravencoin.network': 'RVN',
'sc.tokenview.com': 'SC',
'secretnodes.com': 'SCRT',
'explorer.skycoin.com': 'SKY',
'steemscan.com': 'STEEM',
'explorer.stacks.co': 'STX',
'www.thetascan.io': 'THETA',
'scan.tomochain.com': 'TOMO',
'explore.vechain.org': 'VET',
'explorer.vite.net': 'VITE',
'www.wanscan.org': 'WAN',
'wavesexplorer.com': 'WAVES',
'wax.eosx.io': 'WAXP',
'waltonchain.pro': 'WTC',
'chain.nem.ninja': 'XEM',
'verge-blockchain.info': 'XVG',
'explorer.yoyow.org': 'YOYOW',
'explorer.zcha.in': 'ZEC',
'explorer.zensystem.io': 'ZEN',
},
'impliedNetworks': {
'ETH': {'ERC20': 'ETH'},
'TRX': {'TRC20': 'TRX'},
},
'legalMoney': {
'MXN': True,
'UGX': True,
'SEK': True,
'CHF': True,
'VND': True,
'AED': True,
'DKK': True,
'KZT': True,
'HUF': True,
'PEN': True,
'PHP': True,
'USD': True,
'TRY': True,
'EUR': True,
'NGN': True,
'PLN': True,
'BRL': True,
'ZAR': True,
'KES': True,
'ARS': True,
'RUB': True,
'AUD': True,
'NOK': True,
'CZK': True,
'GBP': True,
'UAH': True,
'GHS': True,
'HKD': True,
'CAD': True,
'INR': True,
'JPY': True,
'NZD': True,
},
},
# https://binance-docs.github.io/apidocs/spot/en/#error-codes-2
'exceptions': {
'exact': {
'System is under maintenance.': OnMaintenance, # {"code":1,"msg":"System is under maintenance."}
'System abnormality': ExchangeError, # {"code":-1000,"msg":"System abnormality"}
'You are not authorized to execute self request.': PermissionDenied, # {"msg":"You are not authorized to execute self request."}
'API key does not exist': AuthenticationError,
'Order would trigger immediately.': OrderImmediatelyFillable,
'Stop price would trigger immediately.': OrderImmediatelyFillable, # {"code":-2010,"msg":"Stop price would trigger immediately."}
'Order would immediately match and take.': OrderImmediatelyFillable, # {"code":-2010,"msg":"Order would immediately match and take."}
'Account has insufficient balance for requested action.': InsufficientFunds,
'Rest API trading is not enabled.': ExchangeNotAvailable,
"You don't have permission.": PermissionDenied, # {"msg":"You don't have permission.","success":false}
'Market is closed.': ExchangeNotAvailable, # {"code":-1013,"msg":"Market is closed."}
'Too many requests. Please try again later.': DDoSProtection, # {"msg":"Too many requests. Please try again later.","success":false}
'-1000': ExchangeNotAvailable, # {"code":-1000,"msg":"An unknown error occured while processing the request."}
'-1001': ExchangeNotAvailable, # 'Internal error; unable to process your request. Please try again.'
'-1002': AuthenticationError, # 'You are not authorized to execute self request.'
'-1003': RateLimitExceeded, # {"code":-1003,"msg":"Too much request weight used, current limit is 1200 request weight per 1 MINUTE. Please use the websocket for live updates to avoid polling the API."}
'-1013': InvalidOrder, # createOrder -> 'invalid quantity'/'invalid price'/MIN_NOTIONAL
'-1015': RateLimitExceeded, # 'Too many new orders; current limit is %s orders per %s.'
'-1016': ExchangeNotAvailable, # 'This service is no longer available.',
'-1020': BadRequest, # 'This operation is not supported.'
'-1021': InvalidNonce, # 'your time is ahead of server'
'-1022': AuthenticationError, # {"code":-1022,"msg":"Signature for self request is not valid."}
'-1100': BadRequest, # createOrder(symbol, 1, asdf) -> 'Illegal characters found in parameter 'price'
'-1101': BadRequest, # Too many parameters; expected %s and received %s.
'-1102': BadRequest, # Param %s or %s must be sent, but both were empty
'-1103': BadRequest, # An unknown parameter was sent.
'-1104': BadRequest, # Not all sent parameters were read, read 8 parameters but was sent 9
'-1105': BadRequest, # Parameter %s was empty.
'-1106': BadRequest, # Parameter %s sent when not required.
'-1111': BadRequest, # Precision is over the maximum defined for self asset.
'-1112': InvalidOrder, # No orders on book for symbol.
'-1114': BadRequest, # TimeInForce parameter sent when not required.
'-1115': BadRequest, # Invalid timeInForce.
'-1116': BadRequest, # Invalid orderType.
'-1117': BadRequest, # Invalid side.
'-1118': BadRequest, # New client order ID was empty.
'-1119': BadRequest, # Original client order ID was empty.
'-1120': BadRequest, # Invalid interval.
'-1121': BadSymbol, # Invalid symbol.
'-1125': AuthenticationError, # This listenKey does not exist.
'-1127': BadRequest, # More than %s hours between startTime and endTime.
'-1128': BadRequest, # {"code":-1128,"msg":"Combination of optional parameters invalid."}
'-1130': BadRequest, # Data sent for paramter %s is not valid.
'-1131': BadRequest, # recvWindow must be less than 60000
'-2008': AuthenticationError, # {"code":-2008,"msg":"Invalid Api-Key ID."}
'-2010': ExchangeError, # generic error code for createOrder -> 'Account has insufficient balance for requested action.', {"code":-2010,"msg":"Rest API trading is not enabled."}, etc...
'-2011': OrderNotFound, # cancelOrder(1, 'BTC/USDT') -> 'UNKNOWN_ORDER'
'-2013': OrderNotFound, # fetchOrder(1, 'BTC/USDT') -> 'Order does not exist'
'-2014': AuthenticationError, # {"code":-2014, "msg": "API-key format invalid."}
'-2015': AuthenticationError, # "Invalid API-key, IP, or permissions for action."
'-2019': InsufficientFunds, # {"code":-2019,"msg":"Margin is insufficient."}
'-3005': InsufficientFunds, # {"code":-3005,"msg":"Transferring out not allowed. Transfer out amount exceeds max amount."}
'-3006': InsufficientFunds, # {"code":-3006,"msg":"Your borrow amount has exceed maximum borrow amount."}
'-3008': InsufficientFunds, # {"code":-3008,"msg":"Borrow not allowed. Your borrow amount has exceed maximum borrow amount."}
'-3010': ExchangeError, # {"code":-3010,"msg":"Repay not allowed. Repay amount exceeds borrow amount."}
'-3015': ExchangeError, # {"code":-3015,"msg":"Repay amount exceeds borrow amount."}
'-3022': AccountSuspended, # You account's trading is banned.
'-4028': BadRequest, # {"code":-4028,"msg":"Leverage 100 is not valid"}
'-3020': InsufficientFunds, # {"code":-3020,"msg":"Transfer out amount exceeds max amount."}
'-3041': InsufficientFunds, # {"code":-3041,"msg":"Balance is not enough"}
'-5013': InsufficientFunds, # Asset transfer failed: insufficient balance"
'-11008': InsufficientFunds, # {"code":-11008,"msg":"Exceeding the account's maximum borrowable limit."}
'-4051': InsufficientFunds, # {"code":-4051,"msg":"Isolated balance insufficient."}
},
'broad': {
'has no operation privilege': PermissionDenied,
'MAX_POSITION': InvalidOrder, # {"code":-2010,"msg":"Filter failure: MAX_POSITION"}
},
},
})
def cost_to_precision(self, symbol, cost):
return self.decimal_to_precision(cost, TRUNCATE, self.markets[symbol]['precision']['quote'], self.precisionMode, self.paddingMode)
def currency_to_precision(self, currency, fee):
# info is available in currencies only if the user has configured his api keys
if self.safe_value(self.currencies[currency], 'precision') is not None:
return self.decimal_to_precision(fee, TRUNCATE, self.currencies[currency]['precision'], self.precisionMode, self.paddingMode)
else:
return self.number_to_string(fee)
def nonce(self):
return self.milliseconds() - self.options['timeDifference']
def fetch_time(self, params={}):
defaultType = self.safe_string_2(self.options, 'fetchTime', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
method = 'publicGetTime'
if type == 'future':
method = 'fapiPublicGetTime'
elif type == 'delivery':
method = 'dapiPublicGetTime'
response = getattr(self, method)(query)
return self.safe_integer(response, 'serverTime')
def load_time_difference(self, params={}):
serverTime = self.fetch_time(params)
after = self.milliseconds()
self.options['timeDifference'] = after - serverTime
return self.options['timeDifference']
def fetch_currencies(self, params={}):
fetchCurrenciesEnabled = self.safe_value(self.options, 'fetchCurrencies')
if not fetchCurrenciesEnabled:
return None
# self endpoint requires authentication
# while fetchCurrencies is a public API method by design
# therefore we check the keys here
# and fallback to generating the currencies from the markets
if not self.check_required_credentials(False):
return None
# sandbox/testnet does not support sapi endpoints
apiBackup = self.safe_string(self.urls, 'apiBackup')
if apiBackup is not None:
return None
response = self.sapiGetCapitalConfigGetall(params)
result = {}
for i in range(0, len(response)):
#
# {
# coin: 'LINK',
# depositAllEnable: True,
# withdrawAllEnable: True,
# name: 'ChainLink',
# free: '0.06168',
# locked: '0',
# freeze: '0',
# withdrawing: '0',
# ipoing: '0',
# ipoable: '0',
# storage: '0',
# isLegalMoney: False,
# trading: True,
# networkList: [
# {
# network: 'BNB',
# coin: 'LINK',
# withdrawIntegerMultiple: '0',
# isDefault: False,
# depositEnable: True,
# withdrawEnable: True,
# depositDesc: '',
# withdrawDesc: '',
# specialTips: 'Both a MEMO and an Address are required to successfully deposit your LINK BEP2 tokens to Binance.',
# name: 'BEP2',
# resetAddressStatus: False,
# addressRegex: '^(bnb1)[0-9a-z]{38}$',
# memoRegex: '^[0-9A-Za-z\\-_]{1,120}$',
# withdrawFee: '0.002',
# withdrawMin: '0.01',
# withdrawMax: '9999999',
# minConfirm: 1,
# unLockConfirm: 0
# },
# {
# network: 'BSC',
# coin: 'LINK',
# withdrawIntegerMultiple: '0.00000001',
# isDefault: False,
# depositEnable: True,
# withdrawEnable: True,
# depositDesc: '',
# withdrawDesc: '',
# specialTips: '',
# name: 'BEP20(BSC)',
# resetAddressStatus: False,
# addressRegex: '^(0x)[0-9A-Fa-f]{40}$',
# memoRegex: '',
# withdrawFee: '0.005',
# withdrawMin: '0.01',
# withdrawMax: '9999999',
# minConfirm: 15,
# unLockConfirm: 0
# },
# {
# network: 'ETH',
# coin: 'LINK',
# withdrawIntegerMultiple: '0.00000001',
# isDefault: True,
# depositEnable: True,
# withdrawEnable: True,
# depositDesc: '',
# withdrawDesc: '',
# name: 'ERC20',
# resetAddressStatus: False,
# addressRegex: '^(0x)[0-9A-Fa-f]{40}$',
# memoRegex: '',
# withdrawFee: '0.34',
# withdrawMin: '0.68',
# withdrawMax: '0',
# minConfirm: 12,
# unLockConfirm: 0
# }
# ]
# }
#
entry = response[i]
id = self.safe_string(entry, 'coin')
name = self.safe_string(entry, 'name')
code = self.safe_currency_code(id)
precision = None
isWithdrawEnabled = True
isDepositEnabled = True
networkList = self.safe_value(entry, 'networkList', [])
fees = {}
fee = None
for j in range(0, len(networkList)):
networkItem = networkList[j]
network = self.safe_string(networkItem, 'network')
# name = self.safe_string(networkItem, 'name')
withdrawFee = self.safe_number(networkItem, 'withdrawFee')
depositEnable = self.safe_value(networkItem, 'depositEnable')
withdrawEnable = self.safe_value(networkItem, 'withdrawEnable')
isDepositEnabled = isDepositEnabled or depositEnable
isWithdrawEnabled = isWithdrawEnabled or withdrawEnable
fees[network] = withdrawFee
isDefault = self.safe_value(networkItem, 'isDefault')
if isDefault or fee is None:
fee = withdrawFee
trading = self.safe_value(entry, 'trading')
active = (isWithdrawEnabled and isDepositEnabled and trading)
result[code] = {
'id': id,
'name': name,
'code': code,
'precision': precision,
'info': entry,
'active': active,
'networks': networkList,
'fee': fee,
'fees': fees,
'limits': self.limits,
}
return result
def fetch_markets(self, params={}):
defaultType = self.safe_string_2(self.options, 'fetchMarkets', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
if (type != 'spot') and (type != 'future') and (type != 'margin') and (type != 'delivery'):
raise ExchangeError(self.id + " does not support '" + type + "' type, set exchange.options['defaultType'] to 'spot', 'margin', 'delivery' or 'future'") # eslint-disable-line quotes
method = 'publicGetExchangeInfo'
if type == 'future':
method = 'fapiPublicGetExchangeInfo'
elif type == 'delivery':
method = 'dapiPublicGetExchangeInfo'
response = getattr(self, method)(query)
#
# spot / margin
#
# {
# "timezone":"UTC",
# "serverTime":1575416692969,
# "rateLimits":[
# {"rateLimitType":"REQUEST_WEIGHT","interval":"MINUTE","intervalNum":1,"limit":1200},
# {"rateLimitType":"ORDERS","interval":"SECOND","intervalNum":10,"limit":100},
# {"rateLimitType":"ORDERS","interval":"DAY","intervalNum":1,"limit":200000}
# ],
# "exchangeFilters":[],
# "symbols":[
# {
# "symbol":"ETHBTC",
# "status":"TRADING",
# "baseAsset":"ETH",
# "baseAssetPrecision":8,
# "quoteAsset":"BTC",
# "quotePrecision":8,
# "baseCommissionPrecision":8,
# "quoteCommissionPrecision":8,
# "orderTypes":["LIMIT","LIMIT_MAKER","MARKET","STOP_LOSS_LIMIT","TAKE_PROFIT_LIMIT"],
# "icebergAllowed":true,
# "ocoAllowed":true,
# "quoteOrderQtyMarketAllowed":true,
# "isSpotTradingAllowed":true,
# "isMarginTradingAllowed":true,
# "filters":[
# {"filterType":"PRICE_FILTER","minPrice":"0.00000100","maxPrice":"100000.00000000","tickSize":"0.00000100"},
# {"filterType":"PERCENT_PRICE","multiplierUp":"5","multiplierDown":"0.2","avgPriceMins":5},
# {"filterType":"LOT_SIZE","minQty":"0.00100000","maxQty":"100000.00000000","stepSize":"0.00100000"},
# {"filterType":"MIN_NOTIONAL","minNotional":"0.00010000","applyToMarket":true,"avgPriceMins":5},
# {"filterType":"ICEBERG_PARTS","limit":10},
# {"filterType":"MARKET_LOT_SIZE","minQty":"0.00000000","maxQty":"63100.00000000","stepSize":"0.00000000"},
# {"filterType":"MAX_NUM_ALGO_ORDERS","maxNumAlgoOrders":5}
# ]
# },
# ],
# }
#
# futures/usdt-margined(fapi)
#
# {
# "timezone":"UTC",
# "serverTime":1575417244353,
# "rateLimits":[
# {"rateLimitType":"REQUEST_WEIGHT","interval":"MINUTE","intervalNum":1,"limit":1200},
# {"rateLimitType":"ORDERS","interval":"MINUTE","intervalNum":1,"limit":1200}
# ],
# "exchangeFilters":[],
# "symbols":[
# {
# "symbol":"BTCUSDT",
# "status":"TRADING",
# "maintMarginPercent":"2.5000",
# "requiredMarginPercent":"5.0000",
# "baseAsset":"BTC",
# "quoteAsset":"USDT",
# "pricePrecision":2,
# "quantityPrecision":3,
# "baseAssetPrecision":8,
# "quotePrecision":8,
# "filters":[
# {"minPrice":"0.01","maxPrice":"100000","filterType":"PRICE_FILTER","tickSize":"0.01"},
# {"stepSize":"0.001","filterType":"LOT_SIZE","maxQty":"1000","minQty":"0.001"},
# {"stepSize":"0.001","filterType":"MARKET_LOT_SIZE","maxQty":"1000","minQty":"0.001"},
# {"limit":200,"filterType":"MAX_NUM_ORDERS"},
# {"multiplierDown":"0.8500","multiplierUp":"1.1500","multiplierDecimal":"4","filterType":"PERCENT_PRICE"}
# ],
# "orderTypes":["LIMIT","MARKET","STOP"],
# "timeInForce":["GTC","IOC","FOK","GTX"]
# }
# ]
# }
#
# delivery/coin-margined(dapi)
#
# {
# "timezone": "UTC",
# "serverTime": 1597667052958,
# "rateLimits": [
# {"rateLimitType":"REQUEST_WEIGHT","interval":"MINUTE","intervalNum":1,"limit":6000},
# {"rateLimitType":"ORDERS","interval":"MINUTE","intervalNum":1,"limit":6000}
# ],
# "exchangeFilters": [],
# "symbols": [
# {
# "symbol": "BTCUSD_200925",
# "pair": "BTCUSD",
# "contractType": "CURRENT_QUARTER",
# "deliveryDate": 1601020800000,
# "onboardDate": 1590739200000,
# "contractStatus": "TRADING",
# "contractSize": 100,
# "marginAsset": "BTC",
# "maintMarginPercent": "2.5000",
# "requiredMarginPercent": "5.0000",
# "baseAsset": "BTC",
# "quoteAsset": "USD",
# "pricePrecision": 1,
# "quantityPrecision": 0,
# "baseAssetPrecision": 8,
# "quotePrecision": 8,
# "equalQtyPrecision": 4,
# "filters": [
# {"minPrice":"0.1","maxPrice":"100000","filterType":"PRICE_FILTER","tickSize":"0.1"},
# {"stepSize":"1","filterType":"LOT_SIZE","maxQty":"100000","minQty":"1"},
# {"stepSize":"0","filterType":"MARKET_LOT_SIZE","maxQty":"100000","minQty":"1"},
# {"limit":200,"filterType":"MAX_NUM_ORDERS"},
# {"multiplierDown":"0.9500","multiplierUp":"1.0500","multiplierDecimal":"4","filterType":"PERCENT_PRICE"}
# ],
# "orderTypes": ["LIMIT","MARKET","STOP","STOP_MARKET","TAKE_PROFIT","TAKE_PROFIT_MARKET","TRAILING_STOP_MARKET"],
# "timeInForce": ["GTC","IOC","FOK","GTX"]
# },
# {
# "symbol": "BTCUSD_PERP",
# "pair": "BTCUSD",
# "contractType": "PERPETUAL",
# "deliveryDate": 4133404800000,
# "onboardDate": 1596006000000,
# "contractStatus": "TRADING",
# "contractSize": 100,
# "marginAsset": "BTC",
# "maintMarginPercent": "2.5000",
# "requiredMarginPercent": "5.0000",
# "baseAsset": "BTC",
# "quoteAsset": "USD",
# "pricePrecision": 1,
# "quantityPrecision": 0,
# "baseAssetPrecision": 8,
# "quotePrecision": 8,
# "equalQtyPrecision": 4,
# "filters": [
# {"minPrice":"0.1","maxPrice":"100000","filterType":"PRICE_FILTER","tickSize":"0.1"},
# {"stepSize":"1","filterType":"LOT_SIZE","maxQty":"100000","minQty":"1"},
# {"stepSize":"1","filterType":"MARKET_LOT_SIZE","maxQty":"100000","minQty":"1"},
# {"limit":200,"filterType":"MAX_NUM_ORDERS"},
# {"multiplierDown":"0.8500","multiplierUp":"1.1500","multiplierDecimal":"4","filterType":"PERCENT_PRICE"}
# ],
# "orderTypes": ["LIMIT","MARKET","STOP","STOP_MARKET","TAKE_PROFIT","TAKE_PROFIT_MARKET","TRAILING_STOP_MARKET"],
# "timeInForce": ["GTC","IOC","FOK","GTX"]
# }
# ]
# }
#
if self.options['adjustForTimeDifference']:
self.load_time_difference()
markets = self.safe_value(response, 'symbols', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
spot = (type == 'spot')
future = (type == 'future')
delivery = (type == 'delivery')
id = self.safe_string(market, 'symbol')
lowercaseId = self.safe_string_lower(market, 'symbol')
baseId = self.safe_string(market, 'baseAsset')
quoteId = self.safe_string(market, 'quoteAsset')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
contractType = self.safe_string(market, 'contractType')
idSymbol = (future or delivery) and (contractType != 'PERPETUAL')
symbol = None
expiry = None
if idSymbol:
symbol = id
expiry = self.safe_integer(market, 'deliveryDate')
else:
symbol = base + '/' + quote
filters = self.safe_value(market, 'filters', [])
filtersByType = self.index_by(filters, 'filterType')
precision = {
'base': self.safe_integer(market, 'baseAssetPrecision'),
'quote': self.safe_integer(market, 'quotePrecision'),
'amount': self.safe_integer(market, 'quantityPrecision'),
'price': self.safe_integer(market, 'pricePrecision'),
}
status = self.safe_string_2(market, 'status', 'contractStatus')
active = (status == 'TRADING')
margin = self.safe_value(market, 'isMarginTradingAllowed', False)
contractSize = None
fees = self.fees
if future or delivery:
contractSize = self.safe_string(market, 'contractSize', '1')
fees = self.fees[type]
maker = fees['trading']['maker']
taker = fees['trading']['taker']
settleId = self.safe_string(market, 'marginAsset')
settle = self.safe_currency_code(settleId)
entry = {
'id': id,
'lowercaseId': lowercaseId,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'info': market,
'spot': spot,
'type': type,
'margin': margin,
'future': future,
'delivery': delivery,
'linear': future,
'inverse': delivery,
'expiry': expiry,
'expiryDatetime': self.iso8601(expiry),
'settleId': settleId,
'settle': settle,
'active': active,
'precision': precision,
'contractSize': contractSize,
'maker': maker,
'taker': taker,
'limits': {
'amount': {
'min': None,
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
}
if 'PRICE_FILTER' in filtersByType:
filter = self.safe_value(filtersByType, 'PRICE_FILTER', {})
tickSize = self.safe_string(filter, 'tickSize')
entry['precision']['price'] = self.precision_from_string(tickSize)
# PRICE_FILTER reports zero values for maxPrice
# since they updated filter types in November 2018
# https://github.com/ccxt/ccxt/issues/4286
# therefore limits['price']['max'] doesn't have any meaningful value except None
entry['limits']['price'] = {
'min': self.safe_number(filter, 'minPrice'),
'max': self.safe_number(filter, 'maxPrice'),
}
entry['precision']['price'] = self.precision_from_string(filter['tickSize'])
if 'LOT_SIZE' in filtersByType:
filter = self.safe_value(filtersByType, 'LOT_SIZE', {})
stepSize = self.safe_string(filter, 'stepSize')
entry['precision']['amount'] = self.precision_from_string(stepSize)
entry['limits']['amount'] = {
'min': self.safe_number(filter, 'minQty'),
'max': self.safe_number(filter, 'maxQty'),
}
if 'MARKET_LOT_SIZE' in filtersByType:
filter = self.safe_value(filtersByType, 'MARKET_LOT_SIZE', {})
entry['limits']['market'] = {
'min': self.safe_number(filter, 'minQty'),
'max': self.safe_number(filter, 'maxQty'),
}
if 'MIN_NOTIONAL' in filtersByType:
filter = self.safe_value(filtersByType, 'MIN_NOTIONAL', {})
entry['limits']['cost']['min'] = self.safe_number_2(filter, 'minNotional', 'notional')
result.append(entry)
return result
def fetch_balance(self, params={}):
self.load_markets()
defaultType = self.safe_string_2(self.options, 'fetchBalance', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
method = 'privateGetAccount'
if type == 'future':
options = self.safe_value(self.options, type, {})
fetchBalanceOptions = self.safe_value(options, 'fetchBalance', {})
method = self.safe_string(fetchBalanceOptions, 'method', 'fapiPrivateV2GetAccount')
elif type == 'delivery':
options = self.safe_value(self.options, type, {})
fetchBalanceOptions = self.safe_value(options, 'fetchBalance', {})
method = self.safe_string(fetchBalanceOptions, 'method', 'dapiPrivateGetAccount')
elif type == 'margin':
method = 'sapiGetMarginAccount'
elif type == 'savings':
method = 'sapiGetLendingUnionAccount'
elif type == 'funding':
method = 'sapiPostAssetGetFundingAsset'
query = self.omit(params, 'type')
response = getattr(self, method)(query)
#
# spot
#
# {
# makerCommission: 10,
# takerCommission: 10,
# buyerCommission: 0,
# sellerCommission: 0,
# canTrade: True,
# canWithdraw: True,
# canDeposit: True,
# updateTime: 1575357359602,
# accountType: "MARGIN",
# balances: [
# {asset: "BTC", free: "0.00219821", locked: "0.00000000" },
# ]
# }
#
# margin
#
# {
# "borrowEnabled":true,
# "marginLevel":"999.00000000",
# "totalAssetOfBtc":"0.00000000",
# "totalLiabilityOfBtc":"0.00000000",
# "totalNetAssetOfBtc":"0.00000000",
# "tradeEnabled":true,
# "transferEnabled":true,
# "userAssets":[
# {"asset":"MATIC","borrowed":"0.00000000","free":"0.00000000","interest":"0.00000000","locked":"0.00000000","netAsset":"0.00000000"},
# {"asset":"VET","borrowed":"0.00000000","free":"0.00000000","interest":"0.00000000","locked":"0.00000000","netAsset":"0.00000000"},
# {"asset":"USDT","borrowed":"0.00000000","free":"0.00000000","interest":"0.00000000","locked":"0.00000000","netAsset":"0.00000000"}
# ],
# }
#
# futures(fapi)
#
# fapiPrivateGetAccount
#
# {
# "feeTier":0,
# "canTrade":true,
# "canDeposit":true,
# "canWithdraw":true,
# "updateTime":0,
# "totalInitialMargin":"0.00000000",
# "totalMaintMargin":"0.00000000",
# "totalWalletBalance":"4.54000000",
# "totalUnrealizedProfit":"0.00000000",
# "totalMarginBalance":"4.54000000",
# "totalPositionInitialMargin":"0.00000000",
# "totalOpenOrderInitialMargin":"0.00000000",
# "maxWithdrawAmount":"4.54000000",
# "assets":[
# {
# "asset":"USDT",
# "walletBalance":"4.54000000",
# "unrealizedProfit":"0.00000000",
# "marginBalance":"4.54000000",
# "maintMargin":"0.00000000",
# "initialMargin":"0.00000000",
# "positionInitialMargin":"0.00000000",
# "openOrderInitialMargin":"0.00000000",
# "maxWithdrawAmount":"4.54000000"
# }
# ],
# "positions":[
# {
# "symbol":"BTCUSDT",
# "initialMargin":"0.00000",
# "maintMargin":"0.00000",
# "unrealizedProfit":"0.00000000",
# "positionInitialMargin":"0.00000",
# "openOrderInitialMargin":"0.00000"
# }
# ]
# }
#
# fapiPrivateV2GetAccount
#
# {
# "feeTier":0,
# "canTrade":true,
# "canDeposit":true,
# "canWithdraw":true,
# "updateTime":0,
# "totalInitialMargin":"0.00000000",
# "totalMaintMargin":"0.00000000",
# "totalWalletBalance":"0.00000000",
# "totalUnrealizedProfit":"0.00000000",
# "totalMarginBalance":"0.00000000",
# "totalPositionInitialMargin":"0.00000000",
# "totalOpenOrderInitialMargin":"0.00000000",
# "totalCrossWalletBalance":"0.00000000",
# "totalCrossUnPnl":"0.00000000",
# "availableBalance":"0.00000000",
# "maxWithdrawAmount":"0.00000000",
# "assets":[
# {
# "asset":"BNB",
# "walletBalance":"0.01000000",
# "unrealizedProfit":"0.00000000",
# "marginBalance":"0.01000000",
# "maintMargin":"0.00000000",
# "initialMargin":"0.00000000",
# "positionInitialMargin":"0.00000000",
# "openOrderInitialMargin":"0.00000000",
# "maxWithdrawAmount":"0.01000000",
# "crossWalletBalance":"0.01000000",
# "crossUnPnl":"0.00000000",
# "availableBalance":"0.01000000"
# }
# ],
# "positions":[
# {
# "symbol":"BTCUSDT",
# "initialMargin":"0",
# "maintMargin":"0",
# "unrealizedProfit":"0.00000000",
# "positionInitialMargin":"0",
# "openOrderInitialMargin":"0",
# "leverage":"20",
# "isolated":false,
# "entryPrice":"0.00000",
# "maxNotional":"5000000",
# "positionSide":"BOTH"
# },
# ]
# }
#
# fapiPrivateV2GetBalance
#
# [
# {
# "accountAlias":"FzFzXquXXqoC",
# "asset":"BNB",
# "balance":"0.01000000",
# "crossWalletBalance":"0.01000000",
# "crossUnPnl":"0.00000000",
# "availableBalance":"0.01000000",
# "maxWithdrawAmount":"0.01000000"
# }
# ]
#
# savings
#
# {
# "totalAmountInBTC": "0.3172",
# "totalAmountInUSDT": "10000",
# "totalFixedAmountInBTC": "0.3172",
# "totalFixedAmountInUSDT": "10000",
# "totalFlexibleInBTC": "0",
# "totalFlexibleInUSDT": "0",
# "positionAmountVos": [
# {
# "asset": "USDT",
# "amount": "10000",
# "amountInBTC": "0.3172",
# "amountInUSDT": "10000"
# },
# {
# "asset": "BUSD",
# "amount": "0",
# "amountInBTC": "0",
# "amountInUSDT": "0"
# }
# ]
# }
#
# binance pay
#
# [
# {
# "asset": "BUSD",
# "free": "1129.83",
# "locked": "0",
# "freeze": "0",
# "withdrawing": "0"
# }
# ]
#
result = {
'info': response,
}
timestamp = None
if (type == 'spot') or (type == 'margin'):
timestamp = self.safe_integer(response, 'updateTime')
balances = self.safe_value_2(response, 'balances', 'userAssets', [])
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'asset')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'free')
account['used'] = self.safe_string(balance, 'locked')
result[code] = account
elif type == 'savings':
positionAmountVos = self.safe_value(response, 'positionAmountVos')
for i in range(0, len(positionAmountVos)):
entry = positionAmountVos[i]
currencyId = self.safe_string(entry, 'asset')
code = self.safe_currency_code(currencyId)
account = self.account()
usedAndTotal = self.safe_string(entry, 'amount')
account['total'] = usedAndTotal
account['used'] = usedAndTotal
result[code] = account
elif type == 'funding':
for i in range(0, len(response)):
entry = response[i]
account = self.account()
currencyId = self.safe_string(entry, 'asset')
code = self.safe_currency_code(currencyId)
account['free'] = self.safe_string(entry, 'free')
frozen = self.safe_string(entry, 'freeze')
withdrawing = self.safe_string(entry, 'withdrawing')
locked = self.safe_string(entry, 'locked')
account['used'] = Precise.string_add(frozen, Precise.string_add(locked, withdrawing))
result[code] = account
else:
balances = response
if not isinstance(response, list):
balances = self.safe_value(response, 'assets', [])
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'asset')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'availableBalance')
account['used'] = self.safe_string(balance, 'initialMargin')
account['total'] = self.safe_string_2(balance, 'marginBalance', 'balance')
result[code] = account
result['timestamp'] = timestamp
result['datetime'] = self.iso8601(timestamp)
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # default 100, max 5000, see https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#order-book
method = 'publicGetDepth'
if market['linear']:
method = 'fapiPublicGetDepth'
elif market['inverse']:
method = 'dapiPublicGetDepth'
response = getattr(self, method)(self.extend(request, params))
#
# future
#
# {
# "lastUpdateId":333598053905,
# "E":1618631511986,
# "T":1618631511964,
# "bids":[
# ["2493.56","20.189"],
# ["2493.54","1.000"],
# ["2493.51","0.005"],["2493.37","0.280"],["2493.31","0.865"],["2493.30","0.514"],["2493.29","2.309"],["2493.25","1.500"],["2493.23","0.012"],["2493.22","7.240"],["2493.21","3.349"],["2493.20","2.030"],["2493.19","58.118"],["2493.18","174.836"],["2493.17","14.436"],["2493.12","2.000"],["2493.09","3.232"],["2493.08","2.010"],["2493.07","2.000"],["2493.06","2.000"],["2493.05","2.684"],["2493.04","2.000"],["2493.03","2.000"],["2493.02","5.000"],["2493.01","2.000"],["2493.00","1.035"],["2492.99","8.546"],["2492.98","4.012"],["2492.96","40.937"],["2492.95","40.595"],["2492.94","21.051"],["2492.92","4.012"],["2492.91","0.200"],["2492.85","2.000"],["2492.83","24.929"],["2492.81","50.000"],["2492.80","0.030"],["2492.76","0.264"],["2492.73","32.098"],["2492.71","32.664"],["2492.70","4.228"],["2492.65","1.230"],["2492.61","5.598"],["2492.60","34.786"],["2492.58","10.393"],["2492.54","4.543"],["2492.50","0.400"],["2492.49","0.600"],["2492.48","4.941"],["2492.45","1.207"],["2492.43","4.878"],["2492.40","4.762"],["2492.39","36.489"],["2492.37","3.000"],["2492.36","4.882"],["2492.33","28.117"],["2492.29","0.490"],["2492.28","76.365"],["2492.27","0.200"],["2492.23","3.804"],["2492.22","1.000"],["2492.19","20.011"],["2492.17","13.500"],["2492.16","4.058"],["2492.14","35.673"],["2492.13","1.915"],["2492.12","76.896"],["2492.10","8.050"],["2492.01","16.615"],["2492.00","10.335"],["2491.95","5.880"],["2491.93","10.000"],["2491.92","3.916"],["2491.90","0.795"],["2491.87","22.000"],["2491.85","1.260"],["2491.84","4.014"],["2491.83","6.668"],["2491.73","0.855"],["2491.72","7.572"],["2491.71","7.000"],["2491.68","3.916"],["2491.66","2.500"],["2491.64","4.945"],["2491.63","2.302"],["2491.62","4.012"],["2491.61","16.170"],["2491.60","0.793"],["2491.59","0.403"],["2491.57","17.445"],["2491.56","88.177"],["2491.53","10.000"],["2491.47","0.013"],["2491.45","0.157"],["2491.44","11.733"],["2491.39","3.593"],["2491.38","3.570"],["2491.36","28.077"],["2491.35","0.808"],["2491.30","0.065"],["2491.29","4.880"],["2491.27","22.000"],["2491.24","9.021"],["2491.23","68.393"],["2491.22","0.050"],["2491.21","1.316"],["2491.20","4.000"],["2491.19","0.108"],["2491.18","0.498"],["2491.17","5.000"],["2491.14","10.000"],["2491.13","0.383"],["2491.12","125.959"],["2491.10","0.870"],["2491.08","10.518"],["2491.05","54.743"],["2491.01","7.980"],["2490.96","3.916"],["2490.95","0.135"],["2490.91","0.140"],["2490.89","8.424"],["2490.88","5.930"],["2490.84","1.208"],["2490.83","2.005"],["2490.82","5.517"],["2490.81","73.707"],["2490.80","1.042"],["2490.79","9.626"],["2490.72","3.916"],["2490.70","0.148"],["2490.69","0.403"],["2490.68","0.012"],["2490.67","21.887"],["2490.66","0.008"],["2490.64","11.500"],["2490.61","0.005"],["2490.58","68.175"],["2490.55","0.218"],["2490.54","14.132"],["2490.53","5.157"],["2490.50","0.018"],["2490.49","9.216"],["2490.48","3.979"],["2490.47","1.884"],["2490.44","0.003"],["2490.36","14.132"],["2490.35","2.008"],["2490.34","0.200"],["2490.33","0.015"],["2490.30","0.065"],["2490.29","5.500"],["2490.28","24.203"],["2490.26","4.373"],["2490.25","0.026"],["2490.24","4.000"],["2490.23","177.628"],["2490.22","14.132"],["2490.21","0.181"],["2490.20","0.645"],["2490.19","9.024"],["2490.18","0.108"],["2490.17","0.085"],["2490.16","0.077"],["2490.14","0.275"],["2490.10","0.080"],["2490.07","0.015"],["2490.04","6.056"],["2490.00","6.796"],["2489.98","0.005"],["2489.97","0.258"],["2489.96","10.084"],["2489.95","1.202"],["2489.91","10.121"],["2489.90","10.084"],["2489.88","0.040"],["2489.87","0.004"],["2489.85","0.003"],["2489.76","3.916"],["2489.73","10.084"],["2489.71","0.272"],["2489.70","12.834"],["2489.67","0.403"],["2489.66","0.362"],["2489.64","0.738"],["2489.63","193.236"],["2489.62","14.152"],["2489.61","0.157"],["2489.59","4.011"],["2489.57","0.015"],["2489.55","0.046"],["2489.52","3.921"],["2489.51","0.005"],["2489.45","80.000"],["2489.44","0.649"],["2489.43","10.088"],["2489.39","0.009"],["2489.37","14.132"],["2489.35","72.262"],["2489.34","10.084"],["2489.33","14.136"],["2489.32","23.953"],["2489.30","0.065"],["2489.28","8.136"],["2489.24","8.022"],["2489.19","14.132"],["2489.18","0.085"],["2489.17","0.108"],["2489.14","10.084"],["2489.13","3.142"],["2489.12","77.827"],["2489.11","10.084"],["2489.10","0.080"],["2489.09","50.024"],["2489.04","3.916"],["2489.03","0.008"],["2489.01","10.084"],["2488.99","0.135"],["2488.98","0.187"],["2488.96","0.324"],["2488.92","0.064"],["2488.85","16.056"],["2488.83","14.132"],["2488.80","3.916"],["2488.79","10.084"],["2488.77","4.414"],["2488.76","0.005"],["2488.75","13.685"],["2488.73","0.020"],["2488.69","0.157"],["2488.60","80.000"],["2488.58","10.164"],["2488.57","0.004"],["2488.56","3.933"],["2488.54","3.311"],["2488.51","12.814"],["2488.50","80.099"],["2488.48","0.684"],["2488.44","0.024"],["2488.42","68.180"],["2488.39","4.412"],["2488.38","26.138"],["2488.34","44.134"],["2488.32","8.014"],["2488.30","0.065"],["2488.29","0.009"],["2488.27","4.513"],["2488.26","4.222"],["2488.25","80.000"],["2488.23","0.007"],["2488.22","0.281"],["2488.19","0.100"],["2488.18","80.100"],["2488.17","80.000"],["2488.16","8.197"],["2488.15","79.184"],["2488.13","0.025"],["2488.11","0.050"],["2488.10","0.080"],["2488.08","3.919"],["2488.04","40.103"],["2488.03","0.120"],["2488.02","0.008"],["2488.01","0.140"],["2488.00","0.406"],["2487.99","0.384"],["2487.98","0.060"],["2487.96","8.010"],["2487.94","0.246"],["2487.93","0.020"],["2487.91","0.136"],["2487.87","0.403"],["2487.84","17.910"],["2487.81","0.005"],["2487.80","0.073"],["2487.74","36.000"],["2487.73","3.225"],["2487.72","0.018"],["2487.71","0.319"],["2487.70","0.006"],["2487.66","0.003"],["2487.64","0.003"],["2487.63","0.008"],["2487.62","0.040"],["2487.60","3.916"],["2487.54","0.805"],["2487.52","0.022"],["2487.51","0.003"],["2487.50","0.051"],["2487.49","6.081"],["2487.47","80.015"],["2487.46","4.735"],["2487.45","30.000"],["2487.41","0.096"],["2487.40","0.078"],["2487.39","0.103"],["2487.37","2.279"],["2487.36","8.152"],["2487.35","2.145"],["2487.32","12.816"],["2487.31","10.023"],["2487.30","0.157"],["2487.27","0.005"],["2487.26","4.010"],["2487.25","0.008"],["2487.24","0.003"],["2487.23","0.014"],["2487.20","0.085"],["2487.17","0.011"],["2487.14","3.217"],["2487.12","3.916"],["2487.11","0.300"],["2487.10","0.088"],["2487.08","10.097"],["2487.07","1.467"],["2487.04","0.600"],["2487.01","18.363"],["2487.00","0.292"],["2486.99","0.014"],["2486.98","0.144"],["2486.97","0.443"],["2486.92","0.005"],["2486.91","0.016"],["2486.89","3.364"],["2486.88","4.166"],["2486.84","24.306"],["2486.83","0.181"],["2486.81","0.015"],["2486.80","0.082"],["2486.79","0.007"],["2486.76","0.011"],["2486.74","0.050"],["2486.73","0.782"],["2486.72","0.004"],["2486.69","0.003"],["2486.68","8.018"],["2486.66","10.004"],["2486.65","40.391"],["2486.64","3.916"],["2486.61","0.489"],["2486.60","0.196"],["2486.57","0.396"],["2486.55","4.015"],["2486.51","3.000"],["2486.50","0.003"],["2486.48","0.005"],["2486.47","0.010"],["2486.45","4.011"],["2486.44","0.602"],["2486.43","0.566"],["2486.42","3.140"],["2486.40","3.958"],["2486.39","0.003"],["2486.34","0.010"],["2486.31","6.281"],["2486.27","0.005"],["2486.26","0.004"],["2486.23","10.088"],["2486.22","0.015"],["2486.17","0.030"],["2486.16","3.916"],["2486.15","0.020"],["2486.13","13.130"],["2486.12","82.414"],["2486.11","0.244"],["2486.10","0.132"],["2486.08","0.720"],["2486.06","0.385"],["2486.01","0.004"],["2486.00","2.359"],["2485.99","154.159"],["2485.98","20.054"],["2485.96","1.000"],["2485.95","0.190"],["2485.92","4.463"],["2485.90","1.557"],["2485.87","0.402"],["2485.85","0.114"],["2485.81","0.900"],["2485.76","4.700"],["2485.75","0.300"],["2485.74","0.196"],["2485.73","4.010"],["2485.72","0.323"],["2485.70","0.263"],["2485.69","0.261"],["2485.68","3.688"],["2485.67","0.005"],["2485.64","1.216"],["2485.63","0.005"],["2485.62","0.015"],["2485.61","0.033"],["2485.60","0.004"],["2485.58","2.012"],["2485.56","0.020"],["2485.54","0.699"],["2485.52","0.003"],["2485.51","1.830"],["2485.48","5.964"],["2485.47","0.015"],["2485.44","7.251"],["2485.43","0.006"],["2485.42","0.644"],["2485.40","8.026"],["2485.38","0.489"],["2485.36","0.014"],["2485.35","0.005"],["2485.31","1.507"],["2485.30","2.107"],["2485.29","0.039"],["2485.28","0.642"],["2485.26","1.990"],["2485.25","4.996"],["2485.23","0.003"],["2485.22","0.277"],["2485.21","0.121"],["2485.20","3.952"],["2485.18","0.006"],["2485.17","0.043"],["2485.15","4.008"],["2485.14","4.434"],["2485.13","1.003"],["2485.05","0.204"],["2485.04","0.254"],["2485.02","5.000"],["2485.01","0.050"],["2485.00","80.821"],["2484.96","3.941"],["2484.95","10.023"],["2484.94","13.935"],["2484.92","0.059"],["2484.90","150.000"],["2484.89","0.004"],["2484.88","150.127"],["2484.87","0.004"],["2484.85","0.100"],["2484.83","0.006"],["2484.82","0.030"],["2484.81","1.246"],["2484.80","0.003"],["2484.79","0.045"],["2484.77","0.003"],["2484.74","0.036"],["2484.72","3.919"],["2484.70","0.134"],["2484.68","1.111"],["2484.66","76.955"],["2484.60","2.580"],["2484.59","31.432"],["2484.58","1.468"],["2484.55","1.153"],["2484.54","0.265"],["2484.53","20.024"],["2484.51","1.047"],["2484.50","0.818"],["2484.49","0.022"],["2484.48","3.887"],["2484.46","0.048"],["2484.45","0.224"],["2484.44","0.174"],["2484.43","223.079"],["2484.42","0.014"],["2484.41","1.115"],["2484.39","26.090"],["2484.38","0.066"],["2484.37","0.121"],["2484.34","0.255"],["2484.33","23.968"],["2484.29","0.085"],["2484.27","1.128"],["2484.26","1.456"],["2484.24","3.916"],["2484.23","28.126"],["2484.22","1.329"],["2484.19","2.015"],["2484.18","0.263"],["2484.15","15.489"],["2484.14","1.135"],["2484.13","0.572"],["2484.12","8.032"],["2484.11","0.021"],["2484.09","0.059"],["2484.08","0.038"],["2484.07","0.147"],["2484.05","24.156"],["2484.04","0.008"],["2484.01","1.184"],["2484.00","4.641"],["2483.99","0.006"],["2483.97","0.294"],["2483.96","0.424"],["2483.94","3.660"],["2483.93","2.067"],["2483.92","0.008"],["2483.89","0.141"],["2483.88","1.089"],
# ["2483.87","110.000"],["2483.85","4.018"],["2483.81","150.077"],["2483.80","0.003"],["2483.77","0.020"]
# ],
# "asks":[
# ["2493.57","0.877"],
# ["2493.62","0.063"],
# ["2493.71","12.054"],
# ]
# }
timestamp = self.safe_integer(response, 'T')
orderbook = self.parse_order_book(response, symbol, timestamp)
orderbook['nonce'] = self.safe_integer(response, 'lastUpdateId')
return orderbook
def parse_ticker(self, ticker, market=None):
#
# {
# symbol: 'ETHBTC',
# priceChange: '0.00068700',
# priceChangePercent: '2.075',
# weightedAvgPrice: '0.03342681',
# prevClosePrice: '0.03310300',
# lastPrice: '0.03378900',
# lastQty: '0.07700000',
# bidPrice: '0.03378900',
# bidQty: '7.16800000',
# askPrice: '0.03379000',
# askQty: '24.00000000',
# openPrice: '0.03310200',
# highPrice: '0.03388900',
# lowPrice: '0.03306900',
# volume: '205478.41000000',
# quoteVolume: '6868.48826294',
# openTime: 1601469986932,
# closeTime: 1601556386932,
# firstId: 196098772,
# lastId: 196186315,
# count: 87544
# }
#
# coinm
# {
# baseVolume: '214549.95171161',
# closeTime: '1621965286847',
# count: '1283779',
# firstId: '152560106',
# highPrice: '39938.3',
# lastId: '153843955',
# lastPrice: '37993.4',
# lastQty: '1',
# lowPrice: '36457.2',
# openPrice: '37783.4',
# openTime: '1621878840000',
# pair: 'BTCUSD',
# priceChange: '210.0',
# priceChangePercent: '0.556',
# symbol: 'BTCUSD_PERP',
# volume: '81990451',
# weightedAvgPrice: '38215.08713747'
# }
#
timestamp = self.safe_integer(ticker, 'closeTime')
marketId = self.safe_string(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market)
last = self.safe_number(ticker, 'lastPrice')
isCoinm = ('baseVolume' in ticker)
baseVolume = None
quoteVolume = None
if isCoinm:
baseVolume = self.safe_number(ticker, 'baseVolume')
quoteVolume = self.safe_number(ticker, 'volume')
else:
baseVolume = self.safe_number(ticker, 'volume')
quoteVolume = self.safe_number(ticker, 'quoteVolume')
return self.safe_ticker({
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'highPrice'),
'low': self.safe_number(ticker, 'lowPrice'),
'bid': self.safe_number(ticker, 'bidPrice'),
'bidVolume': self.safe_number(ticker, 'bidQty'),
'ask': self.safe_number(ticker, 'askPrice'),
'askVolume': self.safe_number(ticker, 'askQty'),
'vwap': self.safe_number(ticker, 'weightedAvgPrice'),
'open': self.safe_number(ticker, 'openPrice'),
'close': last,
'last': last,
'previousClose': self.safe_number(ticker, 'prevClosePrice'), # previous day close
'change': self.safe_number(ticker, 'priceChange'),
'percentage': self.safe_number(ticker, 'priceChangePercent'),
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}, market)
def fetch_status(self, params={}):
response = self.sapiGetSystemStatus(params)
status = self.safe_string(response, 'status')
if status is not None:
status = 'ok' if (status == '0') else 'maintenance'
self.status = self.extend(self.status, {
'status': status,
'updated': self.milliseconds(),
})
return self.status
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
method = 'publicGetTicker24hr'
if market['linear']:
method = 'fapiPublicGetTicker24hr'
elif market['inverse']:
method = 'dapiPublicGetTicker24hr'
response = getattr(self, method)(self.extend(request, params))
if isinstance(response, list):
firstTicker = self.safe_value(response, 0, {})
return self.parse_ticker(firstTicker, market)
return self.parse_ticker(response, market)
def fetch_bids_asks(self, symbols=None, params={}):
self.load_markets()
defaultType = self.safe_string_2(self.options, 'fetchBidsAsks', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
method = None
if type == 'future':
method = 'fapiPublicGetTickerBookTicker'
elif type == 'delivery':
method = 'dapiPublicGetTickerBookTicker'
else:
method = 'publicGetTickerBookTicker'
response = getattr(self, method)(query)
return self.parse_tickers(response, symbols)
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
defaultType = self.safe_string_2(self.options, 'fetchTickers', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
defaultMethod = None
if type == 'future':
defaultMethod = 'fapiPublicGetTicker24hr'
elif type == 'delivery':
defaultMethod = 'dapiPublicGetTicker24hr'
else:
defaultMethod = 'publicGetTicker24hr'
method = self.safe_string(self.options, 'fetchTickersMethod', defaultMethod)
response = getattr(self, method)(query)
return self.parse_tickers(response, symbols)
def parse_ohlcv(self, ohlcv, market=None):
# when api method = publicGetKlines or fapiPublicGetKlines or dapiPublicGetKlines
# [
# 1591478520000, # open time
# "0.02501300", # open
# "0.02501800", # high
# "0.02500000", # low
# "0.02500000", # close
# "22.19000000", # volume
# 1591478579999, # close time
# "0.55490906", # quote asset volume
# 40, # number of trades
# "10.92900000", # taker buy base asset volume
# "0.27336462", # taker buy quote asset volume
# "0" # ignore
# ]
#
# when api method = fapiPublicGetMarkPriceKlines or fapiPublicGetIndexPriceKlines
# [
# [
# 1591256460000, # Open time
# "9653.29201333", # Open
# "9654.56401333", # High
# "9653.07367333", # Low
# "9653.07367333", # Close(or latest price)
# "0", # Ignore
# 1591256519999, # Close time
# "0", # Ignore
# 60, # Number of bisic data
# "0", # Ignore
# "0", # Ignore
# "0" # Ignore
# ]
# ]
#
return [
self.safe_integer(ohlcv, 0),
self.safe_number(ohlcv, 1),
self.safe_number(ohlcv, 2),
self.safe_number(ohlcv, 3),
self.safe_number(ohlcv, 4),
self.safe_number(ohlcv, 5),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
# binance docs say that the default limit 500, max 1500 for futures, max 1000 for spot markets
# the reality is that the time range wider than 500 candles won't work right
defaultLimit = 500
maxLimit = 1500
price = self.safe_string(params, 'price')
params = self.omit(params, 'price')
limit = defaultLimit if (limit is None) else min(limit, maxLimit)
request = {
'interval': self.timeframes[timeframe],
'limit': limit,
}
if price == 'index':
request['pair'] = market['id'] # Index price takes self argument instead of symbol
else:
request['symbol'] = market['id']
# duration = self.parse_timeframe(timeframe)
if since is not None:
request['startTime'] = since
#
# It didn't work before without the endTime
# https://github.com/ccxt/ccxt/issues/8454
#
# if since > 0:
# endTime = self.sum(since, limit * duration * 1000 - 1)
# now = self.milliseconds()
# request['endTime'] = min(now, endTime)
# }
method = 'publicGetKlines'
if price == 'mark':
if market['inverse']:
method = 'dapiPublicGetMarkPriceKlines'
else:
method = 'fapiPublicGetMarkPriceKlines'
elif price == 'index':
if market['inverse']:
method = 'dapiPublicGetIndexPriceKlines'
else:
method = 'fapiPublicGetIndexPriceKlines'
elif market['linear']:
method = 'fapiPublicGetKlines'
elif market['inverse']:
method = 'dapiPublicGetKlines'
response = getattr(self, method)(self.extend(request, params))
#
# [
# [1591478520000,"0.02501300","0.02501800","0.02500000","0.02500000","22.19000000",1591478579999,"0.55490906",40,"10.92900000","0.27336462","0"],
# [1591478580000,"0.02499600","0.02500900","0.02499400","0.02500300","21.34700000",1591478639999,"0.53370468",24,"7.53800000","0.18850725","0"],
# [1591478640000,"0.02500800","0.02501100","0.02500300","0.02500800","154.14200000",1591478699999,"3.85405839",97,"5.32300000","0.13312641","0"],
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def fetch_mark_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
request = {
'price': 'mark',
}
return self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params))
def fetch_index_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
request = {
'price': 'index',
}
return self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params))
def parse_trade(self, trade, market=None):
if 'isDustTrade' in trade:
return self.parse_dust_trade(trade, market)
#
# aggregate trades
# https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#compressedaggregate-trades-list
#
# {
# "a": 26129, # Aggregate tradeId
# "p": "0.01633102", # Price
# "q": "4.70443515", # Quantity
# "f": 27781, # First tradeId
# "l": 27781, # Last tradeId
# "T": 1498793709153, # Timestamp
# "m": True, # Was the buyer the maker?
# "M": True # Was the trade the best price match?
# }
#
# recent public trades and old public trades
# https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#recent-trades-list
# https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#old-trade-lookup-market_data
#
# {
# "id": 28457,
# "price": "4.00000100",
# "qty": "12.00000000",
# "time": 1499865549590,
# "isBuyerMaker": True,
# "isBestMatch": True
# }
#
# private trades
# https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#account-trade-list-user_data
#
# {
# "symbol": "BNBBTC",
# "id": 28457,
# "orderId": 100234,
# "price": "4.00000100",
# "qty": "12.00000000",
# "commission": "10.10000000",
# "commissionAsset": "BNB",
# "time": 1499865549590,
# "isBuyer": True,
# "isMaker": False,
# "isBestMatch": True
# }
#
# futures trades
# https://binance-docs.github.io/apidocs/futures/en/#account-trade-list-user_data
#
# {
# "accountId": 20,
# "buyer": False,
# "commission": "-0.07819010",
# "commissionAsset": "USDT",
# "counterPartyId": 653,
# "id": 698759,
# "maker": False,
# "orderId": 25851813,
# "price": "7819.01",
# "qty": "0.002",
# "quoteQty": "0.01563",
# "realizedPnl": "-0.91539999",
# "side": "SELL",
# "symbol": "BTCUSDT",
# "time": 1569514978020
# }
# {
# "symbol": "BTCUSDT",
# "id": 477128891,
# "orderId": 13809777875,
# "side": "SELL",
# "price": "38479.55",
# "qty": "0.001",
# "realizedPnl": "-0.00009534",
# "marginAsset": "USDT",
# "quoteQty": "38.47955",
# "commission": "-0.00076959",
# "commissionAsset": "USDT",
# "time": 1612733566708,
# "positionSide": "BOTH",
# "maker": True,
# "buyer": False
# }
#
# {respType: FULL}
#
# {
# "price": "4000.00000000",
# "qty": "1.00000000",
# "commission": "4.00000000",
# "commissionAsset": "USDT",
# "tradeId": "1234",
# }
#
timestamp = self.safe_integer_2(trade, 'T', 'time')
price = self.safe_string_2(trade, 'p', 'price')
amount = self.safe_string_2(trade, 'q', 'qty')
cost = self.safe_string_2(trade, 'quoteQty', 'baseQty') # inverse futures
marketId = self.safe_string(trade, 'symbol')
symbol = self.safe_symbol(marketId, market)
id = self.safe_string_2(trade, 't', 'a')
id = self.safe_string_2(trade, 'id', 'tradeId', id)
side = None
orderId = self.safe_string(trade, 'orderId')
if 'm' in trade:
side = 'sell' if trade['m'] else 'buy' # self is reversed intentionally
elif 'isBuyerMaker' in trade:
side = 'sell' if trade['isBuyerMaker'] else 'buy'
elif 'side' in trade:
side = self.safe_string_lower(trade, 'side')
else:
if 'isBuyer' in trade:
side = 'buy' if trade['isBuyer'] else 'sell' # self is a True side
fee = None
if 'commission' in trade:
fee = {
'cost': self.safe_string(trade, 'commission'),
'currency': self.safe_currency_code(self.safe_string(trade, 'commissionAsset')),
}
takerOrMaker = None
if 'isMaker' in trade:
takerOrMaker = 'maker' if trade['isMaker'] else 'taker'
if 'maker' in trade:
takerOrMaker = 'maker' if trade['maker'] else 'taker'
return self.safe_trade({
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': takerOrMaker,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}, market)
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'fromId': 123, # ID to get aggregate trades from INCLUSIVE.
# 'startTime': 456, # Timestamp in ms to get aggregate trades from INCLUSIVE.
# 'endTime': 789, # Timestamp in ms to get aggregate trades until INCLUSIVE.
# 'limit': 500, # default = 500, maximum = 1000
}
defaultType = self.safe_string_2(self.options, 'fetchTrades', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
defaultMethod = None
if type == 'future':
defaultMethod = 'fapiPublicGetAggTrades'
elif type == 'delivery':
defaultMethod = 'dapiPublicGetAggTrades'
else:
defaultMethod = 'publicGetAggTrades'
method = self.safe_string(self.options, 'fetchTradesMethod', defaultMethod)
if method == 'publicGetAggTrades':
if since is not None:
request['startTime'] = since
# https://github.com/ccxt/ccxt/issues/6400
# https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#compressedaggregate-trades-list
request['endTime'] = self.sum(since, 3600000)
if type == 'future':
method = 'fapiPublicGetAggTrades'
elif type == 'delivery':
method = 'dapiPublicGetAggTrades'
elif method == 'publicGetHistoricalTrades':
if type == 'future':
method = 'fapiPublicGetHistoricalTrades'
elif type == 'delivery':
method = 'dapiPublicGetHistoricalTrades'
if limit is not None:
request['limit'] = limit # default = 500, maximum = 1000
#
# Caveats:
# - default limit(500) applies only if no other parameters set, trades up
# to the maximum limit may be returned to satisfy other parameters
# - if both limit and time window is set and time window contains more
# trades than the limit then the last trades from the window are returned
# - 'tradeId' accepted and returned by self method is "aggregate" trade id
# which is different from actual trade id
# - setting both fromId and time window results in error
response = getattr(self, method)(self.extend(request, query))
#
# aggregate trades
#
# [
# {
# "a": 26129, # Aggregate tradeId
# "p": "0.01633102", # Price
# "q": "4.70443515", # Quantity
# "f": 27781, # First tradeId
# "l": 27781, # Last tradeId
# "T": 1498793709153, # Timestamp
# "m": True, # Was the buyer the maker?
# "M": True # Was the trade the best price match?
# }
# ]
#
# recent public trades and historical public trades
#
# [
# {
# "id": 28457,
# "price": "4.00000100",
# "qty": "12.00000000",
# "time": 1499865549590,
# "isBuyerMaker": True,
# "isBestMatch": True
# }
# ]
#
return self.parse_trades(response, market, since, limit)
def parse_order_status(self, status):
statuses = {
'NEW': 'open',
'PARTIALLY_FILLED': 'open',
'FILLED': 'closed',
'CANCELED': 'canceled',
'PENDING_CANCEL': 'canceling', # currently unused
'REJECTED': 'rejected',
'EXPIRED': 'expired',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# spot
#
# {
# "symbol": "LTCBTC",
# "orderId": 1,
# "clientOrderId": "myOrder1",
# "price": "0.1",
# "origQty": "1.0",
# "executedQty": "0.0",
# "cummulativeQuoteQty": "0.0",
# "status": "NEW",
# "timeInForce": "GTC",
# "type": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "icebergQty": "0.0",
# "time": 1499827319559,
# "updateTime": 1499827319559,
# "isWorking": True
# }
#
# futures
#
# {
# "symbol": "BTCUSDT",
# "orderId": 1,
# "clientOrderId": "myOrder1",
# "price": "0.1",
# "origQty": "1.0",
# "executedQty": "1.0",
# "cumQuote": "10.0",
# "status": "NEW",
# "timeInForce": "GTC",
# "type": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "updateTime": 1499827319559
# }
#
# createOrder with {"newOrderRespType": "FULL"}
#
# {
# "symbol": "BTCUSDT",
# "orderId": 5403233939,
# "orderListId": -1,
# "clientOrderId": "x-R4BD3S825e669e75b6c14f69a2c43e",
# "transactTime": 1617151923742,
# "price": "0.00000000",
# "origQty": "0.00050000",
# "executedQty": "0.00050000",
# "cummulativeQuoteQty": "29.47081500",
# "status": "FILLED",
# "timeInForce": "GTC",
# "type": "MARKET",
# "side": "BUY",
# "fills": [
# {
# "price": "58941.63000000",
# "qty": "0.00050000",
# "commission": "0.00007050",
# "commissionAsset": "BNB",
# "tradeId": 737466631
# }
# ]
# }
#
# delivery
#
# {
# "orderId": "18742727411",
# "symbol": "ETHUSD_PERP",
# "pair": "ETHUSD",
# "status": "FILLED",
# "clientOrderId": "x-xcKtGhcu3e2d1503fdd543b3b02419",
# "price": "0",
# "avgPrice": "4522.14",
# "origQty": "1",
# "executedQty": "1",
# "cumBase": "0.00221134",
# "timeInForce": "GTC",
# "type": "MARKET",
# "reduceOnly": False,
# "closePosition": False,
# "side": "SELL",
# "positionSide": "BOTH",
# "stopPrice": "0",
# "workingType": "CONTRACT_PRICE",
# "priceProtect": False,
# "origType": "MARKET",
# "time": "1636061952660",
# "updateTime": "1636061952660"
# }
#
status = self.parse_order_status(self.safe_string(order, 'status'))
marketId = self.safe_string(order, 'symbol')
symbol = self.safe_symbol(marketId, market)
filled = self.safe_string(order, 'executedQty', '0')
timestamp = None
lastTradeTimestamp = None
if 'time' in order:
timestamp = self.safe_integer(order, 'time')
elif 'transactTime' in order:
timestamp = self.safe_integer(order, 'transactTime')
elif 'updateTime' in order:
if status == 'open':
if Precise.string_gt(filled, '0'):
lastTradeTimestamp = self.safe_integer(order, 'updateTime')
else:
timestamp = self.safe_integer(order, 'updateTime')
average = self.safe_string(order, 'avgPrice')
price = self.safe_string(order, 'price')
amount = self.safe_string(order, 'origQty')
# - Spot/Margin market: cummulativeQuoteQty
# - Futures market: cumQuote.
# Note self is not the actual cost, since Binance futures uses leverage to calculate margins.
cost = self.safe_string_2(order, 'cummulativeQuoteQty', 'cumQuote')
cost = self.safe_string(order, 'cumBase', cost)
id = self.safe_string(order, 'orderId')
type = self.safe_string_lower(order, 'type')
side = self.safe_string_lower(order, 'side')
fills = self.safe_value(order, 'fills', [])
clientOrderId = self.safe_string(order, 'clientOrderId')
timeInForce = self.safe_string(order, 'timeInForce')
postOnly = (type == 'limit_maker') or (timeInForce == 'GTX')
if type == 'limit_maker':
type = 'limit'
stopPriceString = self.safe_string(order, 'stopPrice')
stopPrice = self.parse_number(self.omit_zero(stopPriceString))
return self.safe_order2({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': price,
'stopPrice': stopPrice,
'amount': amount,
'cost': cost,
'average': average,
'filled': filled,
'remaining': None,
'status': status,
'fee': None,
'trades': fills,
}, market)
def create_reduce_only_order(self, symbol, type, side, amount, price=None, params={}):
request = {
'reduceOnly': True,
}
return self.create_order(symbol, type, side, amount, price, self.extend(request, params))
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
defaultType = self.safe_string_2(self.options, 'createOrder', 'defaultType', 'spot')
orderType = self.safe_string(params, 'type', defaultType)
clientOrderId = self.safe_string_2(params, 'newClientOrderId', 'clientOrderId')
postOnly = self.safe_value(params, 'postOnly', False)
params = self.omit(params, ['type', 'newClientOrderId', 'clientOrderId', 'postOnly'])
reduceOnly = self.safe_value(params, 'reduceOnly')
if reduceOnly is not None:
if (orderType != 'future') and (orderType != 'delivery'):
raise InvalidOrder(self.id + ' createOrder() does not support reduceOnly for ' + orderType + ' orders, reduceOnly orders are supported for futures and perpetuals only')
method = 'privatePostOrder'
if orderType == 'future':
method = 'fapiPrivatePostOrder'
elif orderType == 'delivery':
method = 'dapiPrivatePostOrder'
elif orderType == 'margin':
method = 'sapiPostMarginOrder'
# the next 5 lines are added to support for testing orders
if market['spot']:
test = self.safe_value(params, 'test', False)
if test:
method += 'Test'
params = self.omit(params, 'test')
# only supported for spot/margin api(all margin markets are spot markets)
if postOnly:
type = 'LIMIT_MAKER'
uppercaseType = type.upper()
validOrderTypes = self.safe_value(market['info'], 'orderTypes')
if not self.in_array(uppercaseType, validOrderTypes):
raise InvalidOrder(self.id + ' ' + type + ' is not a valid order type in market ' + symbol)
request = {
'symbol': market['id'],
'type': uppercaseType,
'side': side.upper(),
}
if clientOrderId is None:
broker = self.safe_value(self.options, 'broker')
if broker is not None:
brokerId = self.safe_string(broker, orderType)
if brokerId is not None:
request['newClientOrderId'] = brokerId + self.uuid22()
else:
request['newClientOrderId'] = clientOrderId
if (orderType == 'spot') or (orderType == 'margin'):
request['newOrderRespType'] = self.safe_value(self.options['newOrderRespType'], type, 'RESULT') # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills
else:
# delivery and future
request['newOrderRespType'] = 'RESULT' # "ACK", "RESULT", default "ACK"
# additional required fields depending on the order type
timeInForceIsRequired = False
priceIsRequired = False
stopPriceIsRequired = False
quantityIsRequired = False
#
# spot/margin
#
# LIMIT timeInForce, quantity, price
# MARKET quantity or quoteOrderQty
# STOP_LOSS quantity, stopPrice
# STOP_LOSS_LIMIT timeInForce, quantity, price, stopPrice
# TAKE_PROFIT quantity, stopPrice
# TAKE_PROFIT_LIMIT timeInForce, quantity, price, stopPrice
# LIMIT_MAKER quantity, price
#
# futures
#
# LIMIT timeInForce, quantity, price
# MARKET quantity
# STOP/TAKE_PROFIT quantity, price, stopPrice
# STOP_MARKET stopPrice
# TAKE_PROFIT_MARKET stopPrice
# TRAILING_STOP_MARKET callbackRate
#
if uppercaseType == 'MARKET':
quoteOrderQty = self.safe_value(self.options, 'quoteOrderQty', False)
if quoteOrderQty:
quoteOrderQty = self.safe_number(params, 'quoteOrderQty')
precision = market['precision']['price']
if quoteOrderQty is not None:
request['quoteOrderQty'] = self.decimal_to_precision(quoteOrderQty, TRUNCATE, precision, self.precisionMode)
params = self.omit(params, 'quoteOrderQty')
elif price is not None:
request['quoteOrderQty'] = self.decimal_to_precision(amount * price, TRUNCATE, precision, self.precisionMode)
else:
quantityIsRequired = True
else:
quantityIsRequired = True
elif uppercaseType == 'LIMIT':
priceIsRequired = True
timeInForceIsRequired = True
quantityIsRequired = True
elif (uppercaseType == 'STOP_LOSS') or (uppercaseType == 'TAKE_PROFIT'):
stopPriceIsRequired = True
quantityIsRequired = True
if market['linear'] or market['inverse']:
priceIsRequired = True
elif (uppercaseType == 'STOP_LOSS_LIMIT') or (uppercaseType == 'TAKE_PROFIT_LIMIT'):
quantityIsRequired = True
stopPriceIsRequired = True
priceIsRequired = True
timeInForceIsRequired = True
elif uppercaseType == 'LIMIT_MAKER':
priceIsRequired = True
quantityIsRequired = True
elif uppercaseType == 'STOP':
quantityIsRequired = True
stopPriceIsRequired = True
priceIsRequired = True
elif (uppercaseType == 'STOP_MARKET') or (uppercaseType == 'TAKE_PROFIT_MARKET'):
closePosition = self.safe_value(params, 'closePosition')
if closePosition is None:
quantityIsRequired = True
stopPriceIsRequired = True
elif uppercaseType == 'TRAILING_STOP_MARKET':
quantityIsRequired = True
callbackRate = self.safe_number(params, 'callbackRate')
if callbackRate is None:
raise InvalidOrder(self.id + ' createOrder() requires a callbackRate extra param for a ' + type + ' order')
if quantityIsRequired:
request['quantity'] = self.amount_to_precision(symbol, amount)
if priceIsRequired:
if price is None:
raise InvalidOrder(self.id + ' createOrder() requires a price argument for a ' + type + ' order')
request['price'] = self.price_to_precision(symbol, price)
if timeInForceIsRequired:
request['timeInForce'] = self.options['defaultTimeInForce'] # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel
if stopPriceIsRequired:
stopPrice = self.safe_number(params, 'stopPrice')
if stopPrice is None:
raise InvalidOrder(self.id + ' createOrder() requires a stopPrice extra param for a ' + type + ' order')
else:
params = self.omit(params, 'stopPrice')
request['stopPrice'] = self.price_to_precision(symbol, stopPrice)
response = getattr(self, method)(self.extend(request, params))
return self.parse_order(response, market)
def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
defaultType = self.safe_string_2(self.options, 'fetchOrder', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
method = 'privateGetOrder'
if type == 'future':
method = 'fapiPrivateGetOrder'
elif type == 'delivery':
method = 'dapiPrivateGetOrder'
elif type == 'margin':
method = 'sapiGetMarginOrder'
request = {
'symbol': market['id'],
}
clientOrderId = self.safe_value_2(params, 'origClientOrderId', 'clientOrderId')
if clientOrderId is not None:
request['origClientOrderId'] = clientOrderId
else:
request['orderId'] = id
query = self.omit(params, ['type', 'clientOrderId', 'origClientOrderId'])
response = getattr(self, method)(self.extend(request, query))
return self.parse_order(response, market)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
defaultType = self.safe_string_2(self.options, 'fetchOrders', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
method = 'privateGetAllOrders'
if type == 'future':
method = 'fapiPrivateGetAllOrders'
elif type == 'delivery':
method = 'dapiPrivateGetAllOrders'
elif type == 'margin':
method = 'sapiGetMarginAllOrders'
request = {
'symbol': market['id'],
}
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit
query = self.omit(params, 'type')
response = getattr(self, method)(self.extend(request, query))
#
# spot
#
# [
# {
# "symbol": "LTCBTC",
# "orderId": 1,
# "clientOrderId": "myOrder1",
# "price": "0.1",
# "origQty": "1.0",
# "executedQty": "0.0",
# "cummulativeQuoteQty": "0.0",
# "status": "NEW",
# "timeInForce": "GTC",
# "type": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "icebergQty": "0.0",
# "time": 1499827319559,
# "updateTime": 1499827319559,
# "isWorking": True
# }
# ]
#
# futures
#
# [
# {
# "symbol": "BTCUSDT",
# "orderId": 1,
# "clientOrderId": "myOrder1",
# "price": "0.1",
# "origQty": "1.0",
# "executedQty": "1.0",
# "cumQuote": "10.0",
# "status": "NEW",
# "timeInForce": "GTC",
# "type": "LIMIT",
# "side": "BUY",
# "stopPrice": "0.0",
# "updateTime": 1499827319559
# }
# ]
#
return self.parse_orders(response, market, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
query = None
type = None
request = {}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
defaultType = self.safe_string_2(self.options, 'fetchOpenOrders', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
elif self.options['warnOnFetchOpenOrdersWithoutSymbol']:
symbols = self.symbols
numSymbols = len(symbols)
fetchOpenOrdersRateLimit = int(numSymbols / 2)
raise ExchangeError(self.id + ' fetchOpenOrders WARNING: fetching open orders without specifying a symbol is rate-limited to one call per ' + str(fetchOpenOrdersRateLimit) + ' seconds. Do not call self method frequently to avoid ban. Set ' + self.id + '.options["warnOnFetchOpenOrdersWithoutSymbol"] = False to suppress self warning message.')
else:
defaultType = self.safe_string_2(self.options, 'fetchOpenOrders', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
method = 'privateGetOpenOrders'
if type == 'future':
method = 'fapiPrivateGetOpenOrders'
elif type == 'delivery':
method = 'dapiPrivateGetOpenOrders'
elif type == 'margin':
method = 'sapiGetMarginOpenOrders'
response = getattr(self, method)(self.extend(request, query))
return self.parse_orders(response, market, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
orders = self.fetch_orders(symbol, since, limit, params)
return self.filter_by(orders, 'status', 'closed')
def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
defaultType = self.safe_string_2(self.options, 'fetchOpenOrders', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
# https://github.com/ccxt/ccxt/issues/6507
origClientOrderId = self.safe_value_2(params, 'origClientOrderId', 'clientOrderId')
request = {
'symbol': market['id'],
# 'orderId': id,
# 'origClientOrderId': id,
}
if origClientOrderId is None:
request['orderId'] = id
else:
request['origClientOrderId'] = origClientOrderId
method = 'privateDeleteOrder'
if type == 'future':
method = 'fapiPrivateDeleteOrder'
elif type == 'delivery':
method = 'dapiPrivateDeleteOrder'
elif type == 'margin':
method = 'sapiDeleteMarginOrder'
query = self.omit(params, ['type', 'origClientOrderId', 'clientOrderId'])
response = getattr(self, method)(self.extend(request, query))
return self.parse_order(response, market)
def cancel_all_orders(self, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelAllOrders() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
defaultType = self.safe_string_2(self.options, 'cancelAllOrders', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
method = 'privateDeleteOpenOrders'
if type == 'margin':
method = 'sapiDeleteMarginOpenOrders'
elif type == 'future':
method = 'fapiPrivateDeleteAllOpenOrders'
elif type == 'delivery':
method = 'dapiPrivateDeleteAllOpenOrders'
response = getattr(self, method)(self.extend(request, query))
if isinstance(response, list):
return self.parse_orders(response, market)
else:
return response
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
defaultType = self.safe_string_2(self.options, 'fetchMyTrades', 'defaultType', 'spot')
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
method = None
if type == 'spot':
method = 'privateGetMyTrades'
elif type == 'margin':
method = 'sapiGetMarginMyTrades'
elif type == 'future':
method = 'fapiPrivateGetUserTrades'
elif type == 'delivery':
method = 'dapiPrivateGetUserTrades'
request = {
'symbol': market['id'],
}
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit
response = getattr(self, method)(self.extend(request, params))
#
# spot trade
#
# [
# {
# "symbol": "BNBBTC",
# "id": 28457,
# "orderId": 100234,
# "price": "4.00000100",
# "qty": "12.00000000",
# "commission": "10.10000000",
# "commissionAsset": "BNB",
# "time": 1499865549590,
# "isBuyer": True,
# "isMaker": False,
# "isBestMatch": True,
# }
# ]
#
# futures trade
#
# [
# {
# "accountId": 20,
# "buyer": False,
# "commission": "-0.07819010",
# "commissionAsset": "USDT",
# "counterPartyId": 653,
# "id": 698759,
# "maker": False,
# "orderId": 25851813,
# "price": "7819.01",
# "qty": "0.002",
# "quoteQty": "0.01563",
# "realizedPnl": "-0.91539999",
# "side": "SELL",
# "symbol": "BTCUSDT",
# "time": 1569514978020
# }
# ]
#
return self.parse_trades(response, market, since, limit)
def fetch_my_dust_trades(self, symbol=None, since=None, limit=None, params={}):
#
# Binance provides an opportunity to trade insignificant(i.e. non-tradable and non-withdrawable)
# token leftovers(of any asset) into `BNB` coin which in turn can be used to pay trading fees with it.
# The corresponding trades history is called the `Dust Log` and can be requested via the following end-point:
# https://github.com/binance-exchange/binance-official-api-docs/blob/master/wapi-api.md#dustlog-user_data
#
self.load_markets()
request = {}
if since is not None:
request['startTime'] = since
request['endTime'] = self.sum(since, 7776000000)
response = self.sapiGetAssetDribblet(self.extend(request, params))
# {
# "total": "4",
# "userAssetDribblets": [
# {
# "operateTime": "1627575731000",
# "totalServiceChargeAmount": "0.00001453",
# "totalTransferedAmount": "0.00072693",
# "transId": "70899815863",
# "userAssetDribbletDetails": [
# {
# "fromAsset": "LTC",
# "amount": "0.000006",
# "transferedAmount": "0.00000267",
# "serviceChargeAmount": "0.00000005",
# "operateTime": "1627575731000",
# "transId": "70899815863"
# },
# {
# "fromAsset": "GBP",
# "amount": "0.15949157",
# "transferedAmount": "0.00072426",
# "serviceChargeAmount": "0.00001448",
# "operateTime": "1627575731000",
# "transId": "70899815863"
# }
# ]
# },
# ]
# }
results = self.safe_value(response, 'userAssetDribblets', [])
rows = self.safe_integer(response, 'total', 0)
data = []
for i in range(0, rows):
logs = self.safe_value(results[i], 'userAssetDribbletDetails', [])
for j in range(0, len(logs)):
logs[j]['isDustTrade'] = True
data.append(logs[j])
trades = self.parse_trades(data, None, since, limit)
return self.filter_by_since_limit(trades, since, limit)
def parse_dust_trade(self, trade, market=None):
#
# {
# "fromAsset": "USDT",
# "amount": "0.009669",
# "transferedAmount": "0.00002992",
# "serviceChargeAmount": "0.00000059",
# "operateTime": "1628076010000",
# "transId": "71416578712",
# "isDustTrade": True
# }
#
orderId = self.safe_string(trade, 'transId')
timestamp = self.safe_integer(trade, 'operateTime')
currencyId = self.safe_string(trade, 'fromAsset')
tradedCurrency = self.safe_currency_code(currencyId)
bnb = self.currency('BNB')
earnedCurrency = bnb['code']
applicantSymbol = earnedCurrency + '/' + tradedCurrency
tradedCurrencyIsQuote = False
if applicantSymbol in self.markets:
tradedCurrencyIsQuote = True
feeCostString = self.safe_string(trade, 'serviceChargeAmount')
fee = {
'currency': earnedCurrency,
'cost': self.parse_number(feeCostString),
}
symbol = None
amountString = None
costString = None
side = None
if tradedCurrencyIsQuote:
symbol = applicantSymbol
amountString = self.safe_string(trade, 'transferedAmount')
costString = self.safe_string(trade, 'amount')
side = 'buy'
else:
symbol = tradedCurrency + '/' + earnedCurrency
amountString = self.safe_string(trade, 'amount')
costString = self.safe_string(trade, 'transferedAmount')
side = 'sell'
priceString = None
if costString is not None:
if amountString:
priceString = Precise.string_div(costString, amountString)
id = None
amount = self.parse_number(amountString)
price = self.parse_number(priceString)
cost = self.parse_number(costString)
type = None
takerOrMaker = None
return {
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': type,
'takerOrMaker': takerOrMaker,
'side': side,
'amount': amount,
'price': price,
'cost': cost,
'fee': fee,
'info': trade,
}
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
self.load_markets()
currency = None
response = None
request = {}
legalMoney = self.safe_value(self.options, 'legalMoney', {})
if code in legalMoney:
if code is not None:
currency = self.currency(code)
request['transactionType'] = 0
if since is not None:
request['beginTime'] = since
raw = self.sapiGetFiatOrders(self.extend(request, params))
response = self.safe_value(raw, 'data')
# {
# "code": "000000",
# "message": "success",
# "data": [
# {
# "orderNo": "25ced37075c1470ba8939d0df2316e23",
# "fiatCurrency": "EUR",
# "indicatedAmount": "15.00",
# "amount": "15.00",
# "totalFee": "0.00",
# "method": "card",
# "status": "Failed",
# "createTime": 1627501026000,
# "updateTime": 1627501027000
# }
# ],
# "total": 1,
# "success": True
# }
else:
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['startTime'] = since
# max 3 months range https://github.com/ccxt/ccxt/issues/6495
request['endTime'] = self.sum(since, 7776000000)
if limit is not None:
request['limit'] = limit
response = self.sapiGetCapitalDepositHisrec(self.extend(request, params))
# [
# {
# "amount": "0.01844487",
# "coin": "BCH",
# "network": "BCH",
# "status": 1,
# "address": "1NYxAJhW2281HK1KtJeaENBqHeygA88FzR",
# "addressTag": "",
# "txId": "bafc5902504d6504a00b7d0306a41154cbf1d1b767ab70f3bc226327362588af",
# "insertTime": 1610784980000,
# "transferType": 0,
# "confirmTimes": "2/2"
# },
# {
# "amount": "4500",
# "coin": "USDT",
# "network": "BSC",
# "status": 1,
# "address": "0xc9c923c87347ca0f3451d6d308ce84f691b9f501",
# "addressTag": "",
# "txId": "Internal transfer 51376627901",
# "insertTime": 1618394381000,
# "transferType": 1,
# "confirmTimes": "1/15"
# }
# ]
return self.parse_transactions(response, currency, since, limit)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
self.load_markets()
legalMoney = self.safe_value(self.options, 'legalMoney', {})
request = {}
response = None
currency = None
if code in legalMoney:
if code is not None:
currency = self.currency(code)
request['transactionType'] = 1
if since is not None:
request['beginTime'] = since
raw = self.sapiGetFiatOrders(self.extend(request, params))
response = self.safe_value(raw, 'data')
# {
# "code": "000000",
# "message": "success",
# "data": [
# {
# "orderNo": "CJW706452266115170304",
# "fiatCurrency": "GBP",
# "indicatedAmount": "10001.50",
# "amount": "100.00",
# "totalFee": "1.50",
# "method": "bank transfer",
# "status": "Successful",
# "createTime": 1620037745000,
# "updateTime": 1620038480000
# },
# {
# "orderNo": "CJW706287492781891584",
# "fiatCurrency": "GBP",
# "indicatedAmount": "10001.50",
# "amount": "100.00",
# "totalFee": "1.50",
# "method": "bank transfer",
# "status": "Successful",
# "createTime": 1619998460000,
# "updateTime": 1619998823000
# }
# ],
# "total": 39,
# "success": True
# }
else:
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['startTime'] = since
# max 3 months range https://github.com/ccxt/ccxt/issues/6495
request['endTime'] = self.sum(since, 7776000000)
if limit is not None:
request['limit'] = limit
response = self.sapiGetCapitalWithdrawHistory(self.extend(request, params))
# [
# {
# "id": "69e53ad305124b96b43668ceab158a18",
# "amount": "28.75",
# "transactionFee": "0.25",
# "coin": "XRP",
# "status": 6,
# "address": "r3T75fuLjX51mmfb5Sk1kMNuhBgBPJsjza",
# "addressTag": "101286922",
# "txId": "19A5B24ED0B697E4F0E9CD09FCB007170A605BC93C9280B9E6379C5E6EF0F65A",
# "applyTime": "2021-04-15 12:09:16",
# "network": "XRP",
# "transferType": 0
# },
# {
# "id": "9a67628b16ba4988ae20d329333f16bc",
# "amount": "20",
# "transactionFee": "20",
# "coin": "USDT",
# "status": 6,
# "address": "0x0AB991497116f7F5532a4c2f4f7B1784488628e1",
# "txId": "0x77fbf2cf2c85b552f0fd31fd2e56dc95c08adae031d96f3717d8b17e1aea3e46",
# "applyTime": "2021-04-15 12:06:53",
# "network": "ETH",
# "transferType": 0
# },
# {
# "id": "a7cdc0afbfa44a48bd225c9ece958fe2",
# "amount": "51",
# "transactionFee": "1",
# "coin": "USDT",
# "status": 6,
# "address": "TYDmtuWL8bsyjvcauUTerpfYyVhFtBjqyo",
# "txId": "168a75112bce6ceb4823c66726ad47620ad332e69fe92d9cb8ceb76023f9a028",
# "applyTime": "2021-04-13 12:46:59",
# "network": "TRX",
# "transferType": 0
# }
# ]
return self.parse_transactions(response, currency, since, limit)
def parse_transaction_status_by_type(self, status, type=None):
statusesByType = {
'deposit': {
'0': 'pending',
'1': 'ok',
# Fiat
# Processing, Failed, Successful, Finished, Refunding, Refunded, Refund Failed, Order Partial credit Stopped
'Processing': 'pending',
'Failed': 'failed',
'Successful': 'ok',
'Refunding': 'canceled',
'Refunded': 'canceled',
'Refund Failed': 'failed',
},
'withdrawal': {
'0': 'pending', # Email Sent
'1': 'canceled', # Cancelled(different from 1 = ok in deposits)
'2': 'pending', # Awaiting Approval
'3': 'failed', # Rejected
'4': 'pending', # Processing
'5': 'failed', # Failure
'6': 'ok', # Completed
# Fiat
# Processing, Failed, Successful, Finished, Refunding, Refunded, Refund Failed, Order Partial credit Stopped
'Processing': 'pending',
'Failed': 'failed',
'Successful': 'ok',
'Refunding': 'canceled',
'Refunded': 'canceled',
'Refund Failed': 'failed',
},
}
statuses = self.safe_value(statusesByType, type, {})
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchDeposits
#
# {
# "amount": "4500",
# "coin": "USDT",
# "network": "BSC",
# "status": 1,
# "address": "0xc9c923c87347ca0f3451d6d308ce84f691b9f501",
# "addressTag": "",
# "txId": "Internal transfer 51376627901",
# "insertTime": 1618394381000,
# "transferType": 1,
# "confirmTimes": "1/15"
# }
#
# fetchWithdrawals
#
# {
# "id": "69e53ad305124b96b43668ceab158a18",
# "amount": "28.75",
# "transactionFee": "0.25",
# "coin": "XRP",
# "status": 6,
# "address": "r3T75fuLjX51mmfb5Sk1kMNuhBgBPJsjza",
# "addressTag": "101286922",
# "txId": "19A5B24ED0B697E4F0E9CD09FCB007170A605BC93C9280B9E6379C5E6EF0F65A",
# "applyTime": "2021-04-15 12:09:16",
# "network": "XRP",
# "transferType": 0
# }
#
# fiat transaction
# withdraw
# {
# "orderNo": "CJW684897551397171200",
# "fiatCurrency": "GBP",
# "indicatedAmount": "29.99",
# "amount": "28.49",
# "totalFee": "1.50",
# "method": "bank transfer",
# "status": "Successful",
# "createTime": 1614898701000,
# "updateTime": 1614898820000
# }
#
# deposit
# {
# "orderNo": "25ced37075c1470ba8939d0df2316e23",
# "fiatCurrency": "EUR",
# "indicatedAmount": "15.00",
# "amount": "15.00",
# "totalFee": "0.00",
# "method": "card",
# "status": "Failed",
# "createTime": "1627501026000",
# "updateTime": "1627501027000"
# }
#
id = self.safe_string_2(transaction, 'id', 'orderNo')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'addressTag') # set but unused
if tag is not None:
if len(tag) < 1:
tag = None
txid = self.safe_string(transaction, 'txId')
if (txid is not None) and (txid.find('Internal transfer ') >= 0):
txid = txid[18:]
currencyId = self.safe_string_2(transaction, 'coin', 'fiatCurrency')
code = self.safe_currency_code(currencyId, currency)
timestamp = None
insertTime = self.safe_integer_2(transaction, 'insertTime', 'createTime')
applyTime = self.parse8601(self.safe_string(transaction, 'applyTime'))
type = self.safe_string(transaction, 'type')
if type is None:
if (insertTime is not None) and (applyTime is None):
type = 'deposit'
timestamp = insertTime
elif (insertTime is None) and (applyTime is not None):
type = 'withdrawal'
timestamp = applyTime
status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'status'), type)
amount = self.safe_number(transaction, 'amount')
feeCost = self.safe_number_2(transaction, 'transactionFee', 'totalFee')
fee = None
if feeCost is not None:
fee = {'currency': code, 'cost': feeCost}
updated = self.safe_integer_2(transaction, 'successTime', 'updateTime')
internal = self.safe_integer(transaction, 'transferType', False)
internal = True if internal else False
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'addressTo': address,
'addressFrom': None,
'tag': tag,
'tagTo': tag,
'tagFrom': None,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'internal': internal,
'fee': fee,
}
def parse_transfer_status(self, status):
statuses = {
'CONFIRMED': 'ok',
}
return self.safe_string(statuses, status, status)
def parse_transfer(self, transfer, currency=None):
#
# transfer
#
# {
# "tranId":13526853623
# }
#
# fetchTransfers
#
# {
# timestamp: 1614640878000,
# asset: 'USDT',
# amount: '25',
# type: 'MAIN_UMFUTURE',
# status: 'CONFIRMED',
# tranId: 43000126248
# }
#
id = self.safe_string(transfer, 'tranId')
currencyId = self.safe_string(transfer, 'asset')
code = self.safe_currency_code(currencyId, currency)
amount = self.safe_number(transfer, 'amount')
type = self.safe_string(transfer, 'type')
fromAccount = None
toAccount = None
typesByAccount = self.safe_value(self.options, 'typesByAccount', {})
if type is not None:
parts = type.split('_')
fromAccount = self.safe_value(parts, 0)
toAccount = self.safe_value(parts, 1)
fromAccount = self.safe_string(typesByAccount, fromAccount, fromAccount)
toAccount = self.safe_string(typesByAccount, toAccount, toAccount)
timestamp = self.safe_integer(transfer, 'timestamp')
status = self.parse_transfer_status(self.safe_string(transfer, 'status'))
return {
'info': transfer,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'currency': code,
'amount': amount,
'fromAccount': fromAccount,
'toAccount': toAccount,
'status': status,
}
def parse_income(self, income, market=None):
#
# {
# "symbol": "ETHUSDT",
# "incomeType": "FUNDING_FEE",
# "income": "0.00134317",
# "asset": "USDT",
# "time": "1621584000000",
# "info": "FUNDING_FEE",
# "tranId": "4480321991774044580",
# "tradeId": ""
# }
#
marketId = self.safe_string(income, 'symbol')
symbol = self.safe_symbol(marketId, market)
amount = self.safe_number(income, 'income')
currencyId = self.safe_string(income, 'asset')
code = self.safe_currency_code(currencyId)
id = self.safe_string(income, 'tranId')
timestamp = self.safe_integer(income, 'time')
return {
'info': income,
'symbol': symbol,
'code': code,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'id': id,
'amount': amount,
}
def parse_incomes(self, incomes, market=None, since=None, limit=None):
result = []
for i in range(0, len(incomes)):
entry = incomes[i]
parsed = self.parse_income(entry, market)
result.append(parsed)
sorted = self.sort_by(result, 'timestamp')
return self.filter_by_since_limit(sorted, since, limit)
def transfer(self, code, amount, fromAccount, toAccount, params={}):
self.load_markets()
currency = self.currency(code)
type = self.safe_string(params, 'type')
if type is None:
accountsByType = self.safe_value(self.options, 'accountsByType', {})
fromAccount = fromAccount.lower()
toAccount = toAccount.lower()
fromId = self.safe_string(accountsByType, fromAccount)
toId = self.safe_string(accountsByType, toAccount)
if fromId is None:
keys = list(accountsByType.keys())
raise ExchangeError(self.id + ' fromAccount must be one of ' + ', '.join(keys))
if toId is None:
keys = list(accountsByType.keys())
raise ExchangeError(self.id + ' toAccount must be one of ' + ', '.join(keys))
type = fromId + '_' + toId
request = {
'asset': currency['id'],
'amount': self.currency_to_precision(code, amount),
'type': type,
}
response = self.sapiPostAssetTransfer(self.extend(request, params))
#
# {
# "tranId":13526853623
# }
#
transfer = self.parse_transfer(response, currency)
return self.extend(transfer, {
'amount': amount,
'currency': code,
'fromAccount': fromAccount,
'toAccount': toAccount,
})
def fetch_transfers(self, code=None, since=None, limit=None, params={}):
self.load_markets()
currency = None
if code is not None:
currency = self.currency(code)
defaultType = self.safe_string_2(self.options, 'fetchTransfers', 'defaultType', 'spot')
fromAccount = self.safe_string(params, 'fromAccount', defaultType)
defaultTo = 'spot' if (fromAccount == 'future') else 'future'
toAccount = self.safe_string(params, 'toAccount', defaultTo)
type = self.safe_string(params, 'type')
accountsByType = self.safe_value(self.options, 'accountsByType', {})
fromId = self.safe_string(accountsByType, fromAccount)
toId = self.safe_string(accountsByType, toAccount)
if type is None:
if fromId is None:
keys = list(accountsByType.keys())
raise ExchangeError(self.id + ' fromAccount parameter must be one of ' + ', '.join(keys))
if toId is None:
keys = list(accountsByType.keys())
raise ExchangeError(self.id + ' toAccount parameter must be one of ' + ', '.join(keys))
type = fromId + '_' + toId
request = {
'type': type,
}
if since is not None:
request['startTime'] = since
if limit is not None:
request['size'] = limit
response = self.sapiGetAssetTransfer(self.extend(request, params))
#
# {
# total: 3,
# rows: [
# {
# timestamp: 1614640878000,
# asset: 'USDT',
# amount: '25',
# type: 'MAIN_UMFUTURE',
# status: 'CONFIRMED',
# tranId: 43000126248
# },
# ]
# }
#
rows = self.safe_value(response, 'rows', [])
return self.parse_transfers(rows, currency, since, limit)
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
# 'network': 'ETH', # 'BSC', 'XMR', you can get network and isDefault in networkList in the response of sapiGetCapitalConfigDetail
}
networks = self.safe_value(self.options, 'networks', {})
network = self.safe_string_upper(params, 'network') # self line allows the user to specify either ERC20 or ETH
network = self.safe_string(networks, network, network) # handle ERC20>ETH alias
if network is not None:
request['network'] = network
params = self.omit(params, 'network')
# has support for the 'network' parameter
# https://binance-docs.github.io/apidocs/spot/en/#deposit-address-supporting-network-user_data
response = self.sapiGetCapitalDepositAddress(self.extend(request, params))
#
# {
# currency: 'XRP',
# address: 'rEb8TK3gBgk5auZkwc6sHnwrGVJH8DuaLh',
# tag: '108618262',
# info: {
# coin: 'XRP',
# address: 'rEb8TK3gBgk5auZkwc6sHnwrGVJH8DuaLh',
# tag: '108618262',
# url: 'https://bithomp.com/explorer/rEb8TK3gBgk5auZkwc6sHnwrGVJH8DuaLh'
# }
# }
#
address = self.safe_string(response, 'address')
url = self.safe_string(response, 'url')
impliedNetwork = None
if url is not None:
reverseNetworks = self.safe_value(self.options, 'reverseNetworks', {})
parts = url.split('/')
topLevel = self.safe_string(parts, 2)
if (topLevel == 'blockchair.com') or (topLevel == 'viewblock.io'):
subLevel = self.safe_string(parts, 3)
if subLevel is not None:
topLevel = topLevel + '/' + subLevel
impliedNetwork = self.safe_string(reverseNetworks, topLevel)
impliedNetworks = self.safe_value(self.options, 'impliedNetworks', {
'ETH': {'ERC20': 'ETH'},
'TRX': {'TRC20': 'TRX'},
})
if code in impliedNetworks:
conversion = self.safe_value(impliedNetworks, code, {})
impliedNetwork = self.safe_string(conversion, impliedNetwork, impliedNetwork)
tag = self.safe_string(response, 'tag', '')
if len(tag) == 0:
tag = None
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'network': impliedNetwork,
'info': response,
}
def fetch_funding_fees(self, codes=None, params={}):
self.load_markets()
response = self.sapiGetCapitalConfigGetall(params)
#
# [
# {
# coin: 'BAT',
# depositAllEnable: True,
# withdrawAllEnable: True,
# name: 'Basic Attention Token',
# free: '0',
# locked: '0',
# freeze: '0',
# withdrawing: '0',
# ipoing: '0',
# ipoable: '0',
# storage: '0',
# isLegalMoney: False,
# trading: True,
# networkList: [
# {
# network: 'BNB',
# coin: 'BAT',
# withdrawIntegerMultiple: '0.00000001',
# isDefault: False,
# depositEnable: True,
# withdrawEnable: True,
# depositDesc: '',
# withdrawDesc: '',
# specialTips: 'The name of self asset is Basic Attention Token(BAT). Both a MEMO and an Address are required to successfully deposit your BEP2 tokens to Binance.',
# name: 'BEP2',
# resetAddressStatus: False,
# addressRegex: '^(bnb1)[0-9a-z]{38}$',
# memoRegex: '^[0-9A-Za-z\\-_]{1,120}$',
# withdrawFee: '0.27',
# withdrawMin: '0.54',
# withdrawMax: '10000000000',
# minConfirm: '1',
# unLockConfirm: '0'
# },
# {
# network: 'BSC',
# coin: 'BAT',
# withdrawIntegerMultiple: '0.00000001',
# isDefault: False,
# depositEnable: True,
# withdrawEnable: True,
# depositDesc: '',
# withdrawDesc: '',
# specialTips: 'The name of self asset is Basic Attention Token. Please ensure you are depositing Basic Attention Token(BAT) tokens under the contract address ending in 9766e.',
# name: 'BEP20(BSC)',
# resetAddressStatus: False,
# addressRegex: '^(0x)[0-9A-Fa-f]{40}$',
# memoRegex: '',
# withdrawFee: '0.27',
# withdrawMin: '0.54',
# withdrawMax: '10000000000',
# minConfirm: '15',
# unLockConfirm: '0'
# },
# {
# network: 'ETH',
# coin: 'BAT',
# withdrawIntegerMultiple: '0.00000001',
# isDefault: True,
# depositEnable: True,
# withdrawEnable: True,
# depositDesc: '',
# withdrawDesc: '',
# specialTips: 'The name of self asset is Basic Attention Token. Please ensure you are depositing Basic Attention Token(BAT) tokens under the contract address ending in 887ef.',
# name: 'ERC20',
# resetAddressStatus: False,
# addressRegex: '^(0x)[0-9A-Fa-f]{40}$',
# memoRegex: '',
# withdrawFee: '27',
# withdrawMin: '54',
# withdrawMax: '10000000000',
# minConfirm: '12',
# unLockConfirm: '0'
# }
# ]
# }
# ]
#
withdrawFees = {}
for i in range(0, len(response)):
entry = response[i]
currencyId = self.safe_string(entry, 'coin')
code = self.safe_currency_code(currencyId)
networkList = self.safe_value(entry, 'networkList')
withdrawFees[code] = {}
for j in range(0, len(networkList)):
networkEntry = networkList[j]
networkId = self.safe_string(networkEntry, 'network')
networkCode = self.safe_currency_code(networkId)
fee = self.safe_number(networkEntry, 'withdrawFee')
withdrawFees[code][networkCode] = fee
return {
'withdraw': withdrawFees,
'deposit': {},
'info': response,
}
def withdraw(self, code, amount, address, tag=None, params={}):
tag, params = self.handle_withdraw_tag_and_params(tag, params)
self.check_address(address)
self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
'address': address,
'amount': amount,
# https://binance-docs.github.io/apidocs/spot/en/#withdraw-sapi
# issue sapiGetCapitalConfigGetall() to get networks for withdrawing USDT ERC20 vs USDT Omni
# 'network': 'ETH', # 'BTC', 'TRX', etc, optional
}
if tag is not None:
request['addressTag'] = tag
networks = self.safe_value(self.options, 'networks', {})
network = self.safe_string_upper(params, 'network') # self line allows the user to specify either ERC20 or ETH
network = self.safe_string(networks, network, network) # handle ERC20>ETH alias
if network is not None:
request['network'] = network
params = self.omit(params, 'network')
response = self.sapiPostCapitalWithdrawApply(self.extend(request, params))
# {id: '9a67628b16ba4988ae20d329333f16bc'}
return {
'info': response,
'id': self.safe_string(response, 'id'),
}
def parse_trading_fee(self, fee, market=None):
#
# {
# "symbol": "ADABNB",
# "makerCommission": 0.001,
# "takerCommission": 0.001
# }
#
marketId = self.safe_string(fee, 'symbol')
symbol = self.safe_symbol(marketId)
return {
'info': fee,
'symbol': symbol,
'maker': self.safe_number(fee, 'makerCommission'),
'taker': self.safe_number(fee, 'takerCommission'),
}
def fetch_trading_fee(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.sapiGetAssetTradeFee(self.extend(request, params))
#
# [
# {
# "symbol": "BTCUSDT",
# "makerCommission": "0.001",
# "takerCommission": "0.001"
# }
# ]
#
first = self.safe_value(response, 0, {})
return self.parse_trading_fee(first)
def fetch_trading_fees(self, params={}):
self.load_markets()
method = None
defaultType = self.safe_string_2(self.options, 'fetchFundingRates', 'defaultType', 'future')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
if (type == 'spot') or (type == 'margin'):
method = 'sapiGetAssetTradeFee'
elif type == 'future':
method = 'fapiPrivateGetAccount'
elif type == 'delivery':
method = 'dapiPrivateGetAccount'
response = getattr(self, method)(query)
#
# sapi / spot
#
# [
# {
# "symbol": "ZRXBNB",
# "makerCommission": "0.001",
# "takerCommission": "0.001"
# },
# {
# "symbol": "ZRXBTC",
# "makerCommission": "0.001",
# "takerCommission": "0.001"
# },
# ]
#
# fapi / future / linear
#
# {
# "feeTier": 0, # account commisssion tier
# "canTrade": True, # if can trade
# "canDeposit": True, # if can transfer in asset
# "canWithdraw": True, # if can transfer out asset
# "updateTime": 0,
# "totalInitialMargin": "0.00000000", # total initial margin required with current mark price(useless with isolated positions), only for USDT asset
# "totalMaintMargin": "0.00000000", # total maintenance margin required, only for USDT asset
# "totalWalletBalance": "23.72469206", # total wallet balance, only for USDT asset
# "totalUnrealizedProfit": "0.00000000", # total unrealized profit, only for USDT asset
# "totalMarginBalance": "23.72469206", # total margin balance, only for USDT asset
# "totalPositionInitialMargin": "0.00000000", # initial margin required for positions with current mark price, only for USDT asset
# "totalOpenOrderInitialMargin": "0.00000000", # initial margin required for open orders with current mark price, only for USDT asset
# "totalCrossWalletBalance": "23.72469206", # crossed wallet balance, only for USDT asset
# "totalCrossUnPnl": "0.00000000", # unrealized profit of crossed positions, only for USDT asset
# "availableBalance": "23.72469206", # available balance, only for USDT asset
# "maxWithdrawAmount": "23.72469206" # maximum amount for transfer out, only for USDT asset
# ...
# }
#
# dapi / delivery / inverse
#
# {
# "canDeposit": True,
# "canTrade": True,
# "canWithdraw": True,
# "feeTier": 2,
# "updateTime": 0
# }
#
if (type == 'spot') or (type == 'margin'):
#
# [
# {
# "symbol": "ZRXBNB",
# "makerCommission": "0.001",
# "takerCommission": "0.001"
# },
# {
# "symbol": "ZRXBTC",
# "makerCommission": "0.001",
# "takerCommission": "0.001"
# },
# ]
#
result = {}
for i in range(0, len(response)):
fee = self.parse_trading_fee(response[i])
symbol = fee['symbol']
result[symbol] = fee
return result
elif type == 'future':
#
# {
# "feeTier": 0, # account commisssion tier
# "canTrade": True, # if can trade
# "canDeposit": True, # if can transfer in asset
# "canWithdraw": True, # if can transfer out asset
# "updateTime": 0,
# "totalInitialMargin": "0.00000000", # total initial margin required with current mark price(useless with isolated positions), only for USDT asset
# "totalMaintMargin": "0.00000000", # total maintenance margin required, only for USDT asset
# "totalWalletBalance": "23.72469206", # total wallet balance, only for USDT asset
# "totalUnrealizedProfit": "0.00000000", # total unrealized profit, only for USDT asset
# "totalMarginBalance": "23.72469206", # total margin balance, only for USDT asset
# "totalPositionInitialMargin": "0.00000000", # initial margin required for positions with current mark price, only for USDT asset
# "totalOpenOrderInitialMargin": "0.00000000", # initial margin required for open orders with current mark price, only for USDT asset
# "totalCrossWalletBalance": "23.72469206", # crossed wallet balance, only for USDT asset
# "totalCrossUnPnl": "0.00000000", # unrealized profit of crossed positions, only for USDT asset
# "availableBalance": "23.72469206", # available balance, only for USDT asset
# "maxWithdrawAmount": "23.72469206" # maximum amount for transfer out, only for USDT asset
# ...
# }
#
symbols = list(self.markets.keys())
result = {}
feeTier = self.safe_integer(response, 'feeTier')
feeTiers = self.fees[type]['trading']['tiers']
maker = feeTiers['maker'][feeTier][1]
taker = feeTiers['taker'][feeTier][1]
for i in range(0, len(symbols)):
symbol = symbols[i]
result[symbol] = {
'info': {
'feeTier': feeTier,
},
'symbol': symbol,
'maker': maker,
'taker': taker,
}
return result
elif type == 'delivery':
#
# {
# "canDeposit": True,
# "canTrade": True,
# "canWithdraw": True,
# "feeTier": 2,
# "updateTime": 0
# }
#
symbols = list(self.markets.keys())
result = {}
feeTier = self.safe_integer(response, 'feeTier')
feeTiers = self.fees[type]['trading']['tiers']
maker = feeTiers['maker'][feeTier][1]
taker = feeTiers['taker'][feeTier][1]
for i in range(0, len(symbols)):
symbol = symbols[i]
result[symbol] = {
'info': {
'feeTier': feeTier,
},
'symbol': symbol,
'maker': maker,
'taker': taker,
}
return result
def futures_transfer(self, code, amount, type, params={}):
if (type < 1) or (type > 4):
raise ArgumentsRequired(self.id + ' type must be between 1 and 4')
self.load_markets()
currency = self.currency(code)
request = {
'asset': currency['id'],
'amount': amount,
'type': type,
}
response = self.sapiPostFuturesTransfer(self.extend(request, params))
#
# {
# "tranId": 100000001
# }
#
return self.parse_transfer(response, currency)
def fetch_funding_rate(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
method = None
if market['linear']:
method = 'fapiPublicGetPremiumIndex'
elif market['inverse']:
method = 'dapiPublicGetPremiumIndex'
else:
raise NotSupported(self.id + ' fetchFundingRate() supports linear and inverse contracts only')
response = getattr(self, method)(self.extend(request, params))
if market['inverse']:
response = response[0]
#
# {
# "symbol": "BTCUSDT",
# "markPrice": "45802.81129892",
# "indexPrice": "45745.47701915",
# "estimatedSettlePrice": "45133.91753671",
# "lastFundingRate": "0.00063521",
# "interestRate": "0.00010000",
# "nextFundingTime": "1621267200000",
# "time": "1621252344001"
# }
#
return self.parse_funding_rate(response, market)
def fetch_funding_rate_history(self, symbol=None, since=None, limit=None, params={}):
#
# Gets a history of funding rates with their timestamps
# (param) symbol: Future currency pair(e.g. "BTC/USDT")
# (param) limit: maximum number of data points returned
# (param) since: Unix timestamp in miliseconds for the time of the earliest requested funding rate
# (param) params: Object containing more params for the request
# - until: Unix timestamp in miliseconds for the time of the earliest requested funding rate
# return: [{symbol, fundingRate, timestamp}]
#
self.load_markets()
request = {}
method = None
defaultType = self.safe_string_2(self.options, 'fetchFundingRateHistory', 'defaultType', 'future')
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
if type == 'future':
method = 'fapiPublicGetFundingRate'
elif type == 'delivery':
method = 'dapiPublicGetFundingRate'
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if market['linear']:
method = 'fapiPublicGetFundingRate'
elif market['inverse']:
method = 'dapiPublicGetFundingRate'
if method is None:
raise NotSupported(self.id + ' fetchFundingRateHistory() not supported for ' + type + ' markets')
if since is not None:
request['startTime'] = since
till = self.safe_integer(params, 'till') # unified in milliseconds
endTime = self.safe_string(params, 'endTime', till) # exchange-specific in milliseconds
params = self.omit(params, ['endTime', 'till'])
if endTime is not None:
request['endTime'] = endTime
if limit is not None:
request['limit'] = limit
response = getattr(self, method)(self.extend(request, params))
#
# {
# "symbol": "BTCUSDT",
# "fundingRate": "0.00063521",
# "fundingTime": "1621267200000",
# }
#
rates = []
for i in range(0, len(response)):
entry = response[i]
timestamp = self.safe_integer(entry, 'fundingTime')
rates.append({
'info': entry,
'symbol': self.safe_symbol(self.safe_string(entry, 'symbol')),
'fundingRate': self.safe_number(entry, 'fundingRate'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
sorted = self.sort_by(rates, 'timestamp')
return self.filter_by_symbol_since_limit(sorted, symbol, since, limit)
def fetch_funding_rates(self, symbols=None, params={}):
self.load_markets()
method = None
defaultType = self.safe_string_2(self.options, 'fetchFundingRates', 'defaultType', 'future')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
if type == 'future':
method = 'fapiPublicGetPremiumIndex'
elif type == 'delivery':
method = 'dapiPublicGetPremiumIndex'
else:
raise NotSupported(self.id + ' fetchFundingRates() supports linear and inverse contracts only')
response = getattr(self, method)(query)
result = []
for i in range(0, len(response)):
entry = response[i]
parsed = self.parse_funding_rate(entry)
result.append(parsed)
return self.filter_by_array(result, 'symbol', symbols)
def parse_funding_rate(self, premiumIndex, market=None):
# ensure it matches with https://www.binance.com/en/futures/funding-history/0
#
# {
# "symbol": "BTCUSDT",
# "markPrice": "45802.81129892",
# "indexPrice": "45745.47701915",
# "estimatedSettlePrice": "45133.91753671",
# "lastFundingRate": "0.00063521",
# "interestRate": "0.00010000",
# "nextFundingTime": "1621267200000",
# "time": "1621252344001"
# }
#
timestamp = self.safe_integer(premiumIndex, 'time')
marketId = self.safe_string(premiumIndex, 'symbol')
symbol = self.safe_symbol(marketId, market)
markPrice = self.safe_number(premiumIndex, 'markPrice')
indexPrice = self.safe_number(premiumIndex, 'indexPrice')
interestRate = self.safe_number(premiumIndex, 'interestRate')
estimatedSettlePrice = self.safe_number(premiumIndex, 'estimatedSettlePrice')
nextFundingRate = self.safe_number(premiumIndex, 'lastFundingRate')
nextFundingTime = self.safe_integer(premiumIndex, 'nextFundingTime')
previousFundingTime = nextFundingTime - (8 * 3600000)
return {
'info': premiumIndex,
'symbol': symbol,
'markPrice': markPrice,
'indexPrice': indexPrice,
'interestRate': interestRate,
'estimatedSettlePrice': estimatedSettlePrice,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'previousFundingRate': None,
'nextFundingRate': nextFundingRate,
'previousFundingTimestamp': previousFundingTime, # subtract 8 hours
'nextFundingTimestamp': nextFundingTime,
'previousFundingDatetime': self.iso8601(previousFundingTime),
'nextFundingDatetime': self.iso8601(nextFundingTime),
}
def parse_account_positions(self, account):
positions = self.safe_value(account, 'positions')
assets = self.safe_value(account, 'assets')
balances = {}
for i in range(0, len(assets)):
entry = assets[i]
currencyId = self.safe_string(entry, 'asset')
code = self.safe_currency_code(currencyId)
crossWalletBalance = self.safe_string(entry, 'crossWalletBalance')
crossUnPnl = self.safe_string(entry, 'crossUnPnl')
balances[code] = {
'crossMargin': Precise.string_add(crossWalletBalance, crossUnPnl),
'crossWalletBalance': crossWalletBalance,
}
result = []
for i in range(0, len(positions)):
position = positions[i]
marketId = self.safe_string(position, 'symbol')
market = self.safe_market(marketId)
code = market['quote'] if (self.options['defaultType'] == 'future') else market['base']
# sometimes not all the codes are correctly returned...
if code in balances:
parsed = self.parse_account_position(self.extend(position, {
'crossMargin': balances[code]['crossMargin'],
'crossWalletBalance': balances[code]['crossWalletBalance'],
}), market)
result.append(parsed)
return result
def parse_account_position(self, position, market=None):
#
# usdm
# {
# "symbol": "BTCBUSD",
# "initialMargin": "0",
# "maintMargin": "0",
# "unrealizedProfit": "0.00000000",
# "positionInitialMargin": "0",
# "openOrderInitialMargin": "0",
# "leverage": "20",
# "isolated": False,
# "entryPrice": "0.0000",
# "maxNotional": "100000",
# "positionSide": "BOTH",
# "positionAmt": "0.000",
# "notional": "0",
# "isolatedWallet": "0",
# "updateTime": "0",
# "crossMargin": "100.93634809",
# }
#
# coinm
# {
# "symbol": "BTCUSD_210625",
# "initialMargin": "0.00024393",
# "maintMargin": "0.00002439",
# "unrealizedProfit": "-0.00000163",
# "positionInitialMargin": "0.00024393",
# "openOrderInitialMargin": "0",
# "leverage": "10",
# "isolated": False,
# "positionSide": "BOTH",
# "entryPrice": "41021.20000069",
# "maxQty": "100",
# "notionalValue": "0.00243939",
# "isolatedWallet": "0",
# "crossMargin": "0.314"
# "crossWalletBalance": "34",
# }
#
marketId = self.safe_string(position, 'symbol')
market = self.safe_market(marketId, market)
symbol = market['symbol']
leverageString = self.safe_string(position, 'leverage')
leverage = int(leverageString)
initialMarginString = self.safe_string(position, 'initialMargin')
initialMargin = self.parse_number(initialMarginString)
initialMarginPercentageString = Precise.string_div('1', leverageString, 8)
rational = (1000 % leverage) == 0
if not rational:
initialMarginPercentageString = Precise.string_div(Precise.string_add(initialMarginPercentageString, '1e-8'), '1', 8)
usdm = ('notional' in position)
maintenanceMarginString = self.safe_string(position, 'maintMargin')
maintenanceMargin = self.parse_number(maintenanceMarginString)
entryPriceString = self.safe_string(position, 'entryPrice')
entryPrice = self.parse_number(entryPriceString)
notionalString = self.safe_string_2(position, 'notional', 'notionalValue')
notionalStringAbs = Precise.string_abs(notionalString)
notionalFloat = float(notionalString)
notionalFloatAbs = float(notionalStringAbs)
notional = self.parse_number(Precise.string_abs(notionalString))
contractsString = self.safe_string(position, 'positionAmt')
contractsStringAbs = Precise.string_abs(contractsString)
if contractsString is None:
entryNotional = Precise.string_mul(Precise.string_mul(leverageString, initialMarginString), entryPriceString)
contractsString = Precise.string_div(entryNotional, market['contractSize'])
contractsStringAbs = Precise.string_div(Precise.string_add(contractsString, '0.5'), '1', 0)
contracts = self.parse_number(contractsStringAbs)
leverageBrackets = self.safe_value(self.options, 'leverageBrackets', {})
leverageBracket = self.safe_value(leverageBrackets, symbol, [])
maintenanceMarginPercentageString = None
for i in range(0, len(leverageBracket)):
bracket = leverageBracket[i]
if notionalFloatAbs < bracket[0]:
break
maintenanceMarginPercentageString = bracket[1]
maintenanceMarginPercentage = self.parse_number(maintenanceMarginPercentageString)
unrealizedPnlString = self.safe_string(position, 'unrealizedProfit')
unrealizedPnl = self.parse_number(unrealizedPnlString)
timestamp = self.safe_integer(position, 'updateTime')
if timestamp == 0:
timestamp = None
isolated = self.safe_value(position, 'isolated')
marginType = None
collateralString = None
walletBalance = None
if isolated:
marginType = 'isolated'
walletBalance = self.safe_string(position, 'isolatedWallet')
collateralString = Precise.string_add(walletBalance, unrealizedPnlString)
else:
marginType = 'cross'
walletBalance = self.safe_string(position, 'crossWalletBalance')
collateralString = self.safe_string(position, 'crossMargin')
collateral = self.parse_number(collateralString)
marginRatio = None
side = None
percentage = None
liquidationPriceStringRaw = None
liquidationPrice = None
if notionalFloat == 0.0:
entryPrice = None
else:
side = 'short' if (notionalFloat < 0) else 'long'
marginRatio = self.parse_number(Precise.string_div(Precise.string_add(Precise.string_div(maintenanceMarginString, collateralString), '5e-5'), '1', 4))
percentage = self.parse_number(Precise.string_mul(Precise.string_div(unrealizedPnlString, initialMarginString, 4), '100'))
if usdm:
# calculate liquidation price
#
# liquidationPrice = (walletBalance / (contracts * (±1 + mmp))) + (±entryPrice / (±1 + mmp))
#
# mmp = maintenanceMarginPercentage
# where ± is negative for long and positive for short
# TODO: calculate liquidation price for coinm contracts
onePlusMaintenanceMarginPercentageString = None
entryPriceSignString = entryPriceString
if side == 'short':
onePlusMaintenanceMarginPercentageString = Precise.string_add('1', maintenanceMarginPercentageString)
else:
onePlusMaintenanceMarginPercentageString = Precise.string_add('-1', maintenanceMarginPercentageString)
entryPriceSignString = Precise.string_mul('-1', entryPriceSignString)
leftSide = Precise.string_div(walletBalance, Precise.string_mul(contractsStringAbs, onePlusMaintenanceMarginPercentageString))
rightSide = Precise.string_div(entryPriceSignString, onePlusMaintenanceMarginPercentageString)
liquidationPriceStringRaw = Precise.string_add(leftSide, rightSide)
else:
# calculate liquidation price
#
# liquidationPrice = (contracts * contractSize(±1 - mmp)) / (±1/entryPrice * contracts * contractSize - walletBalance)
#
onePlusMaintenanceMarginPercentageString = None
entryPriceSignString = entryPriceString
if side == 'short':
onePlusMaintenanceMarginPercentageString = Precise.string_sub('1', maintenanceMarginPercentageString)
else:
onePlusMaintenanceMarginPercentageString = Precise.string_sub('-1', maintenanceMarginPercentageString)
entryPriceSignString = Precise.string_mul('-1', entryPriceSignString)
size = Precise.string_mul(contractsStringAbs, market['contractSize'])
leftSide = Precise.string_mul(size, onePlusMaintenanceMarginPercentageString)
rightSide = Precise.string_sub(Precise.string_mul(Precise.string_div('1', entryPriceSignString), size), walletBalance)
liquidationPriceStringRaw = Precise.string_div(leftSide, rightSide)
pricePrecision = market['precision']['price']
pricePrecisionPlusOne = pricePrecision + 1
pricePrecisionPlusOneString = str(pricePrecisionPlusOne)
# round half up
rounder = Precise('5e-' + pricePrecisionPlusOneString)
rounderString = str(rounder)
liquidationPriceRoundedString = Precise.string_add(rounderString, liquidationPriceStringRaw)
truncatedLiquidationPrice = Precise.string_div(liquidationPriceRoundedString, '1', pricePrecision)
if truncatedLiquidationPrice[0] == '-':
# user cannot be liquidated
# since he has more collateral than the size of the position
truncatedLiquidationPrice = None
liquidationPrice = self.parse_number(truncatedLiquidationPrice)
positionSide = self.safe_string(position, 'positionSide')
hedged = positionSide != 'BOTH'
return {
'info': position,
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'initialMargin': initialMargin,
'initialMarginPercentage': self.parse_number(initialMarginPercentageString),
'maintenanceMargin': maintenanceMargin,
'maintenanceMarginPercentage': maintenanceMarginPercentage,
'entryPrice': entryPrice,
'notional': notional,
'leverage': self.parse_number(leverageString),
'unrealizedPnl': unrealizedPnl,
'contracts': contracts,
'contractSize': self.parse_number(market['contractSize']),
'marginRatio': marginRatio,
'liquidationPrice': liquidationPrice,
'markPrice': None,
'collateral': collateral,
'marginType': marginType,
'side': side,
'hedged': hedged,
'percentage': percentage,
}
def parse_position_risk(self, position, market=None):
#
# usdm
# {
# "symbol": "BTCUSDT",
# "positionAmt": "0.001",
# "entryPrice": "43578.07000",
# "markPrice": "43532.30000000",
# "unRealizedProfit": "-0.04577000",
# "liquidationPrice": "21841.24993976",
# "leverage": "2",
# "maxNotionalValue": "300000000",
# "marginType": "isolated",
# "isolatedMargin": "21.77841506",
# "isAutoAddMargin": "false",
# "positionSide": "BOTH",
# "notional": "43.53230000",
# "isolatedWallet": "21.82418506",
# "updateTime": "1621358023886"
# }
#
# coinm
# {
# "symbol": "BTCUSD_PERP",
# "positionAmt": "2",
# "entryPrice": "37643.10000021",
# "markPrice": "38103.05510455",
# "unRealizedProfit": "0.00006413",
# "liquidationPrice": "25119.97445760",
# "leverage": "2",
# "maxQty": "1500",
# "marginType": "isolated",
# "isolatedMargin": "0.00274471",
# "isAutoAddMargin": "false",
# "positionSide": "BOTH",
# "notionalValue": "0.00524892",
# "isolatedWallet": "0.00268058"
# }
#
marketId = self.safe_string(position, 'symbol')
market = self.safe_market(marketId, market)
symbol = market['symbol']
leverageBrackets = self.safe_value(self.options, 'leverageBrackets', {})
leverageBracket = self.safe_value(leverageBrackets, symbol, [])
notionalString = self.safe_string_2(position, 'notional', 'notionalValue')
notionalStringAbs = Precise.string_abs(notionalString)
notionalFloatAbs = float(notionalStringAbs)
notionalFloat = float(notionalString)
maintenanceMarginPercentageString = None
for i in range(0, len(leverageBracket)):
bracket = leverageBracket[i]
if notionalFloatAbs < bracket[0]:
break
maintenanceMarginPercentageString = bracket[1]
notional = self.parse_number(notionalStringAbs)
contractsAbs = Precise.string_abs(self.safe_string(position, 'positionAmt'))
contracts = self.parse_number(contractsAbs)
unrealizedPnlString = self.safe_string(position, 'unRealizedProfit')
unrealizedPnl = self.parse_number(unrealizedPnlString)
leverageString = self.safe_string(position, 'leverage')
leverage = int(leverageString)
liquidationPriceString = self.omit_zero(self.safe_string(position, 'liquidationPrice'))
liquidationPrice = self.parse_number(liquidationPriceString)
collateralString = None
marginType = self.safe_string(position, 'marginType')
side = None
if notionalFloat > 0:
side = 'long'
elif notionalFloat < 0:
side = 'short'
entryPriceString = self.safe_string(position, 'entryPrice')
entryPrice = self.parse_number(entryPriceString)
if marginType == 'cross':
# calculate collateral
if market['linear']:
# walletBalance = (liquidationPrice * (±1 + mmp) ± entryPrice) * contracts
onePlusMaintenanceMarginPercentageString = None
entryPriceSignString = entryPriceString
if side == 'short':
onePlusMaintenanceMarginPercentageString = Precise.string_add('1', maintenanceMarginPercentageString)
entryPriceSignString = Precise.string_mul('-1', entryPriceSignString)
else:
onePlusMaintenanceMarginPercentageString = Precise.string_add('-1', maintenanceMarginPercentageString)
inner = Precise.string_mul(liquidationPriceString, onePlusMaintenanceMarginPercentageString)
leftSide = Precise.string_add(inner, entryPriceSignString)
collateralString = Precise.string_div(Precise.string_mul(leftSide, contractsAbs), '1', market['precision']['quote'])
else:
# walletBalance = (contracts * contractSize) * (±1/entryPrice - (±1 - mmp) / liquidationPrice)
onePlusMaintenanceMarginPercentageString = None
entryPriceSignString = entryPriceString
if side == 'short':
onePlusMaintenanceMarginPercentageString = Precise.string_sub('1', maintenanceMarginPercentageString)
else:
onePlusMaintenanceMarginPercentageString = Precise.string_sub('-1', maintenanceMarginPercentageString)
entryPriceSignString = Precise.string_mul('-1', entryPriceSignString)
leftSide = Precise.string_mul(contractsAbs, market['contractSize'])
rightSide = Precise.string_sub(Precise.string_div('1', entryPriceSignString), Precise.string_div(onePlusMaintenanceMarginPercentageString, liquidationPriceString))
collateralString = Precise.string_div(Precise.string_mul(leftSide, rightSide), '1', market['precision']['base'])
else:
collateralString = self.safe_string(position, 'isolatedMargin')
collateralString = '0' if (collateralString is None) else collateralString
collateralFloat = float(collateralString)
collateral = self.parse_number(collateralString)
markPrice = self.parse_number(self.omit_zero(self.safe_string(position, 'markPrice')))
timestamp = self.safe_integer(position, 'updateTime')
if timestamp == 0:
timestamp = None
maintenanceMarginPercentage = self.parse_number(maintenanceMarginPercentageString)
maintenanceMarginString = Precise.string_mul(maintenanceMarginPercentageString, notionalStringAbs)
maintenanceMargin = self.parse_number(maintenanceMarginString)
initialMarginPercentageString = Precise.string_div('1', leverageString, 8)
rational = (1000 % leverage) == 0
if not rational:
initialMarginPercentageString = Precise.string_add(initialMarginPercentageString, '1e-8')
initialMarginString = Precise.string_div(Precise.string_mul(notionalStringAbs, initialMarginPercentageString), '1', 8)
initialMargin = self.parse_number(initialMarginString)
marginRatio = None
percentage = None
if collateralFloat != 0.0:
marginRatio = self.parse_number(Precise.string_div(Precise.string_add(Precise.string_div(maintenanceMarginString, collateralString), '5e-5'), '1', 4))
percentage = self.parse_number(Precise.string_mul(Precise.string_div(unrealizedPnlString, initialMarginString, 4), '100'))
positionSide = self.safe_string(position, 'positionSide')
hedged = positionSide != 'BOTH'
return {
'info': position,
'symbol': symbol,
'contracts': contracts,
'contractSize': self.parse_number(market['contractSize']),
'unrealizedPnl': unrealizedPnl,
'leverage': self.parse_number(leverageString),
'liquidationPrice': liquidationPrice,
'collateral': collateral,
'notional': notional,
'markPrice': markPrice,
'entryPrice': entryPrice,
'timestamp': timestamp,
'initialMargin': initialMargin,
'initialMarginPercentage': self.parse_number(initialMarginPercentageString),
'maintenanceMargin': maintenanceMargin,
'maintenanceMarginPercentage': maintenanceMarginPercentage,
'marginRatio': marginRatio,
'datetime': self.iso8601(timestamp),
'marginType': marginType,
'side': side,
'hedged': hedged,
'percentage': percentage,
}
def load_leverage_brackets(self, reload=False, params={}):
self.load_markets()
# by default cache the leverage bracket
# it contains useful stuff like the maintenance margin and initial margin for positions
leverageBrackets = self.safe_value(self.options, 'leverageBrackets')
if (leverageBrackets is None) or (reload):
method = None
defaultType = self.safe_string(self.options, 'defaultType', 'future')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
if type == 'future':
method = 'fapiPrivateGetLeverageBracket'
elif type == 'delivery':
method = 'dapiPrivateV2GetLeverageBracket'
else:
raise NotSupported(self.id + ' loadLeverageBrackets() supports linear and inverse contracts only')
response = getattr(self, method)(query)
self.options['leverageBrackets'] = {}
for i in range(0, len(response)):
entry = response[i]
marketId = self.safe_string(entry, 'symbol')
symbol = self.safe_symbol(marketId)
brackets = self.safe_value(entry, 'brackets')
result = []
for j in range(0, len(brackets)):
bracket = brackets[j]
# we use floats here internally on purpose
floorValue = self.safe_float_2(bracket, 'notionalFloor', 'qtyFloor')
maintenanceMarginPercentage = self.safe_string(bracket, 'maintMarginRatio')
result.append([floorValue, maintenanceMarginPercentage])
self.options['leverageBrackets'][symbol] = result
return self.options['leverageBrackets']
def fetch_positions(self, symbols=None, params={}):
defaultMethod = self.safe_string(self.options, 'fetchPositions', 'positionRisk')
if defaultMethod == 'positionRisk':
return self.fetch_positions_risk(symbols, params)
elif defaultMethod == 'account':
return self.fetch_account_positions(symbols, params)
else:
raise NotSupported(self.id + '.options["fetchPositions"] = "' + defaultMethod + '" is invalid, please choose between "account" and "positionRisk"')
def fetch_account_positions(self, symbols=None, params={}):
if symbols is not None:
if not isinstance(symbols, list):
raise ArgumentsRequired(self.id + ' fetchPositions requires an array argument for symbols')
self.load_markets()
self.load_leverage_brackets()
method = None
defaultType = self.safe_string(self.options, 'defaultType', 'future')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
if type == 'future':
method = 'fapiPrivateGetAccount'
elif type == 'delivery':
method = 'dapiPrivateGetAccount'
else:
raise NotSupported(self.id + ' fetchPositions() supports linear and inverse contracts only')
account = getattr(self, method)(query)
result = self.parse_account_positions(account)
return self.filter_by_array(result, 'symbol', symbols, False)
def fetch_positions_risk(self, symbols=None, params={}):
if symbols is not None:
if not isinstance(symbols, list):
raise ArgumentsRequired(self.id + ' fetchPositions requires an array argument for symbols')
self.load_markets()
self.load_leverage_brackets()
request = {}
method = None
defaultType = 'future'
defaultType = self.safe_string(self.options, 'defaultType', defaultType)
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
if (type == 'future') or (type == 'linear'):
method = 'fapiPrivateGetPositionRisk'
elif (type == 'delivery') or (type == 'inverse'):
method = 'dapiPrivateGetPositionRisk'
else:
raise NotSupported(self.id + ' fetchIsolatedPositions() supports linear and inverse contracts only')
response = getattr(self, method)(self.extend(request, params))
result = []
for i in range(0, len(response)):
parsed = self.parse_position_risk(response[i])
result.append(parsed)
return self.filter_by_array(result, 'symbol', symbols, False)
def fetch_funding_history(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
method = None
defaultType = 'future'
request = {
'incomeType': 'FUNDING_FEE', # "TRANSFER","WELCOME_BONUS", "REALIZED_PNL","FUNDING_FEE", "COMMISSION" and "INSURANCE_CLEAR"
}
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if market['linear']:
defaultType = 'future'
elif market['inverse']:
defaultType = 'delivery'
else:
raise NotSupported(self.id + ' fetchFundingHistory() supports linear and inverse contracts only')
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit
defaultType = self.safe_string_2(self.options, 'fetchFundingHistory', 'defaultType', defaultType)
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
if (type == 'future') or (type == 'linear'):
method = 'fapiPrivateGetIncome'
elif (type == 'delivery') or (type == 'inverse'):
method = 'dapiPrivateGetIncome'
else:
raise NotSupported(self.id + ' fetchFundingHistory() supports linear and inverse contracts only')
response = getattr(self, method)(self.extend(request, params))
return self.parse_incomes(response, market, since, limit)
def set_leverage(self, leverage, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' setLeverage() requires a symbol argument')
# WARNING: THIS WILL INCREASE LIQUIDATION PRICE FOR OPEN ISOLATED LONG POSITIONS
# AND DECREASE LIQUIDATION PRICE FOR OPEN ISOLATED SHORT POSITIONS
if (leverage < 1) or (leverage > 125):
raise BadRequest(self.id + ' leverage should be between 1 and 125')
self.load_markets()
market = self.market(symbol)
method = None
if market['linear']:
method = 'fapiPrivatePostLeverage'
elif market['inverse']:
method = 'dapiPrivatePostLeverage'
else:
raise NotSupported(self.id + ' setLeverage() supports linear and inverse contracts only')
request = {
'symbol': market['id'],
'leverage': leverage,
}
return getattr(self, method)(self.extend(request, params))
def set_margin_mode(self, marginType, symbol=None, params={}):
#
# {"code": -4048 , "msg": "Margin type cannot be changed if there exists position."}
#
# or
#
# {"code": 200, "msg": "success"}
#
marginType = marginType.upper()
if (marginType != 'ISOLATED') and (marginType != 'CROSSED'):
raise BadRequest(self.id + ' marginType must be either isolated or crossed')
self.load_markets()
market = self.market(symbol)
method = None
if market['linear']:
method = 'fapiPrivatePostMarginType'
elif market['inverse']:
method = 'dapiPrivatePostMarginType'
else:
raise NotSupported(self.id + ' setMarginMode() supports linear and inverse contracts only')
request = {
'symbol': market['id'],
'marginType': marginType,
}
return getattr(self, method)(self.extend(request, params))
def set_position_mode(self, hedged, symbol=None, params={}):
defaultType = self.safe_string(self.options, 'defaultType', 'future')
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, ['type'])
dualSidePosition = None
if hedged:
dualSidePosition = 'true'
else:
dualSidePosition = 'false'
request = {
'dualSidePosition': dualSidePosition,
}
method = None
if type == 'delivery':
method = 'dapiPrivatePostPositionSideDual'
else:
# default to future
method = 'fapiPrivatePostPositionSideDual'
#
# {
# "code": 200,
# "msg": "success"
# }
#
return getattr(self, method)(self.extend(request, params))
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
if not (api in self.urls['api']):
raise NotSupported(self.id + ' does not have a testnet/sandbox URL for ' + api + ' endpoints')
url = self.urls['api'][api]
url += '/' + path
if api == 'wapi':
url += '.html'
if path == 'historicalTrades':
if self.apiKey:
headers = {
'X-MBX-APIKEY': self.apiKey,
}
else:
raise AuthenticationError(self.id + ' historicalTrades endpoint requires `apiKey` credential')
userDataStream = (path == 'userDataStream') or (path == 'listenKey')
if userDataStream:
if self.apiKey:
# v1 special case for userDataStream
headers = {
'X-MBX-APIKEY': self.apiKey,
'Content-Type': 'application/x-www-form-urlencoded',
}
if method != 'GET':
body = self.urlencode(params)
else:
raise AuthenticationError(self.id + ' userDataStream endpoint requires `apiKey` credential')
elif (api == 'private') or (api == 'sapi') or (api == 'wapi' and path != 'systemStatus') or (api == 'dapiPrivate') or (api == 'dapiPrivateV2') or (api == 'fapiPrivate') or (api == 'fapiPrivateV2'):
self.check_required_credentials()
query = None
recvWindow = self.safe_integer(self.options, 'recvWindow', 5000)
if (api == 'sapi') and (path == 'asset/dust'):
query = self.urlencode_with_array_repeat(self.extend({
'timestamp': self.nonce(),
'recvWindow': recvWindow,
}, params))
elif (path == 'batchOrders') or (path.find('sub-account') >= 0):
query = self.rawencode(self.extend({
'timestamp': self.nonce(),
'recvWindow': recvWindow,
}, params))
else:
query = self.urlencode(self.extend({
'timestamp': self.nonce(),
'recvWindow': recvWindow,
}, params))
signature = self.hmac(self.encode(query), self.encode(self.secret))
query += '&' + 'signature=' + signature
headers = {
'X-MBX-APIKEY': self.apiKey,
}
if (method == 'GET') or (method == 'DELETE') or (api == 'wapi'):
url += '?' + query
else:
body = query
headers['Content-Type'] = 'application/x-www-form-urlencoded'
else:
if params:
url += '?' + self.urlencode(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if (code == 418) or (code == 429):
raise DDoSProtection(self.id + ' ' + str(code) + ' ' + reason + ' ' + body)
# error response in a form: {"code": -1013, "msg": "Invalid quantity."}
# following block cointains legacy checks against message patterns in "msg" property
# will switch "code" checks eventually, when we know all of them
if code >= 400:
if body.find('Price * QTY is zero or less') >= 0:
raise InvalidOrder(self.id + ' order cost = amount * price is zero or less ' + body)
if body.find('LOT_SIZE') >= 0:
raise InvalidOrder(self.id + ' order amount should be evenly divisible by lot size ' + body)
if body.find('PRICE_FILTER') >= 0:
raise InvalidOrder(self.id + ' order price is invalid, i.e. exceeds allowed price precision, exceeds min price or max price limits or is invalid float value in general, use self.price_to_precision(symbol, amount) ' + body)
if response is None:
return # fallback to default error handler
# check success value for wapi endpoints
# response in format {'msg': 'The coin does not exist.', 'success': True/false}
success = self.safe_value(response, 'success', True)
if not success:
message = self.safe_string(response, 'msg')
parsedMessage = None
if message is not None:
try:
parsedMessage = json.loads(message)
except Exception as e:
# do nothing
parsedMessage = None
if parsedMessage is not None:
response = parsedMessage
message = self.safe_string(response, 'msg')
if message is not None:
self.throw_exactly_matched_exception(self.exceptions['exact'], message, self.id + ' ' + message)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, self.id + ' ' + message)
# checks against error codes
error = self.safe_string(response, 'code')
if error is not None:
# https://github.com/ccxt/ccxt/issues/6501
# https://github.com/ccxt/ccxt/issues/7742
if (error == '200') or Precise.string_equals(error, '0'):
return
# a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."}
# despite that their message is very confusing, it is raised by Binance
# on a temporary ban, the API key is valid, but disabled for a while
if (error == '-2015') and self.options['hasAlreadyAuthenticatedSuccessfully']:
raise DDoSProtection(self.id + ' temporary banned: ' + body)
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], error, feedback)
raise ExchangeError(feedback)
if not success:
raise ExchangeError(self.id + ' ' + body)
def calculate_rate_limiter_cost(self, api, method, path, params, config={}, context={}):
if ('noSymbol' in config) and not ('symbol' in params):
return config['noSymbol']
elif ('noPoolId' in config) and not ('poolId' in params):
return config['noPoolId']
elif ('byLimit' in config) and ('limit' in params):
limit = params['limit']
byLimit = config['byLimit']
for i in range(0, len(byLimit)):
entry = byLimit[i]
if limit <= entry[0]:
return entry[1]
return self.safe_integer(config, 'cost', 1)
def request(self, path, api='public', method='GET', params={}, headers=None, body=None, config={}, context={}):
response = self.fetch2(path, api, method, params, headers, body, config, context)
# a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."}
if (api == 'private') or (api == 'wapi'):
self.options['hasAlreadyAuthenticatedSuccessfully'] = True
return response
def modify_margin_helper(self, symbol, amount, addOrReduce, params={}):
# used to modify isolated positions
defaultType = self.safe_string(self.options, 'defaultType', 'future')
if defaultType == 'spot':
defaultType = 'future'
type = self.safe_string(params, 'type', defaultType)
if (type == 'margin') or (type == 'spot'):
raise NotSupported(self.id + ' add / reduce margin only supported with type future or delivery')
self.load_markets()
market = self.market(symbol)
request = {
'type': addOrReduce,
'symbol': market['id'],
'amount': amount,
}
method = None
code = None
if type == 'future':
method = 'fapiPrivatePostPositionMargin'
code = market['quote']
else:
method = 'dapiPrivatePostPositionMargin'
code = market['base']
response = getattr(self, method)(self.extend(request, params))
#
# {
# "code": 200,
# "msg": "Successfully modify position margin.",
# "amount": 0.001,
# "type": 1
# }
#
rawType = self.safe_integer(response, 'type')
resultType = 'add' if (rawType == 1) else 'reduce'
resultAmount = self.safe_number(response, 'amount')
errorCode = self.safe_string(response, 'code')
status = 'ok' if (errorCode == '200') else 'failed'
return {
'info': response,
'type': resultType,
'amount': resultAmount,
'code': code,
'symbol': market['symbol'],
'status': status,
}
def reduce_margin(self, symbol, amount, params={}):
return self.modify_margin_helper(symbol, amount, 2, params)
def add_margin(self, symbol, amount, params={}):
return self.modify_margin_helper(symbol, amount, 1, params)
def fetch_borrow_rate(self, code, params={}):
self.load_markets()
currency = self.currency(code)
request = {
'asset': currency['id'],
# 'vipLevel': self.safe_integer(params, 'vipLevel'),
}
response = self.sapiGetMarginInterestRateHistory(self.extend(request, params))
#
# [
# {
# "asset": "USDT",
# "timestamp": 1638230400000,
# "dailyInterestRate": "0.0006",
# "vipLevel": 0
# },
# ...
# ]
#
rate = self.safe_value(response, 0)
timestamp = self.safe_number(rate, 'timestamp')
return {
'currency': code,
'rate': self.safe_number(rate, 'dailyInterestRate'),
'period': 86400000,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'info': response,
}
| [
[
[
212,
220
],
[
1103,
1111
]
],
[
[
228,
232
],
[
218575,
218579
]
],
[
[
262,
275
],
[
42414,
42427
],
[
46753,
46766
],
[
47873,
47886
],
[
47998,
48011
],
[
56974,
56987
],
[
131345,
131358
],
[
159206,
159219
],
[
159382,
159395
],
[
160950,
160963
],
[
161136,
161149
],
[
220011,
220024
],
[
220077,
220090
]
],
[
[
305,
324
],
[
42670,
42689
],
[
43922,
43941
],
[
44716,
44735
],
[
46205,
46224
],
[
46657,
46676
],
[
47152,
47171
],
[
47254,
47273
],
[
214649,
214668
],
[
215217,
215236
]
],
[
[
354,
370
],
[
42545,
42561
],
[
43304,
43320
],
[
48884,
48900
]
],
[
[
400,
416
],
[
48104,
48120
]
],
[
[
446,
463
],
[
127036,
127053
],
[
128205,
128222
],
[
132629,
132646
],
[
133924,
133941
],
[
134957,
134974
],
[
178911,
178928
],
[
207677,
207694
],
[
208637,
208654
],
[
211352,
211369
]
],
[
[
493,
503
],
[
44560,
44570
],
[
44833,
44843
],
[
44956,
44966
],
[
45050,
45060
],
[
45143,
45153
],
[
45218,
45228
],
[
45330,
45340
],
[
45398,
45408
],
[
45479,
45489
],
[
45653,
45663
],
[
45743,
45753
],
[
45808,
45818
],
[
45871,
45881
],
[
45929,
45939
],
[
46004,
46014
],
[
46084,
46094
],
[
46289,
46299
],
[
46383,
46393
],
[
46494,
46504
],
[
46578,
46588
],
[
48187,
48197
],
[
211654,
211664
],
[
212624,
212634
]
],
[
[
533,
542
],
[
46146,
46155
]
],
[
[
572,
589
],
[
43157,
43174
],
[
47357,
47374
],
[
47455,
47472
],
[
47599,
47616
],
[
47726,
47743
],
[
48280,
48297
],
[
48394,
48411
],
[
48490,
48507
],
[
48587,
48604
],
[
48712,
48729
]
],
[
[
619,
631
],
[
44248,
44260
],
[
45577,
45589
],
[
48938,
48950
],
[
120508,
120520
],
[
121536,
121548
],
[
125789,
125801
],
[
126077,
126089
],
[
126559,
126571
],
[
217569,
217581
],
[
217713,
217725
],
[
217869,
217881
]
],
[
[
661,
674
],
[
46960,
46973
],
[
47053,
47066
]
],
[
[
704,
728
],
[
42747,
42771
],
[
42834,
42858
],
[
42987,
43011
]
],
[
[
758,
770
],
[
179777,
179789
],
[
181929,
181941
],
[
183829,
183841
],
[
205969,
205981
],
[
207370,
207382
],
[
208216,
208228
],
[
209291,
209303
],
[
210344,
210356
],
[
211015,
211027
],
[
211990,
212002
],
[
212973,
212985
],
[
214243,
214255
],
[
221603,
221615
]
],
[
[
800,
814
],
[
43551,
43565
],
[
217145,
217159
],
[
219802,
219816
]
],
[
[
844,
861
],
[
44025,
44042
],
[
44357,
44374
]
],
[
[
891,
911
],
[
43232,
43252
],
[
43419,
43439
],
[
43668,
43688
],
[
43800,
43820
],
[
44466,
44486
]
],
[
[
941,
954
],
[
42306,
42319
]
],
[
[
984,
996
],
[
44639,
44651
]
],
[
[
1040,
1048
],
[
49147,
49155
],
[
49509,
49517
],
[
123971,
123979
],
[
124205,
124213
]
],
[
[
1079,
1086
],
[
80969,
80976
],
[
80996,
81003
],
[
117502,
117509
],
[
141572,
141579
],
[
186720,
186727
],
[
189456,
189463
],
[
189610,
189617
],
[
189629,
189636
],
[
190119,
190126
],
[
190289,
190296
],
[
190422,
190429
],
[
190522,
190529
],
[
190541,
190548
],
[
190646,
190653
],
[
190737,
190744
],
[
190756,
190763
],
[
191939,
191946
],
[
192577,
192584
],
[
192596,
192603
],
[
192615,
192622
],
[
192739,
192746
],
[
192758,
192765
],
[
193456,
193463
],
[
193600,
193607
],
[
193703,
193710
],
[
193777,
193784
],
[
193811,
193818
],
[
193921,
193928
],
[
194048,
194055
],
[
194542,
194549
],
[
194686,
194693
],
[
194789,
194796
],
[
194859,
194866
],
[
194949,
194956
],
[
195044,
195051
],
[
195063,
195070
],
[
195082,
195089
],
[
195195,
195202
],
[
195467,
195474
],
[
195597,
195604
],
[
195698,
195705
],
[
199045,
199052
],
[
199523,
199530
],
[
200811,
200818
],
[
200913,
200920
],
[
201045,
201052
],
[
201129,
201136
],
[
201241,
201248
],
[
201324,
201331
],
[
201343,
201350
],
[
201770,
201777
],
[
201914,
201921
],
[
202017,
202024
],
[
202091,
202098
],
[
202176,
202183
],
[
202195,
202202
],
[
202242,
202249
],
[
202363,
202370
],
[
202382,
202389
],
[
203075,
203082
],
[
203259,
203266
],
[
203413,
203420
],
[
203501,
203508
],
[
203520,
203527
],
[
203793,
203800
],
[
203812,
203819
],
[
203831,
203838
],
[
203955,
203962
],
[
203974,
203981
],
[
219364,
219371
]
],
[
[
1095,
1102
],
[
1177,
1184
]
]
] |
from arm.logicnode.arm_nodes import *
class SetTransformNode(ArmLogicTreeNode):
"""Use to set the transform of an object."""
bl_idname = 'LNSetTransformNode'
bl_label = 'Set Object Transform'
arm_version = 1
def init(self, context):
super(SetTransformNode, self).init(context)
self.add_input('ArmNodeSocketAction', 'In')
self.add_input('ArmNodeSocketObject', 'Object')
self.add_input('NodeSocketShader', 'Transform')
self.add_output('ArmNodeSocketAction', 'Out')
add_node(SetTransformNode, category=PKG_AS_CATEGORY)
| [
[
[
36,
37
],
[
62,
78
],
[
526,
534
],
[
562,
577
]
],
[
[
45,
61
],
[
535,
551
],
[
269,
285
]
]
] |
# SPDX-FileCopyrightText: 2017 Scott Shawcroft, written for Adafruit Industries
# SPDX-FileCopyrightText: Copyright (c) 2021 Jose David M. for circuitpython
#
# SPDX-License-Identifier: MIT
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, "README.rst"), encoding="utf-8") as f:
long_description = f.read()
setup(
# Community Bundle Information
name="circuitpython-displayio-cartesian",
use_scm_version=True,
setup_requires=["setuptools_scm"],
description="A cartesian plane widget for displaying graphical information.",
long_description=long_description,
long_description_content_type="text/x-rst",
# The project's main homepage.
url="https://github.com/circuitpython/CircuitPython_Org_DisplayIO_Cartesian.git",
# Author details
author="Jose David M.",
author_email="",
install_requires=[
"Adafruit-Blinka",
"adafruit-circuitpython-display-text",
"adafruit-circuitpython-displayio-layout",
],
# Choose your license
license="MIT",
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries",
"Topic :: System :: Hardware",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
# What does your project relate to?
keywords="adafruit blinka circuitpython micropython displayio_cartesian displayio widget "
"graphics gui graph chart graphic",
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
# TODO: IF LIBRARY FILES ARE A PACKAGE FOLDER,
# CHANGE `py_modules=['...']` TO `packages=['...']`
py_modules=["displayio_cartesian"],
)
| [
[
[
356,
361
],
[
645,
650
]
],
[
[
363,
376
]
],
[
[
428,
432
],
[
552,
556
]
],
[
[
448,
452
],
[
461,
465
],
[
474,
478
],
[
557,
561
]
],
[
[
454,
458
],
[
567,
571
]
],
[
[
609,
610
],
[
635,
636
]
],
[
[
616,
632
],
[
901,
917
]
]
] |
from ssmpfwd.helpers import verify_plugin_version, verbose_debug_quiet, time_decorator
from unittest.mock import MagicMock, patch
import unittest
class TestVerifyPluginVersion(unittest.TestCase):
@patch("ssmpfwd.helpers.subprocess")
def test_verify_plugin_version_success(self, mock_subprocess):
result = mock_subprocess.run()
result.stdout = b"9.8.3"
self.assertTrue(verify_plugin_version("9.8.3"))
@patch("ssmpfwd.helpers.subprocess")
def test_verify_plugin_version_fail(self, mock_subprocess):
with self.assertLogs("ssmpfwd.helpers", level="INFO") as cm:
result = mock_subprocess.run()
result.stdout = b"1.8.1"
self.assertFalse(verify_plugin_version("9.2.3"))
self.assertEqual(cm.output[0], "ERROR:ssmpfwd.helpers:session-manager-plugin version 1.8.1 is installed, 9.2.3 is required")
class TestVerboseDebugQuiet(unittest.TestCase):
import logging
def setUp(self):
@verbose_debug_quiet
def test_func():
pass
self.vdq = test_func
self.vdq()
def test_quiet(self):
option_name = "quiet"
self.assertTrue(any([p.name == option_name for p in self.vdq.__click_params__]), msg=f"Can not find {option_name} in option parameters")
def test_debug(self):
flag_value = self.logging.DEBUG
self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values")
def test_verbose(self):
flag_value = self.logging.INFO
self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values")
def test_default_loglevel(self):
flag_value = self.logging.WARN
self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values")
class TestTimeDecorator(unittest.TestCase):
from time import sleep
def setUp(self):
@time_decorator
def test_func():
self.sleep(0.5)
self.time_decorated_method = test_func
def test_time_decorartor(self):
with self.assertLogs("ssmpfwd.helpers", level="INFO") as cm:
self.time_decorated_method()
self.assertEqual(cm.output[0], "INFO:ssmpfwd.helpers:[*] starting test_func")
| [
[
[
28,
49
],
[
412,
433
],
[
735,
756
]
],
[
[
51,
70
],
[
1012,
1031
]
],
[
[
72,
86
],
[
2119,
2133
]
],
[
[
114,
123
]
],
[
[
125,
130
],
[
209,
214
],
[
452,
457
]
],
[
[
139,
147
],
[
183,
191
],
[
938,
946
],
[
2037,
2045
]
],
[
[
159,
182
]
],
[
[
916,
937
]
],
[
[
2019,
2036
]
]
] |
import numpy as np
import matplotlib.pyplot as plt
import pint
# Use the same registry
from main import ureg
ureg.setup_matplotlib(True)
from uncertainties import ufloat, umath, unumpy
import pandas as pd
from scipy.signal import find_peaks
from scipy.integrate import simpson
from scipy.optimize import curve_fit
plt.rcParams['text.usetex'] = True
amp = 700*ureg.mV
R=ufloat(0.82, 0.82*0.1)*ureg.ohm
df = pd.read_csv("./ESRB.csv")
# The I0_modulation signal is horrible, the system was too noisy, so instead:
#
# I0_modulation = (unumpy.uarray(
# df['V_modulation_raw'].values,
# df['V_modulation_err'].values
# )*ureg.mV/R).to('ampere')
#
# we regnerate it, assuming it should be linear, just as V_DC is.
I0_modulation = (unumpy.uarray(np.linspace(
df['V_modulation_raw'].min(),
df['V_modulation_raw'].max(),
len(df)
), df['V_modulation_err'].mean())*ureg.mV/R).to('ampere')
ptp_Y = unumpy.uarray(
df['ptp_Y_raw'].values*df['phase_sign'].values,
df['ptp_Y_err'].values
)*ureg.mV
ptp_X_modulation = ufloat(3.09, 0.01)*ureg.mV
fig, ax = plt.subplots()
I0_modulation_err = np.array([val.m.s for val in I0_modulation])
I0_modulation_raw = np.array([val.m.n for val in I0_modulation])
ptp_ratio = ptp_Y/ptp_X_modulation
absorption_deriviative = ptp_ratio/max(ptp_ratio)
absorption_deriviative_raw = np.array([val.m.n for val in absorption_deriviative])
absorption_deriviative_err = np.array([val.m.s for val in absorption_deriviative])
ax.errorbar(
I0_modulation_raw*ureg.ampere,
absorption_deriviative_raw, # Dimensionless
fmt='.',
yerr=absorption_deriviative_err,
# TODO: Mention in report that error is too big to be drafted
#xerr=I_modulation_err,
# TODO: Is this the correct label?
label='Absorption Deriviative'
)
def lorentzian_dif_fit(I, I0, gamma, amplitude):
return amplitude*(-2*(gamma**2)*(I - I0))/ \
(gamma**2 + (I - I0)**2)**2
def lorentzian_fit(I, I0, gamma, amplitude):
return amplitude*gamma**2/\
(gamma**2 + (I - I0)**2)**2
##### By MATLAB:
# Goodness of fit:
# SSE: 0.197
# R-square: 0.9845
# Adjusted R-square: 0.9838
# RMSE: 0.06769
# I0 gamma amplitude
matlab_p0 = [0.5479, 0.03847, 0.05554]
matlab_bounds=((0.547, 0.03672, 0.05304),
(0.5488, 0.04021, 0.05805))
I_rf = ufloat(matlab_p0[0], abs(matlab_bounds[0][0] - matlab_p0[0]))*ureg.ampere
I_hwhm = ufloat(matlab_p0[1], abs(matlab_bounds[0][1] - matlab_p0[1]))*ureg.ampere
from main import g_times_bohr
# TODO: Take this value from Itamar & Tomer
H_RF = ufloat(34.914, 0.009)*ureg.gauss
k = H_RF/I_rf
# Converts current I To frequency f using all of the constants
def I2f(I):
return (I*k*g_times_bohr/ureg.planck_constant).to('megahertz')
f0_modulation = I2f(I0_modulation)
f_rf = I2f(I_rf)
f_hwhm = I2f(I_hwhm)
T2 = (1/f_hwhm).to('nanosecond')
##### A failing Python fit attempt - I consider it as a failure because it hits
##### the bounds :/
# popt, pcov = curve_fit(
# lorentzian_dif_fit, absorption_deriviative_raw, I0_modulation_raw,
# p0=matlab_p0, bounds=matlab_bounds
# )
# lorentzian_dif_fit_points = lorentzian_dif_fit(I0_modulation_raw, *popt)
# ax.plot(
# I0_modulation_raw*ureg.ampere,
# lorentzian_dif_fit_points,
# label="Python fit"
# )
I0_modulation_seq = np.linspace(
I0_modulation.min().m.n,
I0_modulation.max().m.n,
len(I0_modulation)*100
)
ax.plot(
I0_modulation_seq*ureg.ampere,
lorentzian_dif_fit(I0_modulation_seq, I_rf.m.n, I_hwhm.m.n, matlab_p0[2]),
label="Matlab fit"
)
ax.set_yticks([])
axt = ax.twiny()
axt.grid(linestyle='--')
axt.set_yticks([])
f0_modulation_seq = np.linspace(
f0_modulation.min().m.n,
f0_modulation.max().m.n,
len(f0_modulation)*100
)
def lorentzian_wrapper(f0):
# From some reason this need to be amplified by a factor of 800 so it will
# look good.
return lorentzian_fit(f0, f_rf.m.n, f_hwhm.m.n, matlab_p0[2]*800)
axt.plot(
f0_modulation_seq*ureg.megahertz,
lorentzian_wrapper(f0_modulation_seq),
label = "Lorenzian fit", color='green'
)
axt.set_xticks(
[(f_rf - f_hwhm).m.n, f_rf.m.n, (f_rf + f_hwhm).m.n],
['', '$f_{rf}$', '']
)
axt.set_xlabel('')
axt.arrow(
length_includes_head = True,
x = (f_rf - f_hwhm).m.n*ureg.megahertz,
y = lorentzian_wrapper((f_rf - f_hwhm).m.n),
dx = 2*f_hwhm.m.n*ureg.megahertz,
dy = 0,
head_length = f_hwhm.m.n/10,
head_width = matlab_p0[2],
label="Full Width Half Max",
)
axt.arrow(
length_includes_head = True,
x = (f_rf + f_hwhm).m.n*ureg.megahertz,
y = lorentzian_wrapper((f_rf + f_hwhm).m.n),
dx = -2*f_hwhm.m.n*ureg.megahertz,
head_length = f_hwhm.m.n/10,
head_width = matlab_p0[2],
dy = 0,
)
axt.text(
0.5, 0.63,
# (f_hwhm.m.n/10),
# lorentzian_wrapper((f0 - f_hwhm).m.n)*2,
"FWHM",
transform=ax.transAxes,
# fontsize=00
)
ax.legend(loc='upper right')
# axt.legend(loc='upper left')
plt.show()
fig.savefig("ESRB.pgf")
fig.savefig("ESRB.png")
# TODO: Integrate numerically / or fit to a laurenzian's differentiation
# TODO: Scale the x axis to frequency and find the width of the laurenzian in
# frequency scale
| [
[
[
7,
18
],
[
752,
754
],
[
1108,
1110
],
[
1173,
1175
],
[
1332,
1334
],
[
1415,
1417
],
[
3335,
3337
],
[
3682,
3684
]
],
[
[
26,
50
],
[
314,
317
],
[
1073,
1076
],
[
4991,
4994
]
],
[
[
58,
62
]
],
[
[
104,
108
],
[
109,
113
],
[
360,
364
],
[
393,
397
],
[
879,
883
],
[
1008,
1012
],
[
1054,
1058
],
[
1504,
1508
],
[
2401,
2405
],
[
2484,
2488
],
[
2600,
2604
],
[
3466,
3470
],
[
4008,
4012
],
[
4304,
4308
],
[
4391,
4395
],
[
4590,
4594
],
[
4678,
4682
],
[
2729,
2733
]
],
[
[
163,
169
],
[
370,
376
],
[
1035,
1041
],
[
2339,
2345
],
[
2422,
2428
],
[
2578,
2584
]
],
[
[
171,
176
]
],
[
[
178,
184
],
[
738,
744
],
[
912,
918
]
],
[
[
192,
204
],
[
408,
410
]
],
[
[
230,
240
]
],
[
[
269,
276
]
],
[
[
304,
313
]
],
[
[
350,
353
]
],
[
[
368,
369
],
[
887,
888
]
],
[
[
403,
405
],
[
769,
771
],
[
803,
805
],
[
841,
843
],
[
848,
850
],
[
931,
933
],
[
954,
956
],
[
983,
985
]
],
[
[
721,
734
],
[
1137,
1150
],
[
1202,
1215
],
[
2788,
2801
],
[
3352,
3365
],
[
3381,
3394
],
[
3414,
3427
]
],
[
[
904,
909
],
[
1230,
1235
]
],
[
[
1016,
1032
],
[
1236,
1252
]
],
[
[
1063,
1066
],
[
5002,
5005
],
[
5026,
5029
]
],
[
[
1068,
1070
],
[
1469,
1471
],
[
3435,
3437
],
[
3583,
3585
],
[
3607,
3609
],
[
4895,
4897
],
[
4930,
4932
]
],
[
[
1088,
1105
]
],
[
[
1153,
1170
],
[
1486,
1503
]
],
[
[
1218,
1227
],
[
1278,
1287
],
[
1292,
1301
]
],
[
[
1253,
1275
],
[
1361,
1383
],
[
1444,
1466
]
],
[
[
1303,
1329
],
[
1521,
1547
]
],
[
[
1386,
1412
],
[
1587,
1613
]
],
[
[
1790,
1808
],
[
3483,
3501
]
],
[
[
1928,
1942
],
[
3917,
3931
]
],
[
[
2201,
2210
],
[
2346,
2355
],
[
2386,
2395
],
[
2429,
2438
],
[
2469,
2478
],
[
3543,
3552
],
[
4469,
4478
],
[
4744,
4753
],
[
3958,
3967
]
],
[
[
2243,
2256
],
[
2364,
2377
],
[
2447,
2460
]
],
[
[
2332,
2336
],
[
2620,
2624
],
[
2814,
2818
],
[
3521,
3525
]
],
[
[
2413,
2419
],
[
2833,
2839
],
[
3531,
3537
]
],
[
[
2514,
2526
],
[
2716,
2728
]
],
[
[
2571,
2575
],
[
2615,
2619
]
],
[
[
2611,
2612
],
[
2714,
2715
]
],
[
[
2692,
2695
],
[
2784,
2787
],
[
2810,
2813
],
[
2829,
2832
]
],
[
[
2768,
2781
],
[
3699,
3712
],
[
3728,
3741
],
[
3761,
3774
]
],
[
[
2803,
2807
],
[
4134,
4138
],
[
4154,
4158
],
[
4165,
4169
],
[
4285,
4289
],
[
4348,
4352
],
[
4571,
4575
],
[
4634,
4638
],
[
3936,
3940
]
],
[
[
2820,
2826
],
[
2849,
2855
],
[
4141,
4147
],
[
4172,
4178
],
[
4292,
4298
],
[
4355,
4361
],
[
4380,
4386
],
[
4437,
4443
],
[
4578,
4584
],
[
4641,
4647
],
[
4667,
4673
],
[
4712,
4718
],
[
3946,
3952
]
],
[
[
2841,
2843
]
],
[
[
3315,
3332
],
[
3448,
3465
],
[
3502,
3519
]
],
[
[
3601,
3604
],
[
3618,
3621
],
[
3643,
3646
],
[
3976,
3979
],
[
4112,
4115
],
[
4213,
4216
],
[
4232,
4235
],
[
4518,
4521
],
[
4772,
4775
]
],
[
[
3662,
3679
],
[
3990,
4007
],
[
4047,
4064
]
],
[
[
3786,
3804
],
[
4028,
4046
],
[
4328,
4346
],
[
4614,
4632
]
]
] |
from flask import Flask, request, redirect
from twilio.twiml.messaging_response import MessagingResponse
from get_secrets import *
def main():
resp = MessagingResponse()
resp.message ("You have reached the DogBot. Thanks for contacting us :)")
return str(resp)
if __name__ == "__main__":
main()
| [
[
[
18,
23
]
],
[
[
25,
32
]
],
[
[
34,
42
]
],
[
[
87,
104
],
[
155,
172
]
],
[
[
129,
130
]
],
[
[
136,
140
],
[
308,
312
]
]
] |
from __future__ import annotations
from enum import IntEnum
class Algorithm(IntEnum):
"""
https://developers.yubico.com/YubiHSM2/Concepts/Algorithms.html
"""
RSA_PKCS1_SHA1 = 1
RSA_PKCS1_SHA256 = 2
RSA_PKCS1_SHA384 = 3
RSA_PKCS1_SHA512 = 4
RSA_PSS_SHA1 = 5
RSA_PSS_SHA256 = 6
RSA_PSS_SHA384 = 7
RSA_PSS_SHA512 = 8
RSA_2048 = 9
RSA_3072 = 10
RSA_4096 = 11
EC_P256 = 12
EC_P384 = 13
EC_P521 = 14
EC_K256 = 15
EC_BP256 = 16
EC_BP384 = 17
EC_BP512 = 18
HMAC_SHA1 = 19
HMAC_SHA256 = 20
HMAC_SHA384 = 21
HMAC_SHA512 = 22
ECDSA_SHA1 = 23
EC_ECDH = 24
RSA_OAEP_SHA1 = 25
RSA_OAEP_SHA256 = 26
RSA_OAEP_SHA384 = 27
RSA_OAEP_SHA512 = 28
AES128_CCM_WRAP = 29
Opaque_Data = 30
Opaque_X509_Certificate = 31
MGF1_SHA1 = 32
MGF1_SHA256 = 33
MGF1_SHA384 = 34
MGF1_SHA512 = 35
SSH_Template = 36
Yubico_OTP_AES128 = 37
Yubico_AES_Authentication = 38
Yubico_OTP_AES192 = 39
Yubico_OTP_AES256 = 40
AES192_CCM_WRAP = 41
AES256_CCM_WRAP = 42
ECDSA_SHA256 = 43
ECDSA_SHA384 = 44
ECDSA_SHA512 = 45
ED25519 = 46
EC_P224 = 47
class Capability(IntEnum):
"""
https://developers.yubico.com/YubiHSM2/Concepts/Capability.html
"""
GetOpaque = 0
PutOpaque = 1
PutAuthenticationKey = 2
PutAsymmetricKey = 3
GenerateAsymmetricKey = 4
SignPkcs = 5
SignPss = 6
SignEcdsa = 7
SignEddsa = 8
DecryptPkcs = 9
DecryptOaep = 10
DeriveEcdh = 11
ExportWrapped = 12
ImportWrapped = 13
PutWrapKey = 14
GenerateWrapKey = 15
ExportableUnderWrap = 16
SetOption = 17
GetOption = 18
GetPseudoRandom = 19
PutMacKey = 20
GenerateHmacKey = 21
SignHmac = 22
VerifyHmac = 23
GetLogEntries = 24
SignSshCertificate = 25
GetTemplate = 26
PutTemplate = 27
ResetDevice = 28
DecryptOtp = 29
CreateOtpAead = 30
RandomizeOtpAead = 31
RewrapFromOtpAeadKey = 32
RewrapToOtpAeadKey = 33
SignAttestationCertificate = 34
PutOtpAeadKey = 35
GenerateOtpAeadKey = 36
WrapData = 37
UnwrapData = 38
DeleteOpaque = 39
DeleteAuthenticationKey = 40
DeleteAsymmetricKey = 41
DeleteWrapKey = 42
DeleteHmacKey = 43
DeleteTemplate = 44
DeleteOtpAeadKey = 45
ChangeAuthenticationKey = 46
class Command(IntEnum):
"""
https://developers.yubico.com/YubiHSM2/Commands/
"""
Echo = 0x01
CreateSession = 0x03
AuthenticateSession = 0x04
SessionMessage = 0x05
GetDeviceInfo = 0x06
ResetDevice = 0x08
CloseSession = 0x40
GetStorageInfo = 0x41
PutOpaque = 0x42
GetOpaque = 0x43
PutAuthenticationKey = 0x44
PutAsymmetricKey = 0x45
GenerateAsymmetricKey = 0x46
SignPkcs1 = 0x47
ListObjects = 0x48
DecryptPkcs1 = 0x49
ExportWrapped = 0x4A
ImportWrapped = 0x4B
PutWrapKey = 0x4C
GetLogEntries = 0x4D
GetObjectInfo = 0x4E
SetOption = 0x4F
GetOption = 0x50
GetPseudoRandom = 0x51
PutHmacKey = 0x52
SignHmac = 0x53
GetPublicKey = 0x54
SignPss = 0x55
SignEcdsa = 0x56
DeriveEcdh = 0x57
DeleteObject = 0x58
DecryptOaep = 0x59
GenerateHmacKey = 0x5A
GenerateWrapKey = 0x5B
VerifyHmac = 0x5C
SignSshCertificate = 0x5D
PutTemplate = 0x5E
GetTemplate = 0x5F
DecryptOtp = 0x60
CreateOtpAead = 0x61
RandomizeOtpAead = 0x62
RewrapOtpAead = 0x63
SignAttestationCertificate = 0x64
PutOtpAeadKey = 0x65
GenerateOtpAeadKey = 0x66
SetLogIndex = 0x67
WrapData = 0x68
UnwrapData = 0x69
SignEddsa = 0x6A
BlinkDevice = 0x6B
ChangeAuthenticationKey = 0x6C
Error = 0x7F
class Error(IntEnum):
"""
https://developers.yubico.com/YubiHSM2/Concepts/Errors.html
"""
OK = 0x00
INVALID_COMMAND = 0x01
INVALID_DATA = 0x02
INVALID_SESSION = 0x03
AUTHENTICATION_FAILED = 0x04
SESSIONS_FULL = 0x05
SESSION_FAILED = 0x06
STORAGE_FAILED = 0x07
WRONG_LENGTH = 0x08
INSUFFICIENT_PERMISSIONS = 0x09
LOG_FULL = 0x0A
OBJECT_NOT_FOUND = 0x0B
INVALID_ID = 0x0C
SSH_CA_CONSTRAINT_VIOLATION = 0x0E
INVALID_OTP = 0x0F
DEMO_MODE = 0x10
OBJECT_EXISTS = 0x11
class ObjectType(IntEnum):
"""
https://developers.yubico.com/YubiHSM2/Concepts/Object.html
"""
Opaque = 0x01
AuthenticationKey = 0x02
AsymmetricKey = 0x03
WrapKey = 0x04
HmacKey = 0x05
Template = 0x06
OtpAeadKey = 0x07
class Option(IntEnum):
"""
https://developers.yubico.com/YubiHSM2/Concepts/Options.html
"""
ForceAudit = 0x01
CommandAudit = 0x03
| [
[
[
23,
34
]
],
[
[
53,
60
],
[
79,
86
],
[
1222,
1229
],
[
2427,
2434
],
[
3792,
3799
],
[
4342,
4349
],
[
4600,
4607
]
],
[
[
69,
78
]
],
[
[
1211,
1221
]
],
[
[
2419,
2426
]
],
[
[
3786,
3791
]
],
[
[
4331,
4341
]
],
[
[
4593,
4599
]
]
] |
import unittest.mock
from functools import partial
import bokeh.core.properties as bp
import param
import pytest
from bokeh.document import Document
from bokeh.io.doc import patch_curdoc
from bokeh.models import Div
from panel.layout import Tabs, WidgetBox
from panel.reactive import Reactive, ReactiveHTML
from panel.viewable import Viewable
from panel.widgets import (
Checkbox, IntInput, StaticText, TextInput,
)
def test_reactive_default_title():
doc = ReactiveHTML().server_doc()
assert doc.title == 'Panel Application'
def test_reactive_servable_title():
doc = Document()
session_context = unittest.mock.Mock()
with patch_curdoc(doc):
doc._session_context = lambda: session_context
ReactiveHTML().servable(title='A')
ReactiveHTML().servable(title='B')
assert doc.title == 'B'
def test_link():
"Link two Reactive objects"
class ReactiveLink(Reactive):
a = param.Parameter()
obj = ReactiveLink()
obj2 = ReactiveLink()
obj.link(obj2, a='a')
obj.a = 1
assert obj.a == 1
assert obj2.a == 1
def test_param_rename():
"Test that Reactive renames params and properties"
class ReactiveRename(Reactive):
a = param.Parameter()
_rename = {'a': 'b'}
obj = ReactiveRename()
params = obj._process_property_change({'b': 1})
assert params == {'a': 1}
properties = obj._process_param_change({'a': 1})
assert properties == {'b': 1}
def test_link_properties_nb(document, comm):
class ReactiveLink(Reactive):
text = param.String(default='A')
obj = ReactiveLink()
div = Div()
# Link property and check bokeh js property callback is defined
obj._link_props(div, ['text'], document, div, comm)
assert 'text' in div._callbacks
# Assert callback is set up correctly
cb = div._callbacks['text'][0]
assert isinstance(cb, partial)
assert cb.args == (document, div.ref['id'], comm, None)
assert cb.func == obj._comm_change
def test_link_properties_server(document):
class ReactiveLink(Reactive):
text = param.String(default='A')
obj = ReactiveLink()
div = Div()
# Link property and check bokeh callback is defined
obj._link_props(div, ['text'], document, div)
assert 'text' in div._callbacks
# Assert callback is set up correctly
cb = div._callbacks['text'][0]
assert isinstance(cb, partial)
assert cb.args == (document, div.ref['id'], None)
assert cb.func == obj._server_change
def test_text_input_controls():
text_input = TextInput()
controls = text_input.controls()
assert isinstance(controls, Tabs)
assert len(controls) == 2
wb1, wb2 = controls
assert isinstance(wb1, WidgetBox)
assert len(wb1) == 6
name, disabled, *(ws) = wb1
assert isinstance(name, StaticText)
assert isinstance(disabled, Checkbox)
not_checked = []
for w in ws:
if w.name == 'Value':
assert isinstance(w, TextInput)
text_input.value = "New value"
assert w.value == "New value"
elif w.name == 'Value input':
assert isinstance(w, TextInput)
elif w.name == 'Placeholder':
assert isinstance(w, TextInput)
text_input.placeholder = "Test placeholder..."
assert w.value == "Test placeholder..."
elif w.name == 'Max length':
assert isinstance(w, IntInput)
else:
not_checked.append(w)
assert not not_checked
assert isinstance(wb2, WidgetBox)
assert len(wb2) == len(list(Viewable.param)) + 1
def test_text_input_controls_explicit():
text_input = TextInput()
controls = text_input.controls(['placeholder', 'disabled'])
assert isinstance(controls, WidgetBox)
assert len(controls) == 3
name, disabled, placeholder = controls
assert isinstance(name, StaticText)
assert isinstance(disabled, Checkbox)
assert isinstance(placeholder, TextInput)
text_input.disabled = True
assert disabled.value
text_input.placeholder = "Test placeholder..."
assert placeholder.value == "Test placeholder..."
def test_reactive_html_basic():
class Test(ReactiveHTML):
int = param.Integer(default=3, doc='An integer')
float = param.Number(default=3.14, doc='A float')
_template = '<div id="div" width=${int}></div>'
data_model = Test._data_model
assert data_model.__name__ == 'Test1'
properties = data_model.properties()
assert 'int' in properties
assert 'float' in properties
int_prop = data_model.lookup('int')
assert isinstance(int_prop.property, bp.Int)
assert int_prop.class_default(data_model) == 3
float_prop = data_model.lookup('float')
assert isinstance(float_prop.property, bp.Float)
assert float_prop.class_default(data_model) == 3.14
assert Test._node_callbacks == {}
test = Test()
root = test.get_root()
assert test._attrs == {'div': [('width', ['int'], '{int}')]}
assert root.callbacks == {}
assert root.events == {}
def test_reactive_html_no_id_param_error():
with pytest.raises(ValueError) as excinfo:
class Test(ReactiveHTML):
width = param.Number(default=200)
_template = '<div width=${width}></div>'
assert "Found <div> node with the `width` attribute referencing the `width` parameter." in str(excinfo.value)
def test_reactive_html_no_id_method_error():
with pytest.raises(ValueError) as excinfo:
class Test(ReactiveHTML):
_template = '<div onclick=${_onclick}></div>'
def _onclick(self):
pass
assert "Found <div> node with the `onclick` callback referencing the `_onclick` method." in str(excinfo.value)
def test_reactive_html_dom_events():
class TestDOMEvents(ReactiveHTML):
int = param.Integer(default=3, doc='An integer')
float = param.Number(default=3.14, doc='A float')
_template = '<div id="div" width=${int}></div>'
_dom_events = {'div': ['change']}
data_model = TestDOMEvents._data_model
assert data_model.__name__ == 'TestDOMEvents1'
properties = data_model.properties()
assert 'int' in properties
assert 'float' in properties
int_prop = data_model.lookup('int')
assert isinstance(int_prop.property, bp.Int)
assert int_prop.class_default(data_model) == 3
float_prop = data_model.lookup('float')
assert isinstance(float_prop.property, bp.Float)
assert float_prop.class_default(data_model) == 3.14
assert TestDOMEvents._node_callbacks == {}
test = TestDOMEvents()
root = test.get_root()
assert test._attrs == {'div': [('width', ['int'], '{int}')]}
assert root.callbacks == {}
assert root.events == {'div': {'change': True}}
def test_reactive_html_inline():
class TestInline(ReactiveHTML):
int = param.Integer(default=3, doc='An integer')
_template = '<div id="div" onchange=${_div_change} width=${int}></div>'
def _div_change(self, event):
pass
data_model = TestInline._data_model
assert data_model.__name__ == 'TestInline1'
properties = data_model.properties()
assert 'int' in properties
int_prop = data_model.lookup('int')
assert isinstance(int_prop.property, bp.Int)
assert int_prop.class_default(data_model) == 3
assert TestInline._node_callbacks == {'div': [('onchange', '_div_change')]}
assert TestInline._inline_callbacks == [('div', 'onchange', '_div_change')]
test = TestInline()
root = test.get_root()
assert test._attrs == {
'div': [
('onchange', [], '{_div_change}'),
('width', ['int'], '{int}')
]
}
assert root.callbacks == {'div': [('onchange', '_div_change')]}
assert root.events == {}
test.on_event('div', 'click', print)
assert root.events == {'div': {'click': False}}
def test_reactive_html_children():
class TestChildren(ReactiveHTML):
children = param.List(default=[])
_template = '<div id="div">${children}</div>'
assert TestChildren._node_callbacks == {}
assert TestChildren._inline_callbacks == []
assert TestChildren._parser.children == {'div': 'children'}
widget = TextInput()
test = TestChildren(children=[widget])
root = test.get_root()
assert test._attrs == {}
assert root.children == {'div': [widget._models[root.ref['id']][0]]}
assert len(widget._models) == 1
assert test._panes == {'children': [widget]}
widget_new = TextInput()
test.children = [widget_new]
assert len(widget._models) == 0
assert root.children == {'div': [widget_new._models[root.ref['id']][0]]}
assert test._panes == {'children': [widget_new]}
test._cleanup(root)
assert len(test._models) == 0
assert len(widget_new._models) == 0
def test_reactive_html_templated_children():
class TestTemplatedChildren(ReactiveHTML):
children = param.List(default=[])
_template = """
<select id="select">
{% for option in children %}
<option id="option-{{ loop.index0 }}">${children[{{ loop.index0 }}]}</option>
{% endfor %}
</div>
"""
assert TestTemplatedChildren._node_callbacks == {}
assert TestTemplatedChildren._inline_callbacks == []
assert TestTemplatedChildren._parser.children == {'option': 'children'}
widget = TextInput()
test = TestTemplatedChildren(children=[widget])
root = test.get_root()
assert test._attrs == {}
assert root.looped == ['option']
assert root.children == {'option': [widget._models[root.ref['id']][0]]}
assert test._panes == {'children': [widget]}
widget_new = TextInput()
test.children = [widget_new]
assert len(widget._models) == 0
assert root.children == {'option': [widget_new._models[root.ref['id']][0]]}
assert test._panes == {'children': [widget_new]}
def test_reactive_html_templated_dict_children():
class TestTemplatedChildren(ReactiveHTML):
children = param.Dict(default={})
_template = """
<select id="select">
{% for key, option in children.items() %}
<option id="option-{{ loop.index0 }}">${children[{{ key }}]}</option>
{% endfor %}
</div>
"""
assert TestTemplatedChildren._node_callbacks == {}
assert TestTemplatedChildren._inline_callbacks == []
assert TestTemplatedChildren._parser.children == {'option': 'children'}
widget = TextInput()
test = TestTemplatedChildren(children={'test': widget})
root = test.get_root()
assert test._attrs == {}
assert root.looped == ['option']
assert root.children == {'option': [widget._models[root.ref['id']][0]]}
assert test._panes == {'children': [widget]}
widget_model = widget._models[root.ref['id']][0]
widget_new = TextInput()
test.children = {'test': widget_new, 'test2': widget}
assert len(widget._models) == 1
assert root.children == {
'option': [
widget_new._models[root.ref['id']][0],
widget_model
]
}
assert test._panes == {'children': [widget_new, widget]}
def test_reactive_html_templated_children_add_loop_id():
class TestTemplatedChildren(ReactiveHTML):
children = param.List(default=[])
_template = """
<select id="select">
{%- for option in children %}
<option id="option">${children[{{ loop.index0 }}]}</option>
{%- endfor %}
</select>
"""
assert TestTemplatedChildren._node_callbacks == {}
assert TestTemplatedChildren._inline_callbacks == []
assert TestTemplatedChildren._parser.children == {'option': 'children'}
test = TestTemplatedChildren(children=['A', 'B', 'C'])
assert test._get_template()[0] == """
<select id="select-${id}">
<option id="option-0-${id}"></option>
<option id="option-1-${id}"></option>
<option id="option-2-${id}"></option>
</select>
"""
model = test.get_root()
assert test._attrs == {}
assert model.looped == ['option']
def test_reactive_html_templated_children_add_loop_id_and_for_loop_var():
class TestTemplatedChildren(ReactiveHTML):
children = param.List(default=[])
_template = """
<select id="select">
{%- for option in children %}
<option id="option">${option}</option>
{%- endfor %}
</select>
"""
assert TestTemplatedChildren._node_callbacks == {}
assert TestTemplatedChildren._inline_callbacks == []
assert TestTemplatedChildren._parser.children == {'option': 'children'}
test = TestTemplatedChildren(children=['A', 'B', 'C'])
assert test._get_template()[0] == """
<select id="select-${id}">
<option id="option-0-${id}"></option>
<option id="option-1-${id}"></option>
<option id="option-2-${id}"></option>
</select>
"""
model = test.get_root()
assert test._attrs == {}
assert model.looped == ['option']
@pytest.mark.parametrize('operator', ['', '+', '-', '*', '\\', '%', '**', '>>', '<<', '>>>', '&', '^', '&&', '||', '??'])
@pytest.mark.parametrize('sep', [' ', ''])
def test_reactive_html_scripts_linked_properties_assignment_operator(operator, sep):
class TestScripts(ReactiveHTML):
clicks = param.Integer()
_template = "<div id='test'></div>"
_scripts = {'render': f'test.onclick = () => {{ data.clicks{sep}{operator}= 1 }}'}
assert TestScripts()._linked_properties() == ['clicks']
| [
[
[
7,
20
],
[
626,
634
]
],
[
[
44,
51
],
[
1911,
1918
],
[
2430,
2437
]
],
[
[
60,
87
],
[
4679,
4681
],
[
4826,
4828
],
[
6379,
6381
],
[
6526,
6528
],
[
7354,
7356
]
],
[
[
95,
100
],
[
945,
950
],
[
1233,
1238
],
[
1578,
1583
],
[
2115,
2120
],
[
4255,
4260
],
[
4315,
4320
],
[
5250,
5255
],
[
5894,
5899
],
[
5954,
5959
],
[
6930,
6935
],
[
8061,
8066
],
[
9024,
9029
],
[
10109,
10114
],
[
11362,
11367
],
[
12340,
12345
],
[
13461,
13466
]
],
[
[
108,
114
],
[
13155,
13161
],
[
13278,
13284
],
[
5158,
5164
],
[
5501,
5507
]
],
[
[
143,
151
],
[
592,
600
]
],
[
[
177,
189
],
[
657,
669
]
],
[
[
215,
218
],
[
1640,
1643
],
[
2177,
2180
]
],
[
[
245,
249
],
[
2668,
2672
]
],
[
[
251,
260
],
[
2755,
2764
],
[
3563,
3572
],
[
3798,
3807
]
],
[
[
288,
296
],
[
921,
929
],
[
1209,
1217
],
[
1551,
1559
],
[
2088,
2096
]
],
[
[
298,
310
],
[
471,
483
],
[
739,
751
],
[
782,
794
],
[
4225,
4237
],
[
5215,
5227
],
[
5558,
5570
],
[
5864,
5876
],
[
6900,
6912
],
[
8026,
8038
],
[
8989,
9001
],
[
10074,
10086
],
[
11327,
11339
],
[
12305,
12317
],
[
13428,
13440
]
],
[
[
338,
346
],
[
3606,
3614
]
],
[
[
379,
387
],
[
2896,
2904
],
[
3955,
3963
]
],
[
[
389,
397
],
[
3449,
3457
]
],
[
[
399,
409
],
[
2852,
2862
],
[
3911,
3921
]
],
[
[
411,
420
],
[
2585,
2594
],
[
3008,
3017
],
[
3175,
3184
],
[
3257,
3266
],
[
3688,
3697
],
[
4000,
4009
],
[
8312,
8321
],
[
8599,
8608
],
[
9475,
9484
],
[
9775,
9784
],
[
10565,
10574
],
[
10926,
10935
]
],
[
[
430,
457
]
],
[
[
550,
578
]
],
[
[
852,
861
]
],
[
[
1107,
1124
]
],
[
[
1486,
1509
]
],
[
[
2025,
2052
]
],
[
[
2540,
2564
]
],
[
[
3634,
3667
]
],
[
[
4181,
4205
]
],
[
[
5108,
5144
]
],
[
[
5450,
5487
]
],
[
[
5806,
5835
]
],
[
[
6850,
6875
]
],
[
[
7971,
7998
]
],
[
[
8915,
8952
]
],
[
[
9995,
10037
]
],
[
[
11241,
11290
]
],
[
[
12202,
12268
]
],
[
[
13324,
13388
]
]
] |
# Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds data for ppr queue event tracking."""
from __future__ import annotations
from mhr_api.models import utils as model_utils
from mhr_api.utils.base import BaseEnum
from .db import db
class EventTracking(db.Model): # pylint: disable=too-many-instance-attributes
"""This class manages all of the event tracking information."""
class EventTrackingTypes(BaseEnum):
"""Render an Enum of the event tracking types."""
SEARCH_REPORT = 'SEARCH_REPORT'
API_NOTIFICATION = 'API_NOTIFICATION'
EMAIL = 'EMAIL'
SURFACE_MAIL = 'SURFACE_MAIL'
EMAIL_REPORT = 'EMAIL_REPORT'
REGISTRATION_REPORT = 'REGISTRATION_REPORT'
__tablename__ = 'event_tracking'
id = db.Column('id', db.Integer, db.Sequence('event_tracking_id_seq'), primary_key=True)
key_id = db.Column('key_id', db.Integer, nullable=False, index=True)
event_ts = db.Column('event_ts', db.DateTime, nullable=False, index=True)
event_tracking_type = db.Column('event_tracking_type', db.String(20),
db.ForeignKey('event_tracking_types.event_tracking_type'),
nullable=False, index=True)
status = db.Column('status', db.Integer, nullable=True)
message = db.Column('message', db.String(2000), nullable=True)
email_id = db.Column('email_address', db.String(250), nullable=True)
# Relationships - SerialType
tracking_type = db.relationship('EventTrackingType', foreign_keys=[event_tracking_type],
back_populates='event_tracking', cascade='all, delete', uselist=False)
def save(self):
"""Save the object to the database immediately."""
db.session.add(self)
db.session.commit()
@property
def json(self) -> dict:
"""Return the event tracking record as a json object."""
event_tracking = {
'eventTrackingId': self.id,
'keyId': self.key_id,
'type': self.event_tracking_type,
'createDateTime': model_utils.format_ts(self.event_ts)
}
if self.status:
event_tracking['status'] = self.status
if self.message:
event_tracking['message'] = self.message
if self.email_id:
event_tracking['emailAddress'] = self.email_id
return event_tracking
@classmethod
def find_by_id(cls, event_id: int):
"""Return a tracking object by ID."""
if event_id:
return cls.query.get(event_id)
return None
@classmethod
def find_by_key_id(cls, key_id: int):
"""Return a list of event tracking objects by key id."""
event_tracking = None
if key_id:
event_tracking = cls.query.filter(EventTracking.key_id == key_id) \
.order_by(EventTracking.id).all()
return event_tracking
@classmethod
def find_by_key_id_type(cls, key_id: int, event_tracking_type: str, extra_key: str = None):
"""Return a list of event tracking objects by key id and event tracking type."""
event_tracking = None
if key_id and event_tracking_type:
event_tracking = cls.query.filter(EventTracking.key_id == key_id,
EventTracking.event_tracking_type == event_tracking_type) \
.order_by(EventTracking.id).all()
if event_tracking is not None and extra_key:
events = []
for event in event_tracking:
if event.message and event.message.find(extra_key) > 0:
events.append(event)
return events
return event_tracking
@staticmethod
def create(key_id: int, event_type: str, status: int = None, message: str = None):
"""Create an EventTracking record."""
event_tracking = EventTracking(key_id=key_id, event_tracking_type=event_type, status=status, message=message)
event_tracking.event_ts = model_utils.now_ts()
event_tracking.save()
return event_tracking
| [
[
[
676,
687
]
],
[
[
716,
736
],
[
2656,
2667
],
[
4666,
4677
]
],
[
[
768,
776
],
[
976,
984
]
],
[
[
794,
796
],
[
819,
821
],
[
1332,
1334
],
[
1348,
1350
],
[
1360,
1362
],
[
1429,
1431
],
[
1449,
1451
],
[
1504,
1506
],
[
1526,
1528
],
[
1593,
1595
],
[
1626,
1628
],
[
1677,
1679
],
[
1813,
1815
],
[
1833,
1835
],
[
1874,
1876
],
[
1895,
1897
],
[
1942,
1944
],
[
1969,
1971
],
[
2054,
2056
],
[
2322,
2324
],
[
2351,
2353
]
],
[
[
805,
818
],
[
3381,
3394
],
[
3463,
3476
],
[
3840,
3853
],
[
3918,
3931
],
[
4026,
4039
],
[
4539,
4552
]
]
] |
import unittest
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.core import HydrusSerialisable
from hydrus.client import ClientApplicationCommand as CAC
from hydrus.client import ClientConstants as CC
from hydrus.client import ClientData
from hydrus.client import ClientDefaults
from hydrus.client import ClientDuplicates
from hydrus.client import ClientSearch
from hydrus.client.gui import ClientGUIShortcuts
from hydrus.client.importing import ClientImportOptions
from hydrus.client.importing import ClientImportSubscriptions
from hydrus.client.importing import ClientImportSubscriptionQuery
from hydrus.client.media import ClientMedia
from hydrus.client.media import ClientMediaManagers
from hydrus.client.media import ClientMediaResult
from hydrus.client.metadata import ClientTags
from hydrus.test import TestController as TC
class TestSerialisables( unittest.TestCase ):
def _dump_and_load_and_test( self, obj, test_func ):
serialisable_tuple = obj.GetSerialisableTuple()
self.assertIsInstance( serialisable_tuple, tuple )
if isinstance( obj, HydrusSerialisable.SerialisableBaseNamed ):
( serialisable_type, name, version, serialisable_info ) = serialisable_tuple
elif isinstance( obj, HydrusSerialisable.SerialisableBase ):
( serialisable_type, version, serialisable_info ) = serialisable_tuple
self.assertEqual( serialisable_type, obj.SERIALISABLE_TYPE )
self.assertEqual( version, obj.SERIALISABLE_VERSION )
dupe_obj = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_tuple )
self.assertIsNot( obj, dupe_obj )
test_func( obj, dupe_obj )
#
json_string = obj.DumpToString()
self.assertIsInstance( json_string, str )
dupe_obj = HydrusSerialisable.CreateFromString( json_string )
self.assertIsNot( obj, dupe_obj )
test_func( obj, dupe_obj )
#
network_bytes = obj.DumpToNetworkBytes()
self.assertIsInstance( network_bytes, bytes )
dupe_obj = HydrusSerialisable.CreateFromNetworkBytes( network_bytes )
self.assertIsNot( obj, dupe_obj )
test_func( obj, dupe_obj )
def test_basics( self ):
def test( obj, dupe_obj ):
self.assertEqual( len( list(obj.items()) ), len( list(dupe_obj.items()) ) )
for ( key, value ) in list(obj.items()):
self.assertEqual( value, dupe_obj[ key ] )
#
d = HydrusSerialisable.SerialisableDictionary()
d[ 1 ] = 2
d[ 3 ] = 'test1'
d[ 'test2' ] = 4
d[ 'test3' ] = 5
d[ 6 ] = HydrusSerialisable.SerialisableDictionary( { i : 'test' + str( i ) for i in range( 20 ) } )
d[ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'test pred 1' ) ] = 56
d[ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'test pred 2' ) ] = HydrusSerialisable.SerialisableList( [ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'test' + str( i ) ) for i in range( 10 ) ] )
self.assertEqual( len( list(d.keys()) ), 7 )
for ( key, value ) in list(d.items()):
self.assertEqual( d[ key ], value )
self._dump_and_load_and_test( d, test )
#
db = HydrusSerialisable.SerialisableBytesDictionary()
db[ HydrusData.GenerateKey() ] = HydrusData.GenerateKey()
db[ HydrusData.GenerateKey() ] = [ HydrusData.GenerateKey() for i in range( 10 ) ]
db[ 1 ] = HydrusData.GenerateKey()
db[ 2 ] = [ HydrusData.GenerateKey() for i in range( 10 ) ]
self.assertEqual( len( list(db.keys()) ), 4 )
for ( key, value ) in list(db.items()):
self.assertEqual( db[ key ], value )
self._dump_and_load_and_test( db, test )
def test_SERIALISABLE_TYPE_APPLICATION_COMMAND( self ):
def test( obj, dupe_obj ):
self.assertEqual( obj.GetCommandType(), dupe_obj.GetCommandType() )
self.assertEqual( obj.GetData(), dupe_obj.GetData() )
acs = []
acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_SIMPLE, CAC.SIMPLE_ARCHIVE_FILE ), 'archive file' ) )
acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ), 'flip on/off mappings "test" for unknown service!' ) )
acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ), 'flip on/off mappings "test" for my tags' ) )
acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_SET, 0.4 ) ), 'set ratings uncertain rating, "0.4" for unknown service!' ) )
for ( ac, s ) in acs:
self._dump_and_load_and_test( ac, test )
self.assertEqual( ac.ToString(), s )
def test_SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS( self ):
def test( obj, dupe_obj ):
self.assertEqual( obj.ToTuple(), dupe_obj.ToTuple() )
duplicate_action_options_delete_and_move = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ) ] )
duplicate_action_options_copy = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ) ] )
duplicate_action_options_merge = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ) ] )
inbox = True
size = 40960
mime = HC.IMAGE_JPEG
width = 640
height = 480
duration = None
num_frames = None
has_audio = False
num_words = None
local_locations_manager = ClientMediaManagers.LocationsManager( { CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox )
trash_locations_manager = ClientMediaManagers.LocationsManager( { CC.TRASH_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox )
deleted_locations_manager = ClientMediaManagers.LocationsManager( set(), { CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), inbox )
# duplicate to generate proper dicts
one_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'one' } } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'one' } } } ).Duplicate()
two_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'two' } } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'two' } } } ).Duplicate()
substantial_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'test tag', 'series:namespaced test tag' } } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'test tag', 'series:namespaced test tag' } } } ).Duplicate()
empty_tags_manager = ClientMediaManagers.TagsManager( {}, {} ).Duplicate()
one_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } )
two_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 0.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.6 } )
substantial_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } )
empty_ratings_manager = ClientMediaManagers.RatingsManager( {} )
notes_manager = ClientMediaManagers.NotesManager( {} )
file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateEmptyManager()
#
local_hash_has_values = HydrusData.GenerateKey()
file_info_manager = ClientMediaManagers.FileInfoManager( 1, local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words )
media_result = ClientMediaResult.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager )
local_media_has_values = ClientMedia.MediaSingleton( media_result )
#
other_local_hash_has_values = HydrusData.GenerateKey()
file_info_manager = ClientMediaManagers.FileInfoManager( 2, other_local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words )
media_result = ClientMediaResult.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager )
other_local_media_has_values = ClientMedia.MediaSingleton( media_result )
#
local_hash_empty = HydrusData.GenerateKey()
file_info_manager = ClientMediaManagers.FileInfoManager( 3, local_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words )
media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, local_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager )
local_media_empty = ClientMedia.MediaSingleton( media_result )
#
trashed_hash_empty = HydrusData.GenerateKey()
file_info_manager = ClientMediaManagers.FileInfoManager( 4, trashed_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words )
media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, trash_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager )
trashed_media_empty = ClientMedia.MediaSingleton( media_result )
#
deleted_hash_empty = HydrusData.GenerateKey()
file_info_manager = ClientMediaManagers.FileInfoManager( 5, deleted_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words )
media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, deleted_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager )
deleted_media_empty = ClientMedia.MediaSingleton( media_result )
#
one_hash = HydrusData.GenerateKey()
file_info_manager = ClientMediaManagers.FileInfoManager( 6, one_hash, size, mime, width, height, duration, num_frames, has_audio, num_words )
media_result = ClientMediaResult.MediaResult( file_info_manager, one_tags_manager, local_locations_manager, one_ratings_manager, notes_manager, file_viewing_stats_manager )
one_media = ClientMedia.MediaSingleton( media_result )
#
two_hash = HydrusData.GenerateKey()
file_info_manager = ClientMediaManagers.FileInfoManager( 7, two_hash, size, mime, width, height, duration, num_frames, has_audio, num_words )
media_result = ClientMediaResult.MediaResult( file_info_manager, two_tags_manager, local_locations_manager, two_ratings_manager, notes_manager, file_viewing_stats_manager )
two_media = ClientMedia.MediaSingleton( media_result )
#
self._dump_and_load_and_test( duplicate_action_options_delete_and_move, test )
self._dump_and_load_and_test( duplicate_action_options_copy, test )
self._dump_and_load_and_test( duplicate_action_options_merge, test )
#
def assertSCUEqual( one, two ):
self.assertEqual( TC.ConvertServiceKeysToContentUpdatesToComparable( one ), TC.ConvertServiceKeysToContentUpdatesToComparable( two ) )
file_deletion_reason = 'test delete'
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason )
scu = {}
scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { local_hash_empty }, reason = file_deletion_reason ) ]
assertSCUEqual( result, scu )
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, trashed_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason )
scu = {}
scu[ CC.TRASH_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { trashed_hash_empty }, reason = file_deletion_reason ) ]
assertSCUEqual( result, scu )
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, deleted_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason )
self.assertEqual( result, {} )
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, other_local_media_has_values, delete_second = True, file_deletion_reason = file_deletion_reason )
scu = {}
scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values }, reason = file_deletion_reason ) ]
assertSCUEqual( result, scu )
#
result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, delete_second = True, file_deletion_reason = file_deletion_reason )
scu = {}
scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values }, reason = file_deletion_reason ) ]
assertSCUEqual( result, scu )
#
#
result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason = file_deletion_reason )
self.assertEqual( result, {} )
#
result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason = file_deletion_reason )
scu = {}
scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ]
assertSCUEqual( result, scu )
#
#
result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason = file_deletion_reason )
scu = {}
scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ]
assertSCUEqual( result, scu )
#
result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason = file_deletion_reason )
scu = {}
scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ]
assertSCUEqual( result, scu )
#
result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( one_media, two_media, file_deletion_reason = file_deletion_reason )
scu = {}
scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'one', { two_hash } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'two', { one_hash } ) ) ]
scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { two_hash } ) ) ]
scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { two_hash } ) ) ]
assertSCUEqual( result, scu )
def test_SERIALISABLE_TYPE_SHORTCUT( self ):
def test( obj, dupe_obj ):
self.assertEqual( dupe_obj.__hash__(), ( dupe_obj.shortcut_type, dupe_obj.shortcut_key, dupe_obj.shortcut_press_type, tuple( dupe_obj.modifiers ) ).__hash__() )
self.assertEqual( obj, dupe_obj )
shortcuts = []
shortcuts.append( ( ClientGUIShortcuts.Shortcut(), 'f7' ) )
shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_SPACE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ), 'space' ) )
shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'a' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+a' ) )
shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'A' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+a' ) )
shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_HOME, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+alt+home' ) )
shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_LEFT, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ), 'left-click' ) )
shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_MIDDLE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+middle-click' ) )
shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_SCROLL_DOWN, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_SHIFT ] ), 'alt+shift+scroll down' ) )
for ( shortcut, s ) in shortcuts:
self._dump_and_load_and_test( shortcut, test )
self.assertEqual( shortcut.ToString(), s )
def test_SERIALISABLE_TYPE_SHORTCUT_SET( self ):
def test( obj, dupe_obj ):
for ( shortcut, command ) in obj:
self.assertEqual( dupe_obj.GetCommand( shortcut ).GetData(), command.GetData() )
default_shortcuts = ClientDefaults.GetDefaultShortcuts()
for shortcuts in default_shortcuts:
self._dump_and_load_and_test( shortcuts, test )
command_1 = CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_SIMPLE, CAC.SIMPLE_ARCHIVE_FILE )
command_2 = CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) )
command_3 = CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) )
k_shortcut_1 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_SPACE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] )
k_shortcut_2 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'a' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] )
k_shortcut_3 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'A' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] )
k_shortcut_4 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_HOME, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] )
m_shortcut_1 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_LEFT, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] )
m_shortcut_2 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_MIDDLE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] )
m_shortcut_3 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_SCROLL_DOWN, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_SHIFT ] )
shortcut_set = ClientGUIShortcuts.ShortcutSet( 'test' )
shortcut_set.SetCommand( k_shortcut_1, command_1 )
shortcut_set.SetCommand( k_shortcut_2, command_2 )
shortcut_set.SetCommand( k_shortcut_3, command_2 )
shortcut_set.SetCommand( k_shortcut_4, command_3 )
shortcut_set.SetCommand( m_shortcut_1, command_1 )
shortcut_set.SetCommand( m_shortcut_2, command_2 )
shortcut_set.SetCommand( m_shortcut_3, command_3 )
self._dump_and_load_and_test( shortcut_set, test )
self.assertEqual( shortcut_set.GetCommand( k_shortcut_1 ).GetData(), command_1.GetData() )
shortcut_set.SetCommand( k_shortcut_1, command_3 )
self.assertEqual( shortcut_set.GetCommand( k_shortcut_1 ).GetData(), command_3.GetData() )
def test_SERIALISABLE_TYPE_SUBSCRIPTION( self ):
def test( obj, dupe_obj ):
self.assertEqual( obj.GetName(), dupe_obj.GetName() )
self.assertEqual( obj._gug_key_and_name, dupe_obj._gug_key_and_name )
self.assertEqual( len( obj._query_headers ), len( dupe_obj._query_headers ) )
self.assertEqual( obj._initial_file_limit, dupe_obj._initial_file_limit )
self.assertEqual( obj._periodic_file_limit, dupe_obj._periodic_file_limit )
self.assertEqual( obj._paused, dupe_obj._paused )
self.assertEqual( obj._file_import_options.GetSerialisableTuple(), dupe_obj._file_import_options.GetSerialisableTuple() )
self.assertEqual( obj._tag_import_options.GetSerialisableTuple(), dupe_obj._tag_import_options.GetSerialisableTuple() )
self.assertEqual( obj._no_work_until, dupe_obj._no_work_until )
sub = ClientImportSubscriptions.Subscription( 'test sub' )
self._dump_and_load_and_test( sub, test )
gug_key_and_name = ( HydrusData.GenerateKey(), 'muh test gug' )
query_headers = []
q = ClientImportSubscriptionQuery.SubscriptionQueryHeader()
q.SetQueryText( 'test query' )
query_headers.append( q )
q = ClientImportSubscriptionQuery.SubscriptionQueryHeader()
q.SetQueryText( 'test query 2' )
query_headers.append( q )
checker_options = ClientImportOptions.CheckerOptions()
initial_file_limit = 100
periodic_file_limit = 50
paused = False
file_import_options = ClientImportOptions.FileImportOptions()
service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_tags = False, additional_tags = { 'test additional tag', 'and another' } )
tag_import_options = ClientImportOptions.TagImportOptions( service_keys_to_service_tag_import_options = { HydrusData.GenerateKey() : service_tag_import_options } )
no_work_until = HydrusData.GetNow() - 86400 * 20
sub.SetTuple( gug_key_and_name, checker_options, initial_file_limit, periodic_file_limit, paused, file_import_options, tag_import_options, no_work_until )
sub.SetQueryHeaders( query_headers )
self.assertEqual( sub.GetGUGKeyAndName(), gug_key_and_name )
self.assertEqual( sub.GetTagImportOptions(), tag_import_options )
self.assertEqual( sub.GetQueryHeaders(), query_headers )
self.assertEqual( sub._paused, False )
sub.PauseResume()
self.assertEqual( sub._paused, True )
sub.PauseResume()
self.assertEqual( sub._paused, False )
self._dump_and_load_and_test( sub, test )
def test_SERIALISABLE_TYPE_TAG_FILTER( self ):
def test( obj, dupe_obj ):
self.assertEqual( obj._tag_slices_to_rules, dupe_obj._tag_slices_to_rules )
tags = set()
tags.add( 'title:test title' )
tags.add( 'series:neon genesis evangelion' )
tags.add( 'series:kill la kill' )
tags.add( 'smile' )
tags.add( 'blue eyes' )
#
tag_filter = ClientTags.TagFilter()
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), set() )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
tag_filter.SetRule( 'series:', CC.FILTER_WHITELIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'series:neon genesis evangelion', 'series:kill la kill' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
tag_filter.SetRule( 'series:kill la kill', CC.FILTER_WHITELIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'series:kill la kill' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
tag_filter.SetRule( 'smile', CC.FILTER_WHITELIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'smile' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
tag_filter.SetRule( 'series:', CC.FILTER_WHITELIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'series:neon genesis evangelion', 'series:kill la kill' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
tag_filter.SetRule( 'series:kill la kill', CC.FILTER_WHITELIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'series:kill la kill' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( 'series:', CC.FILTER_BLACKLIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( 'series:', CC.FILTER_BLACKLIST )
tag_filter.SetRule( 'series:neon genesis evangelion', CC.FILTER_WHITELIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
tag_filter.SetRule( 'blue eyes', CC.FILTER_WHITELIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill', 'blue eyes' } )
# blacklist namespace test
blacklist_tags = { 'nintendo', 'studio:nintendo' }
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( blacklist_tags ), { 'studio:nintendo' } )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags = True ), set() )
#
tag_filter = ClientTags.TagFilter()
tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST )
tag_filter.SetRule( 'studio:nintendo', CC.FILTER_WHITELIST )
self._dump_and_load_and_test( tag_filter, test )
self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags = True ), { 'studio:nintendo' } )
| [
[
[
7,
15
],
[
905,
913
]
],
[
[
41,
62
],
[
4902,
4904
],
[
4928,
4930
],
[
5139,
5141
],
[
5165,
5167
],
[
5359,
5361
],
[
5384,
5386
],
[
6013,
6015
],
[
6109,
6111
],
[
6182,
6184
],
[
6336,
6338
],
[
6432,
6434
],
[
6505,
6507
],
[
6660,
6662
],
[
6765,
6767
],
[
6847,
6849
],
[
6957,
6959
],
[
7758,
7760
],
[
7840,
7842
],
[
7995,
7997
],
[
8077,
8079
],
[
8240,
8242
],
[
8357,
8359
],
[
13854,
13856
],
[
13877,
13879
],
[
14337,
14339
],
[
14360,
14362
],
[
15121,
15123
],
[
15147,
15149
],
[
15250,
15252
],
[
15276,
15278
],
[
15450,
15452
],
[
15475,
15477
],
[
15627,
15629
],
[
15652,
15654
],
[
15792,
15794
],
[
15815,
15817
],
[
16302,
16304
],
[
16328,
16330
],
[
16417,
16419
],
[
16443,
16445
],
[
16550,
16552
],
[
16576,
16578
],
[
16679,
16681
],
[
16705,
16707
],
[
16879,
16881
],
[
16904,
16906
],
[
16986,
16988
],
[
17011,
17013
],
[
17163,
17165
],
[
17188,
17190
],
[
17270,
17272
],
[
17295,
17297
],
[
17435,
17437
],
[
17458,
17460
],
[
18165,
18167
],
[
18191,
18193
],
[
18280,
18282
],
[
18306,
18308
],
[
18466,
18468
],
[
18491,
18493
],
[
18631,
18633
],
[
18656,
18658
],
[
19079,
19081
],
[
19105,
19107
],
[
19194,
19196
],
[
19220,
19222
],
[
19380,
19382
],
[
19405,
19407
],
[
19545,
19547
],
[
19570,
19572
],
[
19989,
19991
],
[
20015,
20017
],
[
20104,
20106
],
[
20130,
20132
],
[
20290,
20292
],
[
20315,
20317
],
[
20455,
20457
],
[
20480,
20482
],
[
20872,
20874
],
[
20898,
20900
],
[
20974,
20976
],
[
21000,
21002
],
[
21129,
21131
],
[
21154,
21156
],
[
21286,
21288
],
[
21311,
21313
],
[
24620,
24622
],
[
24646,
24648
],
[
24799,
24801
],
[
24825,
24827
]
],
[
[
87,
97
],
[
3843,
3853
],
[
3814,
3824
],
[
3911,
3921
],
[
3880,
3890
],
[
3977,
3987
],
[
4022,
4032
],
[
4876,
4886
],
[
5333,
5343
],
[
9340,
9350
],
[
9894,
9904
],
[
10449,
10459
],
[
10972,
10982
],
[
11499,
11509
],
[
12018,
12028
],
[
12511,
12521
],
[
13828,
13838
],
[
14311,
14321
],
[
15095,
15105
],
[
15224,
15234
],
[
15424,
15434
],
[
15601,
15611
],
[
15766,
15776
],
[
16276,
16286
],
[
16391,
16401
],
[
16524,
16534
],
[
16653,
16663
],
[
16853,
16863
],
[
16960,
16970
],
[
17137,
17147
],
[
17244,
17254
],
[
17409,
17419
],
[
18139,
18149
],
[
18254,
18264
],
[
18440,
18450
],
[
18605,
18615
],
[
19053,
19063
],
[
19168,
19178
],
[
19354,
19364
],
[
19519,
19529
],
[
19963,
19973
],
[
20078,
20088
],
[
20264,
20274
],
[
20429,
20439
],
[
20846,
20856
],
[
20948,
20958
],
[
21103,
21113
],
[
21260,
21270
],
[
24594,
24604
],
[
28494,
28504
],
[
29408,
29418
],
[
29499,
29509
]
],
[
[
122,
140
],
[
1158,
1176
],
[
1347,
1365
],
[
1663,
1681
],
[
1983,
2001
],
[
2297,
2315
],
[
2851,
2869
],
[
3033,
3051
],
[
3313,
3331
],
[
3744,
3762
]
],
[
[
168,
199
],
[
4683,
4686
],
[
4707,
4710
],
[
4744,
4747
],
[
4812,
4815
],
[
4836,
4839
],
[
5041,
5044
],
[
5065,
5068
],
[
5269,
5272
],
[
5293,
5296
],
[
24423,
24426
],
[
24447,
24450
],
[
24484,
24487
],
[
24530,
24533
],
[
24554,
24557
],
[
24701,
24704
],
[
24725,
24728
]
],
[
[
226,
247
],
[
5105,
5107
],
[
5979,
5981
],
[
6302,
6304
],
[
6626,
6628
],
[
7196,
7198
],
[
7223,
7225
],
[
7364,
7366
],
[
7386,
7388
],
[
7536,
7538
],
[
7721,
7723
],
[
7803,
7805
],
[
7958,
7960
],
[
8040,
8042
],
[
8203,
8205
],
[
8320,
8322
],
[
13796,
13798
],
[
14284,
14286
],
[
15056,
15058
],
[
15734,
15736
],
[
16237,
16239
],
[
17377,
17379
],
[
18100,
18102
],
[
19014,
19016
],
[
19924,
19926
],
[
20807,
20809
],
[
24765,
24767
],
[
31108,
31110
],
[
31163,
31165
],
[
31434,
31436
],
[
31489,
31491
],
[
31550,
31552
],
[
31875,
31877
],
[
31930,
31932
],
[
32003,
32005
],
[
32294,
32296
],
[
32349,
32351
],
[
32408,
32410
],
[
32686,
32688
],
[
32977,
32979
],
[
33038,
33040
],
[
33386,
33388
],
[
33459,
33461
],
[
33779,
33781
],
[
34096,
34098
],
[
34180,
34182
],
[
34524,
34526
],
[
34869,
34871
],
[
34932,
34934
],
[
35410,
35412
],
[
35715,
35717
],
[
36056,
36058
],
[
36125,
36127
]
],
[
[
274,
284
]
],
[
[
311,
325
],
[
24218,
24232
]
],
[
[
352,
368
],
[
5934,
5950
],
[
6257,
6273
],
[
6581,
6597
]
],
[
[
395,
407
],
[
3136,
3148
],
[
3160,
3172
],
[
3352,
3364
],
[
3376,
3388
],
[
3236,
3248
],
[
3260,
3272
]
],
[
[
438,
456
],
[
21841,
21859
],
[
21918,
21936
],
[
21947,
21965
],
[
21998,
22016
],
[
22045,
22063
],
[
22137,
22155
],
[
22166,
22184
],
[
22231,
22249
],
[
22279,
22297
],
[
22367,
22385
],
[
22396,
22414
],
[
22461,
22479
],
[
22509,
22527
],
[
22597,
22615
],
[
22626,
22644
],
[
22677,
22695
],
[
22723,
22741
],
[
22771,
22789
],
[
22813,
22831
],
[
22917,
22935
],
[
22946,
22964
],
[
22986,
23004
],
[
23026,
23044
],
[
23123,
23141
],
[
23152,
23170
],
[
23192,
23210
],
[
23234,
23252
],
[
23282,
23300
],
[
23381,
23399
],
[
23410,
23428
],
[
23450,
23468
],
[
23497,
23515
],
[
23545,
23563
],
[
23587,
23605
],
[
24892,
24910
],
[
24921,
24939
],
[
24972,
24990
],
[
25019,
25037
],
[
25093,
25111
],
[
25122,
25140
],
[
25187,
25205
],
[
25235,
25253
],
[
25304,
25322
],
[
25333,
25351
],
[
25398,
25416
],
[
25446,
25464
],
[
25515,
25533
],
[
25544,
25562
],
[
25595,
25613
],
[
25641,
25659
],
[
25689,
25707
],
[
25731,
25749
],
[
25809,
25827
],
[
25838,
25856
],
[
25878,
25896
],
[
25918,
25936
],
[
25992,
26010
],
[
26021,
26039
],
[
26061,
26079
],
[
26103,
26121
],
[
26151,
26169
],
[
26220,
26238
],
[
26249,
26267
],
[
26289,
26307
],
[
26336,
26354
],
[
26384,
26402
],
[
26426,
26444
],
[
26505,
26523
]
],
[
[
493,
512
],
[
28910,
28929
],
[
29075,
29094
],
[
29161,
29180
],
[
29323,
29342
]
],
[
[
549,
574
],
[
28344,
28369
]
],
[
[
611,
640
],
[
28594,
28623
],
[
28744,
28773
]
],
[
[
673,
684
],
[
9785,
9796
],
[
10351,
10362
],
[
10872,
10883
],
[
11399,
11410
],
[
11928,
11939
],
[
12421,
12432
],
[
12914,
12925
]
],
[
[
717,
736
],
[
7156,
7175
],
[
7324,
7343
],
[
7489,
7508
],
[
7686,
7705
],
[
7923,
7942
],
[
8168,
8187
],
[
8477,
8496
],
[
8570,
8589
],
[
8726,
8745
],
[
8890,
8909
],
[
9048,
9067
],
[
9122,
9141
],
[
9207,
9226
],
[
9402,
9421
],
[
9956,
9975
],
[
10511,
10530
],
[
11034,
11053
],
[
11561,
11580
],
[
12080,
12099
],
[
12573,
12592
]
],
[
[
769,
786
],
[
9569,
9586
],
[
10129,
10146
],
[
10673,
10690
],
[
11198,
11215
],
[
11725,
11742
],
[
12234,
12251
],
[
12727,
12744
]
],
[
[
822,
832
],
[
6043,
6053
],
[
6366,
6376
],
[
6699,
6709
],
[
30740,
30750
],
[
31044,
31054
],
[
31370,
31380
],
[
31811,
31821
],
[
32230,
32240
],
[
32621,
32631
],
[
32912,
32922
],
[
33321,
33331
],
[
33708,
33718
],
[
34025,
34035
],
[
34460,
34470
],
[
34805,
34815
],
[
35338,
35348
],
[
35643,
35653
],
[
35984,
35994
]
],
[
[
858,
878
],
[
6075,
6077
],
[
6143,
6145
],
[
6398,
6400
],
[
6466,
6468
],
[
6731,
6733
],
[
6808,
6810
],
[
8608,
8610
],
[
8648,
8650
],
[
8764,
8766
],
[
8804,
8806
],
[
8928,
8930
],
[
8968,
8970
],
[
15385,
15387
],
[
15557,
15559
],
[
16814,
16816
],
[
17093,
17095
],
[
18401,
18403
],
[
18561,
18563
],
[
19315,
19317
],
[
19475,
19477
],
[
20225,
20227
],
[
20385,
20387
],
[
21064,
21066
],
[
21216,
21218
],
[
13336,
13338
],
[
13394,
13396
]
],
[
[
886,
903
]
]
] |
import requests
import json
import time
import random
from . import conf, data, lang
from inukit.timestamp import natural_date, natural_time, timestamp_now
def is_same_day(ts1, ts2) -> bool:
def d(ts):
return natural_date(ts, '%Y-%m-%d')
return d(ts1) == d(ts2)
def handle_morning(qq):
last_morning = data.get(qq, 'last_morning')
last_night = data.get(qq, 'last_night')
now = timestamp_now()
if last_morning > last_night:
msg = lang.no_sleep
else:
msg = lang.morning_success % (
natural_time(now - last_night)
)
data.set(qq, 'last_morning', now)
return msg
def handle_night(qq):
last_morning = data.get(qq, 'last_morning')
last_night = data.get(qq, 'last_night')
now = timestamp_now()
if last_night > last_morning:
msg = lang.no_getup
else:
data.set(qq, 'last_night', now)
msg = lang.night_success % (
natural_time(now - last_morning)
)
return msg
def gen_sign_info():
rp = random.randint(1,100)
return {
"rp": rp
}
def handle_sign(qq):
last_sign = data.get(qq, 'last_sign')
now = timestamp_now()
msg = ''
if is_same_day(last_sign, now):
info = data.get(qq, 'last_sign_info')
msg = lang.already_sign
else:
msg = lang.sign_success
info = gen_sign_info()
data.set(qq, 'last_sign', now)
data.set(qq, 'last_sign_info', info)
msg += lang.sign % (
natural_date(last_sign),
info['rp']
)
return msg | [
[
[
7,
15
]
],
[
[
23,
27
]
],
[
[
35,
39
]
],
[
[
47,
53
],
[
1038,
1044
]
],
[
[
68,
72
]
],
[
[
74,
78
],
[
323,
327
],
[
369,
373
],
[
594,
598
],
[
685,
689
],
[
731,
735
],
[
864,
868
],
[
1134,
1138
],
[
1250,
1254
],
[
1394,
1398
],
[
1433,
1437
]
],
[
[
80,
84
],
[
470,
474
],
[
508,
512
],
[
832,
836
],
[
910,
914
],
[
1295,
1299
],
[
1337,
1341
],
[
1481,
1485
]
],
[
[
114,
126
],
[
1503,
1515
],
[
222,
234
]
],
[
[
128,
140
],
[
545,
557
],
[
945,
957
]
],
[
[
142,
155
],
[
406,
419
],
[
768,
781
],
[
1170,
1183
]
],
[
[
161,
172
],
[
1206,
1217
]
],
[
[
284,
298
]
],
[
[
648,
660
]
],
[
[
1012,
1025
],
[
1370,
1383
]
],
[
[
1101,
1112
]
]
] |
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 2.1.15.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'lxb!(o00)qtw0p+6q_vs$01&wtsw(m*s!ol0_6^v*flo^!&ek&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'core',
'user',
'recipe',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': os.environ.get('DB_HOST'),
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = '/vol/web/media'
STATIC_ROOT = '/vol/web/static'
AUTH_USER_MODEL = 'core.User'
| [
[
[
312,
314
],
[
399,
401
],
[
415,
417
],
[
431,
433
],
[
2302,
2304
],
[
2345,
2347
],
[
2388,
2390
],
[
2435,
2437
]
],
[
[
388,
396
]
],
[
[
664,
674
]
],
[
[
797,
802
]
],
[
[
811,
824
]
],
[
[
858,
872
]
],
[
[
1158,
1168
]
],
[
[
1572,
1584
]
],
[
[
1599,
1608
]
],
[
[
2084,
2100
]
],
[
[
2204,
2213
]
],
[
[
2574,
2598
]
],
[
[
3077,
3090
]
],
[
[
3102,
3111
]
],
[
[
3121,
3129
]
],
[
[
3138,
3146
]
],
[
[
3155,
3161
]
],
[
[
3273,
3283
]
],
[
[
3297,
3306
]
],
[
[
3320,
3330
]
],
[
[
3350,
3361
]
],
[
[
3383,
3398
]
]
] |
#!/usr/bin/env python3
'''Test config updates '''
# ------------------------------------------------------------------------------
# Imports
# ------------------------------------------------------------------------------
import subprocess
import os
import json
import time
import datetime
import requests
import pytest
# ------------------------------------------------------------------------------
# Constants
# ------------------------------------------------------------------------------
G_TEST_HOST = 'http://127.0.0.1:12345'
# ------------------------------------------------------------------------------
# run_command
# ------------------------------------------------------------------------------
def run_command(command):
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return (p.returncode, stdout, stderr)
# ------------------------------------------------------------------------------
# setup scopez server in action mode
# ------------------------------------------------------------------------------
@pytest.fixture()
def setup_scopez_server_action():
# ------------------------------------------------------
# setup
# ------------------------------------------------------
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_geoip2city_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-City.mmdb'))
l_geoip2ISP_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-ASN.mmdb'))
l_conf_dir = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf'))
l_ruleset_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/ruleset'))
l_scopez_dir = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes'))
l_an_list = os.path.realpath(os.path.join(l_file_path, '../../data/an/an-scopes.json'))
l_scopez_server_path = os.path.abspath(os.path.join(l_file_path, '../../../build/util/scopez_server/scopez_server'))
l_bot_challenge = os.path.realpath(os.path.join(l_file_path, '../../data/bot/bot-challenges.json'))
l_subproc = subprocess.Popen([l_scopez_server_path,
'-d', l_conf_dir,
'-S', l_scopez_dir,
'-l', l_an_list,
'-r', l_ruleset_path,
'-g', l_geoip2city_path,
'-i', l_geoip2ISP_path,
'-c', l_bot_challenge,
'-a'
])
print('cmd: {}'.format(' '.join([l_scopez_server_path,
'-d', l_conf_dir,
'-S', l_scopez_dir,
'-l', l_an_list,
'-r', l_ruleset_path,
'-g', l_geoip2city_path,
'-i', l_geoip2ISP_path,
'-c', l_bot_challenge,
'-a'])))
# '-b'])))
time.sleep(1)
# ------------------------------------------------------
# yield...
# ------------------------------------------------------
yield setup_scopez_server_action
# ------------------------------------------------------
# tear down
# ------------------------------------------------------
_, _, _ = run_command('kill -9 %d'%(l_subproc.pid))
time.sleep(0.5)
def test_acl_config_update(setup_scopez_server_action):
'''
update acl config 0050-ZrLf2KkQ - remove gizoogle from
user agent black list and test if request returns 200
'''
# ------------------------------------------------------
# test an 0050 with user-agent acl 'gizoogle' in the
# request
# ------------------------------------------------------
l_uri = G_TEST_HOST
l_headers = {'host': 'monkeez.com',
'user-agent': 'gizoogle',
'waf-scopes-id': '0050'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is acl custom response\n'
#-------------------------------------------------------
# load acl config and remove gizoogle from blacklist
# ------------------------------------------------------
l_conf = {}
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_acl_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/acl/0050-ZrLf2KkQ.acl.json'))
try:
with open(l_acl_conf_path) as l_f:
l_conf = json.load(l_f)
except Exception as l_e:
print('error opening config file: %s. Reason: %s error: %s, doc: %s' % (
l_acl_conf_path, type(l_e), l_e, l_e.__doc__))
assert False
l_conf['user_agent']['blacklist'] = []
l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
# ------------------------------------------------------
# post/update acl conf
# ------------------------------------------------------
l_url = '%s/update_acl'%(G_TEST_HOST)
l_headers = {'Content-Type': 'application/json',
'waf-scopes-id': '0050'}
l_r = requests.post(l_url,
headers=l_headers,
data=json.dumps(l_conf))
assert l_r.status_code == 200
# ------------------------------------------------------
# blacklist should have been updated and should get 200
#-------------------------------------------------------
l_uri = G_TEST_HOST
l_headers = {'host': 'monkeez.com',
'user-agent': 'gizoogle',
'waf-scopes-id': '0050'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
def test_rules_config_update(setup_scopez_server_action):
'''
update rules config 0050-ZrLf3KKq.rules.json - change
user agent to Donkeez from Monkeez
'''
# ------------------------------------------------------
# test an 0050 with user-agent 'Monkeez' in the
# request
# ------------------------------------------------------
l_uri = G_TEST_HOST
l_headers = {'host': 'monkeez.com',
'user-agent': 'monkeez',
'waf-scopes-id': '0050'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is rules custom response\n'
#-------------------------------------------------------
# load rules config and changes monkeez to donkeez in
# custom rules
# ------------------------------------------------------
l_conf = {}
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_rules_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/rules/0050-ZrLf3KkQ.rules.json'))
try:
with open(l_rules_conf_path) as l_f:
l_conf = json.load(l_f)
except Exception as l_e:
print('error opening config file: %s. Reason: %s error: %s, doc: %s' % (
l_file_path, type(l_e), l_e, l_e.__doc__))
assert False
l_conf['directive'][1]['sec_rule']['operator']['value'] = 'donkeez'
l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
# ------------------------------------------------------
# post/update rules conf
# ------------------------------------------------------
l_url = '%s/update_rules'%(G_TEST_HOST)
l_headers = {'Content-Type': 'application/json',
'waf-scopes-id': '0050'}
l_r = requests.post(l_url,
headers=l_headers,
data=json.dumps(l_conf))
assert l_r.status_code == 200
# ------------------------------------------------------
# test again with user-agent 'Monkeez' in the
# request. It should pass
# ------------------------------------------------------
l_uri = G_TEST_HOST
l_headers = {'host': 'monkeez.com',
'user-agent': 'monkeez',
'waf-scopes-id': '0050'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
# ------------------------------------------------------
# test with user-agent 'donkeez' in the
# request. should be blocked
# ------------------------------------------------------
l_uri = G_TEST_HOST
l_headers = {'host': 'monkeez.com',
'user-agent': 'donkeez',
'waf-scopes-id': '0050'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is rules custom response\n'
def test_profile_config_update(setup_scopez_server_action):
'''
update profile config 0050-YrLf3KkQ.wafprof.json - change
ignore_query_args to test from ignore
'''
# ------------------------------------------------------
# test an 0050 with sql injection
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/profile.html?a=%27select%20*%20from%20testing%27'
l_headers = {'host': 'monkeez.com',
'waf-scopes-id': '0050'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is profile custom response\n'
# ------------------------------------------------------
# test an 0050 with sql injection and query_args "ignore"
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/profile.html?ignore=%27select%20*%20from%20testing%27'
l_headers = {'host': 'monkeez.com',
'waf-scopes-id': '0050'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
#-------------------------------------------------------
# load profile config and change "ignore_query_args"
# to "test"
# ------------------------------------------------------
l_conf = {}
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_profile_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/profile/0050-YrLf3KkQ.wafprof.json'))
try:
with open(l_profile_conf_path) as l_f:
l_conf = json.load(l_f)
except Exception as l_e:
print('error opening config file: %s. Reason: %s error: %s, doc: %s' % (
l_profile_conf_path, type(l_e), l_e, l_e.__doc__))
assert False
l_conf["general_settings"]["ignore_query_args"] = ["test"]
l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
# ------------------------------------------------------
# post/update profile conf
# ------------------------------------------------------
l_url = '%s/update_profile'%(G_TEST_HOST)
l_headers = {'Content-Type': 'application/json',
'waf-scopes-id': '0050'}
l_r = requests.post(l_url,
headers=l_headers,
data=json.dumps(l_conf))
assert l_r.status_code == 200
# ------------------------------------------------------
# test an 0050 with sql injection and query_args "ignore"
# should get 403
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/profile.html?ignore=%27select%20*%20from%20testing%27'
l_headers = {'host': 'monkeez.com',
'waf-scopes-id': '0050'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is profile custom response\n'
# ------------------------------------------------------
# test an 0050 with sql injection and query_args "test"
# sql injection should be ignored and get 200
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/profile.html?test=%27select%20*%20from%20testing%27'
l_headers = {'host': 'monkeez.com',
'waf-scopes-id': '0050'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
def test_limit_config_update(setup_scopez_server_action):
# ------------------------------------------------------
# Make 3 request in 2 sec for 3rd and
# 4th scope. Third request should get rate limited
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'limit.com',
'waf-scopes-id': '0050'}
for _ in range(2):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is ddos custom response\n'
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'test.limit.com',
'waf-scopes-id': '0050'}
for _ in range(2):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'custom response for limits from limit_id_2\n'
# ------------------------------------------------------
# sleep for 2 seconds. Enforcements should expire
# ------------------------------------------------------
time.sleep(2)
#-------------------------------------------------------
# load limit config and change duration_sec to 3
# ------------------------------------------------------
l_conf = {}
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_limit_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/limit/0050-MjMhNXMR.limit.json'))
try:
with open(l_limit_conf_path) as l_f:
l_conf = json.load(l_f)
except Exception as l_e:
print('error opening config file: %s. Reason: %s error: %s, doc: %s' % (
l_limit_conf_path, type(l_e), l_e, l_e.__doc__))
assert False
l_conf["num"] = 3
l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
#-------------------------------------------------------
# POST conf
# ------------------------------------------------------
l_url = '%s/update_limit'%(G_TEST_HOST)
l_headers = {'Content-Type': 'application/json',
'waf-scopes-id': '0050'}
l_r = requests.post(l_url,
headers=l_headers,
data=json.dumps(l_conf))
assert l_r.status_code == 200
# ------------------------------------------------------
# Make 4 request in 2 sec. fourth request should get
# rate limited. Third request shouldn't be blocked
# because of the update
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'limit.com',
'waf-scopes-id': '0050'}
for _ in range(3):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is ddos custom response\n'
# ------------------------------------------------------
# Make 4 request in 2 sec for fourth scope.
# verify if 4th scope was also updated
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'test.limit.com',
'waf-scopes-id': '0050'}
for _ in range(3):
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'custom response for limits from limit_id_2\n'
def test_scopes_update(setup_scopez_server_action):
#-------------------------------------------------------
# check second scope for AN 0051 working correctly
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/path.html'
l_headers = {'host': 'www.regexhost.com',
'waf-scopes-id':'0051',
'User-Agent': 'bananas'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is from RX scope\n'
#-------------------------------------------------------
# change the 'path' value for scope and update.
# check if update was successful
# ------------------------------------------------------
l_conf = {}
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_scopes_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes/0051.scopes.json'))
try:
with open(l_scopes_conf_path) as l_f:
l_conf = json.load(l_f)
except Exception as l_e:
print('error opening config file: %s. Reason: %s error: %s, doc: %s' % (
l_scopes_conf_path, type(l_e), l_e, l_e.__doc__))
assert False
l_conf['scopes'][1]['path']['value'] = ".*/test.html"
l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
#-------------------------------------------------------
# POST conf
# ------------------------------------------------------
l_url = '%s/update_scopes'%(G_TEST_HOST)
l_headers = {'Content-Type': 'application/json'}
l_r = requests.post(l_url,
headers=l_headers,
data=json.dumps(l_conf))
assert l_r.status_code == 200
#-------------------------------------------------------
# make a request with same path '/path.html',
# should match GLOB scope
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/path.html'
l_headers = {'host': 'www.regexhost.com',
'waf-scopes-id':'0051',
'User-Agent': 'bananas'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is from GLOB scope\n'
#-------------------------------------------------------
# make a request with updated path '/test.html',
# should get 403 with custom response
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'www.regexhost.com',
'waf-scopes-id':'0051',
'User-Agent': 'bananas'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is from RX scope\n'
def test_scopes_linkage_update(setup_scopez_server_action):
"""
Test linkage update. Update rules config in second scope
(0050-scopes.json) to 0050-0gG8osWJ.rules.json from
0050-ZrLf3KkQ.rules.json check if update worked
"""
#-------------------------------------------------------
# check second scope for AN 0050 working correctly
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/path.html'
l_headers = {'host': 'test.com',
'waf-scopes-id':'0050',
'User-Agent': 'monkeez'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is rules custom response\n'
#-------------------------------------------------------
# change the 'rules_prod_id' value for second scope
# and update.
# check if update was successful
# ------------------------------------------------------
l_conf = {}
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_scopes_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes/0050.scopes.json'))
try:
with open(l_scopes_conf_path) as l_f:
l_conf = json.load(l_f)
except Exception as l_e:
print('error opening config file: %s. Reason: %s error: %s, doc: %s' % (
l_scopes_conf_path, type(l_e), l_e, l_e.__doc__))
assert False
l_conf['scopes'][1]['rules_prod_id'] = "0gG8osWJ"
l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
#-------------------------------------------------------
# POST conf
# ------------------------------------------------------
l_url = '%s/update_scopes'%(G_TEST_HOST)
l_headers = {'Content-Type': 'application/json'}
l_r = requests.post(l_url,
headers=l_headers,
data=json.dumps(l_conf))
assert l_r.status_code == 200
#-------------------------------------------------------
# make the same request. should get 200
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/path.html'
l_headers = {'host': 'test.com',
'waf-scopes-id':'0050',
'User-Agent': 'monkeez'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200
#assert l_r.text == 'This is from GLOB scope\n'
#-------------------------------------------------------
# make a request with user-agent bananas
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/path.html'
l_headers = {'host': 'test.com',
'waf-scopes-id':'0050',
'User-Agent': 'bananas'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 403
assert l_r.text == 'This is rules custom response\n'
# ------------------------------------------------------------------------------
# test /update_bots endpoint
# ------------------------------------------------------------------------------
def test_update_bots_endpoint(setup_scopez_server_action):
l_url = G_TEST_HOST + '/update_bots'
l_file_path = os.path.dirname(os.path.abspath(__file__))
l_test_file = os.path.realpath(os.path.join(l_file_path,
'../../data/waf/conf/bots/0052-wHyMHxV7.bots.json'))
l_test_payload = ''
# ------------------------------------------------------
# check setup
# ------------------------------------------------------
assert os.path.exists(l_test_file), 'test file not found!'
# ------------------------------------------------------
# slurp test file
# ------------------------------------------------------
with open(l_test_file) as l_tf:
l_test_payload = l_tf.read()
# ------------------------------------------------------
# check setup
# ------------------------------------------------------
assert l_test_payload, 'payload is empty!'
l_json_payload = json.loads(l_test_payload)
# ------------------------------------------------------
# Check that challenge works
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'mybot.com',
'user-agent': 'bot-testing',
'waf-scopes-id': '0052'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 401
# ------------------------------------------------------
# Update the bot config
# ------------------------------------------------------
l_json_payload['directive'][0]['sec_rule']['operator']['value'] = 'chowdah'
# ------------------------------------------------------
# update the timestamp, else it will silently do nothing and return 200
# ref: scopes.cc:load_bots (compare time)
# ------------------------------------------------------
l_json_payload['last_modified_date'] = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')
l_result = requests.post(l_url, timeout=3, json=l_json_payload)
assert l_result.status_code == 200
assert l_result.json()['status'] == 'success'
# ------------------------------------------------------
# Expect 200
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'mybot.com',
'user-agent': 'bot-testing',
'waf-scopes-id': '0052'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 200,\
"expecting 200, got {resp_code} since user-agent changed to chowdah".format(resp_code=l_r.status_code)
# ------------------------------------------------------
# Expect 401 due to new UA
# ------------------------------------------------------
l_uri = G_TEST_HOST+'/test.html'
l_headers = {'host': 'mybot.com',
'user-agent': 'chowdah',
'waf-scopes-id': '0052'}
l_r = requests.get(l_uri, headers=l_headers)
assert l_r.status_code == 401,\
"expecting 401, got {resp_code} since user-agent changed to chowdah".format(resp_code=l_r.status_code)
# ------------------------------------------------------
# check negative test - missing customer_id field
# ------------------------------------------------------
l_cust_id = l_json_payload.pop('customer_id')
l_n2_result = requests.post(l_url, json=l_json_payload)
assert l_n2_result.status_code == 500,\
'expected 500 since customer_id {} is removed'.format(l_cust_id)
| [
[
[
229,
239
],
[
743,
753
],
[
788,
798
],
[
812,
822
],
[
2169,
2179
]
],
[
[
247,
249
],
[
1311,
1313
],
[
1327,
1329
],
[
1378,
1380
],
[
1395,
1397
],
[
1485,
1487
],
[
1502,
1504
],
[
1585,
1587
],
[
1602,
1604
],
[
1673,
1675
],
[
1690,
1692
],
[
1762,
1764
],
[
1779,
1781
],
[
1852,
1854
],
[
1869,
1871
],
[
1955,
1957
],
[
1971,
1973
],
[
2071,
2073
],
[
2088,
2090
],
[
4496,
4498
],
[
4512,
4514
],
[
4561,
4563
],
[
4578,
4580
],
[
6814,
6816
],
[
6830,
6832
],
[
6881,
6883
],
[
6898,
6900
],
[
10093,
10095
],
[
10109,
10111
],
[
10162,
10164
],
[
10179,
10181
],
[
13588,
13590
],
[
13604,
13606
],
[
13655,
13657
],
[
13672,
13674
],
[
16598,
16600
],
[
16614,
16616
],
[
16666,
16668
],
[
16683,
16685
],
[
19599,
19601
],
[
19615,
19617
],
[
19667,
19669
],
[
19684,
19686
],
[
21818,
21820
],
[
21834,
21836
],
[
21879,
21881
],
[
21896,
21898
],
[
22198,
22200
]
],
[
[
257,
261
],
[
4728,
4732
],
[
5462,
5466
],
[
7054,
7058
],
[
7817,
7821
],
[
10341,
10345
],
[
11107,
11111
],
[
13828,
13832
],
[
14534,
14538
],
[
16833,
16837
],
[
17536,
17540
],
[
19834,
19838
],
[
20533,
20537
],
[
22675,
22679
]
],
[
[
269,
273
],
[
3210,
3214
],
[
3596,
3600
],
[
13365,
13369
]
],
[
[
281,
289
],
[
5012,
5020
],
[
7363,
7371
],
[
10649,
10657
],
[
14093,
14101
],
[
17136,
17144
],
[
20133,
20141
],
[
23620,
23628
]
],
[
[
297,
305
],
[
4155,
4163
],
[
5369,
5377
],
[
5857,
5865
],
[
6450,
6458
],
[
7724,
7732
],
[
8232,
8240
],
[
8663,
8671
],
[
9302,
9310
],
[
9791,
9799
],
[
11014,
11022
],
[
11539,
11547
],
[
12074,
12082
],
[
12579,
12587
],
[
12666,
12674
],
[
12955,
12963
],
[
13042,
13050
],
[
14441,
14449
],
[
15004,
15012
],
[
15091,
15099
],
[
15592,
15600
],
[
15679,
15687
],
[
16229,
16237
],
[
17443,
17451
],
[
17968,
17976
],
[
18485,
18493
],
[
19198,
19206
],
[
20440,
20448
],
[
20920,
20928
],
[
21379,
21387
],
[
23030,
23038
],
[
23690,
23698
],
[
24144,
24152
],
[
24652,
24660
],
[
25082,
25090
]
],
[
[
313,
319
],
[
1108,
1114
]
],
[
[
494,
505
],
[
4008,
4019
],
[
5251,
5262
],
[
5710,
5721
],
[
6304,
6315
],
[
7606,
7617
],
[
8086,
8097
],
[
8517,
8528
],
[
9146,
9157
],
[
9630,
9641
],
[
10896,
10907
],
[
11378,
11389
],
[
11915,
11926
],
[
12437,
12448
],
[
12808,
12819
],
[
14323,
14334
],
[
14862,
14873
],
[
15445,
15456
],
[
16065,
16076
],
[
17367,
17378
],
[
17804,
17815
],
[
18321,
18332
],
[
19043,
19054
],
[
20364,
20375
],
[
20765,
20776
],
[
21224,
21235
],
[
21771,
21782
],
[
22869,
22880
],
[
23983,
23994
],
[
24495,
24506
]
],
[
[
713,
724
],
[
3550,
3561
]
],
[
[
1129,
1155
],
[
3371,
3397
]
],
[
[
3617,
3639
]
],
[
[
5935,
5959
]
],
[
[
8798,
8824
]
],
[
[
12152,
12176
]
],
[
[
15827,
15845
]
],
[
[
18612,
18638
]
],
[
[
21704,
21729
]
]
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The setup script."""
from setuptools import find_packages, setup
test_requirements = [
"black>=19.10b0",
"flake8>=3.8.3",
"flake8-debugger>=3.2.1",
]
dev_requirements = [
*test_requirements,
"wheel>=0.34.2",
]
requirements = [
"cdp-backend[pipeline]==3.0.2",
"cdp-scrapers[king_county]>=0.3.2",
]
extra_requirements = {
"test": test_requirements,
"dev": dev_requirements,
"all": [
*requirements,
*dev_requirements,
],
}
setup(
author="JacksonMaxfield",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 3.9",
],
description="Package containing the gather functions for Example.",
install_requires=requirements,
license="MIT license",
long_description_content_type="text/markdown",
include_package_data=True,
keywords="civic technology, open government",
name="cdp-king_county-backend",
packages=find_packages(exclude=["tests", "*.tests", "*.tests.*"]),
python_requires=">=3.9",
tests_require=test_requirements,
extras_require=extra_requirements,
url="https://github.com/CouncilDataProject/king-county",
version="1.0.0",
zip_safe=False,
)
| [
[
[
95,
108
],
[
1141,
1154
]
],
[
[
110,
115
],
[
536,
541
]
],
[
[
117,
134
],
[
241,
258
],
[
415,
432
],
[
1246,
1263
]
],
[
[
215,
231
],
[
445,
461
],
[
508,
524
]
],
[
[
284,
296
],
[
485,
497
],
[
919,
931
]
],
[
[
380,
398
],
[
1284,
1302
]
]
] |
from scrapy.spider import BaseSpider
from scrapy.http import Request
from scrapy.selector import XmlXPathSelector
from openrecipes.spiders.elanaspantry_spider import ElanaspantryMixin
class ElanaspantryfeedSpider(BaseSpider, ElanaspantryMixin):
name = "elanaspantry.feed"
allowed_domains = [
"www.elanaspantry.com",
"feeds.feedburner.com",
"feedproxy.google.com",
]
start_urls = [
"http://feeds.feedburner.com/elanaspantry",
]
def parse(self, response):
xxs = XmlXPathSelector(response)
links = xxs.select("//item/*[local-name()='origLink']/text()").extract()
return [Request(x, callback=self.parse_item) for x in links]
| [
[
[
26,
36
],
[
215,
225
]
],
[
[
61,
68
],
[
652,
659
]
],
[
[
97,
113
],
[
527,
543
]
],
[
[
166,
183
],
[
227,
244
]
],
[
[
192,
214
]
]
] |
#!/usr/bin/env python
#-*- encoding: UTF-8 -*-
###############################################
# Todos los derechos reservados a: #
# CreceLibre Consultores en Tecnologías Ltda. #
# #
# ©Milton Inostroza Aguilera #
# minostro@minostro.com #
# 2009 #
###############################################
from django.db import models
from AlyMoly.mantenedor.models import Producto, Promocion, Trabajador
class Turno(models.Model):
"""
estado:
1 --> abierto
2 --> cerrado
"""
fecha_apertura_sistema = models.DateTimeField()
fecha_cierre_sistema = models.DateTimeField(null=True, blank=True)
estado = models.IntegerField(default=1, blank=True)
trabajador = models.ForeignKey(Trabajador, blank=True)
monto_apertura_caja = models.IntegerField(default=0)
monto_cierre_calculado = models.IntegerField(default=0, blank=True)
monto_afecto = models.IntegerField(default=0, blank=True)
monto_exento = models.IntegerField(default=0, blank=True)
def monto_cierre_informado(self):
return self.boletadeposito.total
def estado_turno(self):
if self.estado == 1:
return "Abierto"
else:
return "Cerrado"
def save(self, force_insert=False, force_update=False):
"""
Al guardar un turno abierto se verifica que el trabajador ya no cuente con un
turno abierto anteriormente.
"""
if self.estado == 1 and len(Turno.objects.exclude(id=self.id).filter(trabajador__id=self.trabajador.id).filter(estado=1)) > 0:
raise Exception(u"Usted ya cuenta con un turno abierto.")
super(Turno, self).save(force_insert, force_update)
class BoletaDeposito(models.Model):
turno = models.OneToOneField(Turno, blank=True)
veintemil = models.PositiveIntegerField(default=0, blank=True)
diezmil = models.PositiveIntegerField(default=0, blank=True)
cincomil = models.PositiveIntegerField(default=0, blank=True)
dosmil = models.PositiveIntegerField(default=0, blank=True)
mil = models.PositiveIntegerField(default=0, blank=True)
quinientos = models.PositiveIntegerField(default=0, blank=True)
cien = models.PositiveIntegerField(default=0, blank=True)
cincuenta = models.PositiveIntegerField(default=0, blank=True)
diez = models.PositiveIntegerField(default=0, blank=True)
tarjetas = models.PositiveIntegerField(default=0, blank=True)
otros = models.PositiveIntegerField(default=0, blank=True)
total = models.PositiveIntegerField(default=0, blank=True)
class Venta(models.Model):
"""
medio_pago:
1 --> efectivo
2 --> otro
"""
fecha_venta = models.DateTimeField()
folio_boleta = models.PositiveIntegerField(null=True, blank=True)
monto_total = models.PositiveIntegerField()
monto_afecto = models.PositiveIntegerField()
monto_exento = models.PositiveIntegerField()
cantidad_productos = models.PositiveIntegerField()
medio_pago = models.PositiveIntegerField()
monto_pago = models.PositiveIntegerField(null=True)
turno = models.ForeignKey('Turno')
def __unicode__(self):
return u"%s-%s" % (self.id, self.folio_boleta)
class LineaDetalle(models.Model):
cantidad = models.IntegerField()
precio_venta = models.IntegerField()
precio_venta_total = models.IntegerField()
producto = models.ForeignKey(Producto, null=True, blank=True)
promocion = models.ForeignKey(Promocion, null=True, blank=True)
venta = models.ForeignKey('Venta')
| [
[
[
454,
460
],
[
545,
551
],
[
673,
679
],
[
723,
729
],
[
780,
786
],
[
840,
846
],
[
908,
914
],
[
968,
974
],
[
1030,
1036
],
[
1092,
1098
],
[
1849,
1855
],
[
1876,
1882
],
[
1932,
1938
],
[
1997,
2003
],
[
2063,
2069
],
[
2127,
2133
],
[
2188,
2194
],
[
2256,
2262
],
[
2318,
2324
],
[
2385,
2391
],
[
2447,
2453
],
[
2513,
2519
],
[
2576,
2582
],
[
2639,
2645
],
[
2704,
2710
],
[
2823,
2829
],
[
2865,
2871
],
[
2934,
2940
],
[
2983,
2989
],
[
3032,
3038
],
[
3087,
3093
],
[
3134,
3140
],
[
3181,
3187
],
[
3232,
3238
],
[
3363,
3369
],
[
3393,
3399
],
[
3434,
3440
],
[
3481,
3487
],
[
3518,
3524
],
[
3585,
3591
],
[
3649,
3655
]
],
[
[
499,
507
],
[
3536,
3544
]
],
[
[
509,
518
],
[
3603,
3612
]
],
[
[
520,
530
],
[
858,
868
]
],
[
[
539,
544
],
[
1897,
1902
],
[
1597,
1602
],
[
1780,
1785
]
],
[
[
1834,
1848
]
],
[
[
2698,
2703
]
],
[
[
3350,
3362
]
]
] |
"""Train (basic) densely-connected oracle."""
import os
import time
import multiprocessing as mp
import pandas as pd
import torch
from torch import optim
from torch.utils.data import DataLoader, Subset, TensorDataset, WeightedRandomSampler
from profit.dataset.splitters import split_method_dict
from profit.models.torch import SequenceOracle
from profit.utils.data_utils.tokenizers import AminoAcidTokenizer
from profit.utils.training_utils.torch import losses as L
from profit.utils.training_utils.torch.callbacks import ModelCheckpoint
from profit.utils.training_utils.torch.callbacks import EarlyStopping
from examples.gb1.data import load_dataset
timestep = time.strftime("%Y-%b-%d-%H:%M:%S", time.gmtime())
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
tensor = torch.cuda.FloatTensor if torch.cuda.is_available() else torch.Tensor
splits = ["train", "valid"]
# Preprocess + load the dataset
dataset = load_dataset("lstm", "primary", labels="Fitness", num_data=-1,
filetype="mdb", as_numpy=False, vocab="aa20")
# Stratify train/val/test sets s.t. the target labels are equally represented in
# each subset. Each subset will have the same ratio of low/mid/high variants in
# each batch as the full dataset. See: https://discuss.pytorch.org/t/29907/2
_dataset = dataset[:]["arr_0"]
_labels = dataset[:]["arr_1"].view(-1)
# # Remove samples below a certain threshold
# high_idx = torch.where(_labels > _labels.mean())
# dataset = Subset(dataset, sorted(high_idx))
# _dataset = _dataset[high_idx]
# _labels = _labels[high_idx]
# Compute sample weights (each sample should get its own weight)
def sampler(labels: torch.Tensor,
nbins: int = 10,
stratify: bool = False) -> WeightedRandomSampler:
discretize = pd.qcut if stratify else pd.cut
bin_labels = torch.LongTensor(discretize(labels.tolist(), nbins,
labels=False, duplicates="drop"))
class_sample_count = torch.LongTensor(
[(bin_labels == t).sum() for t in torch.arange(nbins)])
weight = 1. / class_sample_count.float()
sample_weights = torch.zeros_like(labels)
for t in torch.unique(bin_labels):
sample_weights[bin_labels == t] = weight[t]
return WeightedRandomSampler(sample_weights, len(sample_weights))
# Compute sample weights and add to original dataset
weights = sampler(_labels, nbins=10, stratify=False).weights.type(torch.float)
dataset = TensorDataset(*dataset[:].values(), weights)
# Create subset indicies
subset_idx = split_method_dict["stratified"]().train_valid_test_split(
dataset=_dataset, labels=_labels.tolist(), frac_train=0.9,
frac_valid=0.1, frac_test=0.0, return_idxs=True, n_bins=10)
stratified = {split: Subset(dataset, sorted(idx))
for split, idx in zip(splits, subset_idx)}
# Create stratified sampler (only needed for training)
train_sampler = sampler(stratified["train"][:][1].view(-1), stratify=True)
# Initialize model
tokenizer = AminoAcidTokenizer("aa20")
vocab_size = tokenizer.vocab_size
seqlen = stratified["train"][0][0].size(0)
model = SequenceOracle(seqlen, vocab_size, hidden_size=50, out_size=2)
# Initialize callbacks
# NOTE: Must set model (within save_clbk) to ensure weights get saved
stop_clbk = EarlyStopping(patience=5, verbose=1)
save_clbk = ModelCheckpoint(os.path.join("bin/3gb1/oracle", timestep),
monitor="val_loss",
verbose=1,
save_weights_only=True)
save_clbk.set_model(model)
# Initialize callbacks
optimizer = optim.AdamW(model.parameters(), lr=1e-3)
epochs = 50
for epoch in range(1, epochs+1):
for split in splits:
summed_loss = 0
data_loader = DataLoader(
dataset=stratified[split],
batch_size=32,
sampler=train_sampler if split == "train" else None,
num_workers=mp.cpu_count(),
pin_memory=torch.cuda.is_available()
)
# Enable/disable dropout
model.train() if split == "train" else model.eval()
for it, batch in enumerate(data_loader):
data = batch[0].long().to(device)
target = batch[1].to(device)
sample_weight = batch[2].to(device)
# One-hot encode (see: https://discuss.pytorch.org/t/507/34)
batch_size, seqlen = data.size()
onehot = torch.zeros(batch_size, seqlen, vocab_size)
onehot.scatter_(2, torch.unsqueeze(data, 2), 1)
# Forward pass
pred = model(onehot)
# Loss calculation
nll_loss = L.gaussian_nll_loss(pred, target, reduction="none")
# Reweight nll_loss w/ sample weights
nll_loss = (nll_loss * sample_weight).sum()
summed_loss += nll_loss.item()
loss = nll_loss / batch_size
# Compute gradients and update params/weights
if split == "train":
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Bookkeeping (batch)
if it % 5 == 0 or it+1 == len(data_loader):
print("{} Batch {:04d}/{:d} ({:.2f}%)\tLoss: {:.4f}".format(
split.upper(), it+1, len(data_loader),
100. * ((it+1)/len(data_loader)), loss.item()))
# Bookkeeping (epoch)
avg_loss = summed_loss / len(data_loader.dataset)
print("{} Epoch {}/{}, Average NLL loss: {:.4f}".format(
split.upper(), epoch, epochs, avg_loss))
# Stop training (based off val loss) and save (top k) ckpts
if split == "valid":
save_clbk.on_epoch_end(epoch, logs={"val_loss": avg_loss})
should_stop = stop_clbk.on_epoch_end(epoch, logs={"val_loss": avg_loss})
if should_stop:
break
else:
continue
break
| [
[
[
54,
56
],
[
3358,
3360
]
],
[
[
64,
68
],
[
669,
673
],
[
704,
708
]
],
[
[
76,
97
],
[
3928,
3930
]
],
[
[
106,
118
],
[
1791,
1793
],
[
1816,
1818
]
],
[
[
127,
132
],
[
728,
733
],
[
751,
756
],
[
824,
829
],
[
798,
803
],
[
855,
860
],
[
2450,
2455
],
[
3967,
3972
],
[
4421,
4426
],
[
4496,
4501
],
[
1669,
1674
],
[
1840,
1845
],
[
1996,
2001
],
[
2056,
2061
],
[
2144,
2149
],
[
2182,
2187
]
],
[
[
151,
156
],
[
3603,
3608
]
],
[
[
186,
196
],
[
3761,
3771
]
],
[
[
198,
204
],
[
2763,
2769
]
],
[
[
206,
219
],
[
2473,
2486
]
],
[
[
221,
242
],
[
1751,
1772
],
[
2271,
2292
]
],
[
[
281,
298
],
[
2557,
2574
]
],
[
[
331,
345
],
[
3124,
3138
]
],
[
[
393,
411
],
[
3012,
3030
]
],
[
[
458,
469
],
[
4640,
4641
]
],
[
[
526,
541
],
[
3342,
3357
]
],
[
[
598,
611
],
[
3293,
3306
]
],
[
[
643,
655
],
[
939,
951
]
],
[
[
658,
666
],
[
3390,
3398
]
],
[
[
719,
725
],
[
4185,
4191
],
[
4226,
4232
],
[
4274,
4280
]
],
[
[
789,
795
]
],
[
[
868,
874
],
[
2828,
2834
],
[
3707,
3713
]
],
[
[
929,
936
],
[
1320,
1327
],
[
1350,
1357
],
[
2488,
2495
]
],
[
[
1309,
1317
],
[
2627,
2635
]
],
[
[
1340,
1347
],
[
2402,
2409
],
[
2644,
2651
]
],
[
[
1653,
1660
],
[
2394,
2401
],
[
2921,
2928
]
],
[
[
2384,
2391
],
[
2509,
2516
]
],
[
[
2463,
2470
],
[
2770,
2777
]
],
[
[
2544,
2554
],
[
2836,
2846
]
],
[
[
2742,
2752
],
[
2929,
2939
],
[
3082,
3092
],
[
3793,
3803
]
],
[
[
2905,
2918
],
[
3859,
3872
]
],
[
[
3000,
3009
],
[
3052,
3061
]
],
[
[
3039,
3049
],
[
3147,
3157
],
[
4453,
4463
]
],
[
[
3073,
3079
],
[
3139,
3145
]
],
[
[
3116,
3121
],
[
3560,
3565
],
[
3615,
3620
],
[
4045,
4050
],
[
4084,
4089
],
[
4572,
4577
]
],
[
[
3281,
3290
],
[
5773,
5782
]
],
[
[
3330,
3339
],
[
3540,
3549
],
[
5688,
5697
]
],
[
[
3591,
3600
],
[
4989,
4998
],
[
5059,
5068
]
],
[
[
3645,
3651
],
[
3679,
3685
],
[
5559,
5565
]
],
[
[
3661,
3666
],
[
5552,
5557
],
[
5711,
5716
],
[
5796,
5801
]
],
[
[
3698,
3703
],
[
3804,
3809
],
[
3876,
3881
],
[
4062,
4067
],
[
4955,
4960
],
[
5264,
5269
],
[
5537,
5542
],
[
5658,
5663
]
],
[
[
3723,
3734
],
[
4810,
4821
],
[
5421,
5432
]
],
[
[
3747,
3758
],
[
4133,
4144
],
[
5153,
5164
],
[
5289,
5300
],
[
5342,
5353
],
[
5439,
5450
]
],
[
[
4110,
4112
],
[
5126,
5128
],
[
5141,
5143
],
[
5279,
5281
],
[
5332,
5334
]
],
[
[
4114,
4119
],
[
4166,
4171
],
[
4214,
4219
],
[
4262,
4267
]
],
[
[
4159,
4163
],
[
4388,
4392
],
[
4512,
4516
]
],
[
[
4205,
4211
],
[
4666,
4672
]
],
[
[
4246,
4259
],
[
4777,
4790
]
],
[
[
4367,
4377
],
[
4433,
4443
],
[
4871,
4881
]
],
[
[
4379,
4385
],
[
4445,
4451
]
],
[
[
4412,
4418
],
[
4477,
4483
],
[
4578,
4584
]
],
[
[
4565,
4569
],
[
4660,
4664
]
],
[
[
4629,
4637
],
[
4766,
4774
]
],
[
[
4754,
4762
],
[
4825,
4833
],
[
4860,
4868
]
],
[
[
4853,
4857
],
[
5027,
5031
],
[
5357,
5361
]
],
[
[
5410,
5418
],
[
5567,
5575
],
[
5736,
5744
],
[
5821,
5829
]
],
[
[
5759,
5770
],
[
5847,
5858
]
]
] |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.deploy_v1.services.cloud_deploy import CloudDeployAsyncClient
from google.cloud.deploy_v1.services.cloud_deploy import CloudDeployClient
from google.cloud.deploy_v1.services.cloud_deploy import pagers
from google.cloud.deploy_v1.services.cloud_deploy import transports
from google.cloud.deploy_v1.types import cloud_deploy
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert CloudDeployClient._get_default_mtls_endpoint(None) is None
assert (
CloudDeployClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
)
assert (
CloudDeployClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
CloudDeployClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
CloudDeployClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert CloudDeployClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [CloudDeployClient, CloudDeployAsyncClient,])
def test_cloud_deploy_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "clouddeploy.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.CloudDeployGrpcTransport, "grpc"),
(transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_cloud_deploy_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize("client_class", [CloudDeployClient, CloudDeployAsyncClient,])
def test_cloud_deploy_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "clouddeploy.googleapis.com:443"
def test_cloud_deploy_client_get_transport_class():
transport = CloudDeployClient.get_transport_class()
available_transports = [
transports.CloudDeployGrpcTransport,
]
assert transport in available_transports
transport = CloudDeployClient.get_transport_class("grpc")
assert transport == transports.CloudDeployGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"),
(
CloudDeployAsyncClient,
transports.CloudDeployGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
CloudDeployClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployClient)
)
@mock.patch.object(
CloudDeployAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(CloudDeployAsyncClient),
)
def test_cloud_deploy_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(CloudDeployClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(CloudDeployClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc", "true"),
(
CloudDeployAsyncClient,
transports.CloudDeployGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc", "false"),
(
CloudDeployAsyncClient,
transports.CloudDeployGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
CloudDeployClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployClient)
)
@mock.patch.object(
CloudDeployAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(CloudDeployAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_cloud_deploy_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class", [CloudDeployClient, CloudDeployAsyncClient])
@mock.patch.object(
CloudDeployClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployClient)
)
@mock.patch.object(
CloudDeployAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(CloudDeployAsyncClient),
)
def test_cloud_deploy_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"),
(
CloudDeployAsyncClient,
transports.CloudDeployGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_cloud_deploy_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"),
(
CloudDeployAsyncClient,
transports.CloudDeployGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_cloud_deploy_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_cloud_deploy_client_client_options_from_dict():
with mock.patch(
"google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = CloudDeployClient(client_options={"api_endpoint": "squid.clam.whelk"})
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"request_type", [cloud_deploy.ListDeliveryPipelinesRequest, dict,]
)
def test_list_delivery_pipelines(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListDeliveryPipelinesResponse(
next_page_token="next_page_token_value", unreachable=["unreachable_value"],
)
response = client.list_delivery_pipelines(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListDeliveryPipelinesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListDeliveryPipelinesPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
def test_list_delivery_pipelines_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines), "__call__"
) as call:
client.list_delivery_pipelines()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListDeliveryPipelinesRequest()
@pytest.mark.asyncio
async def test_list_delivery_pipelines_async(
transport: str = "grpc_asyncio",
request_type=cloud_deploy.ListDeliveryPipelinesRequest,
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListDeliveryPipelinesResponse(
next_page_token="next_page_token_value",
unreachable=["unreachable_value"],
)
)
response = await client.list_delivery_pipelines(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListDeliveryPipelinesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListDeliveryPipelinesAsyncPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
@pytest.mark.asyncio
async def test_list_delivery_pipelines_async_from_dict():
await test_list_delivery_pipelines_async(request_type=dict)
def test_list_delivery_pipelines_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ListDeliveryPipelinesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines), "__call__"
) as call:
call.return_value = cloud_deploy.ListDeliveryPipelinesResponse()
client.list_delivery_pipelines(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_delivery_pipelines_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ListDeliveryPipelinesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListDeliveryPipelinesResponse()
)
await client.list_delivery_pipelines(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_delivery_pipelines_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListDeliveryPipelinesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_delivery_pipelines(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_delivery_pipelines_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_delivery_pipelines(
cloud_deploy.ListDeliveryPipelinesRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_delivery_pipelines_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListDeliveryPipelinesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListDeliveryPipelinesResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_delivery_pipelines(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_delivery_pipelines_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_delivery_pipelines(
cloud_deploy.ListDeliveryPipelinesRequest(), parent="parent_value",
)
def test_list_delivery_pipelines_pager(transport_name: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials, transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
],
next_page_token="abc",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[], next_page_token="def",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[cloud_deploy.DeliveryPipeline(),],
next_page_token="ghi",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_delivery_pipelines(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results)
def test_list_delivery_pipelines_pages(transport_name: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials, transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
],
next_page_token="abc",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[], next_page_token="def",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[cloud_deploy.DeliveryPipeline(),],
next_page_token="ghi",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
],
),
RuntimeError,
)
pages = list(client.list_delivery_pipelines(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_delivery_pipelines_async_pager():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
],
next_page_token="abc",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[], next_page_token="def",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[cloud_deploy.DeliveryPipeline(),],
next_page_token="ghi",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
],
),
RuntimeError,
)
async_pager = await client.list_delivery_pipelines(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in responses)
@pytest.mark.asyncio
async def test_list_delivery_pipelines_async_pages():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_delivery_pipelines),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
],
next_page_token="abc",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[], next_page_token="def",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[cloud_deploy.DeliveryPipeline(),],
next_page_token="ghi",
),
cloud_deploy.ListDeliveryPipelinesResponse(
delivery_pipelines=[
cloud_deploy.DeliveryPipeline(),
cloud_deploy.DeliveryPipeline(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_delivery_pipelines(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type", [cloud_deploy.GetDeliveryPipelineRequest, dict,]
)
def test_get_delivery_pipeline(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.DeliveryPipeline(
name="name_value",
uid="uid_value",
description="description_value",
etag="etag_value",
serial_pipeline=cloud_deploy.SerialPipeline(
stages=[cloud_deploy.Stage(target_id="target_id_value")]
),
)
response = client.get_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetDeliveryPipelineRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.DeliveryPipeline)
assert response.name == "name_value"
assert response.uid == "uid_value"
assert response.description == "description_value"
assert response.etag == "etag_value"
def test_get_delivery_pipeline_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_delivery_pipeline), "__call__"
) as call:
client.get_delivery_pipeline()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetDeliveryPipelineRequest()
@pytest.mark.asyncio
async def test_get_delivery_pipeline_async(
transport: str = "grpc_asyncio",
request_type=cloud_deploy.GetDeliveryPipelineRequest,
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.DeliveryPipeline(
name="name_value",
uid="uid_value",
description="description_value",
etag="etag_value",
)
)
response = await client.get_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetDeliveryPipelineRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.DeliveryPipeline)
assert response.name == "name_value"
assert response.uid == "uid_value"
assert response.description == "description_value"
assert response.etag == "etag_value"
@pytest.mark.asyncio
async def test_get_delivery_pipeline_async_from_dict():
await test_get_delivery_pipeline_async(request_type=dict)
def test_get_delivery_pipeline_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetDeliveryPipelineRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_delivery_pipeline), "__call__"
) as call:
call.return_value = cloud_deploy.DeliveryPipeline()
client.get_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_delivery_pipeline_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetDeliveryPipelineRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_delivery_pipeline), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.DeliveryPipeline()
)
await client.get_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_delivery_pipeline_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.DeliveryPipeline()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_delivery_pipeline(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_delivery_pipeline_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_delivery_pipeline(
cloud_deploy.GetDeliveryPipelineRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_delivery_pipeline_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.DeliveryPipeline()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.DeliveryPipeline()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_delivery_pipeline(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_delivery_pipeline_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_delivery_pipeline(
cloud_deploy.GetDeliveryPipelineRequest(), name="name_value",
)
@pytest.mark.parametrize(
"request_type", [cloud_deploy.CreateDeliveryPipelineRequest, dict,]
)
def test_create_delivery_pipeline(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.create_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateDeliveryPipelineRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_delivery_pipeline_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_delivery_pipeline), "__call__"
) as call:
client.create_delivery_pipeline()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateDeliveryPipelineRequest()
@pytest.mark.asyncio
async def test_create_delivery_pipeline_async(
transport: str = "grpc_asyncio",
request_type=cloud_deploy.CreateDeliveryPipelineRequest,
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.create_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateDeliveryPipelineRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_delivery_pipeline_async_from_dict():
await test_create_delivery_pipeline_async(request_type=dict)
def test_create_delivery_pipeline_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.CreateDeliveryPipelineRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_delivery_pipeline), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_delivery_pipeline_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.CreateDeliveryPipelineRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_delivery_pipeline), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.create_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_delivery_pipeline_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_delivery_pipeline(
parent="parent_value",
delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"),
delivery_pipeline_id="delivery_pipeline_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].delivery_pipeline
mock_val = cloud_deploy.DeliveryPipeline(name="name_value")
assert arg == mock_val
arg = args[0].delivery_pipeline_id
mock_val = "delivery_pipeline_id_value"
assert arg == mock_val
def test_create_delivery_pipeline_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_delivery_pipeline(
cloud_deploy.CreateDeliveryPipelineRequest(),
parent="parent_value",
delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"),
delivery_pipeline_id="delivery_pipeline_id_value",
)
@pytest.mark.asyncio
async def test_create_delivery_pipeline_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_delivery_pipeline(
parent="parent_value",
delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"),
delivery_pipeline_id="delivery_pipeline_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].delivery_pipeline
mock_val = cloud_deploy.DeliveryPipeline(name="name_value")
assert arg == mock_val
arg = args[0].delivery_pipeline_id
mock_val = "delivery_pipeline_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_delivery_pipeline_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_delivery_pipeline(
cloud_deploy.CreateDeliveryPipelineRequest(),
parent="parent_value",
delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"),
delivery_pipeline_id="delivery_pipeline_id_value",
)
@pytest.mark.parametrize(
"request_type", [cloud_deploy.UpdateDeliveryPipelineRequest, dict,]
)
def test_update_delivery_pipeline(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.update_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.UpdateDeliveryPipelineRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_update_delivery_pipeline_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_delivery_pipeline), "__call__"
) as call:
client.update_delivery_pipeline()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.UpdateDeliveryPipelineRequest()
@pytest.mark.asyncio
async def test_update_delivery_pipeline_async(
transport: str = "grpc_asyncio",
request_type=cloud_deploy.UpdateDeliveryPipelineRequest,
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.update_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.UpdateDeliveryPipelineRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_update_delivery_pipeline_async_from_dict():
await test_update_delivery_pipeline_async(request_type=dict)
def test_update_delivery_pipeline_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.UpdateDeliveryPipelineRequest()
request.delivery_pipeline.name = "delivery_pipeline.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_delivery_pipeline), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.update_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"delivery_pipeline.name=delivery_pipeline.name/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_delivery_pipeline_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.UpdateDeliveryPipelineRequest()
request.delivery_pipeline.name = "delivery_pipeline.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_delivery_pipeline), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.update_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"delivery_pipeline.name=delivery_pipeline.name/value",
) in kw["metadata"]
def test_update_delivery_pipeline_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_delivery_pipeline(
delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].delivery_pipeline
mock_val = cloud_deploy.DeliveryPipeline(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_delivery_pipeline_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_delivery_pipeline(
cloud_deploy.UpdateDeliveryPipelineRequest(),
delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_delivery_pipeline_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_delivery_pipeline(
delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].delivery_pipeline
mock_val = cloud_deploy.DeliveryPipeline(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_delivery_pipeline_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_delivery_pipeline(
cloud_deploy.UpdateDeliveryPipelineRequest(),
delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.parametrize(
"request_type", [cloud_deploy.DeleteDeliveryPipelineRequest, dict,]
)
def test_delete_delivery_pipeline(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.delete_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.DeleteDeliveryPipelineRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_delivery_pipeline_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_delivery_pipeline), "__call__"
) as call:
client.delete_delivery_pipeline()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.DeleteDeliveryPipelineRequest()
@pytest.mark.asyncio
async def test_delete_delivery_pipeline_async(
transport: str = "grpc_asyncio",
request_type=cloud_deploy.DeleteDeliveryPipelineRequest,
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.delete_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.DeleteDeliveryPipelineRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_delivery_pipeline_async_from_dict():
await test_delete_delivery_pipeline_async(request_type=dict)
def test_delete_delivery_pipeline_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.DeleteDeliveryPipelineRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_delivery_pipeline), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_delivery_pipeline_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.DeleteDeliveryPipelineRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_delivery_pipeline), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.delete_delivery_pipeline(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_delivery_pipeline_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_delivery_pipeline(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_delivery_pipeline_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_delivery_pipeline(
cloud_deploy.DeleteDeliveryPipelineRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_delivery_pipeline_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_delivery_pipeline), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_delivery_pipeline(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_delivery_pipeline_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_delivery_pipeline(
cloud_deploy.DeleteDeliveryPipelineRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [cloud_deploy.ListTargetsRequest, dict,])
def test_list_targets(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_targets), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListTargetsResponse(
next_page_token="next_page_token_value", unreachable=["unreachable_value"],
)
response = client.list_targets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListTargetsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTargetsPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
def test_list_targets_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_targets), "__call__") as call:
client.list_targets()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListTargetsRequest()
@pytest.mark.asyncio
async def test_list_targets_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.ListTargetsRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_targets), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListTargetsResponse(
next_page_token="next_page_token_value",
unreachable=["unreachable_value"],
)
)
response = await client.list_targets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListTargetsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTargetsAsyncPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
@pytest.mark.asyncio
async def test_list_targets_async_from_dict():
await test_list_targets_async(request_type=dict)
def test_list_targets_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ListTargetsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_targets), "__call__") as call:
call.return_value = cloud_deploy.ListTargetsResponse()
client.list_targets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_targets_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ListTargetsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_targets), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListTargetsResponse()
)
await client.list_targets(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_targets_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_targets), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListTargetsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_targets(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_targets_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_targets(
cloud_deploy.ListTargetsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_targets_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_targets), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListTargetsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListTargetsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_targets(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_targets_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_targets(
cloud_deploy.ListTargetsRequest(), parent="parent_value",
)
def test_list_targets_pager(transport_name: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials, transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_targets), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListTargetsResponse(
targets=[
cloud_deploy.Target(),
cloud_deploy.Target(),
cloud_deploy.Target(),
],
next_page_token="abc",
),
cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",),
cloud_deploy.ListTargetsResponse(
targets=[cloud_deploy.Target(),], next_page_token="ghi",
),
cloud_deploy.ListTargetsResponse(
targets=[cloud_deploy.Target(), cloud_deploy.Target(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_targets(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, cloud_deploy.Target) for i in results)
def test_list_targets_pages(transport_name: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials, transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_targets), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListTargetsResponse(
targets=[
cloud_deploy.Target(),
cloud_deploy.Target(),
cloud_deploy.Target(),
],
next_page_token="abc",
),
cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",),
cloud_deploy.ListTargetsResponse(
targets=[cloud_deploy.Target(),], next_page_token="ghi",
),
cloud_deploy.ListTargetsResponse(
targets=[cloud_deploy.Target(), cloud_deploy.Target(),],
),
RuntimeError,
)
pages = list(client.list_targets(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_targets_async_pager():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_targets), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListTargetsResponse(
targets=[
cloud_deploy.Target(),
cloud_deploy.Target(),
cloud_deploy.Target(),
],
next_page_token="abc",
),
cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",),
cloud_deploy.ListTargetsResponse(
targets=[cloud_deploy.Target(),], next_page_token="ghi",
),
cloud_deploy.ListTargetsResponse(
targets=[cloud_deploy.Target(), cloud_deploy.Target(),],
),
RuntimeError,
)
async_pager = await client.list_targets(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, cloud_deploy.Target) for i in responses)
@pytest.mark.asyncio
async def test_list_targets_async_pages():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_targets), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListTargetsResponse(
targets=[
cloud_deploy.Target(),
cloud_deploy.Target(),
cloud_deploy.Target(),
],
next_page_token="abc",
),
cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",),
cloud_deploy.ListTargetsResponse(
targets=[cloud_deploy.Target(),], next_page_token="ghi",
),
cloud_deploy.ListTargetsResponse(
targets=[cloud_deploy.Target(), cloud_deploy.Target(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_targets(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize("request_type", [cloud_deploy.GetTargetRequest, dict,])
def test_get_target(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Target(
name="name_value",
target_id="target_id_value",
uid="uid_value",
description="description_value",
require_approval=True,
etag="etag_value",
gke=cloud_deploy.GkeCluster(cluster="cluster_value"),
)
response = client.get_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetTargetRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.Target)
assert response.name == "name_value"
assert response.target_id == "target_id_value"
assert response.uid == "uid_value"
assert response.description == "description_value"
assert response.require_approval is True
assert response.etag == "etag_value"
def test_get_target_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_target), "__call__") as call:
client.get_target()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetTargetRequest()
@pytest.mark.asyncio
async def test_get_target_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.GetTargetRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.Target(
name="name_value",
target_id="target_id_value",
uid="uid_value",
description="description_value",
require_approval=True,
etag="etag_value",
)
)
response = await client.get_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetTargetRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.Target)
assert response.name == "name_value"
assert response.target_id == "target_id_value"
assert response.uid == "uid_value"
assert response.description == "description_value"
assert response.require_approval is True
assert response.etag == "etag_value"
@pytest.mark.asyncio
async def test_get_target_async_from_dict():
await test_get_target_async(request_type=dict)
def test_get_target_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetTargetRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_target), "__call__") as call:
call.return_value = cloud_deploy.Target()
client.get_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_target_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetTargetRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_target), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Target())
await client.get_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_target_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Target()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_target(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_target_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_target(
cloud_deploy.GetTargetRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_target_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Target()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Target())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_target(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_target_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_target(
cloud_deploy.GetTargetRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [cloud_deploy.CreateTargetRequest, dict,])
def test_create_target(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.create_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateTargetRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_target_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_target), "__call__") as call:
client.create_target()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateTargetRequest()
@pytest.mark.asyncio
async def test_create_target_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateTargetRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.create_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateTargetRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_target_async_from_dict():
await test_create_target_async(request_type=dict)
def test_create_target_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.CreateTargetRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_target), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_target_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.CreateTargetRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_target), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.create_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_target_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_target(
parent="parent_value",
target=cloud_deploy.Target(name="name_value"),
target_id="target_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].target
mock_val = cloud_deploy.Target(name="name_value")
assert arg == mock_val
arg = args[0].target_id
mock_val = "target_id_value"
assert arg == mock_val
def test_create_target_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_target(
cloud_deploy.CreateTargetRequest(),
parent="parent_value",
target=cloud_deploy.Target(name="name_value"),
target_id="target_id_value",
)
@pytest.mark.asyncio
async def test_create_target_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_target(
parent="parent_value",
target=cloud_deploy.Target(name="name_value"),
target_id="target_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].target
mock_val = cloud_deploy.Target(name="name_value")
assert arg == mock_val
arg = args[0].target_id
mock_val = "target_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_target_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_target(
cloud_deploy.CreateTargetRequest(),
parent="parent_value",
target=cloud_deploy.Target(name="name_value"),
target_id="target_id_value",
)
@pytest.mark.parametrize("request_type", [cloud_deploy.UpdateTargetRequest, dict,])
def test_update_target(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.update_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.UpdateTargetRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_update_target_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_target), "__call__") as call:
client.update_target()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.UpdateTargetRequest()
@pytest.mark.asyncio
async def test_update_target_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateTargetRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.update_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.UpdateTargetRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_update_target_async_from_dict():
await test_update_target_async(request_type=dict)
def test_update_target_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.UpdateTargetRequest()
request.target.name = "target.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_target), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.update_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "target.name=target.name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_target_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.UpdateTargetRequest()
request.target.name = "target.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_target), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.update_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "target.name=target.name/value",) in kw["metadata"]
def test_update_target_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_target(
target=cloud_deploy.Target(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].target
mock_val = cloud_deploy.Target(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
def test_update_target_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_target(
cloud_deploy.UpdateTargetRequest(),
target=cloud_deploy.Target(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_target_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_target(
target=cloud_deploy.Target(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].target
mock_val = cloud_deploy.Target(name="name_value")
assert arg == mock_val
arg = args[0].update_mask
mock_val = field_mask_pb2.FieldMask(paths=["paths_value"])
assert arg == mock_val
@pytest.mark.asyncio
async def test_update_target_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_target(
cloud_deploy.UpdateTargetRequest(),
target=cloud_deploy.Target(name="name_value"),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
)
@pytest.mark.parametrize("request_type", [cloud_deploy.DeleteTargetRequest, dict,])
def test_delete_target(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.delete_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.DeleteTargetRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_target_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_target), "__call__") as call:
client.delete_target()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.DeleteTargetRequest()
@pytest.mark.asyncio
async def test_delete_target_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteTargetRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.delete_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.DeleteTargetRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_target_async_from_dict():
await test_delete_target_async(request_type=dict)
def test_delete_target_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.DeleteTargetRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_target), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_target_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.DeleteTargetRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_target), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.delete_target(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_target_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_target(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_delete_target_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_target(
cloud_deploy.DeleteTargetRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_target_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_target), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_target(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_delete_target_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_target(
cloud_deploy.DeleteTargetRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [cloud_deploy.ListReleasesRequest, dict,])
def test_list_releases(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_releases), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListReleasesResponse(
next_page_token="next_page_token_value", unreachable=["unreachable_value"],
)
response = client.list_releases(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListReleasesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListReleasesPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
def test_list_releases_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_releases), "__call__") as call:
client.list_releases()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListReleasesRequest()
@pytest.mark.asyncio
async def test_list_releases_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.ListReleasesRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_releases), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListReleasesResponse(
next_page_token="next_page_token_value",
unreachable=["unreachable_value"],
)
)
response = await client.list_releases(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListReleasesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListReleasesAsyncPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
@pytest.mark.asyncio
async def test_list_releases_async_from_dict():
await test_list_releases_async(request_type=dict)
def test_list_releases_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ListReleasesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_releases), "__call__") as call:
call.return_value = cloud_deploy.ListReleasesResponse()
client.list_releases(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_releases_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ListReleasesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_releases), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListReleasesResponse()
)
await client.list_releases(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_releases_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_releases), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListReleasesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_releases(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_releases_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_releases(
cloud_deploy.ListReleasesRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_releases_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_releases), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListReleasesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListReleasesResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_releases(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_releases_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_releases(
cloud_deploy.ListReleasesRequest(), parent="parent_value",
)
def test_list_releases_pager(transport_name: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials, transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_releases), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListReleasesResponse(
releases=[
cloud_deploy.Release(),
cloud_deploy.Release(),
cloud_deploy.Release(),
],
next_page_token="abc",
),
cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",),
cloud_deploy.ListReleasesResponse(
releases=[cloud_deploy.Release(),], next_page_token="ghi",
),
cloud_deploy.ListReleasesResponse(
releases=[cloud_deploy.Release(), cloud_deploy.Release(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_releases(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, cloud_deploy.Release) for i in results)
def test_list_releases_pages(transport_name: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials, transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_releases), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListReleasesResponse(
releases=[
cloud_deploy.Release(),
cloud_deploy.Release(),
cloud_deploy.Release(),
],
next_page_token="abc",
),
cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",),
cloud_deploy.ListReleasesResponse(
releases=[cloud_deploy.Release(),], next_page_token="ghi",
),
cloud_deploy.ListReleasesResponse(
releases=[cloud_deploy.Release(), cloud_deploy.Release(),],
),
RuntimeError,
)
pages = list(client.list_releases(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_releases_async_pager():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_releases), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListReleasesResponse(
releases=[
cloud_deploy.Release(),
cloud_deploy.Release(),
cloud_deploy.Release(),
],
next_page_token="abc",
),
cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",),
cloud_deploy.ListReleasesResponse(
releases=[cloud_deploy.Release(),], next_page_token="ghi",
),
cloud_deploy.ListReleasesResponse(
releases=[cloud_deploy.Release(), cloud_deploy.Release(),],
),
RuntimeError,
)
async_pager = await client.list_releases(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, cloud_deploy.Release) for i in responses)
@pytest.mark.asyncio
async def test_list_releases_async_pages():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_releases), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListReleasesResponse(
releases=[
cloud_deploy.Release(),
cloud_deploy.Release(),
cloud_deploy.Release(),
],
next_page_token="abc",
),
cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",),
cloud_deploy.ListReleasesResponse(
releases=[cloud_deploy.Release(),], next_page_token="ghi",
),
cloud_deploy.ListReleasesResponse(
releases=[cloud_deploy.Release(), cloud_deploy.Release(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_releases(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize("request_type", [cloud_deploy.GetReleaseRequest, dict,])
def test_get_release(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_release), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Release(
name="name_value",
uid="uid_value",
description="description_value",
skaffold_config_uri="skaffold_config_uri_value",
skaffold_config_path="skaffold_config_path_value",
render_state=cloud_deploy.Release.RenderState.SUCCEEDED,
etag="etag_value",
skaffold_version="skaffold_version_value",
)
response = client.get_release(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetReleaseRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.Release)
assert response.name == "name_value"
assert response.uid == "uid_value"
assert response.description == "description_value"
assert response.skaffold_config_uri == "skaffold_config_uri_value"
assert response.skaffold_config_path == "skaffold_config_path_value"
assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED
assert response.etag == "etag_value"
assert response.skaffold_version == "skaffold_version_value"
def test_get_release_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_release), "__call__") as call:
client.get_release()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetReleaseRequest()
@pytest.mark.asyncio
async def test_get_release_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.GetReleaseRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_release), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.Release(
name="name_value",
uid="uid_value",
description="description_value",
skaffold_config_uri="skaffold_config_uri_value",
skaffold_config_path="skaffold_config_path_value",
render_state=cloud_deploy.Release.RenderState.SUCCEEDED,
etag="etag_value",
skaffold_version="skaffold_version_value",
)
)
response = await client.get_release(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetReleaseRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.Release)
assert response.name == "name_value"
assert response.uid == "uid_value"
assert response.description == "description_value"
assert response.skaffold_config_uri == "skaffold_config_uri_value"
assert response.skaffold_config_path == "skaffold_config_path_value"
assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED
assert response.etag == "etag_value"
assert response.skaffold_version == "skaffold_version_value"
@pytest.mark.asyncio
async def test_get_release_async_from_dict():
await test_get_release_async(request_type=dict)
def test_get_release_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetReleaseRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_release), "__call__") as call:
call.return_value = cloud_deploy.Release()
client.get_release(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_release_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetReleaseRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_release), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.Release()
)
await client.get_release(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_release_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_release), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Release()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_release(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_release_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_release(
cloud_deploy.GetReleaseRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_release_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_release), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Release()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.Release()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_release(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_release_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_release(
cloud_deploy.GetReleaseRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [cloud_deploy.CreateReleaseRequest, dict,])
def test_create_release(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_release), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.create_release(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateReleaseRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_release_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_release), "__call__") as call:
client.create_release()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateReleaseRequest()
@pytest.mark.asyncio
async def test_create_release_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateReleaseRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_release), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.create_release(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateReleaseRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_release_async_from_dict():
await test_create_release_async(request_type=dict)
def test_create_release_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.CreateReleaseRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_release), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_release(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_release_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.CreateReleaseRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_release), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.create_release(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_release_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_release), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_release(
parent="parent_value",
release=cloud_deploy.Release(name="name_value"),
release_id="release_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].release
mock_val = cloud_deploy.Release(name="name_value")
assert arg == mock_val
arg = args[0].release_id
mock_val = "release_id_value"
assert arg == mock_val
def test_create_release_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_release(
cloud_deploy.CreateReleaseRequest(),
parent="parent_value",
release=cloud_deploy.Release(name="name_value"),
release_id="release_id_value",
)
@pytest.mark.asyncio
async def test_create_release_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_release), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_release(
parent="parent_value",
release=cloud_deploy.Release(name="name_value"),
release_id="release_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].release
mock_val = cloud_deploy.Release(name="name_value")
assert arg == mock_val
arg = args[0].release_id
mock_val = "release_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_release_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_release(
cloud_deploy.CreateReleaseRequest(),
parent="parent_value",
release=cloud_deploy.Release(name="name_value"),
release_id="release_id_value",
)
@pytest.mark.parametrize("request_type", [cloud_deploy.ApproveRolloutRequest, dict,])
def test_approve_rollout(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ApproveRolloutResponse()
response = client.approve_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ApproveRolloutRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.ApproveRolloutResponse)
def test_approve_rollout_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call:
client.approve_rollout()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ApproveRolloutRequest()
@pytest.mark.asyncio
async def test_approve_rollout_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ApproveRolloutResponse()
)
response = await client.approve_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ApproveRolloutRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.ApproveRolloutResponse)
@pytest.mark.asyncio
async def test_approve_rollout_async_from_dict():
await test_approve_rollout_async(request_type=dict)
def test_approve_rollout_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ApproveRolloutRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call:
call.return_value = cloud_deploy.ApproveRolloutResponse()
client.approve_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_approve_rollout_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ApproveRolloutRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ApproveRolloutResponse()
)
await client.approve_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_approve_rollout_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ApproveRolloutResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.approve_rollout(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_approve_rollout_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.approve_rollout(
cloud_deploy.ApproveRolloutRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_approve_rollout_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ApproveRolloutResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ApproveRolloutResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.approve_rollout(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_approve_rollout_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.approve_rollout(
cloud_deploy.ApproveRolloutRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [cloud_deploy.ListRolloutsRequest, dict,])
def test_list_rollouts(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListRolloutsResponse(
next_page_token="next_page_token_value", unreachable=["unreachable_value"],
)
response = client.list_rollouts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListRolloutsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListRolloutsPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
def test_list_rollouts_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call:
client.list_rollouts()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListRolloutsRequest()
@pytest.mark.asyncio
async def test_list_rollouts_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListRolloutsResponse(
next_page_token="next_page_token_value",
unreachable=["unreachable_value"],
)
)
response = await client.list_rollouts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.ListRolloutsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListRolloutsAsyncPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
@pytest.mark.asyncio
async def test_list_rollouts_async_from_dict():
await test_list_rollouts_async(request_type=dict)
def test_list_rollouts_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ListRolloutsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call:
call.return_value = cloud_deploy.ListRolloutsResponse()
client.list_rollouts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_rollouts_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.ListRolloutsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListRolloutsResponse()
)
await client.list_rollouts(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_rollouts_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListRolloutsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_rollouts(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
def test_list_rollouts_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_rollouts(
cloud_deploy.ListRolloutsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_rollouts_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.ListRolloutsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.ListRolloutsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_rollouts(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_list_rollouts_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_rollouts(
cloud_deploy.ListRolloutsRequest(), parent="parent_value",
)
def test_list_rollouts_pager(transport_name: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials, transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListRolloutsResponse(
rollouts=[
cloud_deploy.Rollout(),
cloud_deploy.Rollout(),
cloud_deploy.Rollout(),
],
next_page_token="abc",
),
cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",),
cloud_deploy.ListRolloutsResponse(
rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi",
),
cloud_deploy.ListRolloutsResponse(
rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_rollouts(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, cloud_deploy.Rollout) for i in results)
def test_list_rollouts_pages(transport_name: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials, transport=transport_name,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListRolloutsResponse(
rollouts=[
cloud_deploy.Rollout(),
cloud_deploy.Rollout(),
cloud_deploy.Rollout(),
],
next_page_token="abc",
),
cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",),
cloud_deploy.ListRolloutsResponse(
rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi",
),
cloud_deploy.ListRolloutsResponse(
rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),],
),
RuntimeError,
)
pages = list(client.list_rollouts(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_rollouts_async_pager():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListRolloutsResponse(
rollouts=[
cloud_deploy.Rollout(),
cloud_deploy.Rollout(),
cloud_deploy.Rollout(),
],
next_page_token="abc",
),
cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",),
cloud_deploy.ListRolloutsResponse(
rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi",
),
cloud_deploy.ListRolloutsResponse(
rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),],
),
RuntimeError,
)
async_pager = await client.list_rollouts(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, cloud_deploy.Rollout) for i in responses)
@pytest.mark.asyncio
async def test_list_rollouts_async_pages():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_deploy.ListRolloutsResponse(
rollouts=[
cloud_deploy.Rollout(),
cloud_deploy.Rollout(),
cloud_deploy.Rollout(),
],
next_page_token="abc",
),
cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",),
cloud_deploy.ListRolloutsResponse(
rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi",
),
cloud_deploy.ListRolloutsResponse(
rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_rollouts(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize("request_type", [cloud_deploy.GetRolloutRequest, dict,])
def test_get_rollout(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Rollout(
name="name_value",
uid="uid_value",
description="description_value",
target_id="target_id_value",
approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL,
state=cloud_deploy.Rollout.State.SUCCEEDED,
failure_reason="failure_reason_value",
deploying_build="deploying_build_value",
etag="etag_value",
)
response = client.get_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetRolloutRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.Rollout)
assert response.name == "name_value"
assert response.uid == "uid_value"
assert response.description == "description_value"
assert response.target_id == "target_id_value"
assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL
assert response.state == cloud_deploy.Rollout.State.SUCCEEDED
assert response.failure_reason == "failure_reason_value"
assert response.deploying_build == "deploying_build_value"
assert response.etag == "etag_value"
def test_get_rollout_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_rollout), "__call__") as call:
client.get_rollout()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetRolloutRequest()
@pytest.mark.asyncio
async def test_get_rollout_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.Rollout(
name="name_value",
uid="uid_value",
description="description_value",
target_id="target_id_value",
approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL,
state=cloud_deploy.Rollout.State.SUCCEEDED,
failure_reason="failure_reason_value",
deploying_build="deploying_build_value",
etag="etag_value",
)
)
response = await client.get_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetRolloutRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.Rollout)
assert response.name == "name_value"
assert response.uid == "uid_value"
assert response.description == "description_value"
assert response.target_id == "target_id_value"
assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL
assert response.state == cloud_deploy.Rollout.State.SUCCEEDED
assert response.failure_reason == "failure_reason_value"
assert response.deploying_build == "deploying_build_value"
assert response.etag == "etag_value"
@pytest.mark.asyncio
async def test_get_rollout_async_from_dict():
await test_get_rollout_async(request_type=dict)
def test_get_rollout_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetRolloutRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_rollout), "__call__") as call:
call.return_value = cloud_deploy.Rollout()
client.get_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_rollout_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetRolloutRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_rollout), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.Rollout()
)
await client.get_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_rollout_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Rollout()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_rollout(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_rollout_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_rollout(
cloud_deploy.GetRolloutRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_rollout_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Rollout()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.Rollout()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_rollout(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_rollout_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_rollout(
cloud_deploy.GetRolloutRequest(), name="name_value",
)
@pytest.mark.parametrize("request_type", [cloud_deploy.CreateRolloutRequest, dict,])
def test_create_rollout(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.create_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateRolloutRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_rollout_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_rollout), "__call__") as call:
client.create_rollout()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateRolloutRequest()
@pytest.mark.asyncio
async def test_create_rollout_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.create_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.CreateRolloutRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_rollout_async_from_dict():
await test_create_rollout_async(request_type=dict)
def test_create_rollout_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.CreateRolloutRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_rollout), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_rollout_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.CreateRolloutRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_rollout), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.create_rollout(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_rollout_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_rollout(
parent="parent_value",
rollout=cloud_deploy.Rollout(name="name_value"),
rollout_id="rollout_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].rollout
mock_val = cloud_deploy.Rollout(name="name_value")
assert arg == mock_val
arg = args[0].rollout_id
mock_val = "rollout_id_value"
assert arg == mock_val
def test_create_rollout_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_rollout(
cloud_deploy.CreateRolloutRequest(),
parent="parent_value",
rollout=cloud_deploy.Rollout(name="name_value"),
rollout_id="rollout_id_value",
)
@pytest.mark.asyncio
async def test_create_rollout_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_rollout), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_rollout(
parent="parent_value",
rollout=cloud_deploy.Rollout(name="name_value"),
rollout_id="rollout_id_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].parent
mock_val = "parent_value"
assert arg == mock_val
arg = args[0].rollout
mock_val = cloud_deploy.Rollout(name="name_value")
assert arg == mock_val
arg = args[0].rollout_id
mock_val = "rollout_id_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_create_rollout_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_rollout(
cloud_deploy.CreateRolloutRequest(),
parent="parent_value",
rollout=cloud_deploy.Rollout(name="name_value"),
rollout_id="rollout_id_value",
)
@pytest.mark.parametrize("request_type", [cloud_deploy.GetConfigRequest, dict,])
def test_get_config(request_type, transport: str = "grpc"):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_config), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Config(
name="name_value",
default_skaffold_version="default_skaffold_version_value",
)
response = client.get_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetConfigRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.Config)
assert response.name == "name_value"
assert response.default_skaffold_version == "default_skaffold_version_value"
def test_get_config_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_config), "__call__") as call:
client.get_config()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetConfigRequest()
@pytest.mark.asyncio
async def test_get_config_async(
transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest
):
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_config), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_deploy.Config(
name="name_value",
default_skaffold_version="default_skaffold_version_value",
)
)
response = await client.get_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_deploy.GetConfigRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_deploy.Config)
assert response.name == "name_value"
assert response.default_skaffold_version == "default_skaffold_version_value"
@pytest.mark.asyncio
async def test_get_config_async_from_dict():
await test_get_config_async(request_type=dict)
def test_get_config_field_headers():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetConfigRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_config), "__call__") as call:
call.return_value = cloud_deploy.Config()
client.get_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_config_field_headers_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_deploy.GetConfigRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_config), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config())
await client.get_config(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_config_flattened():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_config), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Config()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_config(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
def test_get_config_flattened_error():
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_config(
cloud_deploy.GetConfigRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_config_flattened_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_config), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_deploy.Config()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_config(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].name
mock_val = "name_value"
assert arg == mock_val
@pytest.mark.asyncio
async def test_get_config_flattened_error_async():
client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_config(
cloud_deploy.GetConfigRequest(), name="name_value",
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.CloudDeployGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.CloudDeployGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudDeployClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.CloudDeployGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = CloudDeployClient(client_options=options, transport=transport,)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = CloudDeployClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.CloudDeployGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudDeployClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.CloudDeployGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = CloudDeployClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.CloudDeployGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.CloudDeployGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport,],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),)
assert isinstance(client.transport, transports.CloudDeployGrpcTransport,)
def test_cloud_deploy_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.CloudDeployTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_cloud_deploy_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.CloudDeployTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"list_delivery_pipelines",
"get_delivery_pipeline",
"create_delivery_pipeline",
"update_delivery_pipeline",
"delete_delivery_pipeline",
"list_targets",
"get_target",
"create_target",
"update_target",
"delete_target",
"list_releases",
"get_release",
"create_release",
"approve_rollout",
"list_rollouts",
"get_rollout",
"create_rollout",
"get_config",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
def test_cloud_deploy_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.CloudDeployTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_cloud_deploy_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.CloudDeployTransport()
adc.assert_called_once()
def test_cloud_deploy_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
CloudDeployClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport,],
)
def test_cloud_deploy_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.CloudDeployGrpcTransport, grpc_helpers),
(transports.CloudDeployGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_cloud_deploy_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"clouddeploy.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=["1", "2"],
default_host="clouddeploy.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport],
)
def test_cloud_deploy_grpc_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_cloud_deploy_host_no_port():
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="clouddeploy.googleapis.com"
),
)
assert client.transport._host == "clouddeploy.googleapis.com:443"
def test_cloud_deploy_host_with_port():
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="clouddeploy.googleapis.com:8000"
),
)
assert client.transport._host == "clouddeploy.googleapis.com:8000"
def test_cloud_deploy_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.CloudDeployGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_cloud_deploy_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.CloudDeployGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport],
)
def test_cloud_deploy_transport_channel_mtls_with_client_cert_source(transport_class):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport],
)
def test_cloud_deploy_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_cloud_deploy_grpc_lro_client():
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_cloud_deploy_grpc_lro_async_client():
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_build_path():
project = "squid"
location = "clam"
build = "whelk"
expected = "projects/{project}/locations/{location}/builds/{build}".format(
project=project, location=location, build=build,
)
actual = CloudDeployClient.build_path(project, location, build)
assert expected == actual
def test_parse_build_path():
expected = {
"project": "octopus",
"location": "oyster",
"build": "nudibranch",
}
path = CloudDeployClient.build_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_build_path(path)
assert expected == actual
def test_cluster_path():
project = "cuttlefish"
location = "mussel"
cluster = "winkle"
expected = "projects/{project}/locations/{location}/clusters/{cluster}".format(
project=project, location=location, cluster=cluster,
)
actual = CloudDeployClient.cluster_path(project, location, cluster)
assert expected == actual
def test_parse_cluster_path():
expected = {
"project": "nautilus",
"location": "scallop",
"cluster": "abalone",
}
path = CloudDeployClient.cluster_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_cluster_path(path)
assert expected == actual
def test_config_path():
project = "squid"
location = "clam"
expected = "projects/{project}/locations/{location}/config".format(
project=project, location=location,
)
actual = CloudDeployClient.config_path(project, location)
assert expected == actual
def test_parse_config_path():
expected = {
"project": "whelk",
"location": "octopus",
}
path = CloudDeployClient.config_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_config_path(path)
assert expected == actual
def test_delivery_pipeline_path():
project = "oyster"
location = "nudibranch"
delivery_pipeline = "cuttlefish"
expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}".format(
project=project, location=location, delivery_pipeline=delivery_pipeline,
)
actual = CloudDeployClient.delivery_pipeline_path(
project, location, delivery_pipeline
)
assert expected == actual
def test_parse_delivery_pipeline_path():
expected = {
"project": "mussel",
"location": "winkle",
"delivery_pipeline": "nautilus",
}
path = CloudDeployClient.delivery_pipeline_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_delivery_pipeline_path(path)
assert expected == actual
def test_release_path():
project = "scallop"
location = "abalone"
delivery_pipeline = "squid"
release = "clam"
expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}".format(
project=project,
location=location,
delivery_pipeline=delivery_pipeline,
release=release,
)
actual = CloudDeployClient.release_path(
project, location, delivery_pipeline, release
)
assert expected == actual
def test_parse_release_path():
expected = {
"project": "whelk",
"location": "octopus",
"delivery_pipeline": "oyster",
"release": "nudibranch",
}
path = CloudDeployClient.release_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_release_path(path)
assert expected == actual
def test_rollout_path():
project = "cuttlefish"
location = "mussel"
delivery_pipeline = "winkle"
release = "nautilus"
rollout = "scallop"
expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}".format(
project=project,
location=location,
delivery_pipeline=delivery_pipeline,
release=release,
rollout=rollout,
)
actual = CloudDeployClient.rollout_path(
project, location, delivery_pipeline, release, rollout
)
assert expected == actual
def test_parse_rollout_path():
expected = {
"project": "abalone",
"location": "squid",
"delivery_pipeline": "clam",
"release": "whelk",
"rollout": "octopus",
}
path = CloudDeployClient.rollout_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_rollout_path(path)
assert expected == actual
def test_target_path():
project = "oyster"
location = "nudibranch"
target = "cuttlefish"
expected = "projects/{project}/locations/{location}/targets/{target}".format(
project=project, location=location, target=target,
)
actual = CloudDeployClient.target_path(project, location, target)
assert expected == actual
def test_parse_target_path():
expected = {
"project": "mussel",
"location": "winkle",
"target": "nautilus",
}
path = CloudDeployClient.target_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_target_path(path)
assert expected == actual
def test_worker_pool_path():
project = "scallop"
location = "abalone"
worker_pool = "squid"
expected = "projects/{project}/locations/{location}/workerPools/{worker_pool}".format(
project=project, location=location, worker_pool=worker_pool,
)
actual = CloudDeployClient.worker_pool_path(project, location, worker_pool)
assert expected == actual
def test_parse_worker_pool_path():
expected = {
"project": "clam",
"location": "whelk",
"worker_pool": "octopus",
}
path = CloudDeployClient.worker_pool_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_worker_pool_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "oyster"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = CloudDeployClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "nudibranch",
}
path = CloudDeployClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "cuttlefish"
expected = "folders/{folder}".format(folder=folder,)
actual = CloudDeployClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "mussel",
}
path = CloudDeployClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "winkle"
expected = "organizations/{organization}".format(organization=organization,)
actual = CloudDeployClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nautilus",
}
path = CloudDeployClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "scallop"
expected = "projects/{project}".format(project=project,)
actual = CloudDeployClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "abalone",
}
path = CloudDeployClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "squid"
location = "clam"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = CloudDeployClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "whelk",
"location": "octopus",
}
path = CloudDeployClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = CloudDeployClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.CloudDeployTransport, "_prep_wrapped_messages"
) as prep:
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.CloudDeployTransport, "_prep_wrapped_messages"
) as prep:
transport_class = CloudDeployClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = CloudDeployAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = CloudDeployClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[
(CloudDeployClient, transports.CloudDeployGrpcTransport),
(CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport),
],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
| [
[
[
607,
609
],
[
11467,
11469
],
[
7941,
7943
],
[
8702,
8704
],
[
9479,
9481
],
[
9765,
9767
],
[
11996,
11998
],
[
13285,
13287
],
[
14865,
14867
],
[
16259,
16261
],
[
16807,
16809
],
[
17378,
17380
],
[
17703,
17705
],
[
18063,
18065
],
[
18567,
18569
]
],
[
[
617,
621
],
[
6264,
6268
],
[
6372,
6376
],
[
11216,
11220
],
[
11324,
11328
],
[
11451,
11455
],
[
15814,
15818
],
[
15922,
15926
],
[
3460,
3464
],
[
4184,
4188
],
[
4501,
4505
],
[
5017,
5021
],
[
6672,
6676
],
[
6994,
6998
],
[
7279,
7283
],
[
7925,
7929
],
[
8010,
8014
],
[
8686,
8690
],
[
8772,
8776
],
[
9463,
9467
],
[
9740,
9744
],
[
10068,
10072
],
[
11971,
11975
],
[
12204,
12208
],
[
13260,
13264
],
[
13376,
13380
],
[
13452,
13456
],
[
13612,
13616
],
[
14840,
14844
],
[
14956,
14960
],
[
15032,
15036
],
[
16156,
16160
],
[
16243,
16247
],
[
16791,
16795
],
[
16902,
16906
],
[
17362,
17366
],
[
17687,
17691
],
[
18047,
18051
],
[
18136,
18140
],
[
18551,
18555
],
[
18640,
18644
],
[
18784,
18788
],
[
19756,
19760
],
[
20866,
20870
],
[
21488,
21492
],
[
22677,
22681
],
[
23928,
23932
],
[
24763,
24767
],
[
26295,
26299
],
[
27335,
27339
],
[
28178,
28182
],
[
29538,
29542
],
[
31113,
31117
],
[
32875,
32879
],
[
34403,
34407
],
[
34519,
34523
],
[
36129,
36133
],
[
36245,
36249
],
[
38074,
38078
],
[
39549,
39553
],
[
40374,
40378
],
[
41966,
41970
],
[
42977,
42981
],
[
43797,
43801
],
[
45120,
45124
],
[
46929,
46933
],
[
47951,
47955
],
[
48791,
48795
],
[
50068,
50072
],
[
51114,
51118
],
[
51962,
51966
],
[
53911,
53915
],
[
56360,
56364
],
[
57382,
57386
],
[
58222,
58226
],
[
59531,
59535
],
[
60663,
60667
],
[
61565,
61569
],
[
63380,
63384
],
[
65695,
65699
],
[
66717,
66721
],
[
67557,
67561
],
[
68830,
68834
],
[
69868,
69872
],
[
70712,
70716
],
[
72068,
72072
],
[
73895,
73899
],
[
75069,
75073
],
[
75832,
75836
],
[
77255,
77259
],
[
78228,
78232
],
[
79014,
79018
],
[
80285,
80289
],
[
81761,
81765
],
[
83218,
83222
],
[
84451,
84455
],
[
84540,
84544
],
[
85869,
85873
],
[
85958,
85962
],
[
87500,
87504
],
[
88991,
88995
],
[
89744,
89748
],
[
91407,
91411
],
[
92351,
92355
],
[
93092,
93096
],
[
94326,
94330
],
[
95998,
96002
],
[
96963,
96967
],
[
97731,
97735
],
[
98919,
98923
],
[
99908,
99912
],
[
100709,
100713
],
[
102450,
102454
],
[
104675,
104679
],
[
105640,
105644
],
[
106408,
106412
],
[
107606,
107610
],
[
108615,
108619
],
[
109426,
109430
],
[
111099,
111103
],
[
113256,
113260
],
[
114221,
114225
],
[
114989,
114993
],
[
116173,
116177
],
[
117154,
117158
],
[
117951,
117955
],
[
119228,
119232
],
[
120989,
120993
],
[
122169,
122173
],
[
122937,
122941
],
[
124369,
124373
],
[
125347,
125351
],
[
126137,
126141
],
[
127415,
127419
],
[
128899,
128903
],
[
130374,
130378
],
[
131624,
131628
],
[
131714,
131718
],
[
133060,
133064
],
[
133150,
133154
],
[
134709,
134713
],
[
136504,
136508
],
[
137262,
137266
],
[
139306,
139310
],
[
140255,
140259
],
[
141022,
141026
],
[
142263,
142267
],
[
143966,
143970
],
[
144935,
144939
],
[
145708,
145712
],
[
146903,
146907
],
[
147896,
147900
],
[
148700,
148704
],
[
150459,
150463
],
[
152705,
152709
],
[
153689,
153693
],
[
154467,
154471
],
[
155676,
155680
],
[
156656,
156660
],
[
157450,
157454
],
[
158730,
158734
],
[
160481,
160485
],
[
161661,
161665
],
[
162429,
162433
],
[
163861,
163865
],
[
164839,
164843
],
[
165629,
165633
],
[
166907,
166911
],
[
168391,
168395
],
[
169866,
169870
],
[
171116,
171120
],
[
171206,
171210
],
[
172552,
172556
],
[
172642,
172646
],
[
174201,
174205
],
[
176068,
176072
],
[
176826,
176830
],
[
178946,
178950
],
[
179895,
179899
],
[
180662,
180666
],
[
181903,
181907
],
[
183606,
183610
],
[
184575,
184579
],
[
185348,
185352
],
[
186543,
186547
],
[
187536,
187540
],
[
188340,
188344
],
[
190099,
190103
],
[
192335,
192339
],
[
193500,
193504
],
[
194253,
194257
],
[
195640,
195644
],
[
196584,
196588
],
[
197325,
197329
],
[
198559,
198563
],
[
200936,
200940
],
[
202508,
202512
],
[
203405,
203409
],
[
204816,
204820
],
[
204921,
204925
],
[
205702,
205706
],
[
205767,
205771
],
[
206221,
206225
],
[
206885,
206889
],
[
207696,
207700
],
[
207775,
207779
],
[
208932,
208936
],
[
209042,
209046
],
[
209754,
209758
],
[
209820,
209824
],
[
209847,
209851
],
[
212216,
212220
],
[
212327,
212331
],
[
212454,
212458
],
[
212562,
212566
],
[
212769,
212773
],
[
214302,
214306
],
[
214323,
214327
],
[
214414,
214418
],
[
214468,
214472
],
[
214535,
214539
],
[
214666,
214670
],
[
214767,
214771
],
[
225664,
225668
],
[
225958,
225962
],
[
226509,
226513
],
[
226993,
226997
],
[
227494,
227498
],
[
227966,
227970
],
[
228108,
228112
],
[
228275,
228279
]
],
[
[
630,
634
],
[
210991,
210995
],
[
211032,
211036
],
[
211486,
211490
]
],
[
[
665,
668
],
[
211446,
211449
]
],
[
[
676,
680
]
],
[
[
688,
694
],
[
3245,
3251
],
[
3872,
3878
],
[
4802,
4808
],
[
5957,
5963
],
[
10616,
10622
],
[
15728,
15734
],
[
19231,
19237
],
[
20305,
20311
],
[
22126,
22132
],
[
24212,
24218
],
[
25720,
25726
],
[
26867,
26873
],
[
29291,
29297
],
[
30393,
30399
],
[
34162,
34168
],
[
35888,
35894
],
[
37527,
37533
],
[
39827,
39833
],
[
41403,
41409
],
[
42517,
42523
],
[
44875,
44881
],
[
45937,
45943
],
[
46376,
46382
],
[
48238,
48244
],
[
49489,
49495
],
[
50644,
50650
],
[
53663,
53669
],
[
55200,
55206
],
[
55807,
55813
],
[
57669,
57675
],
[
58920,
58926
],
[
60161,
60167
],
[
63132,
63138
],
[
64560,
64566
],
[
65142,
65148
],
[
67004,
67010
],
[
68255,
68261
],
[
69402,
69408
],
[
71820,
71826
],
[
72923,
72929
],
[
73371,
73377
],
[
75307,
75313
],
[
76723,
76729
],
[
77781,
77787
],
[
80049,
80055
],
[
81084,
81090
],
[
84221,
84227
],
[
85639,
85645
],
[
86980,
86986
],
[
89223,
89229
],
[
90887,
90893
],
[
91912,
91918
],
[
94092,
94098
],
[
95065,
95071
],
[
95472,
95478
],
[
97204,
97210
],
[
98383,
98389
],
[
99459,
99465
],
[
102213,
102219
],
[
103617,
103623
],
[
104149,
104155
],
[
105881,
105887
],
[
107060,
107066
],
[
108156,
108162
],
[
110862,
110868
],
[
112201,
112207
],
[
112730,
112736
],
[
114462,
114468
],
[
115641,
115647
],
[
116709,
116715
],
[
118991,
118997
],
[
120047,
120053
],
[
120463,
120469
],
[
122410,
122416
],
[
123833,
123839
],
[
124898,
124904
],
[
127178,
127184
],
[
128218,
128224
],
[
131393,
131399
],
[
132829,
132835
],
[
134187,
134193
],
[
136739,
136745
],
[
138782,
138788
],
[
139814,
139820
],
[
142028,
142034
],
[
143028,
143034
],
[
143438,
143444
],
[
145179,
145185
],
[
146363,
146369
],
[
147445,
147451
],
[
150221,
150227
],
[
151636,
151642
],
[
152175,
152181
],
[
153936,
153942
],
[
155136,
155142
],
[
156207,
156213
],
[
158491,
158497
],
[
159533,
159539
],
[
159955,
159961
],
[
161902,
161908
],
[
163325,
163331
],
[
164390,
164396
],
[
166670,
166676
],
[
167710,
167716
],
[
170885,
170891
],
[
172321,
172327
],
[
173679,
173685
],
[
176303,
176309
],
[
178422,
178428
],
[
179454,
179460
],
[
181668,
181674
],
[
182668,
182674
],
[
183078,
183084
],
[
184819,
184825
],
[
186003,
186009
],
[
187085,
187091
],
[
189861,
189867
],
[
191276,
191282
],
[
191815,
191821
],
[
193732,
193738
],
[
195120,
195126
],
[
196145,
196151
],
[
198325,
198331
],
[
199298,
199304
],
[
202263,
202269
],
[
206577,
206583
],
[
207295,
207301
],
[
208596,
208602
],
[
211983,
211989
],
[
214073,
214079
],
[
226312,
226318
],
[
227676,
227682
],
[
9554,
9560
],
[
9850,
9856
],
[
29131,
29137
],
[
30676,
30682
],
[
44723,
44729
],
[
46218,
46224
],
[
53346,
53352
],
[
55484,
55490
],
[
62840,
62846
],
[
64844,
64850
],
[
71662,
71668
],
[
73207,
73213
],
[
79910,
79916
],
[
81356,
81362
],
[
93961,
93967
],
[
95335,
95341
],
[
101960,
101966
],
[
103890,
103896
],
[
110612,
110618
],
[
112474,
112480
],
[
118854,
118860
],
[
120320,
120326
],
[
127037,
127043
],
[
128491,
128497
],
[
141895,
141901
],
[
143299,
143305
],
[
149962,
149968
],
[
151910,
151916
],
[
158350,
158356
],
[
159808,
159814
],
[
166529,
166535
],
[
167983,
167989
],
[
181535,
181541
],
[
182939,
182945
],
[
189602,
189608
],
[
191550,
191556
],
[
198194,
198200
],
[
199568,
199574
],
[
199941,
199947
],
[
200303,
200309
],
[
200752,
200758
],
[
200989,
200995
],
[
201341,
201347
],
[
203085,
203091
],
[
204343,
204349
],
[
204446,
204452
],
[
204610,
204616
],
[
212714,
212720
],
[
214797,
214803
]
],
[
[
733,
745
]
],
[
[
747,
760
]
],
[
[
791,
805
],
[
7208,
7222
],
[
10002,
10016
],
[
12092,
12106
],
[
16371,
16385
],
[
16966,
16980
],
[
19698,
19712
],
[
20791,
20805
],
[
200680,
200694
],
[
210423,
210437
],
[
210750,
210764
],
[
228195,
228209
]
],
[
[
834,
863
],
[
203099,
203114
]
],
[
[
892,
898
],
[
47547,
47553
],
[
49471,
49477
],
[
56978,
56984
],
[
58902,
58908
],
[
66313,
66319
],
[
68237,
68243
],
[
96570,
96576
],
[
98365,
98371
],
[
105247,
105253
],
[
107042,
107048
],
[
113828,
113834
],
[
115623,
115629
],
[
144541,
144547
],
[
146345,
146351
],
[
184181,
184187
],
[
185985,
185991
]
],
[
[
927,
935
],
[
32274,
32282
],
[
82649,
82657
],
[
129802,
129810
],
[
169294,
169302
],
[
225620,
225628
]
],
[
[
964,
976
],
[
207408,
207420
]
],
[
[
1005,
1023
],
[
207476,
207494
],
[
24954,
24972
],
[
27464,
27482
],
[
29803,
29821
],
[
40563,
40581
],
[
43104,
43122
],
[
45370,
45388
],
[
48983,
49001
],
[
51244,
51262
],
[
54179,
54197
],
[
58414,
58432
],
[
60793,
60811
],
[
63648,
63666
],
[
67749,
67767
],
[
69998,
70016
],
[
72336,
72354
],
[
75998,
76016
],
[
78332,
78350
],
[
80515,
80533
],
[
89908,
89926
],
[
92453,
92471
],
[
94541,
94559
],
[
97898,
97916
],
[
100013,
100031
],
[
102693,
102711
],
[
106575,
106593
],
[
108720,
108738
],
[
111342,
111360
],
[
115156,
115174
],
[
117259,
117277
],
[
119471,
119489
],
[
123104,
123122
],
[
125452,
125470
],
[
127647,
127665
],
[
137427,
137445
],
[
140358,
140376
],
[
142480,
142498
],
[
145876,
145894
],
[
148002,
148020
],
[
150703,
150721
],
[
154636,
154654
],
[
156763,
156781
],
[
158966,
158984
],
[
162596,
162614
],
[
164944,
164962
],
[
167139,
167157
],
[
176991,
177009
],
[
179998,
180016
],
[
182120,
182138
],
[
185516,
185534
],
[
187642,
187660
],
[
190343,
190361
],
[
194417,
194435
],
[
196686,
196704
],
[
198774,
198792
]
],
[
[
1052,
1067
]
],
[
[
1112,
1125
],
[
215926,
215939
],
[
216427,
216440
]
],
[
[
1154,
1167
]
],
[
[
1192,
1221
],
[
3413,
3427
],
[
4970,
4984
],
[
6788,
6802
],
[
22349,
22363
],
[
23784,
23798
],
[
24435,
24449
],
[
25957,
25971
],
[
26997,
27011
],
[
28057,
28071
],
[
28980,
28994
],
[
29417,
29431
],
[
30525,
30539
],
[
30963,
30977
],
[
32725,
32739
],
[
34284,
34298
],
[
36010,
36024
],
[
37746,
37760
],
[
39405,
39419
],
[
40046,
40060
],
[
41634,
41648
],
[
42645,
42659
],
[
43676,
43690
],
[
44572,
44586
],
[
44999,
45013
],
[
46067,
46081
],
[
46601,
46615
],
[
47807,
47821
],
[
48463,
48477
],
[
49729,
49743
],
[
50775,
50789
],
[
51841,
51855
],
[
53195,
53209
],
[
53790,
53804
],
[
55333,
55347
],
[
56032,
56046
],
[
57238,
57252
],
[
57894,
57908
],
[
59160,
59174
],
[
60292,
60306
],
[
61444,
61458
],
[
62689,
62703
],
[
63259,
63273
],
[
64693,
64707
],
[
65367,
65381
],
[
66573,
66587
],
[
67229,
67243
],
[
68495,
68509
],
[
69533,
69547
],
[
70591,
70605
],
[
71511,
71525
],
[
71947,
71961
],
[
73056,
73070
],
[
73567,
73581
],
[
74925,
74939
],
[
75504,
75518
],
[
76927,
76941
],
[
77900,
77914
],
[
78893,
78907
],
[
79759,
79773
],
[
80164,
80178
],
[
81205,
81219
],
[
81611,
81625
],
[
83068,
83082
],
[
84332,
84346
],
[
85750,
85764
],
[
87172,
87186
],
[
88847,
88861
],
[
89416,
89430
],
[
91085,
91099
],
[
92029,
92043
],
[
92971,
92985
],
[
93810,
93824
],
[
94205,
94219
],
[
95184,
95198
],
[
95670,
95684
],
[
96819,
96833
],
[
97403,
97417
],
[
98590,
98604
],
[
99579,
99593
],
[
100588,
100602
],
[
101809,
101823
],
[
102329,
102343
],
[
103739,
103753
],
[
104347,
104361
],
[
105496,
105510
],
[
106080,
106094
],
[
107267,
107281
],
[
108276,
108290
],
[
109305,
109319
],
[
110461,
110475
],
[
110978,
110992
],
[
112323,
112337
],
[
112928,
112942
],
[
114077,
114091
],
[
114661,
114675
],
[
115848,
115862
],
[
116829,
116843
],
[
117830,
117844
],
[
118703,
118717
],
[
119107,
119121
],
[
120169,
120183
],
[
120661,
120675
],
[
122025,
122039
],
[
122609,
122623
],
[
124040,
124054
],
[
125018,
125032
],
[
126016,
126030
],
[
126886,
126900
],
[
127294,
127308
],
[
128340,
128354
],
[
128749,
128763
],
[
130224,
130238
],
[
131505,
131519
],
[
132941,
132955
],
[
134381,
134395
],
[
136360,
136374
],
[
136934,
136948
],
[
138983,
138997
],
[
139932,
139946
],
[
140901,
140915
],
[
141744,
141758
],
[
142142,
142156
],
[
143148,
143162
],
[
143638,
143652
],
[
144791,
144805
],
[
145380,
145394
],
[
146573,
146587
],
[
147566,
147580
],
[
148579,
148593
],
[
149811,
149825
],
[
150338,
150352
],
[
151759,
151773
],
[
152377,
152391
],
[
153545,
153559
],
[
154139,
154153
],
[
155349,
155363
],
[
156329,
156343
],
[
157329,
157343
],
[
158199,
158213
],
[
158609,
158623
],
[
159657,
159671
],
[
160153,
160167
],
[
161517,
161531
],
[
162101,
162115
],
[
163532,
163546
],
[
164510,
164524
],
[
165508,
165522
],
[
166378,
166392
],
[
166786,
166800
],
[
167832,
167846
],
[
168241,
168255
],
[
169716,
169730
],
[
170997,
171011
],
[
172433,
172447
],
[
173873,
173887
],
[
175924,
175938
],
[
176498,
176512
],
[
178623,
178637
],
[
179572,
179586
],
[
180541,
180555
],
[
181384,
181398
],
[
181782,
181796
],
[
182788,
182802
],
[
183278,
183292
],
[
184431,
184445
],
[
185020,
185034
],
[
186213,
186227
],
[
187206,
187220
],
[
188219,
188233
],
[
189451,
189465
],
[
189978,
189992
],
[
191399,
191413
],
[
192007,
192021
],
[
193356,
193370
],
[
193925,
193939
],
[
195318,
195332
],
[
196262,
196276
],
[
197204,
197218
],
[
198043,
198057
],
[
198438,
198452
],
[
199417,
199431
],
[
199887,
199901
],
[
200028,
200042
],
[
200249,
200263
],
[
200621,
200635
],
[
201100,
201114
],
[
201287,
201301
],
[
201661,
201675
],
[
201977,
201991
],
[
202159,
202173
],
[
202586,
202600
],
[
202827,
202841
],
[
203218,
203232
],
[
203648,
203662
],
[
205136,
205150
],
[
205975,
205989
],
[
206314,
206328
],
[
206978,
206992
],
[
207889,
207903
],
[
208827,
208841
],
[
210361,
210375
],
[
210688,
210702
],
[
212659,
212673
],
[
215722,
215736
],
[
216215,
216229
],
[
225824,
225838
],
[
226185,
226199
],
[
226429,
226443
],
[
226911,
226925
],
[
227362,
227376
]
],
[
[
1257,
1278
],
[
9568,
9589
]
],
[
[
1336,
1358
],
[
3305,
3327
],
[
4862,
4884
],
[
6135,
6157
],
[
6395,
6417
],
[
6471,
6493
],
[
10822,
10844
],
[
11066,
11088
],
[
11347,
11369
],
[
11423,
11445
],
[
15788,
15810
],
[
15945,
15967
],
[
16021,
16043
],
[
19409,
19431
],
[
20483,
20505
],
[
227818,
227840
],
[
24391,
24413
],
[
26962,
26984
],
[
29382,
29404
],
[
30490,
30512
],
[
34249,
34271
],
[
35975,
35997
],
[
40002,
40024
],
[
42610,
42632
],
[
44964,
44986
],
[
46032,
46054
],
[
48419,
48441
],
[
50740,
50762
],
[
53755,
53777
],
[
55298,
55320
],
[
57850,
57872
],
[
60257,
60279
],
[
63224,
63246
],
[
64658,
64680
],
[
67185,
67207
],
[
69498,
69520
],
[
71912,
71934
],
[
73021,
73043
],
[
75460,
75482
],
[
77865,
77887
],
[
80129,
80151
],
[
81170,
81192
],
[
84297,
84319
],
[
85715,
85737
],
[
89372,
89394
],
[
91994,
92016
],
[
94170,
94192
],
[
95149,
95171
],
[
97359,
97381
],
[
99544,
99566
],
[
102294,
102316
],
[
103704,
103726
],
[
106036,
106058
],
[
108241,
108263
],
[
110943,
110965
],
[
112288,
112310
],
[
114617,
114639
],
[
116794,
116816
],
[
119072,
119094
],
[
120134,
120156
],
[
122565,
122587
],
[
124983,
125005
],
[
127259,
127281
],
[
128305,
128327
],
[
131470,
131492
],
[
132906,
132928
],
[
136890,
136912
],
[
139897,
139919
],
[
142107,
142129
],
[
143113,
143135
],
[
145336,
145358
],
[
147531,
147553
],
[
150303,
150325
],
[
151724,
151746
],
[
154095,
154117
],
[
156294,
156316
],
[
158574,
158596
],
[
159622,
159644
],
[
162057,
162079
],
[
164475,
164497
],
[
166751,
166773
],
[
167797,
167819
],
[
170962,
170984
],
[
172398,
172420
],
[
176454,
176476
],
[
179537,
179559
],
[
181747,
181769
],
[
182753,
182775
],
[
184976,
184998
],
[
187171,
187193
],
[
189943,
189965
],
[
191364,
191386
],
[
193881,
193903
],
[
196227,
196249
],
[
198403,
198425
],
[
199382,
199404
],
[
216171,
216193
],
[
226385,
226407
]
],
[
[
1416,
1433
],
[
3286,
3303
],
[
4843,
4860
],
[
6048,
6065
],
[
6287,
6304
],
[
6350,
6367
],
[
10727,
10744
],
[
10970,
10987
],
[
11239,
11256
],
[
11302,
11319
],
[
15769,
15786
],
[
15837,
15854
],
[
15900,
15917
],
[
19322,
19339
],
[
20396,
20413
],
[
227752,
227769
],
[
2617,
2634
],
[
2697,
2714
],
[
2804,
2821
],
[
2924,
2941
],
[
3047,
3064
],
[
3165,
3182
],
[
5666,
5683
],
[
5848,
5865
],
[
6690,
6707
],
[
7012,
7029
],
[
21685,
21702
],
[
22310,
22327
],
[
23745,
23762
],
[
25927,
25944
],
[
28027,
28044
],
[
28950,
28967
],
[
30924,
30941
],
[
32686,
32703
],
[
37707,
37724
],
[
39366,
39383
],
[
41604,
41621
],
[
43646,
43663
],
[
44542,
44559
],
[
46562,
46579
],
[
47768,
47785
],
[
49699,
49716
],
[
51811,
51828
],
[
53165,
53182
],
[
55993,
56010
],
[
57199,
57216
],
[
59130,
59147
],
[
61414,
61431
],
[
62659,
62676
],
[
65328,
65345
],
[
66534,
66551
],
[
68465,
68482
],
[
70561,
70578
],
[
71481,
71498
],
[
73528,
73545
],
[
74886,
74903
],
[
76897,
76914
],
[
78863,
78880
],
[
79729,
79746
],
[
81572,
81589
],
[
83029,
83046
],
[
87133,
87150
],
[
88808,
88825
],
[
91055,
91072
],
[
92941,
92958
],
[
93780,
93797
],
[
95631,
95648
],
[
96780,
96797
],
[
98560,
98577
],
[
100558,
100575
],
[
101779,
101796
],
[
104308,
104325
],
[
105457,
105474
],
[
107237,
107254
],
[
109275,
109292
],
[
110431,
110448
],
[
112889,
112906
],
[
114038,
114055
],
[
115818,
115835
],
[
117800,
117817
],
[
118673,
118690
],
[
120622,
120639
],
[
121986,
122003
],
[
124010,
124027
],
[
125986,
126003
],
[
126856,
126873
],
[
128710,
128727
],
[
130185,
130202
],
[
134342,
134359
],
[
136321,
136338
],
[
138953,
138970
],
[
140871,
140888
],
[
141714,
141731
],
[
143599,
143616
],
[
144752,
144769
],
[
146543,
146560
],
[
148549,
148566
],
[
149781,
149798
],
[
152338,
152355
],
[
153506,
153523
],
[
155319,
155336
],
[
157299,
157316
],
[
158169,
158186
],
[
160114,
160131
],
[
161478,
161495
],
[
163502,
163519
],
[
165478,
165495
],
[
166348,
166365
],
[
168202,
168219
],
[
169677,
169694
],
[
173834,
173851
],
[
175885,
175902
],
[
178593,
178610
],
[
180511,
180528
],
[
181354,
181371
],
[
183239,
183256
],
[
184392,
184409
],
[
186183,
186200
],
[
188189,
188206
],
[
189421,
189438
],
[
191968,
191985
],
[
193317,
193334
],
[
195288,
195305
],
[
197174,
197191
],
[
198013,
198030
],
[
199985,
200002
],
[
200347,
200364
],
[
200796,
200813
],
[
201033,
201050
],
[
201385,
201402
],
[
201719,
201736
],
[
202797,
202814
],
[
206367,
206384
],
[
210322,
210339
],
[
210649,
210666
],
[
215683,
215700
],
[
216860,
216877
],
[
217101,
217118
],
[
217210,
217227
],
[
217546,
217563
],
[
217794,
217811
],
[
217905,
217922
],
[
218183,
218200
],
[
218387,
218404
],
[
218497,
218514
],
[
218897,
218914
],
[
219197,
219214
],
[
219318,
219335
],
[
219793,
219810
],
[
220113,
220130
],
[
220224,
220241
],
[
220764,
220781
],
[
221116,
221133
],
[
221227,
221244
],
[
221563,
221580
],
[
221805,
221822
],
[
221915,
221932
],
[
222272,
222289
],
[
222530,
222547
],
[
222645,
222662
],
[
222914,
222931
],
[
223130,
223147
],
[
223256,
223273
],
[
223473,
223490
],
[
223649,
223666
],
[
223766,
223783
],
[
224006,
224023
],
[
224208,
224225
],
[
224331,
224348
],
[
224548,
224565
],
[
224729,
224746
],
[
224847,
224864
],
[
225134,
225151
],
[
225356,
225373
],
[
225475,
225492
],
[
225781,
225798
],
[
226084,
226101
],
[
226868,
226885
],
[
227319,
227336
]
],
[
[
1491,
1497
],
[
23385,
23391
],
[
25557,
25563
],
[
74547,
74553
],
[
76570,
76576
],
[
121645,
121651
],
[
123679,
123685
],
[
161137,
161143
],
[
163171,
163177
]
],
[
[
1555,
1565
],
[
3950,
3960
],
[
4005,
4015
],
[
6067,
6077
],
[
6171,
6181
],
[
10746,
10756
],
[
10858,
10868
],
[
10989,
10999
],
[
11102,
11112
],
[
19341,
19351
],
[
19445,
19455
],
[
20415,
20425
],
[
20519,
20529
],
[
202316,
202326
],
[
202353,
202363
],
[
206630,
206640
],
[
206667,
206677
],
[
207371,
207381
],
[
207432,
207442
],
[
208649,
208659
],
[
208686,
208696
],
[
212036,
212046
],
[
212073,
212083
],
[
214126,
214136
],
[
214163,
214173
],
[
227771,
227781
],
[
227842,
227852
],
[
5743,
5753
],
[
5918,
5928
],
[
7727,
7737
],
[
8479,
8489
],
[
9246,
9256
],
[
10526,
10536
],
[
13019,
13029
],
[
14641,
14651
],
[
15622,
15632
],
[
20215,
20225
],
[
21333,
21343
],
[
22036,
22046
],
[
199830,
199840
],
[
200192,
200202
],
[
200564,
200574
],
[
201230,
201240
],
[
201604,
201614
],
[
201920,
201930
],
[
202095,
202105
],
[
202907,
202917
],
[
203161,
203171
],
[
203591,
203601
],
[
205201,
205211
],
[
206040,
206050
],
[
211129,
211139
],
[
211583,
211593
],
[
225691,
225701
],
[
225985,
225995
],
[
228747,
228757
]
],
[
[
1607,
1619
],
[
22172,
22184
],
[
24332,
24344
],
[
37573,
37585
],
[
39945,
39957
],
[
46422,
46434
],
[
48359,
48371
],
[
55853,
55865
],
[
57790,
57802
],
[
65188,
65200
],
[
67125,
67137
],
[
73412,
73424
],
[
75412,
75424
],
[
87021,
87033
],
[
89326,
89338
],
[
95513,
95525
],
[
97310,
97322
],
[
104190,
104202
],
[
105987,
105999
],
[
112771,
112783
],
[
114568,
114580
],
[
120504,
120516
],
[
122516,
122528
],
[
134228,
134240
],
[
136843,
136855
],
[
143479,
143491
],
[
145286,
145298
],
[
152216,
152228
],
[
154044,
154056
],
[
159996,
160008
],
[
162008,
162020
],
[
173720,
173732
],
[
176407,
176419
],
[
183119,
183131
],
[
184926,
184938
],
[
191856,
191868
],
[
193835,
193847
],
[
22868,
22880
],
[
23246,
23258
],
[
24165,
24177
],
[
25005,
25017
],
[
25418,
25430
],
[
26133,
26145
],
[
26424,
26436
],
[
27173,
27185
],
[
27515,
27527
],
[
28369,
28381
],
[
29210,
29222
],
[
29729,
29741
],
[
29854,
29866
],
[
30761,
30773
],
[
31304,
31316
],
[
31405,
31417
],
[
31458,
31470
],
[
31511,
31523
],
[
31629,
31641
],
[
31762,
31774
],
[
31842,
31854
],
[
31943,
31955
],
[
32044,
32056
],
[
32097,
32109
],
[
32552,
32564
],
[
33066,
33078
],
[
33167,
33179
],
[
33220,
33232
],
[
33273,
33285
],
[
33391,
33403
],
[
33524,
33536
],
[
33604,
33616
],
[
33705,
33717
],
[
33806,
33818
],
[
33859,
33871
],
[
34640,
34652
],
[
34741,
34753
],
[
34794,
34806
],
[
34847,
34859
],
[
34965,
34977
],
[
35098,
35110
],
[
35178,
35190
],
[
35279,
35291
],
[
35380,
35392
],
[
35433,
35445
],
[
35834,
35846
],
[
36366,
36378
],
[
36467,
36479
],
[
36520,
36532
],
[
36573,
36585
],
[
36691,
36703
],
[
36824,
36836
],
[
36904,
36916
],
[
37005,
37017
],
[
37106,
37118
],
[
37159,
37171
],
[
38263,
38275
],
[
38458,
38470
],
[
38511,
38523
],
[
38819,
38831
],
[
38956,
38968
],
[
39782,
39794
],
[
40614,
40626
],
[
41056,
41068
],
[
41193,
41205
],
[
41810,
41822
],
[
42093,
42105
],
[
42821,
42833
],
[
43155,
43167
],
[
43986,
43998
],
[
44800,
44812
],
[
45309,
45321
],
[
45421,
45433
],
[
46301,
46313
],
[
47407,
47419
],
[
48190,
48202
],
[
49331,
49343
],
[
49905,
49917
],
[
50951,
50963
],
[
52432,
52444
],
[
52895,
52907
],
[
53426,
53438
],
[
53537,
53549
],
[
54537,
54549
],
[
54995,
55007
],
[
55570,
55582
],
[
55681,
55693
],
[
56838,
56850
],
[
57621,
57633
],
[
58762,
58774
],
[
59336,
59348
],
[
60468,
60480
],
[
62000,
62012
],
[
62379,
62391
],
[
62920,
62932
],
[
62996,
63008
],
[
63971,
63983
],
[
64345,
64357
],
[
64930,
64942
],
[
65006,
65018
],
[
66173,
66185
],
[
66956,
66968
],
[
68097,
68109
],
[
68671,
68683
],
[
69709,
69721
],
[
71742,
71754
],
[
73293,
73305
],
[
74061,
74073
],
[
74418,
74430
],
[
75270,
75282
],
[
76049,
76061
],
[
76441,
76453
],
[
77103,
77115
],
[
77359,
77371
],
[
78076,
78088
],
[
78383,
78395
],
[
79180,
79192
],
[
79978,
79990
],
[
80451,
80463
],
[
80566,
80578
],
[
81430,
81442
],
[
81927,
81939
],
[
82007,
82019
],
[
82050,
82062
],
[
82093,
82105
],
[
82201,
82213
],
[
82283,
82295
],
[
82342,
82354
],
[
82417,
82429
],
[
82476,
82488
],
[
82499,
82511
],
[
82916,
82928
],
[
83384,
83396
],
[
83464,
83476
],
[
83507,
83519
],
[
83550,
83562
],
[
83658,
83670
],
[
83740,
83752
],
[
83799,
83811
],
[
83874,
83886
],
[
83933,
83945
],
[
83956,
83968
],
[
84660,
84672
],
[
84740,
84752
],
[
84783,
84795
],
[
84826,
84838
],
[
84934,
84946
],
[
85016,
85028
],
[
85075,
85087
],
[
85150,
85162
],
[
85209,
85221
],
[
85232,
85244
],
[
85595,
85607
],
[
86078,
86090
],
[
86158,
86170
],
[
86201,
86213
],
[
86244,
86256
],
[
86352,
86364
],
[
86434,
86446
],
[
86493,
86505
],
[
86568,
86580
],
[
86627,
86639
],
[
86650,
86662
],
[
87664,
87676
],
[
87913,
87925
],
[
88196,
88208
],
[
88323,
88335
],
[
89188,
89200
],
[
89959,
89971
],
[
90464,
90476
],
[
90591,
90603
],
[
91261,
91273
],
[
91509,
91521
],
[
92205,
92217
],
[
92491,
92503
],
[
93256,
93268
],
[
94027,
94039
],
[
94490,
94502
],
[
94579,
94591
],
[
95407,
95419
],
[
96440,
96452
],
[
97166,
97178
],
[
98235,
98247
],
[
98766,
98778
],
[
99755,
99767
],
[
101132,
101144
],
[
101552,
101564
],
[
102029,
102041
],
[
102119,
102131
],
[
103029,
103041
],
[
103444,
103456
],
[
103965,
103977
],
[
104055,
104067
],
[
105117,
105129
],
[
105843,
105855
],
[
106912,
106924
],
[
107443,
107455
],
[
108452,
108464
],
[
109814,
109826
],
[
110172,
110184
],
[
110681,
110693
],
[
110736,
110748
],
[
111643,
111655
],
[
111996,
112008
],
[
112549,
112561
],
[
112604,
112616
],
[
113698,
113710
],
[
114424,
114436
],
[
115493,
115505
],
[
116024,
116036
],
[
117005,
117017
],
[
118923,
118935
],
[
120395,
120407
],
[
121156,
121168
],
[
121515,
121527
],
[
122372,
122384
],
[
123155,
123167
],
[
123549,
123561
],
[
124216,
124228
],
[
124474,
124486
],
[
125194,
125206
],
[
125503,
125515
],
[
126304,
126316
],
[
127106,
127118
],
[
127582,
127594
],
[
127698,
127710
],
[
128566,
128578
],
[
129066,
129078
],
[
129148,
129160
],
[
129192,
129204
],
[
129236,
129248
],
[
129345,
129357
],
[
129429,
129441
],
[
129490,
129502
],
[
129566,
129578
],
[
129627,
129639
],
[
129651,
129663
],
[
130070,
130082
],
[
130541,
130553
],
[
130623,
130635
],
[
130667,
130679
],
[
130711,
130723
],
[
130820,
130832
],
[
130904,
130916
],
[
130965,
130977
],
[
131041,
131053
],
[
131102,
131114
],
[
131126,
131138
],
[
131834,
131846
],
[
131916,
131928
],
[
131960,
131972
],
[
132004,
132016
],
[
132113,
132125
],
[
132197,
132209
],
[
132258,
132270
],
[
132334,
132346
],
[
132395,
132407
],
[
132419,
132431
],
[
132784,
132796
],
[
133270,
133282
],
[
133352,
133364
],
[
133396,
133408
],
[
133440,
133452
],
[
133549,
133561
],
[
133633,
133645
],
[
133694,
133706
],
[
133770,
133782
],
[
133831,
133843
],
[
133855,
133867
],
[
134874,
134886
],
[
135150,
135162
],
[
135514,
135526
],
[
135642,
135654
],
[
135979,
135991
],
[
136703,
136715
],
[
137478,
137490
],
[
137778,
137790
],
[
138165,
138177
],
[
138293,
138305
],
[
138630,
138642
],
[
139159,
139171
],
[
139409,
139421
],
[
140108,
140120
],
[
140409,
140421
],
[
141187,
141199
],
[
141962,
141974
],
[
142428,
142440
],
[
142531,
142543
],
[
143372,
143384
],
[
144410,
144422
],
[
145140,
145152
],
[
146214,
146226
],
[
146749,
146761
],
[
147742,
147754
],
[
149126,
149138
],
[
149550,
149562
],
[
150032,
150044
],
[
150124,
150136
],
[
151041,
151053
],
[
151460,
151472
],
[
151986,
151998
],
[
152078,
152090
],
[
152874,
152886
],
[
153140,
153152
],
[
153272,
153284
],
[
153896,
153908
],
[
154687,
154699
],
[
154964,
154976
],
[
155096,
155108
],
[
155525,
155537
],
[
155783,
155795
],
[
156505,
156517
],
[
156814,
156826
],
[
157619,
157631
],
[
158421,
158433
],
[
158899,
158911
],
[
159017,
159029
],
[
159885,
159897
],
[
160648,
160660
],
[
161007,
161019
],
[
161864,
161876
],
[
162647,
162659
],
[
163041,
163053
],
[
163708,
163720
],
[
163966,
163978
],
[
164686,
164698
],
[
164995,
165007
],
[
165796,
165808
],
[
166598,
166610
],
[
167074,
167086
],
[
167190,
167202
],
[
168058,
168070
],
[
168558,
168570
],
[
168640,
168652
],
[
168684,
168696
],
[
168728,
168740
],
[
168837,
168849
],
[
168921,
168933
],
[
168982,
168994
],
[
169058,
169070
],
[
169119,
169131
],
[
169143,
169155
],
[
169562,
169574
],
[
170033,
170045
],
[
170115,
170127
],
[
170159,
170171
],
[
170203,
170215
],
[
170312,
170324
],
[
170396,
170408
],
[
170457,
170469
],
[
170533,
170545
],
[
170594,
170606
],
[
170618,
170630
],
[
171326,
171338
],
[
171408,
171420
],
[
171452,
171464
],
[
171496,
171508
],
[
171605,
171617
],
[
171689,
171701
],
[
171750,
171762
],
[
171826,
171838
],
[
171887,
171899
],
[
171911,
171923
],
[
172276,
172288
],
[
172762,
172774
],
[
172844,
172856
],
[
172888,
172900
],
[
172932,
172944
],
[
173041,
173053
],
[
173125,
173137
],
[
173186,
173198
],
[
173262,
173274
],
[
173323,
173335
],
[
173347,
173359
],
[
174366,
174378
],
[
174561,
174573
],
[
174630,
174642
],
[
175037,
175049
],
[
175165,
175177
],
[
175411,
175423
],
[
175490,
175502
],
[
176267,
176279
],
[
177042,
177054
],
[
177257,
177269
],
[
177330,
177342
],
[
177764,
177776
],
[
177892,
177904
],
[
178138,
178150
],
[
178217,
178229
],
[
178799,
178811
],
[
179049,
179061
],
[
179748,
179760
],
[
180049,
180061
],
[
180827,
180839
],
[
181602,
181614
],
[
182068,
182080
],
[
182171,
182183
],
[
183012,
183024
],
[
184050,
184062
],
[
184780,
184792
],
[
185854,
185866
],
[
186389,
186401
],
[
187382,
187394
],
[
188766,
188778
],
[
189190,
189202
],
[
189672,
189684
],
[
189764,
189776
],
[
190681,
190693
],
[
191100,
191112
],
[
191626,
191638
],
[
191718,
191730
],
[
192499,
192511
],
[
192855,
192867
],
[
192982,
192994
],
[
193697,
193709
],
[
194468,
194480
],
[
194847,
194859
],
[
194974,
194986
],
[
195494,
195506
],
[
195742,
195754
],
[
196438,
196450
],
[
196724,
196736
],
[
197489,
197501
],
[
198260,
198272
],
[
198723,
198735
],
[
198812,
198824
],
[
199640,
199652
]
],
[
[
1651,
1665
],
[
47121,
47135
],
[
49034,
49048
],
[
50198,
50212
],
[
51295,
51309
],
[
52154,
52168
],
[
54103,
54117
],
[
54230,
54244
],
[
56552,
56566
],
[
58465,
58479
],
[
59661,
59675
],
[
60844,
60858
],
[
61757,
61771
],
[
63572,
63586
],
[
63699,
63713
],
[
65887,
65901
],
[
67800,
67814
],
[
68960,
68974
],
[
70049,
70063
],
[
70904,
70918
],
[
72260,
72274
],
[
72387,
72401
],
[
96165,
96179
],
[
97949,
97963
],
[
99024,
99038
],
[
100064,
100078
],
[
100876,
100890
],
[
102617,
102631
],
[
102744,
102758
],
[
104842,
104856
],
[
106626,
106640
],
[
107711,
107725
],
[
108771,
108785
],
[
109593,
109607
],
[
111266,
111280
],
[
111393,
111407
],
[
113423,
113437
],
[
115207,
115221
],
[
116278,
116292
],
[
117310,
117324
],
[
118118,
118132
],
[
119395,
119409
],
[
119522,
119536
],
[
144134,
144148
],
[
145927,
145941
],
[
147009,
147023
],
[
148053,
148067
],
[
148868,
148882
],
[
150627,
150641
],
[
150754,
150768
],
[
183774,
183788
],
[
185567,
185581
],
[
186649,
186663
],
[
187693,
187707
],
[
188508,
188522
],
[
190267,
190281
],
[
190394,
190408
]
],
[
[
1692,
1707
],
[
3487,
3502
],
[
4211,
4226
],
[
4316,
4331
],
[
4528,
4543
],
[
4633,
4648
],
[
5044,
5059
]
],
[
[
1736,
1750
],
[
62074,
62088
],
[
62512,
62526
],
[
63070,
63084
],
[
64045,
64059
],
[
64478,
64492
],
[
65080,
65094
],
[
109878,
109892
],
[
110295,
110309
],
[
110800,
110814
],
[
111707,
111721
],
[
112119,
112133
],
[
112668,
112682
]
],
[
[
1795,
1808
]
],
[
[
1832,
1843
],
[
202526,
202532
],
[
204843,
204849
],
[
205720,
205726
],
[
206239,
206245
],
[
206903,
206909
],
[
207723,
207729
],
[
212787,
212793
],
[
227993,
227999
]
],
[
[
1850,
1877
],
[
12153,
12180
],
[
12606,
12633
],
[
13734,
13761
],
[
14109,
14136
],
[
210015,
210042
],
[
210100,
210127
],
[
213079,
213106
]
],
[
[
2136,
2159
],
[
6326,
6349
],
[
6447,
6470
],
[
11278,
11301
],
[
11399,
11422
],
[
15876,
15899
],
[
15997,
16020
]
],
[
[
2312,
2343
]
],
[
[
3335,
3385
]
],
[
[
4079,
4134
]
],
[
[
4892,
4942
]
],
[
[
5602,
5646
]
],
[
[
6502,
6541
]
],
[
[
11525,
11563
]
],
[
[
16052,
16110
]
],
[
[
19541,
19587
]
],
[
[
20615,
20671
]
],
[
[
21426,
21475
]
],
[
[
22228,
22256
]
],
[
[
23546,
23585
]
],
[
[
24232,
25716
],
[
25808,
25842
]
],
[
[
25740,
25861
]
],
[
[
25868,
25910
]
],
[
[
26887,
27965
]
],
[
[
27972,
28010
]
],
[
[
28889,
28933
]
],
[
[
29311,
30389
]
],
[
[
30413,
30838
]
],
[
[
30845,
30879
]
],
[
[
32607,
32641
]
],
[
[
34182,
35884
]
],
[
[
35908,
37523
]
],
[
[
37627,
37653
]
],
[
[
39169,
39206
]
],
[
[
39847,
41399
],
[
41489,
41521
]
],
[
[
41423,
41540
]
],
[
[
41547,
41587
]
],
[
[
42537,
43586
]
],
[
[
43593,
43629
]
],
[
[
44483,
44525
]
],
[
[
44895,
45933
]
],
[
[
45957,
46372
]
],
[
[
46479,
46508
]
],
[
[
47568,
47608
]
],
[
[
48258,
49485
],
[
49578,
49613
]
],
[
[
49509,
49632
]
],
[
[
49639,
49682
]
],
[
[
50664,
51748
]
],
[
[
51755,
51794
]
],
[
[
53103,
53148
]
],
[
[
53683,
55196
]
],
[
[
55220,
55803
]
],
[
[
55910,
55939
]
],
[
[
56999,
57039
]
],
[
[
57689,
58916
],
[
59009,
59044
]
],
[
[
58940,
59063
]
],
[
[
59070,
59113
]
],
[
[
60181,
61351
]
],
[
[
61358,
61397
]
],
[
[
62597,
62642
]
],
[
[
63152,
64556
]
],
[
[
64580,
65138
]
],
[
[
65245,
65274
]
],
[
[
66334,
66374
]
],
[
[
67024,
68251
],
[
68344,
68379
]
],
[
[
68275,
68398
]
],
[
[
68405,
68448
]
],
[
[
69422,
70498
]
],
[
[
70505,
70544
]
],
[
[
71419,
71464
]
],
[
[
71840,
72919
]
],
[
[
72943,
73367
]
],
[
[
73457,
73474
]
],
[
[
74698,
74726
]
],
[
[
75327,
76719
],
[
76800,
76823
]
],
[
[
76743,
76842
]
],
[
[
76849,
76880
]
],
[
[
77801,
78812
]
],
[
[
78819,
78846
]
],
[
[
79679,
79712
]
],
[
[
80069,
81080
]
],
[
[
81104,
81497
]
],
[
[
81504,
81527
]
],
[
[
82961,
82984
]
],
[
[
84241,
85635
]
],
[
[
85659,
86976
]
],
[
[
87064,
87079
]
],
[
[
88622,
88648
]
],
[
[
89243,
90883
],
[
90962,
90983
]
],
[
[
90907,
91002
]
],
[
[
91009,
91038
]
],
[
[
91932,
92892
]
],
[
[
92899,
92924
]
],
[
[
93732,
93763
]
],
[
[
94112,
95061
]
],
[
[
95085,
95468
]
],
[
[
95559,
95577
]
],
[
[
96591,
96620
]
],
[
[
97224,
98379
],
[
98461,
98485
]
],
[
[
98403,
98504
]
],
[
[
98511,
98543
]
],
[
[
99479,
100506
]
],
[
[
100513,
100541
]
],
[
[
101728,
101762
]
],
[
[
102233,
103613
]
],
[
[
103637,
104145
]
],
[
[
104236,
104254
]
],
[
[
105268,
105297
]
],
[
[
105901,
107056
],
[
107138,
107162
]
],
[
[
107080,
107181
]
],
[
[
107188,
107220
]
],
[
[
108176,
109223
]
],
[
[
109230,
109258
]
],
[
[
110380,
110414
]
],
[
[
110882,
112197
]
],
[
[
112221,
112726
]
],
[
[
112817,
112835
]
],
[
[
113849,
113878
]
],
[
[
114482,
115637
],
[
115719,
115743
]
],
[
[
115661,
115762
]
],
[
[
115769,
115801
]
],
[
[
116729,
117748
]
],
[
[
117755,
117783
]
],
[
[
118622,
118656
]
],
[
[
119011,
120043
]
],
[
[
120067,
120459
]
],
[
[
120550,
120568
]
],
[
[
121797,
121826
]
],
[
[
122430,
123829
],
[
123911,
123935
]
],
[
[
123853,
123954
]
],
[
[
123961,
123993
]
],
[
[
124918,
125934
]
],
[
[
125941,
125969
]
],
[
[
126805,
126839
]
],
[
[
127198,
128214
]
],
[
[
128238,
128634
]
],
[
[
128641,
128665
]
],
[
[
130116,
130140
]
],
[
[
131413,
132825
]
],
[
[
132849,
134183
]
],
[
[
134272,
134288
]
],
[
[
136134,
136161
]
],
[
[
136759,
138778
],
[
138858,
138880
]
],
[
[
138802,
138899
]
],
[
[
138906,
138936
]
],
[
[
139834,
140821
]
],
[
[
140828,
140854
]
],
[
[
141665,
141697
]
],
[
[
142048,
143024
]
],
[
[
143048,
143434
]
],
[
[
143526,
143545
]
],
[
[
144562,
144592
]
],
[
[
145199,
146359
],
[
146442,
146467
]
],
[
[
146383,
146486
]
],
[
[
146493,
146526
]
],
[
[
147465,
148496
]
],
[
[
148503,
148532
]
],
[
[
149729,
149764
]
],
[
[
150241,
151632
]
],
[
[
151656,
152171
]
],
[
[
152264,
152284
]
],
[
[
153315,
153346
]
],
[
[
153956,
155132
],
[
155216,
155242
]
],
[
[
155156,
155261
]
],
[
[
155268,
155302
]
],
[
[
156227,
157245
]
],
[
[
157252,
157282
]
],
[
[
158116,
158152
]
],
[
[
158511,
159529
]
],
[
[
159553,
159951
]
],
[
[
160042,
160060
]
],
[
[
161289,
161318
]
],
[
[
161922,
163321
],
[
163403,
163427
]
],
[
[
163345,
163446
]
],
[
[
163453,
163485
]
],
[
[
164410,
165426
]
],
[
[
165433,
165461
]
],
[
[
166297,
166331
]
],
[
[
166690,
167706
]
],
[
[
167730,
168126
]
],
[
[
168133,
168157
]
],
[
[
169608,
169632
]
],
[
[
170905,
172317
]
],
[
[
172341,
173675
]
],
[
[
173764,
173780
]
],
[
[
175698,
175725
]
],
[
[
176323,
178418
],
[
178498,
178520
]
],
[
[
178442,
178539
]
],
[
[
178546,
178576
]
],
[
[
179474,
180461
]
],
[
[
180468,
180494
]
],
[
[
181305,
181337
]
],
[
[
181688,
182664
]
],
[
[
182688,
183074
]
],
[
[
183166,
183185
]
],
[
[
184202,
184232
]
],
[
[
184839,
185999
],
[
186082,
186107
]
],
[
[
186023,
186126
]
],
[
[
186133,
186166
]
],
[
[
187105,
188136
]
],
[
[
188143,
188172
]
],
[
[
189369,
189404
]
],
[
[
189881,
191272
]
],
[
[
191296,
191811
]
],
[
[
191899,
191914
]
],
[
[
193131,
193157
]
],
[
[
193752,
195116
],
[
195195,
195216
]
],
[
[
195140,
195235
]
],
[
[
195242,
195271
]
],
[
[
196165,
197125
]
],
[
[
197132,
197157
]
],
[
[
197965,
197996
]
],
[
[
198345,
199294
]
],
[
[
199318,
199701
]
],
[
[
199708,
199740
]
],
[
[
201492,
201515
]
],
[
[
201805,
201831
]
],
[
[
202405,
202423
]
],
[
[
202696,
202723
]
],
[
[
202951,
202989
]
],
[
[
203322,
203354
]
],
[
[
204688,
204742
]
],
[
[
205558,
205599
]
],
[
[
206113,
206139
]
],
[
[
206719,
206755
]
],
[
[
207510,
207552
]
],
[
[
208737,
208797
]
],
[
[
210275,
210305
]
],
[
[
210600,
210632
]
],
[
[
210933,
210973
]
],
[
[
211380,
211428
]
],
[
[
212124,
212188
]
],
[
[
214214,
214263
]
],
[
[
215633,
215666
]
],
[
[
216115,
216154
]
],
[
[
216621,
216636
]
],
[
[
216951,
216972
]
],
[
[
217287,
217304
]
],
[
[
217641,
217664
]
],
[
[
217984,
218000
]
],
[
[
218268,
218290
]
],
[
[
218575,
218602
]
],
[
[
219026,
219059
]
],
[
[
219407,
219424
]
],
[
[
219921,
219944
]
],
[
[
220303,
220320
]
],
[
[
220901,
220924
]
],
[
[
221306,
221322
]
],
[
[
221656,
221678
]
],
[
[
221993,
222014
]
],
[
[
222375,
222402
]
],
[
[
222728,
222760
]
],
[
[
223013,
223051
]
],
[
[
223350,
223373
]
],
[
[
223554,
223583
]
],
[
[
223851,
223880
]
],
[
[
224099,
224134
]
],
[
[
224422,
224446
]
],
[
[
224631,
224661
]
],
[
[
224933,
224958
]
],
[
[
225228,
225259
]
],
[
[
225562,
225598
]
],
[
[
226332,
226708
]
],
[
[
226715,
226735
]
],
[
[
227209,
227224
]
],
[
[
227900,
227924
]
]
] |
import numpy as np
from skimage.morphology import max_tree, area_closing, area_opening
from skimage.morphology import max_tree_local_maxima, diameter_opening
from skimage.morphology import diameter_closing
from skimage.util import invert
from skimage._shared.testing import assert_array_equal, TestCase
eps = 1e-12
def _full_type_test(img, param, expected, func, param_scale=False,
**keywords):
# images as they are
out = func(img, param, **keywords)
assert_array_equal(out, expected)
# unsigned int
for dt in [np.uint32, np.uint64]:
img_cast = img.astype(dt)
out = func(img_cast, param, **keywords)
exp_cast = expected.astype(dt)
assert_array_equal(out, exp_cast)
# float
data_float = img.astype(np.float64)
data_float = data_float / 255.0
expected_float = expected.astype(np.float64)
expected_float = expected_float / 255.0
if param_scale:
param_cast = param / 255.0
else:
param_cast = param
for dt in [np.float32, np.float64]:
data_cast = data_float.astype(dt)
out = func(data_cast, param_cast, **keywords)
exp_cast = expected_float.astype(dt)
error_img = 255.0 * exp_cast - 255.0 * out
error = (error_img >= 1.0).sum()
assert error < eps
# signed images
img_signed = img.astype(np.int16)
img_signed = img_signed - 128
exp_signed = expected.astype(np.int16)
exp_signed = exp_signed - 128
for dt in [np.int8, np.int16, np.int32, np.int64]:
img_s = img_signed.astype(dt)
out = func(img_s, param, **keywords)
exp_s = exp_signed.astype(dt)
assert_array_equal(out, exp_s)
class TestMaxtree(TestCase):
def test_max_tree(self):
"Test for max tree"
img_type = np.uint8
img = np.array([[10, 8, 8, 9],
[7, 7, 9, 9],
[8, 7, 10, 10],
[9, 9, 10, 10]], dtype=img_type)
P_exp = np.array([[1, 4, 1, 1],
[4, 4, 3, 3],
[1, 4, 3, 10],
[3, 3, 10, 10]], dtype=np.int64)
S_exp = np.array([4, 5, 9, 1, 2, 8, 3, 6, 7,
12, 13, 0, 10, 11, 14, 15],
dtype=np.int64)
for img_type in [np.uint8, np.uint16, np.uint32, np.uint64]:
img = img.astype(img_type)
P, S = max_tree(img, connectivity=2)
assert_array_equal(P, P_exp)
assert_array_equal(S, S_exp)
for img_type in [np.int8, np.int16, np.int32, np.int64]:
img = img.astype(img_type)
img_shifted = img - 9
P, S = max_tree(img_shifted, connectivity=2)
assert_array_equal(P, P_exp)
assert_array_equal(S, S_exp)
img_float = img.astype(float)
img_float = (img_float - 8) / 2.0
for img_type in [np.float32, np.float64]:
img_float = img_float.astype(img_type)
P, S = max_tree(img_float, connectivity=2)
assert_array_equal(P, P_exp)
assert_array_equal(S, S_exp)
return
def test_area_closing(self):
"Test for Area Closing (2 thresholds, all types)"
# original image
img = np.array(
[[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240],
[240, 200, 200, 240, 200, 240, 200, 200, 240, 240, 200, 240],
[240, 200, 40, 240, 240, 240, 240, 240, 240, 240, 40, 240],
[240, 240, 240, 240, 100, 240, 100, 100, 240, 240, 200, 240],
[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240],
[200, 200, 200, 200, 200, 200, 200, 240, 200, 200, 255, 255],
[200, 255, 200, 200, 200, 255, 200, 240, 255, 255, 255, 40],
[200, 200, 200, 100, 200, 200, 200, 240, 255, 255, 255, 255],
[200, 200, 200, 100, 200, 200, 200, 240, 200, 200, 255, 255],
[200, 200, 200, 200, 200, 40, 200, 240, 240, 100, 255, 255],
[200, 40, 255, 255, 255, 40, 200, 255, 200, 200, 255, 255],
[200, 200, 200, 200, 200, 200, 200, 255, 255, 255, 255, 255]],
dtype=np.uint8)
# expected area closing with area 2
expected_2 = np.array(
[[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240],
[240, 200, 200, 240, 240, 240, 200, 200, 240, 240, 200, 240],
[240, 200, 200, 240, 240, 240, 240, 240, 240, 240, 200, 240],
[240, 240, 240, 240, 240, 240, 100, 100, 240, 240, 200, 240],
[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240],
[200, 200, 200, 200, 200, 200, 200, 240, 200, 200, 255, 255],
[200, 255, 200, 200, 200, 255, 200, 240, 255, 255, 255, 255],
[200, 200, 200, 100, 200, 200, 200, 240, 255, 255, 255, 255],
[200, 200, 200, 100, 200, 200, 200, 240, 200, 200, 255, 255],
[200, 200, 200, 200, 200, 40, 200, 240, 240, 200, 255, 255],
[200, 200, 255, 255, 255, 40, 200, 255, 200, 200, 255, 255],
[200, 200, 200, 200, 200, 200, 200, 255, 255, 255, 255, 255]],
dtype=np.uint8)
# expected diameter closing with diameter 4
expected_4 = np.array(
[[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240],
[240, 200, 200, 240, 240, 240, 240, 240, 240, 240, 240, 240],
[240, 200, 200, 240, 240, 240, 240, 240, 240, 240, 240, 240],
[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240],
[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240],
[200, 200, 200, 200, 200, 200, 200, 240, 240, 240, 255, 255],
[200, 255, 200, 200, 200, 255, 200, 240, 255, 255, 255, 255],
[200, 200, 200, 200, 200, 200, 200, 240, 255, 255, 255, 255],
[200, 200, 200, 200, 200, 200, 200, 240, 200, 200, 255, 255],
[200, 200, 200, 200, 200, 200, 200, 240, 240, 200, 255, 255],
[200, 200, 255, 255, 255, 200, 200, 255, 200, 200, 255, 255],
[200, 200, 200, 200, 200, 200, 200, 255, 255, 255, 255, 255]],
dtype=np.uint8)
# _full_type_test makes a test with many image types.
_full_type_test(img, 2, expected_2, area_closing, connectivity=2)
_full_type_test(img, 4, expected_4, area_closing, connectivity=2)
P, S = max_tree(invert(img), connectivity=2)
_full_type_test(img, 4, expected_4, area_closing,
parent=P, tree_traverser=S)
def test_area_opening(self):
"Test for Area Opening (2 thresholds, all types)"
# original image
img = np.array([[15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15],
[15, 55, 55, 15, 55, 15, 55, 55, 15, 15, 55, 15],
[15, 55, 215, 15, 15, 15, 15, 15, 15, 15, 215, 15],
[15, 15, 15, 15, 155, 15, 155, 155, 15, 15, 55, 15],
[15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15],
[55, 55, 55, 55, 55, 55, 55, 15, 55, 55, 0, 0],
[55, 0, 55, 55, 55, 0, 55, 15, 0, 0, 0, 215],
[55, 55, 55, 155, 55, 55, 55, 15, 0, 0, 0, 0],
[55, 55, 55, 155, 55, 55, 55, 15, 55, 55, 0, 0],
[55, 55, 55, 55, 55, 215, 55, 15, 15, 155, 0, 0],
[55, 215, 0, 0, 0, 215, 55, 0, 55, 55, 0, 0],
[55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0]],
dtype=np.uint8)
# expected area closing with area 2
expected_2 = np.array([[15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15],
[15, 55, 55, 15, 15, 15, 55, 55, 15,
15, 55, 15],
[15, 55, 55, 15, 15, 15, 15, 15, 15,
15, 55, 15],
[15, 15, 15, 15, 15, 15, 155, 155, 15,
15, 55, 15],
[15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15],
[55, 55, 55, 55, 55, 55, 55, 15, 55,
55, 0, 0],
[55, 0, 55, 55, 55, 0, 55, 15, 0,
0, 0, 0],
[55, 55, 55, 155, 55, 55, 55, 15, 0,
0, 0, 0],
[55, 55, 55, 155, 55, 55, 55, 15, 55,
55, 0, 0],
[55, 55, 55, 55, 55, 215, 55, 15, 15,
55, 0, 0],
[55, 55, 0, 0, 0, 215, 55, 0, 55,
55, 0, 0],
[55, 55, 55, 55, 55, 55, 55, 0, 0,
0, 0, 0]],
dtype=np.uint8)
# expected diameter closing with diameter 4
expected_4 = np.array([[15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15],
[15, 55, 55, 15, 15, 15, 15, 15, 15,
15, 15, 15],
[15, 55, 55, 15, 15, 15, 15, 15, 15,
15, 15, 15],
[15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15],
[15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15],
[55, 55, 55, 55, 55, 55, 55, 15, 15,
15, 0, 0],
[55, 0, 55, 55, 55, 0, 55, 15, 0,
0, 0, 0],
[55, 55, 55, 55, 55, 55, 55, 15, 0,
0, 0, 0],
[55, 55, 55, 55, 55, 55, 55, 15, 55,
55, 0, 0],
[55, 55, 55, 55, 55, 55, 55, 15, 15,
55, 0, 0],
[55, 55, 0, 0, 0, 55, 55, 0, 55,
55, 0, 0],
[55, 55, 55, 55, 55, 55, 55, 0, 0,
0, 0, 0]],
dtype=np.uint8)
# _full_type_test makes a test with many image types.
_full_type_test(img, 2, expected_2, area_opening, connectivity=2)
_full_type_test(img, 4, expected_4, area_opening, connectivity=2)
P, S = max_tree(img, connectivity=2)
_full_type_test(img, 4, expected_4, area_opening,
parent=P, tree_traverser=S)
def test_diameter_closing(self):
"Test for Diameter Opening (2 thresholds, all types)"
img = np.array([[97, 95, 93, 92, 91, 90, 90, 90, 91, 92, 93, 95],
[95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93],
[93, 63, 63, 63, 63, 86, 86, 86, 87, 43, 43, 91],
[92, 89, 88, 86, 85, 85, 84, 85, 85, 43, 43, 89],
[91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88],
[90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88],
[90, 88, 86, 84, 83, 83, 82, 83, 83, 84, 86, 88],
[90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88],
[91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88],
[92, 89, 23, 23, 85, 85, 84, 85, 85, 3, 3, 89],
[93, 91, 23, 23, 87, 86, 86, 86, 87, 88, 3, 91],
[95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93]],
dtype=np.uint8)
ex2 = np.array([[97, 95, 93, 92, 91, 90, 90, 90, 91, 92, 93, 95],
[95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93],
[93, 63, 63, 63, 63, 86, 86, 86, 87, 43, 43, 91],
[92, 89, 88, 86, 85, 85, 84, 85, 85, 43, 43, 89],
[91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88],
[90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88],
[90, 88, 86, 84, 83, 83, 83, 83, 83, 84, 86, 88],
[90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88],
[91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88],
[92, 89, 23, 23, 85, 85, 84, 85, 85, 3, 3, 89],
[93, 91, 23, 23, 87, 86, 86, 86, 87, 88, 3, 91],
[95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93]],
dtype=np.uint8)
ex4 = np.array([[97, 95, 93, 92, 91, 90, 90, 90, 91, 92, 93, 95],
[95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93],
[93, 63, 63, 63, 63, 86, 86, 86, 87, 84, 84, 91],
[92, 89, 88, 86, 85, 85, 84, 85, 85, 84, 84, 89],
[91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88],
[90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88],
[90, 88, 86, 84, 83, 83, 83, 83, 83, 84, 86, 88],
[90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88],
[91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88],
[92, 89, 84, 84, 85, 85, 84, 85, 85, 84, 84, 89],
[93, 91, 84, 84, 87, 86, 86, 86, 87, 88, 84, 91],
[95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93]],
dtype=np.uint8)
# _full_type_test makes a test with many image types.
_full_type_test(img, 2, ex2, diameter_closing, connectivity=2)
_full_type_test(img, 4, ex4, diameter_closing, connectivity=2)
P, S = max_tree(invert(img), connectivity=2)
_full_type_test(img, 4, ex4, diameter_opening,
parent=P, tree_traverser=S)
def test_diameter_opening(self):
"Test for Diameter Opening (2 thresholds, all types)"
img = np.array([[5, 7, 9, 11, 12, 12, 12, 12, 12, 11, 9, 7],
[7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10],
[9, 40, 40, 40, 40, 16, 16, 16, 16, 60, 60, 11],
[11, 13, 15, 16, 17, 18, 18, 18, 17, 60, 60, 13],
[12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14],
[12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14],
[12, 15, 16, 18, 19, 19, 20, 19, 19, 18, 16, 15],
[12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14],
[12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14],
[11, 13, 80, 80, 17, 18, 18, 18, 17, 100, 100, 13],
[9, 11, 80, 80, 16, 16, 16, 16, 16, 15, 100, 11],
[7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10]])
ex2 = np.array([[5, 7, 9, 11, 12, 12, 12, 12, 12, 11, 9, 7],
[7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10],
[9, 40, 40, 40, 40, 16, 16, 16, 16, 60, 60, 11],
[11, 13, 15, 16, 17, 18, 18, 18, 17, 60, 60, 13],
[12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14],
[12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14],
[12, 15, 16, 18, 19, 19, 19, 19, 19, 18, 16, 15],
[12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14],
[12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14],
[11, 13, 80, 80, 17, 18, 18, 18, 17, 100, 100, 13],
[9, 11, 80, 80, 16, 16, 16, 16, 16, 15, 100, 11],
[7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10]])
ex4 = np.array([[5, 7, 9, 11, 12, 12, 12, 12, 12, 11, 9, 7],
[7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10],
[9, 40, 40, 40, 40, 16, 16, 16, 16, 18, 18, 11],
[11, 13, 15, 16, 17, 18, 18, 18, 17, 18, 18, 13],
[12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14],
[12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14],
[12, 15, 16, 18, 19, 19, 19, 19, 19, 18, 16, 15],
[12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14],
[12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14],
[11, 13, 18, 18, 17, 18, 18, 18, 17, 18, 18, 13],
[9, 11, 18, 18, 16, 16, 16, 16, 16, 15, 18, 11],
[7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10]])
# _full_type_test makes a test with many image types.
_full_type_test(img, 2, ex2, diameter_opening, connectivity=2)
_full_type_test(img, 4, ex4, diameter_opening, connectivity=2)
P, S = max_tree(img, connectivity=2)
_full_type_test(img, 4, ex4, diameter_opening,
parent=P, tree_traverser=S)
def test_local_maxima(self):
"local maxima for various data types"
data = np.array([[10, 11, 13, 14, 14, 15, 14, 14, 13, 11],
[11, 13, 15, 16, 16, 16, 16, 16, 15, 13],
[13, 15, 40, 40, 18, 18, 18, 60, 60, 15],
[14, 16, 40, 40, 19, 19, 19, 60, 60, 16],
[14, 16, 18, 19, 19, 19, 19, 19, 18, 16],
[15, 16, 18, 19, 19, 20, 19, 19, 18, 16],
[14, 16, 18, 19, 19, 19, 19, 19, 18, 16],
[14, 16, 80, 80, 19, 19, 19, 100, 100, 16],
[13, 15, 80, 80, 18, 18, 18, 100, 100, 15],
[11, 13, 15, 16, 16, 16, 16, 16, 15, 13]],
dtype=np.uint8)
expected_result = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 0, 1, 1, 0],
[0, 0, 1, 1, 0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 0, 1, 1, 0],
[0, 0, 1, 1, 0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
dtype=np.uint64)
for dtype in [np.uint8, np.uint64, np.int8, np.int64]:
test_data = data.astype(dtype)
out = max_tree_local_maxima(test_data, connectivity=1)
out_bin = out > 0
assert_array_equal(expected_result, out_bin)
assert out.dtype == expected_result.dtype
assert np.max(out) == 5
P, S = max_tree(test_data)
out = max_tree_local_maxima(test_data,
parent=P,
tree_traverser=S)
assert_array_equal(expected_result, out_bin)
assert out.dtype == expected_result.dtype
assert np.max(out) == 5
def test_extrema_float(self):
"specific tests for float type"
data = np.array([[0.10, 0.11, 0.13, 0.14, 0.14, 0.15, 0.14,
0.14, 0.13, 0.11],
[0.11, 0.13, 0.15, 0.16, 0.16, 0.16, 0.16,
0.16, 0.15, 0.13],
[0.13, 0.15, 0.40, 0.40, 0.18, 0.18, 0.18,
0.60, 0.60, 0.15],
[0.14, 0.16, 0.40, 0.40, 0.19, 0.19, 0.19,
0.60, 0.60, 0.16],
[0.14, 0.16, 0.18, 0.19, 0.19, 0.19, 0.19,
0.19, 0.18, 0.16],
[0.15, 0.182, 0.18, 0.19, 0.204, 0.20, 0.19,
0.19, 0.18, 0.16],
[0.14, 0.16, 0.18, 0.19, 0.19, 0.19, 0.19,
0.19, 0.18, 0.16],
[0.14, 0.16, 0.80, 0.80, 0.19, 0.19, 0.19,
4.0, 1.0, 0.16],
[0.13, 0.15, 0.80, 0.80, 0.18, 0.18, 0.18,
1.0, 1.0, 0.15],
[0.11, 0.13, 0.15, 0.16, 0.16, 0.16, 0.16,
0.16, 0.15, 0.13]],
dtype=np.float32)
expected_result = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 0, 1, 1, 0],
[0, 0, 1, 1, 0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 0, 1, 0, 0],
[0, 0, 1, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
dtype=np.uint8)
# test for local maxima
out = max_tree_local_maxima(data, connectivity=1)
out_bin = out > 0
assert_array_equal(expected_result, out_bin)
assert np.max(out) == 6
def test_3d(self):
"""tests the detection of maxima in 3D."""
img = np.zeros((8, 8, 8), dtype=np.uint8)
local_maxima = np.zeros((8, 8, 8), dtype=np.uint64)
# first maximum: only one pixel
img[1, 1:3, 1:3] = 100
img[2, 2, 2] = 200
img[3, 1:3, 1:3] = 100
local_maxima[2, 2, 2] = 1
# second maximum: three pixels in z-direction
img[5:8, 1, 1] = 200
local_maxima[5:8, 1, 1] = 1
# third: two maxima in 0 and 3.
img[0, 5:8, 5:8] = 200
img[1, 6, 6] = 100
img[2, 5:7, 5:7] = 200
img[0:3, 5:8, 5:8] += 50
local_maxima[0, 5:8, 5:8] = 1
local_maxima[2, 5:7, 5:7] = 1
# four : one maximum in the corner of the square
img[6:8, 6:8, 6:8] = 200
img[7, 7, 7] = 255
local_maxima[7, 7, 7] = 1
out = max_tree_local_maxima(img)
out_bin = out > 0
assert_array_equal(local_maxima, out_bin)
assert np.max(out) == 5
| [
[
[
7,
18
],
[
557,
559
],
[
568,
570
],
[
784,
786
],
[
869,
871
],
[
1032,
1034
],
[
1044,
1046
],
[
1366,
1368
],
[
1443,
1445
],
[
1502,
1504
],
[
1511,
1513
],
[
1521,
1523
],
[
1531,
1533
],
[
1810,
1812
],
[
1833,
1835
],
[
2010,
2012
],
[
2164,
2166
],
[
2191,
2193
],
[
2313,
2315
],
[
2349,
2351
],
[
2359,
2361
],
[
2370,
2372
],
[
2381,
2383
],
[
2589,
2591
],
[
2598,
2600
],
[
2608,
2610
],
[
2618,
2620
],
[
2947,
2949
],
[
2959,
2961
],
[
3308,
3310
],
[
4231,
4233
],
[
4307,
4309
],
[
5234,
5236
],
[
5318,
5320
],
[
6247,
6249
],
[
6764,
6766
],
[
7654,
7656
],
[
7730,
7732
],
[
9081,
9083
],
[
9165,
9167
],
[
10510,
10512
],
[
11001,
11003
],
[
11902,
11904
],
[
11927,
11929
],
[
12828,
12830
],
[
12853,
12855
],
[
13757,
13759
],
[
14247,
14249
],
[
15131,
15133
],
[
16015,
16017
],
[
17334,
17336
],
[
18024,
18026
],
[
18060,
18062
],
[
18756,
18758
],
[
18789,
18791
],
[
18799,
18801
],
[
18810,
18812
],
[
18819,
18821
],
[
19101,
19103
],
[
19449,
19451
],
[
19556,
19558
],
[
20700,
20702
],
[
20739,
20741
],
[
21435,
21437
],
[
21630,
21632
],
[
21736,
21738
],
[
21762,
21764
],
[
21795,
21797
],
[
21821,
21823
],
[
22640,
22642
]
],
[
[
50,
58
],
[
2451,
2459
],
[
2721,
2729
],
[
3042,
3050
],
[
6484,
6492
],
[
10747,
10755
],
[
13988,
13996
],
[
17102,
17110
],
[
19138,
19146
]
],
[
[
60,
72
],
[
6364,
6376
],
[
6438,
6450
],
[
6566,
6578
]
],
[
[
74,
86
],
[
10627,
10639
],
[
10701,
10713
],
[
10821,
10833
]
],
[
[
118,
139
],
[
18892,
18913
],
[
19176,
19197
],
[
21492,
21513
],
[
22522,
22543
]
],
[
[
141,
157
],
[
14063,
14079
],
[
16981,
16997
],
[
17052,
17068
],
[
17169,
17185
]
],
[
[
189,
205
],
[
13867,
13883
],
[
13938,
13954
]
],
[
[
231,
237
],
[
6493,
6499
],
[
13997,
14003
]
],
[
[
275,
293
],
[
488,
506
],
[
709,
727
],
[
1671,
1689
],
[
2493,
2511
],
[
2534,
2552
],
[
2771,
2789
],
[
2812,
2830
],
[
3090,
3108
],
[
3131,
3149
],
[
18983,
19001
],
[
19330,
19348
],
[
21570,
21588
],
[
22583,
22601
]
],
[
[
295,
303
],
[
1722,
1730
]
],
[
[
305,
308
],
[
1313,
1316
]
],
[
[
323,
338
],
[
6328,
6343
],
[
6402,
6417
],
[
6530,
6545
],
[
10591,
10606
],
[
10665,
10680
],
[
10785,
10800
],
[
13838,
13853
],
[
13909,
13924
],
[
14034,
14049
],
[
16952,
16967
],
[
17023,
17038
],
[
17140,
17155
]
],
[
[
1710,
1721
]
]
] |
import logging
import os
from pathlib import Path
from typing import Any, Callable, Optional
from torch.utils.data import Dataset
from torchvision import transforms
from PIL import Image
import cv2
import numpy as np
class URISC(Dataset):
def __init__(
self,
dir: str,
mode: str = 'train',
transform: Optional[Callable] = None,
data_rank: str = 'simple',
):
super(URISC, self).__init__()
self.dir = dir
self.mode = mode
self.transform = transform
self.data_rank = data_rank
if data_rank == 'simple':
self.transform_normalize = transforms.Normalize(mean=0.520, std=0.185)
elif data_rank == 'complex':
self.transform_normalize = transforms.Normalize(mean=0.518, std=0.190)
self.transform_totensor = transforms.ToTensor()
self.ids = [os.path.join(dir, data_rank, mode, filename) for filename in os.listdir(os.path.join(dir, data_rank, mode))]
if not self.ids:
raise RuntimeError(f'No input file found in {os.path.join(dir, data_rank, mode)}, make sure you put your images there')
logging.info(f'Creating dataset with {len(self.ids)} examples')
def __len__(self):
return len(self.ids)
def __getitem__(self, idx):
image = cv2.imread(self.ids[idx])
# print(image.shape)
if self.mode == 'test':
if self.transform is not None:
image = self.transform(image=image)
return image.float().contiguous(), self.ids[idx]
mask_path = self.ids[idx].replace(self.mode, "label/"+self.mode)
mask = cv2.imread(mask_path, cv2.IMREAD_GRAYSCALE)
# print(mask)
if self.transform is not None:
transformed = self.transform(image=image, mask=mask)
transformed_image = transformed['image']
transformed_mask = transformed['mask']
else:
transformed_image = image
transformed_mask = mask
transformed_image = self.transform_totensor(transformed_image)
transformed_image = self.transform_normalize(transformed_image)
transformed_mask = self.transform_totensor(transformed_mask)
# transformed_image = np.transpose(transformed_image, (2, 0, 1))
# transformed_mask = np.expand_dims(transformed_mask, axis=0)
return transformed_image, transformed_mask | [
[
[
7,
14
],
[
1158,
1165
]
],
[
[
22,
24
],
[
945,
947
],
[
956,
958
],
[
884,
886
],
[
1075,
1077
]
],
[
[
45,
49
]
],
[
[
69,
72
]
],
[
[
74,
82
],
[
352,
360
]
],
[
[
84,
92
],
[
343,
351
]
],
[
[
123,
130
],
[
233,
240
]
],
[
[
155,
165
],
[
643,
653
],
[
763,
773
],
[
841,
851
]
],
[
[
182,
187
]
],
[
[
195,
198
],
[
1324,
1327
],
[
1673,
1676
],
[
1695,
1698
]
],
[
[
207,
218
]
],
[
[
227,
232
],
[
427,
432
]
]
] |
# Copyright (C) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# This work is made available under the Nvidia Source Code License-NC.
# To view a copy of this license, check out LICENSE.md
import torch.nn as nn
class FeatureMatchingLoss(nn.Module):
r"""Compute feature matching loss"""
def __init__(self, criterion='l1'):
super(FeatureMatchingLoss, self).__init__()
if criterion == 'l1':
self.criterion = nn.L1Loss()
elif criterion == 'l2' or criterion == 'mse':
self.criterion = nn.MSELoss()
else:
raise ValueError('Criterion %s is not recognized' % criterion)
def forward(self, fake_features, real_features):
r"""Return the target vector for the binary cross entropy loss
computation.
Args:
fake_features (list of lists): Discriminator features of fake images.
real_features (list of lists): Discriminator features of real images.
Returns:
(tensor): Loss value.
"""
num_d = len(fake_features)
dis_weight = 1.0 / num_d
loss = fake_features[0][0].new_tensor(0)
for i in range(num_d):
for j in range(len(fake_features[i])):
tmp_loss = self.criterion(fake_features[i][j],
real_features[i][j].detach())
loss += dis_weight * tmp_loss
return loss
| [
[
[
211,
225
],
[
254,
256
],
[
458,
460
],
[
553,
555
]
],
[
[
234,
253
],
[
361,
380
]
]
] |
# Copyright 2017 Datera
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import uuid
from eventlet.green import threading
from oslo_config import cfg
from oslo_log import log as logging
import six
from cinder import exception
from cinder.i18n import _
from cinder import utils
from cinder.volume import configuration
from cinder.volume.drivers.san import san
import cinder.volume.drivers.datera.datera_api2 as api2
import cinder.volume.drivers.datera.datera_api21 as api21
import cinder.volume.drivers.datera.datera_common as datc
LOG = logging.getLogger(__name__)
d_opts = [
cfg.StrOpt('datera_api_port',
default='7717',
help='Datera API port.'),
cfg.StrOpt('datera_api_version',
default='2',
deprecated_for_removal=True,
help='Datera API version.'),
cfg.IntOpt('datera_503_timeout',
default='120',
help='Timeout for HTTP 503 retry messages'),
cfg.IntOpt('datera_503_interval',
default='5',
help='Interval between 503 retries'),
cfg.BoolOpt('datera_debug',
default=False,
help="True to set function arg and return logging"),
cfg.BoolOpt('datera_debug_replica_count_override',
default=False,
help="ONLY FOR DEBUG/TESTING PURPOSES\n"
"True to set replica_count to 1"),
cfg.StrOpt('datera_tenant_id',
default=None,
help="If set to 'Map' --> OpenStack project ID will be mapped "
"implicitly to Datera tenant ID\n"
"If set to 'None' --> Datera tenant ID will not be used "
"during volume provisioning\n"
"If set to anything else --> Datera tenant ID will be the "
"provided value"),
cfg.BoolOpt('datera_disable_profiler',
default=False,
help="Set to True to disable profiling in the Datera driver"),
]
CONF = cfg.CONF
CONF.import_opt('driver_use_ssl', 'cinder.volume.driver')
CONF.register_opts(d_opts, group=configuration.SHARED_CONF_GROUP)
@six.add_metaclass(utils.TraceWrapperWithABCMetaclass)
class DateraDriver(san.SanISCSIDriver, api2.DateraApi, api21.DateraApi):
"""The OpenStack Datera Driver
Version history:
* 1.0 - Initial driver
* 1.1 - Look for lun-0 instead of lun-1.
* 2.0 - Update For Datera API v2
* 2.1 - Multipath, ACL and reorg
* 2.2 - Capabilites List, Extended Volume-Type Support
Naming convention change,
Volume Manage/Unmanage support
* 2.3 - Templates, Tenants, Snapshot Polling,
2.1 Api Version Support, Restructure
* 2.3.1 - Scalability bugfixes
* 2.3.2 - Volume Placement, ACL multi-attach bugfix
* 2.4.0 - Fast Retype Support
"""
VERSION = '2.4.0'
CI_WIKI_NAME = "datera-ci"
HEADER_DATA = {'Datera-Driver': 'OpenStack-Cinder-{}'.format(VERSION)}
# TODO(jsbryant) Remove driver in the 'U' release if CI is not fixed.
SUPPORTED = False
def __init__(self, *args, **kwargs):
super(DateraDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(d_opts)
self.username = self.configuration.san_login
self.password = self.configuration.san_password
self.cluster_stats = {}
self.datera_api_token = None
self.interval = self.configuration.datera_503_interval
self.retry_attempts = (self.configuration.datera_503_timeout /
self.interval)
self.driver_prefix = str(uuid.uuid4())[:4]
self.datera_debug = self.configuration.datera_debug
self.datera_api_versions = []
if self.datera_debug:
utils.setup_tracing(['method'])
self.tenant_id = self.configuration.datera_tenant_id
if self.tenant_id and self.tenant_id.lower() == 'none':
self.tenant_id = None
self.api_check = time.time()
self.api_cache = []
self.api_timeout = 0
self.do_profile = not self.configuration.datera_disable_profiler
self.thread_local = threading.local()
backend_name = self.configuration.safe_get(
'volume_backend_name')
self.backend_name = backend_name or 'Datera'
datc.register_driver(self)
def do_setup(self, context):
# If we can't authenticate through the old and new method, just fail
# now.
if not all([self.username, self.password]):
msg = _("san_login and/or san_password is not set for Datera "
"driver in the cinder.conf. Set this information and "
"start the cinder-volume service again.")
LOG.error(msg)
raise exception.InvalidInput(msg)
self.login()
self._create_tenant()
# =================
# =================
# = Create Volume =
# =================
@datc._api_lookup
def create_volume(self, volume):
"""Create a logical volume."""
pass
# =================
# = Extend Volume =
# =================
@datc._api_lookup
def extend_volume(self, volume, new_size):
pass
# =================
# =================
# = Cloned Volume =
# =================
@datc._api_lookup
def create_cloned_volume(self, volume, src_vref):
pass
# =================
# = Delete Volume =
# =================
@datc._api_lookup
def delete_volume(self, volume):
pass
# =================
# = Ensure Export =
# =================
@datc._api_lookup
def ensure_export(self, context, volume, connector=None):
"""Gets the associated account, retrieves CHAP info and updates."""
# =========================
# = Initialize Connection =
# =========================
@datc._api_lookup
def initialize_connection(self, volume, connector):
pass
# =================
# = Create Export =
# =================
@datc._api_lookup
def create_export(self, context, volume, connector):
pass
# =================
# = Detach Volume =
# =================
@datc._api_lookup
def detach_volume(self, context, volume, attachment=None):
pass
# ===================
# = Create Snapshot =
# ===================
@datc._api_lookup
def create_snapshot(self, snapshot):
pass
# ===================
# = Delete Snapshot =
# ===================
@datc._api_lookup
def delete_snapshot(self, snapshot):
pass
# ========================
# = Volume From Snapshot =
# ========================
@datc._api_lookup
def create_volume_from_snapshot(self, volume, snapshot):
pass
# ==========
# = Retype =
# ==========
@datc._api_lookup
def retype(self, ctxt, volume, new_type, diff, host):
"""Convert the volume to be of the new type.
Returns a boolean indicating whether the retype occurred.
:param ctxt: Context
:param volume: A dictionary describing the volume to migrate
:param new_type: A dictionary describing the volume type to convert to
:param diff: A dictionary with the difference between the two types
:param host: A dictionary describing the host to migrate to, where
host['host'] is its name, and host['capabilities'] is a
dictionary of its reported capabilities (Not Used).
"""
pass
# ==========
# = Manage =
# ==========
@datc._api_lookup
def manage_existing(self, volume, existing_ref):
"""Manage an existing volume on the Datera backend
The existing_ref must be either the current name or Datera UUID of
an app_instance on the Datera backend in a colon separated list with
the storage instance name and volume name. This means only
single storage instances and single volumes are supported for
managing by cinder.
Eg.
(existing_ref['source-name'] ==
tenant:app_inst_name:storage_inst_name:vol_name)
if using Datera 2.1 API
or
(existing_ref['source-name'] ==
app_inst_name:storage_inst_name:vol_name)
if using 2.0 API
:param volume: Cinder volume to manage
:param existing_ref: Driver-specific information used to identify a
volume
"""
pass
# ===================
# = Manage Get Size =
# ===================
@datc._api_lookup
def manage_existing_get_size(self, volume, existing_ref):
"""Get the size of an unmanaged volume on the Datera backend
The existing_ref must be either the current name or Datera UUID of
an app_instance on the Datera backend in a colon separated list with
the storage instance name and volume name. This means only
single storage instances and single volumes are supported for
managing by cinder.
Eg.
existing_ref == app_inst_name:storage_inst_name:vol_name
:param volume: Cinder volume to manage
:param existing_ref: Driver-specific information used to identify a
volume on the Datera backend
"""
pass
# =========================
# = Get Manageable Volume =
# =========================
@datc._api_lookup
def get_manageable_volumes(self, cinder_volumes, marker, limit, offset,
sort_keys, sort_dirs):
"""List volumes on the backend available for management by Cinder.
Returns a list of dictionaries, each specifying a volume in the host,
with the following keys:
- reference (dictionary): The reference for a volume, which can be
passed to 'manage_existing'.
- size (int): The size of the volume according to the storage
backend, rounded up to the nearest GB.
- safe_to_manage (boolean): Whether or not this volume is safe to
manage according to the storage backend. For example, is the volume
in use or invalid for any reason.
- reason_not_safe (string): If safe_to_manage is False, the reason why.
- cinder_id (string): If already managed, provide the Cinder ID.
- extra_info (string): Any extra information to return to the user
:param cinder_volumes: A list of volumes in this host that Cinder
currently manages, used to determine if
a volume is manageable or not.
:param marker: The last item of the previous page; we return the
next results after this value (after sorting)
:param limit: Maximum number of items to return
:param offset: Number of items to skip after marker
:param sort_keys: List of keys to sort results by (valid keys are
'identifier' and 'size')
:param sort_dirs: List of directions to sort by, corresponding to
sort_keys (valid directions are 'asc' and 'desc')
"""
pass
# ============
# = Unmanage =
# ============
@datc._api_lookup
def unmanage(self, volume):
"""Unmanage a currently managed volume in Cinder
:param volume: Cinder volume to unmanage
"""
pass
# ================
# = Volume Stats =
# ================
@datc._api_lookup
def get_volume_stats(self, refresh=False):
"""Get volume stats.
If 'refresh' is True, run update first.
The name is a bit misleading as
the majority of the data here is cluster
data.
"""
pass
# =========
# = Login =
# =========
@datc._api_lookup
def login(self):
pass
# =======
# = QoS =
# =======
def _update_qos(self, resource, policies):
url = datc.URL_TEMPLATES['vol_inst'](
policies['default_storage_name'],
policies['default_volume_name']) + '/performance_policy'
url = url.format(datc._get_name(resource['id']))
type_id = resource.get('volume_type_id', None)
if type_id is not None:
# Filter for just QOS policies in result. All of their keys
# should end with "max"
fpolicies = {k: int(v) for k, v in
policies.items() if k.endswith("max")}
# Filter all 0 values from being passed
fpolicies = dict(filter(lambda _v: _v[1] > 0, fpolicies.items()))
if fpolicies:
self._issue_api_request(url, 'post', body=fpolicies,
api_version='2')
def _get_lunid(self):
return 0
# ============================
# = Volume-Types/Extra-Specs =
# ============================
def _init_vendor_properties(self):
"""Create a dictionary of vendor unique properties.
This method creates a dictionary of vendor unique properties
and returns both created dictionary and vendor name.
Returned vendor name is used to check for name of vendor
unique properties.
- Vendor name shouldn't include colon(:) because of the separator
and it is automatically replaced by underscore(_).
ex. abc:d -> abc_d
- Vendor prefix is equal to vendor name.
ex. abcd
- Vendor unique properties must start with vendor prefix + ':'.
ex. abcd:maxIOPS
Each backend driver needs to override this method to expose
its own properties using _set_property() like this:
self._set_property(
properties,
"vendorPrefix:specific_property",
"Title of property",
_("Description of property"),
"type")
: return dictionary of vendor unique properties
: return vendor name
prefix: DF --> Datera Fabric
"""
properties = {}
self._set_property(
properties,
"DF:placement_mode",
"Datera Volume Placement",
_("'single_flash' for single-flash-replica placement, "
"'all_flash' for all-flash-replica placement, "
"'hybrid' for hybrid placement"),
"string",
default="hybrid")
self._set_property(
properties,
"DF:round_robin",
"Datera Round Robin Portals",
_("True to round robin the provided portals for a target"),
"boolean",
default=False)
if self.configuration.get('datera_debug_replica_count_override'):
replica_count = 1
else:
replica_count = 3
self._set_property(
properties,
"DF:replica_count",
"Datera Volume Replica Count",
_("Specifies number of replicas for each volume. Can only be "
"increased once volume is created"),
"integer",
minimum=1,
default=replica_count)
self._set_property(
properties,
"DF:acl_allow_all",
"Datera ACL Allow All",
_("True to set acl 'allow_all' on volumes created. Cannot be "
"changed on volume once set"),
"boolean",
default=False)
self._set_property(
properties,
"DF:ip_pool",
"Datera IP Pool",
_("Specifies IP pool to use for volume"),
"string",
default="default")
self._set_property(
properties,
"DF:template",
"Datera Template",
_("Specifies Template to use for volume provisioning"),
"string",
default="")
# ###### QoS Settings ###### #
self._set_property(
properties,
"DF:read_bandwidth_max",
"Datera QoS Max Bandwidth Read",
_("Max read bandwidth setting for volume qos, "
"use 0 for unlimited"),
"integer",
minimum=0,
default=0)
self._set_property(
properties,
"DF:default_storage_name",
"Datera Default Storage Instance Name",
_("The name to use for storage instances created"),
"string",
default="storage-1")
self._set_property(
properties,
"DF:default_volume_name",
"Datera Default Volume Name",
_("The name to use for volumes created"),
"string",
default="volume-1")
self._set_property(
properties,
"DF:write_bandwidth_max",
"Datera QoS Max Bandwidth Write",
_("Max write bandwidth setting for volume qos, "
"use 0 for unlimited"),
"integer",
minimum=0,
default=0)
self._set_property(
properties,
"DF:total_bandwidth_max",
"Datera QoS Max Bandwidth Total",
_("Max total bandwidth setting for volume qos, "
"use 0 for unlimited"),
"integer",
minimum=0,
default=0)
self._set_property(
properties,
"DF:read_iops_max",
"Datera QoS Max iops Read",
_("Max read iops setting for volume qos, "
"use 0 for unlimited"),
"integer",
minimum=0,
default=0)
self._set_property(
properties,
"DF:write_iops_max",
"Datera QoS Max IOPS Write",
_("Max write iops setting for volume qos, "
"use 0 for unlimited"),
"integer",
minimum=0,
default=0)
self._set_property(
properties,
"DF:total_iops_max",
"Datera QoS Max IOPS Total",
_("Max total iops setting for volume qos, "
"use 0 for unlimited"),
"integer",
minimum=0,
default=0)
# ###### End QoS Settings ###### #
return properties, 'DF'
| [
[
[
629,
633
],
[
4627,
4631
]
],
[
[
641,
645
],
[
4252,
4256
]
],
[
[
674,
683
],
[
4797,
4806
]
],
[
[
708,
711
],
[
1147,
1150
],
[
1253,
1256
],
[
1406,
1409
],
[
1533,
1536
],
[
1652,
1655
],
[
1784,
1787
],
[
1983,
1986
],
[
2429,
2432
],
[
2589,
2592
]
],
[
[
733,
747
],
[
1103,
1110
]
],
[
[
755,
758
],
[
2725,
2728
]
],
[
[
779,
788
],
[
5427,
5436
]
],
[
[
813,
814
],
[
5188,
5189
],
[
14952,
14953
],
[
15307,
15308
],
[
15705,
15706
],
[
16033,
16034
],
[
16313,
16314
],
[
16531,
16532
],
[
16819,
16820
],
[
17130,
17131
],
[
17382,
17383
],
[
17627,
17628
],
[
17932,
17933
],
[
18225,
18226
],
[
18514,
18515
],
[
18804,
18805
]
],
[
[
834,
839
],
[
2743,
2748
],
[
4411,
4416
]
],
[
[
866,
879
],
[
2689,
2702
]
],
[
[
918,
921
],
[
2798,
2801
]
],
[
[
930,
978
],
[
2818,
2822
]
],
[
[
986,
1036
],
[
2834,
2839
]
],
[
[
1044,
1094
],
[
5611,
5615
],
[
5796,
5800
],
[
5977,
5981
],
[
6140,
6144
],
[
6286,
6290
],
[
6544,
6548
],
[
6709,
6713
],
[
6875,
6879
],
[
7053,
7057
],
[
7209,
7213
],
[
7380,
7384
],
[
7529,
7533
],
[
8286,
8290
],
[
9293,
9297
],
[
10155,
10159
],
[
11990,
11994
],
[
12253,
12257
],
[
12578,
12582
],
[
4965,
4969
],
[
12734,
12738
],
[
12906,
12910
]
],
[
[
1097,
1100
],
[
5394,
5397
]
],
[
[
1132,
1138
],
[
2675,
2681
],
[
3853,
3859
]
],
[
[
2582,
2586
],
[
2598,
2602
],
[
2656,
2660
]
],
[
[
2785,
2797
],
[
3759,
3771
]
]
] |
#!/usr/bin/env python
"""
Author: Alexander David Leech
Date: 30/09/2015
Rev: 2
Lang: Python 2.7
Deps: Pyserial, Pymodbus, logging
"""
import time # For sleep functionality
import logging # For detailed error output
from pymodbus.client.sync import ModbusSerialClient \
as ModbusClient # Import MODBUS support class
comSettings = {
"method" : 'rtu',
"port" : 'COM3',
"stopbits" : 1,
"bytesize" : 8,
"parity" : 'N',
"baudrate" : 9600,
"timeout" : 1
}
logging.basicConfig() # Setup error logging
log = logging.getLogger() # Start logging
client = ModbusClient(**comSettings) # Setup connection object
client.connect() # Open the MODBUS connection
while(True):
client.write_register(3,1000,unit=0x01) # Write valve to 100%
time.sleep(4) # Sleep 4 seconds
client.write_register(3,0,unit=0x01) # Write valve to 0%
time.sleep(4) # Sleep 4 seconds
client.close() # Close the connection | [
[
[
150,
154
],
[
1167,
1171
],
[
1317,
1321
]
],
[
[
231,
238
],
[
753,
760
],
[
837,
844
]
],
[
[
340,
376
],
[
913,
925
]
],
[
[
447,
458
],
[
928,
939
]
],
[
[
831,
834
]
],
[
[
904,
910
],
[
986,
992
],
[
1089,
1095
],
[
1241,
1247
],
[
1388,
1394
]
]
] |
import os
import subprocess
from unittest import mock
import pytest
from pre_commit.constants import VERSION as PRE_COMMIT_VERSION
import testing.git
from all_repos import autofix_lib
from all_repos import clone
from all_repos import git
from all_repos.config import load_config
@pytest.mark.parametrize(
('cli_repos', 'expected'),
(
(None, ['found_repo']),
([], []),
(['cli_repo'], ['cli_repo']),
),
)
def test_filter_repos(file_config, cli_repos, expected):
ret = autofix_lib.filter_repos(
file_config, cli_repos, lambda _: ['found_repo'],
)
assert ret == expected
def test_assert_importable_is_importable():
autofix_lib.assert_importable('pre_commit', install='pre-commit')
def test_assert_importable_not_importable():
with pytest.raises(SystemExit) as excinfo:
autofix_lib.assert_importable('watmodule', install='wat')
msg, = excinfo.value.args
assert msg == (
'This tool requires the `watmodule` module to be installed.\n'
'Try installing it via `pip install wat`.'
)
def test_require_version_new_enough():
autofix_lib.require_version_gte('pre-commit', '0.17.0')
def test_require_version_not_new_enough():
with pytest.raises(SystemExit) as excinfo:
autofix_lib.require_version_gte('pre-commit', '999')
msg, = excinfo.value.args
assert msg == (
f'This tool requires the `pre-commit` package is at least version '
f'999. The currently installed version is {PRE_COMMIT_VERSION}.\n\n'
f'Try `pip install --upgrade pre-commit`'
)
def test_run(capfd):
autofix_lib.run('echo', 'h"i')
out, _ = capfd.readouterr()
assert out == (
'$ echo \'h"i\'\n'
'h"i\n'
)
def test_cwd(tmpdir):
orig = os.getcwd()
with autofix_lib.cwd(tmpdir):
assert os.getcwd() == tmpdir
assert os.getcwd() == orig
def test_repo_context_success(file_config_files, capsys):
expected_rev = testing.git.revparse(file_config_files.dir1)
with autofix_lib.repo_context(
str(file_config_files.output_dir.join('repo1')), use_color=False,
):
assert testing.git.revparse('.') == expected_rev
assert git.remote('.') == file_config_files.dir1
out, err = capsys.readouterr()
assert err == ''
assert 'Errored' not in out
def test_repo_context_errors(file_config_files, capsys):
with autofix_lib.repo_context(
str(file_config_files.output_dir.join('repo1')), use_color=False,
):
assert False
out, err = capsys.readouterr()
assert 'Errored' in out
assert 'assert False' in err
def test_interactive_control_c(mock_input, capfd):
mock_input.set_side_effect(KeyboardInterrupt)
with pytest.raises(SystemExit):
autofix_lib._interactive_check(use_color=False)
out, _ = capfd.readouterr()
assert out == (
'***Looks good [y,n,s,q,?]? ^C\n'
'Goodbye!\n'
)
def test_interactive_eof(mock_input, capfd):
mock_input.set_side_effect(EOFError)
with pytest.raises(SystemExit):
autofix_lib._interactive_check(use_color=False)
out, _ = capfd.readouterr()
assert out == (
'***Looks good [y,n,s,q,?]? ^D\n'
'Goodbye!\n'
)
def test_interactive_quit(mock_input, capfd):
mock_input.set_side_effect('q')
with pytest.raises(SystemExit):
autofix_lib._interactive_check(use_color=False)
out, _ = capfd.readouterr()
assert out == (
'***Looks good [y,n,s,q,?]? <<q\n'
'Goodbye!\n'
)
def test_interactive_yes(mock_input, capfd):
mock_input.set_side_effect('y')
assert autofix_lib._interactive_check(use_color=False) is True
out, _ = capfd.readouterr()
assert out == '***Looks good [y,n,s,q,?]? <<y\n'
def test_interactive_no(mock_input, capfd):
mock_input.set_side_effect('n')
assert autofix_lib._interactive_check(use_color=False) is False
out, _ = capfd.readouterr()
assert out == '***Looks good [y,n,s,q,?]? <<n\n'
def test_interactive_shell(mock_input, capfd):
mock_input.set_side_effect('s', 'n')
with mock.patch.dict(os.environ, {'SHELL': 'echo'}):
assert autofix_lib._interactive_check(use_color=False) is False
out, _ = capfd.readouterr()
assert out == (
'***Looks good [y,n,s,q,?]? <<s\n'
'Opening an interactive shell, type `exit` to continue.\n'
'Any modifications will be committed.\n'
# A newline from echo
'\n'
'***Looks good [y,n,s,q,?]? <<n\n'
)
def test_interactive_help(mock_input, capfd):
mock_input.set_side_effect('?', 'n')
assert autofix_lib._interactive_check(use_color=False) is False
out, _ = capfd.readouterr()
assert out == (
'***Looks good [y,n,s,q,?]? <<?\n'
'y (yes): yes it looks good, commit and continue.\n'
'n (no): no, do not commit this repository.\n'
's (shell): open an interactive shell in the repo.\n'
'q (quit, ^C): early exit from the autofixer.\n'
'? (help): show this help message.\n'
'***Looks good [y,n,s,q,?]? <<n\n'
)
def test_interactive_garbage(mock_input, capfd):
mock_input.set_side_effect('garbage', 'n')
assert autofix_lib._interactive_check(use_color=False) is False
out, _ = capfd.readouterr()
assert out == (
'***Looks good [y,n,s,q,?]? <<garbage\n'
'Unexpected input: garbage\n'
'y (yes): yes it looks good, commit and continue.\n'
'n (no): no, do not commit this repository.\n'
's (shell): open an interactive shell in the repo.\n'
'q (quit, ^C): early exit from the autofixer.\n'
'? (help): show this help message.\n'
'***Looks good [y,n,s,q,?]? <<n\n'
)
def lower_case_f():
f_contents = open('f').read()
with open('f', 'w') as f:
f.write(f_contents.lower())
def failing_check_fix():
raise AssertionError('nope!')
def test_fix_dry_run_no_change(file_config_files, capfd):
autofix_lib.fix(
(
str(file_config_files.output_dir.join('repo1')),
str(file_config_files.output_dir.join('repo2')),
),
apply_fix=lower_case_f,
config=load_config(file_config_files.cfg),
commit=autofix_lib.Commit('message!', 'test-branch', None),
autofix_settings=autofix_lib.AutofixSettings(
jobs=1, color=False, limit=None, dry_run=True, interactive=False,
),
)
out, err = capfd.readouterr()
assert err == ''
assert 'Errored' not in out
# Showed the diff of what would have happened
assert '-OHAI\n+ohai\n' in out
assert '-OHELLO\n+ohello\n' in out
# Didn't actually perform any changes
assert file_config_files.dir1.join('f').read() == 'OHAI\n'
assert file_config_files.dir2.join('f').read() == 'OHELLO\n'
def test_fix_with_limit(file_config_files, capfd):
autofix_lib.fix(
(
str(file_config_files.output_dir.join('repo1')),
str(file_config_files.output_dir.join('repo2')),
),
apply_fix=lower_case_f,
config=load_config(file_config_files.cfg),
commit=autofix_lib.Commit('message!', 'test-branch', None),
autofix_settings=autofix_lib.AutofixSettings(
jobs=1, color=False, limit=1, dry_run=True, interactive=False,
),
)
out, err = capfd.readouterr()
assert err == ''
assert 'Errored' not in out
# Should still see the diff from the first repository
assert '-OHAI\n+ohai\n' in out
assert '-OHELLO\n+ohello\n' not in out
def test_fix_interactive(file_config_files, capfd, mock_input):
mock_input.set_side_effect('y', 'n')
autofix_lib.fix(
(
str(file_config_files.output_dir.join('repo1')),
str(file_config_files.output_dir.join('repo2')),
),
apply_fix=lower_case_f,
config=load_config(file_config_files.cfg),
commit=autofix_lib.Commit('message!', 'test-branch', None),
autofix_settings=autofix_lib.AutofixSettings(
jobs=1, color=False, limit=None, dry_run=False, interactive=True,
),
)
assert file_config_files.dir1.join('f').read() == 'ohai\n'
assert file_config_files.dir2.join('f').read() == 'OHELLO\n'
def test_autofix_makes_commits(file_config_files, capfd):
autofix_lib.fix(
(
str(file_config_files.output_dir.join('repo1')),
str(file_config_files.output_dir.join('repo2')),
),
apply_fix=lower_case_f,
config=load_config(file_config_files.cfg),
commit=autofix_lib.Commit('message!', 'test-branch', 'A B <a@a.a>'),
autofix_settings=autofix_lib.AutofixSettings(
jobs=1, color=False, limit=None, dry_run=False, interactive=False,
),
)
out, err = capfd.readouterr()
assert err == ''
assert 'Errored' not in out
assert file_config_files.dir1.join('f').read() == 'ohai\n'
assert file_config_files.dir2.join('f').read() == 'ohello\n'
# The branch name should be what we specified
last_commit_msg = subprocess.check_output((
'git', '-C', file_config_files.dir1, 'log',
'--format=%s', '--first-parent', '-1',
)).decode()
assert last_commit_msg == "Merge branch 'all-repos_autofix_test-branch'\n"
# We should see a commit from the autofix change we made
commit = subprocess.check_output((
'git', '-C', file_config_files.dir1, 'log',
'--patch', '--grep', 'message!', '--format=%an %ae\n%B',
)).decode()
assert commit.startswith(
'A B a@a.a\n'
'message!\n'
'\n'
'Committed via https://github.com/asottile/all-repos\n',
)
assert commit.endswith('-OHAI\n+ohai\n')
def test_fix_failing_check_no_changes(file_config_files, capfd):
autofix_lib.fix(
(
str(file_config_files.output_dir.join('repo1')),
str(file_config_files.output_dir.join('repo2')),
),
apply_fix=lower_case_f,
check_fix=failing_check_fix,
config=load_config(file_config_files.cfg),
commit=autofix_lib.Commit('message!', 'test-branch', None),
autofix_settings=autofix_lib.AutofixSettings(
jobs=1, color=False, limit=None, dry_run=False, interactive=False,
),
)
out, err = capfd.readouterr()
assert 'nope!' in err
assert out.count('Errored') == 2
# An error while checking should not allow the changes
assert file_config_files.dir1.join('f').read() == 'OHAI\n'
assert file_config_files.dir2.join('f').read() == 'OHELLO\n'
def test_noop_does_not_commit(file_config_files):
rev_before1 = testing.git.revparse(file_config_files.dir1)
rev_before2 = testing.git.revparse(file_config_files.dir2)
autofix_lib.fix(
(
str(file_config_files.output_dir.join('repo1')),
str(file_config_files.output_dir.join('repo2')),
),
apply_fix=lambda: None,
config=load_config(file_config_files.cfg),
commit=autofix_lib.Commit('message!', 'test-branch', None),
autofix_settings=autofix_lib.AutofixSettings(
jobs=1, color=False, limit=None, dry_run=False, interactive=False,
),
)
rev_after1 = testing.git.revparse(file_config_files.dir1)
rev_after2 = testing.git.revparse(file_config_files.dir2)
assert (rev_before1, rev_before2) == (rev_after1, rev_after2)
def test_fix_non_default_branch(file_config_non_default):
clone.main(('--config-filename', str(file_config_non_default.cfg)))
autofix_lib.fix(
(
str(file_config_non_default.output_dir.join('repo1')),
),
apply_fix=lower_case_f,
config=load_config(file_config_non_default.cfg),
commit=autofix_lib.Commit('message!', 'test-branch', 'A B <a@a.a>'),
autofix_settings=autofix_lib.AutofixSettings(
jobs=1, color=False, limit=None, dry_run=False, interactive=False,
),
)
assert file_config_non_default.dir1.join('f').read() == 'ohai\n'
| [
[
[
7,
9
],
[
1789,
1791
],
[
1850,
1852
],
[
1883,
1885
],
[
4145,
4147
]
],
[
[
17,
27
],
[
9121,
9131
],
[
9416,
9426
]
],
[
[
49,
53
],
[
4129,
4133
]
],
[
[
62,
68
],
[
284,
290
],
[
799,
805
],
[
1236,
1242
],
[
2757,
2763
],
[
3058,
3064
],
[
3355,
3361
]
],
[
[
102,
131
],
[
1513,
1531
]
],
[
[
140,
151
],
[
1982,
1989
],
[
2162,
2169
],
[
10702,
10709
],
[
10765,
10772
],
[
11292,
11299
],
[
11354,
11361
]
],
[
[
174,
185
],
[
510,
521
],
[
677,
688
],
[
845,
856
],
[
1126,
1137
],
[
1282,
1293
],
[
1622,
1633
],
[
1810,
1821
],
[
2036,
2047
],
[
2417,
2428
],
[
2792,
2803
],
[
3093,
3104
],
[
3390,
3401
],
[
3654,
3665
],
[
3888,
3899
],
[
4192,
4203
],
[
4652,
4663
],
[
5243,
5254
],
[
6016,
6027
],
[
6274,
6285
],
[
6352,
6363
],
[
6916,
6927
],
[
7174,
7185
],
[
7252,
7263
],
[
7708,
7719
],
[
7966,
7977
],
[
8044,
8055
],
[
8361,
8372
],
[
8619,
8630
],
[
8706,
8717
],
[
9848,
9859
],
[
10143,
10154
],
[
10221,
10232
],
[
10814,
10825
],
[
11072,
11083
],
[
11150,
11161
],
[
11602,
11613
],
[
11811,
11822
],
[
11898,
11909
]
],
[
[
208,
213
],
[
11529,
11534
]
],
[
[
236,
239
],
[
2219,
2222
]
],
[
[
269,
280
],
[
6223,
6234
],
[
7123,
7134
],
[
7915,
7926
],
[
8568,
8579
],
[
10092,
10103
],
[
11021,
11032
],
[
11754,
11765
]
],
[
[
447,
464
]
],
[
[
633,
669
]
],
[
[
749,
786
]
],
[
[
1087,
1118
]
],
[
[
1188,
1223
]
],
[
[
1601,
1609
]
],
[
[
1760,
1768
]
],
[
[
1909,
1934
]
],
[
[
2355,
2379
]
],
[
[
2651,
2677
]
],
[
[
2967,
2987
]
],
[
[
3268,
3289
]
],
[
[
3566,
3586
]
],
[
[
3801,
3820
]
],
[
[
4036,
4058
]
],
[
[
4558,
4579
]
],
[
[
5140,
5164
]
],
[
[
5775,
5787
],
[
6194,
6206
],
[
7094,
7106
],
[
7886,
7898
],
[
8539,
8551
],
[
10026,
10038
],
[
11725,
11737
]
],
[
[
5897,
5914
],
[
10058,
10075
]
],
[
[
5958,
5984
]
],
[
[
6865,
6884
]
],
[
[
7603,
7623
]
],
[
[
8303,
8329
]
],
[
[
9783,
9816
]
],
[
[
10638,
10663
]
],
[
[
11471,
11498
]
]
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Generate random usernames in
"""
import random
from .names import names as default_names
class NameGenerator(object):
def __init__(self, names=None):
self.names = names or default_names
def __call__(self):
return self.names.pop(random.randrange(len(self.names)))
def __iter__(self):
while self.names:
yield self()
| [
[
[
97,
103
],
[
315,
321
]
],
[
[
124,
146
],
[
245,
258
]
],
[
[
155,
168
]
]
] |
from django.utils import timezone
from rest_framework.authtoken.models import Token
class AuthTokenHandler:
"""
Handles variations in auth token
"""
@staticmethod
def expired_token(auth_token):
"""
Checks expiry of auth token
"""
utc_now = timezone.now()
expired = auth_token.created < utc_now - \
timezone.timedelta(hours=24)
return expired
@staticmethod
def create_auth_token(user):
"""
Creates an auth token for a user
"""
token, created = Token.objects.get_or_create(user=user)
if not created:
token.created = timezone.now()
token.save()
return token
| [
[
[
25,
33
],
[
294,
302
],
[
372,
380
],
[
657,
665
]
],
[
[
78,
83
],
[
566,
571
]
],
[
[
92,
108
]
]
] |
from .useful_functions import get_ngrams, words_to_ngrams_list, remove_hook_words, remove_words
from .transformers import phrases_transform, phrases2lower, phrases_without_excess_symbols
from .tokenizers import text2sentences, split_by_words, sentence_split
from .stemlem_operators import create_stemmer_lemmer, create_stemmer, create_lemmatizer
from .pipeline import StemLemPipeline
from .simplifiers import sum_phrases, wordlist2set
from .stopwords import stopwords
from .metrics import Levenstein
| [
[
[
32,
42
]
],
[
[
44,
64
]
],
[
[
66,
83
]
],
[
[
85,
97
]
],
[
[
125,
142
]
],
[
[
144,
157
]
],
[
[
159,
189
]
],
[
[
215,
229
]
],
[
[
231,
245
]
],
[
[
247,
261
]
],
[
[
294,
315
]
],
[
[
317,
331
]
],
[
[
333,
350
]
],
[
[
374,
389
]
],
[
[
416,
427
]
],
[
[
429,
441
]
],
[
[
466,
475
]
],
[
[
498,
508
]
]
] |
# internal imports
import dependency_checker
import dependency_installer
import dependency_updater
import logger
from rendering import VortexWindow
# external imports
import pyglet
import sys
# check if python version is too old. If it is, exit.
if sys.version_info < (3, 6): # if python version is less than 3.6
logger.critical(
"Vortex", "Python version is too old. Please use python 3.6 or higher.")
sys.exit(1)
# check all deps and update them if needed
if not dependency_checker.check_deps(): # if any deps are missing
dependency_installer.install_deps() # install them
if not dependency_checker.check_deps(): # if any deps are still missing
# warn user and exit
logger.warn(
"Vortex", "Dependencies are not installed. Please install them manually.")
sys.exit(1)
else:
dependency_updater.update_deps() # update deps
window = VortexWindow() # create the window
pyglet.app.run() # run the app
| [
[
[
26,
44
],
[
485,
503
],
[
612,
630
]
],
[
[
52,
72
],
[
549,
569
]
],
[
[
80,
98
],
[
845,
863
]
],
[
[
106,
112
],
[
320,
326
],
[
715,
721
]
],
[
[
135,
147
],
[
903,
915
]
],
[
[
175,
181
],
[
939,
945
]
],
[
[
189,
192
],
[
251,
254
],
[
422,
425
],
[
823,
826
]
],
[
[
894,
900
]
]
] |
"""
Intergation of the pytorch_transformers openai and gpt2 modules.
Note that these objects are only to be used to load
pretrained models. The pytorch-transformers library
wasn't designed to train these models from scratch.
"""
import pytorch_transformers as pt
from flambe.nlp.transformers.utils import TransformerTextField, TransformerEmbedder
class GPTTextField(TransformerTextField):
"""Integrate the pytorch_transformers OpenAIGPTTokenizer.
Currently available aliases:
. `openai-gpt`
"""
_cls = pt.OpenAIGPTTokenizer
class GPTEmbedder(TransformerEmbedder):
"""Integrate the pytorch_transformers OpenAIGPTmodel.
Currently available aliases:
. `openai-gpt`
"""
_cls = pt.OpenAIGPTModel
class GPT2TextField(TransformerTextField):
"""Integrate the pytorch_transformers GPT2Tokenizer.
Currently available aliases:
. `gpt2`
. `gpt2-medium`
. `gpt2-large`
"""
_cls = pt.GPT2Tokenizer
class GPT2Embedder(TransformerEmbedder):
"""Integrate the pytorch_transformers GPT2Model.
Currently available aliases:
. `gpt2`
. `gpt2-medium`
. `gpt2-large`
"""
_cls = pt.GPT2Model
| [
[
[
239,
265
],
[
535,
537
],
[
735,
737
],
[
974,
976
],
[
1206,
1208
]
],
[
[
309,
329
],
[
372,
392
],
[
775,
795
]
],
[
[
331,
350
],
[
577,
596
],
[
1012,
1031
]
],
[
[
359,
371
]
],
[
[
565,
576
]
],
[
[
761,
774
]
],
[
[
999,
1011
]
]
] |
#
# Copyright 2018 Analytics Zoo Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, LSTM, Dense
import tensorflow.keras as keras
from zoo.automl.model.abstract import BaseModel
from zoo.automl.common.util import *
from zoo.automl.common.metrics import Evaluator
class LSTMSeq2Seq(BaseModel):
def __init__(self, check_optional_config=True, future_seq_len=2):
"""
Constructor of LSTM Seq2Seq model
"""
self.model = None
self.past_seq_len = None
self.future_seq_len = future_seq_len
self.feature_num = None
self.target_col_num = None
self.metric = None
self.latent_dim = None
self.batch_size = None
self.check_optional_config = check_optional_config
def _build_train(self, mc=False, **config):
"""
build LSTM Seq2Seq model
:param config:
:return:
"""
super()._check_config(**config)
self.metric = config.get('metric', 'mean_squared_error')
self.latent_dim = config.get('latent_dim', 128)
self.dropout = config.get('dropout', 0.2)
self.lr = config.get('lr', 0.001)
# for restore in continuous training
self.batch_size = config.get('batch_size', 64)
training = True if mc else None
# Define an input sequence and process it.
self.encoder_inputs = Input(shape=(None, self.feature_num), name="encoder_inputs")
encoder = LSTM(units=self.latent_dim,
dropout=self.dropout,
return_state=True,
name="encoder_lstm")
encoder_outputs, state_h, state_c = encoder(self.encoder_inputs, training=training)
# We discard `encoder_outputs` and only keep the states.
self.encoder_states = [state_h, state_c]
# Set up the decoder, using `encoder_states` as initial state.
self.decoder_inputs = Input(shape=(None, self.target_col_num), name="decoder_inputs")
# We set up our decoder to return full output sequences,
# and to return internal states as well. We don't use the
# return states in the training model, but we will use them in inference.
self.decoder_lstm = LSTM(self.latent_dim,
dropout=self.dropout,
return_sequences=True,
return_state=True,
name="decoder_lstm")
decoder_outputs, _, _ = self.decoder_lstm(self.decoder_inputs,
training=training,
initial_state=self.encoder_states)
self.decoder_dense = Dense(self.target_col_num, name="decoder_dense")
decoder_outputs = self.decoder_dense(decoder_outputs)
# Define the model that will turn
# `encoder_input_data` & `decoder_input_data` into `decoder_target_data`
self.model = Model([self.encoder_inputs, self.decoder_inputs], decoder_outputs)
self.model.compile(loss='mse',
metrics=[self.metric],
optimizer=keras.optimizers.RMSprop(lr=self.lr))
return self.model
def _restore_model(self):
self.encoder_inputs = self.model.input[0] # input_1
encoder_outputs, state_h_enc, state_c_enc = self.model.layers[2].output # lstm_1
self.encoder_states = [state_h_enc, state_c_enc]
self.decoder_inputs = self.model.input[1] # input_2
self.decoder_lstm = self.model.layers[3]
self.decoder_dense = self.model.layers[4]
def _build_inference(self, mc=False):
training = True if mc else None
# from our previous model - mapping encoder sequence to state vectors
encoder_model = Model(self.encoder_inputs, self.encoder_states)
# A modified version of the decoding stage that takes in predicted target inputs
# and encoded state vectors, returning predicted target outputs and decoder state vectors.
# We need to hang onto these state vectors to run the next step of the inference loop.
decoder_state_input_h = Input(shape=(self.latent_dim,))
decoder_state_input_c = Input(shape=(self.latent_dim,))
decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c]
decoder_outputs, state_h, state_c = self.decoder_lstm(self.decoder_inputs,
training=training,
initial_state=decoder_states_inputs)
decoder_states = [state_h, state_c]
decoder_outputs = self.decoder_dense(decoder_outputs)
decoder_model = Model([self.decoder_inputs] + decoder_states_inputs,
[decoder_outputs] + decoder_states)
return encoder_model, decoder_model
def _decode_sequence(self, input_seq, mc=False):
encoder_model, decoder_model = self._build_inference(mc=mc)
# Encode the input as state vectors.
states_value = encoder_model.predict(input_seq)
# Generate empty target sequence of length 1.
target_seq = np.zeros((len(input_seq), 1, self.target_col_num))
# Populate the first target sequence with end of encoding series value
target_seq[:, 0] = input_seq[:, -1, :self.target_col_num]
# Sampling loop for a batch of sequences - we will fill decoded_seq with predictions
# (to simplify, here we assume a batch of size 1).
decoded_seq = np.zeros((len(input_seq), self.future_seq_len, self.target_col_num))
for i in range(self.future_seq_len):
output, h, c = decoder_model.predict([target_seq] + states_value)
decoded_seq[:, i] = output[:, 0]
# Update the target sequence (of length 1).
target_seq = np.zeros((len(input_seq), 1, self.target_col_num))
target_seq[:, 0] = output[:, 0]
# Update states
states_value = [h, c]
return decoded_seq
def _get_decoder_inputs(self, x, y):
"""
lagged target series for teacher forcing
decoder_input data is one timestamp ahead of y
:param x: 3-d array in format of (sample_num, past_sequence_len, feature_num)
:param y: 3-d array in format of (sample_num, future_sequence_len, target_col_num)
Need to expand dimension if y is a 2-d array with one target col
:return: 3-d array of decoder inputs
"""
decoder_input_data = np.zeros(y.shape)
decoder_input_data[1:, ] = y[:-1, ]
decoder_input_data[0, 0] = x[-1, -1, :self.target_col_num]
decoder_input_data[0, 1:] = y[0, :-1]
return decoder_input_data
def _get_len(self, x, y):
self.past_seq_len = x.shape[1]
self.feature_num = x.shape[2]
# self.future_seq_len = y.shape[1]
self.target_col_num = y.shape[2]
def _expand_y(self, y):
"""
expand dims for y.
:param y:
:return:
"""
while len(y.shape) < 3:
y = np.expand_dims(y, axis=2)
return y
def _pre_processing(self, x, y, validation_data):
"""
pre_process input data.
1. expand dims for y and val_y
2. get decoder inputs for train data
3. get decoder inputs for validation data
:param x: train_x
:param y: train_y
:param validation_data:
:return: network input
"""
y = self._expand_y(y)
self._get_len(x, y)
decoder_input_data = self._get_decoder_inputs(x, y)
if validation_data is not None:
val_x, val_y = validation_data
val_y = self._expand_y(val_y)
val_decoder_input = self._get_decoder_inputs(val_x, val_y)
validation_data = ([val_x, val_decoder_input], val_y)
return x, y, decoder_input_data, validation_data
def fit_eval(self, data, validation_data=None, mc=False, verbose=0, **config):
"""
fit for one iteration
:param data: could be a tuple with numpy ndarray with form (x, y)
x: 3-d array in format (no. of samples, past sequence length, 2+feature length),
in the last dimension, the 1st col is the time index (data type needs to be numpy datetime
type, e.g. "datetime64"),
the 2nd col is the target value (data type should be numeric)
y: 2-d numpy array in format (no. of samples, future sequence length)
if future sequence length > 1,
or 1-d numpy array in format (no. of samples, ) if future sequence length = 1
:param validation_data: tuple in format (x_test,y_test), data used for validation.
If this is specified, validation result will be the optimization target for automl.
Otherwise, train metric will be the optimization target.
:param config: optimization hyper parameters
:return: the resulting metric
"""
x, y = data[0], data[1]
x, y, decoder_input_data, validation_data = self._pre_processing(x, y, validation_data)
# if model is not initialized, __build the model
if self.model is None:
self._build_train(mc=mc, **config)
# batch_size = config.get('batch_size', 64)
# lr = self.lr
# name = "seq2seq-batch_size-{}-epochs-{}-lr-{}-time-{}"\
# .format(batch_size, epochs, lr, time())
# tensorboard = TensorBoard(log_dir="logs/" + name)
hist = self.model.fit([x, decoder_input_data], y,
validation_data=validation_data,
batch_size=self.batch_size,
epochs=config.get("epochs", 10),
verbose=verbose,
# callbacks=[tensorboard]
)
# print(hist.history)
if validation_data is None:
# get train metrics
# results = self.model.evaluate(x, y)
result = hist.history.get(self.metric)[-1]
else:
result = hist.history.get('val_' + str(self.metric))[-1]
return result
def evaluate(self, x, y, metric=['mse']):
"""
Evaluate on x, y
:param x: input
:param y: target
:param metric: a list of metrics in string format
:return: a list of metric evaluation results
"""
y_pred = self.predict(x)
# y = np.squeeze(y, axis=2)
if self.target_col_num == 1:
return [Evaluator.evaluate(m, y, y_pred) for m in metric]
else:
return [np.array([Evaluator.evaluate(m, y[:, i, :], y_pred[:, i, :])
for i in range(self.future_seq_len)])
for m in metric]
def predict(self, x, mc=False):
"""
Prediction on x.
:param x: input
:return: predicted y (expected dimension = 2)
"""
y_pred = self._decode_sequence(x, mc=mc)
if self.target_col_num == 1:
y_pred = np.squeeze(y_pred, axis=2)
return y_pred
def predict_with_uncertainty(self, x, n_iter=100):
result = np.array([self.predict(x, mc=True) for i in range(n_iter)])
prediction = result.mean(axis=0)
uncertainty = result.var(axis=0)
return prediction, uncertainty
def save(self, model_path, config_path):
"""
save model to file.
:param model_path: the model file path to be saved to.
:param config_path: the config file path to be saved to.
:return:
"""
self.model.save(model_path)
config_to_save = {"past_seq_len": self.past_seq_len,
"feature_num": self.feature_num,
"future_seq_len": self.future_seq_len,
"target_col_num": self.target_col_num,
"metric": self.metric,
"latent_dim": self.latent_dim,
"batch_size": self.batch_size}
save_config(config_path, config_to_save)
def restore(self, model_path, **config):
"""
restore model from file
:param model_path: the model file
:param config: the trial config
:return: the restored model
"""
self.past_seq_len = config["past_seq_len"]
self.feature_num = config["feature_num"]
self.future_seq_len = config["future_seq_len"]
self.target_col_num = config["target_col_num"]
self.metric = config["metric"]
self.latent_dim = config["latent_dim"]
self.batch_size = config["batch_size"]
self.model = keras.models.load_model(model_path)
self._restore_model()
# self.model.load_weights(file_path)
def _get_required_parameters(self):
return {
# 'input_shape_x',
# 'input_shape_y',
# 'out_units'
}
def _get_optional_parameters(self):
return {
'past_seq_len'
'latent_dim'
'dropout',
'metric',
'lr',
'epochs',
'batch_size'
}
| [
[
[
627,
632
],
[
3565,
3570
],
[
4408,
4413
],
[
5342,
5347
]
],
[
[
669,
674
],
[
1964,
1969
],
[
2510,
2515
],
[
4772,
4777
],
[
4836,
4841
]
],
[
[
676,
680
],
[
2043,
2047
],
[
2815,
2819
]
],
[
[
682,
687
],
[
3309,
3314
]
],
[
[
695,
720
],
[
3758,
3763
],
[
13381,
13386
]
],
[
[
760,
769
],
[
875,
884
]
],
[
[
805,
806
],
[
5804,
5806
],
[
6177,
6179
],
[
6498,
6500
],
[
7188,
7190
],
[
7753,
7755
],
[
11312,
11314
],
[
11749,
11751
],
[
11871,
11873
],
[
12754,
12765
]
],
[
[
845,
854
],
[
11228,
11237
],
[
11322,
11331
]
],
[
[
863,
874
]
]
] |
from src.layers.LayerHelper import *
from settings import LayerSettings as layerSettings
import tensorflow as tf
import os
CUDA_VISIBLE_DEVICES=0
os.environ["CUDA_VISIBLE_DEVICES"] = "0" # set gpu number
def LSTM(name_, inputTensor_, numberOfOutputs_, isTraining_, dropoutProb_=None):
with tf.name_scope(name_):
cell = tf.nn.rnn_cell.LSTMCell(num_units=numberOfOutputs_,
use_peepholes=True,
initializer=layerSettings.LSTM_INITIALIZER,
forget_bias=1.0,
state_is_tuple=True,
activation=tf.nn.tanh,
name=name_+"_cell")
if dropoutProb_ != None:
dropoutProbTensor = tf.cond(isTraining_, lambda: 0.5, lambda: 1.0)
cell = tf.nn.rnn_cell.DropoutWrapper(cell,
input_keep_prob=dropoutProbTensor,
output_keep_prob=dropoutProbTensor)
statePlaceHolder = tf.nn.rnn_cell.LSTMStateTuple( tf.placeholder(layerSettings.FLOAT_TYPE, [None, numberOfOutputs_]),
tf.placeholder(layerSettings.FLOAT_TYPE, [None, numberOfOutputs_]) )
outputTensor, stateTensor = tf.nn.dynamic_rnn( cell=cell,
initial_state=statePlaceHolder,
inputs=inputTensor_)
# Add Regularization Loss
for eachVariable in tf.trainable_variables():
if name_ in eachVariable.name:
if ('bias' not in eachVariable.name)and(layerSettings.REGULARIZER_WEIGHTS_DECAY != None):
regularizationLoss = L2_Regularizer(eachVariable)
tf.losses.add_loss(regularizationLoss, loss_collection=tf.GraphKeys.REGULARIZATION_LOSSES)
return outputTensor, stateTensor, statePlaceHolder
| [
[
[
35,
36
],
[
1362,
1376
]
],
[
[
58,
88
],
[
421,
434
],
[
869,
882
],
[
947,
960
],
[
1286,
1299
]
],
[
[
96,
112
],
[
292,
294
],
[
323,
325
],
[
523,
525
],
[
613,
615
],
[
670,
672
],
[
823,
825
],
[
854,
856
],
[
932,
934
],
[
1032,
1034
],
[
1182,
1184
],
[
1396,
1398
],
[
1451,
1453
]
],
[
[
120,
122
],
[
146,
148
]
],
[
[
123,
143
]
],
[
[
209,
213
]
]
] |
"""LCM type definitions
This file automatically generated by lcm.
DO NOT MODIFY BY HAND!!!!
"""
import cStringIO as StringIO
import struct
class request_t(object):
__slots__ = ["utime"]
def __init__(self):
self.utime = 0
def encode(self):
buf = StringIO.StringIO()
buf.write(request_t._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
buf.write(struct.pack(">q", self.utime))
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = StringIO.StringIO(data)
if buf.read(8) != request_t._get_packed_fingerprint():
raise ValueError("Decode error")
return request_t._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = request_t()
self.utime = struct.unpack(">q", buf.read(8))[0]
return self
_decode_one = staticmethod(_decode_one)
_hash = None
def _get_hash_recursive(parents):
if request_t in parents: return 0
tmphash = (0xa686a0e0f882d897) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if request_t._packed_fingerprint is None:
request_t._packed_fingerprint = struct.pack(">Q", request_t._get_hash_recursive([]))
return request_t._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
| [
[
[
104,
125
],
[
277,
285
],
[
606,
614
]
],
[
[
133,
139
],
[
463,
469
],
[
889,
895
],
[
1484,
1490
]
],
[
[
147,
156
],
[
315,
324
],
[
656,
665
],
[
753,
762
],
[
856,
865
],
[
1056,
1065
],
[
1401,
1410
],
[
1502,
1511
],
[
1452,
1461
],
[
1552,
1561
]
]
] |
from conans import ConanFile, CMake
import os
channel = os.getenv("CONAN_CHANNEL", "testing")
username = os.getenv("CONAN_USERNAME", "memsharded")
class EasyLoggingTestConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
requires = "easyloggingpp/9.94.1@%s/%s" % (username, channel)
generators = "cmake"
def build(self):
cmake = CMake(self.settings)
self.run('cmake "%s" %s' % (self.conanfile_directory, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def imports(self):
self.copy("*.cc")
def test(self):
os.chdir("bin")
self.run(".%sexample" % os.sep)
| [
[
[
19,
28
],
[
176,
185
]
],
[
[
30,
35
],
[
371,
376
]
],
[
[
43,
45
],
[
57,
59
],
[
106,
108
],
[
614,
616
],
[
662,
664
]
],
[
[
47,
54
],
[
299,
306
]
],
[
[
95,
103
],
[
289,
297
]
],
[
[
155,
175
]
]
] |
# Author: Guilherme Aldeia
# Contact: guilherme.aldeia@ufabc.edu.br
# Version: 1.0.1
# Last modified: 06-07-2021 by Guilherme Aldeia
"""Interaction Transformation expression's **Inspector**
Sub-module containing three classes to help inspect and explain the
results obtained with the itea.
- ``ITExpr_explainer``: Implementations of feature importances methods specific
to the Interaction-Transformation representation, and several visualization
tools to help interpret the final expression;
- ``ITExpr_inspector``: Based on a more statistical approach, this class
implements methods to measure the quality of the final expression by
calculating information between individual terms;
- ``ITExpr_texifier``: Creation of latex representations of the final expression
and its derivatives. In cases where the final expression is simple enough,
the analysis of the expression can provide useful insights.
All the modules are designed to work with `ITExpr`s. After the evolutionary
process is performed (by calling `fit()` on the `ITEA_classifier` or
`ITEA_regressor`), the best final expression can be accessed by
`itea.bestsol_`, and those classes are specialized in different ways of
inspecting the final model.
Additionally, there is one class designed to work with the ´`itea``, instead
of ``ITExpr`` expressions. The class ``ITEA_summarizer`` implements a method
to automatically create a pdf file containing information generated with
all the inspection classes, in an attempt to automate the task of generating
an interpretability report.
"""
from itea.inspection._ITExpr_explainer import ITExpr_explainer
from itea.inspection._ITExpr_inspector import ITExpr_inspector
from itea.inspection._ITExpr_texifier import ITExpr_texifier
from itea.inspection._ITEA_summarizer import ITEA_summarizer
import jax
# Must be used at startup. We'll perform lightweight usage with jax
jax.config.update('jax_platform_name', 'cpu')
__all__ = [
'ITExpr_explainer',
'ITExpr_inspector',
'ITExpr_texifier',
'ITEA_summarizer'
] | [
[
[
1650,
1666
]
],
[
[
1714,
1730
]
],
[
[
1778,
1793
]
],
[
[
1841,
1856
]
],
[
[
1867,
1870
],
[
1946,
1949
]
],
[
[
1997,
2004
]
]
] |
# write your first unittest!
import unittest
from ovos_plugin_manager.skills import find_skill_plugins
class TestPlugin(unittest.TestCase):
@classmethod
def setUpClass(self):
self.skill_id = "ovos-skill-timer.OpenVoiceOS"
def test_find_plugin(self):
plugins = find_skill_plugins()
self.assertIn(self.skill_id, list(plugins))
| [
[
[
36,
44
],
[
122,
130
]
],
[
[
84,
102
],
[
291,
309
]
],
[
[
111,
121
]
]
] |
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Convolution2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense, Lambda, ELU
from keras.optimizers import Adam
from sklearn.model_selection import train_test_split
from keras.models import model_from_json
from sklearn.preprocessing import normalize
import cv2
import numpy as np
import glob
import json
from keras.layers import merge
from keras.layers.core import Lambda
from keras.models import Model
import tensorflow as tf
def make_parallel(model, gpu_count):
def get_slice(data, idx, parts):
shape = tf.shape(data)
size = tf.concat(0, [shape[:1] // parts, shape[1:]])
stride = tf.concat(0, [shape[:1] // parts, shape[1:] * 0])
start = stride * idx
return tf.slice(data, start, size)
outputs_all = []
for i in range(len(model.outputs)):
outputs_all.append([])
# Place a copy of the model on each GPU, each getting a slice of the batch
for i in range(gpu_count):
with tf.device('/gpu:%d' % i):
with tf.name_scope('tower_%d' % i) as scope:
inputs = []
# Slice each input into a piece for processing on this GPU
for x in model.inputs:
input_shape = tuple(x.get_shape().as_list())[1:]
slice_n = Lambda(get_slice, output_shape=input_shape, arguments={'idx': i, 'parts': gpu_count})(x)
inputs.append(slice_n)
outputs = model(inputs)
if not isinstance(outputs, list):
outputs = [outputs]
# Save all the outputs for merging back together later
for l in range(len(outputs)):
outputs_all[l].append(outputs[l])
# merge outputs on CPU
with tf.device('/cpu:0'):
merged = []
for outputs in outputs_all:
merged.append(merge(outputs, mode='concat', concat_axis=0))
return Model(input=model.inputs, output=merged)
class CNNClassifier:
def __init__(self):
self.classifier = None
def get_model(self, parallel=False):
model = Sequential()
#model.add(Lambda(lambda x: x / 127.5 - 1., input_shape=(64, 64, 3)))
model.add(Convolution2D(8, 8, 8, subsample=(4, 4), border_mode="same", activation='elu', name='Conv1'))
model.add(Convolution2D(16, 5, 5, subsample=(2, 2), border_mode="same", activation='elu', name='Conv2'))
model.add(Convolution2D(32, 5, 5, subsample=(2, 2), border_mode="same", activation='elu', name='Conv3'))
model.add(Flatten())
model.add(ELU())
model.add(Dense(1024, activation='elu'))
model.add(Dropout(.5))
model.add(ELU())
model.add(Dense(512, activation='elu'))
model.add(Dropout(.5))
model.add(Dense(1, name='output'))
model.add(Activation('sigmoid'))
if parallel:
model = make_parallel(model, 2)
#model.compile(optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy'])
self.model = model
return model
def _model(self):
img_width, img_height = 64, 64
model = Sequential()
model.add(Convolution2D(8, 3, 3, input_shape=(img_width, img_height, 3)))
model.add(Activation('elu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(Convolution2D(16, 3, 3))
#model.add(Activation('elu'))
#model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(Convolution2D(32, 3, 3))
#model.add(Activation('elu'))
#model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(512))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
#model = make_parallel(model, 2)
self.model = model
def compile(self):
self.model.compile(loss='binary_crossentropy',
optimizer='rmsprop', class_mode='binary',
metrics=['accuracy'])
def save(self):
model_json = self.model.to_json()
with open("./model.json", "w") as json_file:
json.dump(model_json, json_file)
self.model.save_weights("./model.h5")
print("Saved model to disk")
def load(self):
with open('./model.json', 'r') as jfile:
self.model = model_from_json(json.load(jfile))
self.compile()
self.model.load_weights('./model.h5')
def get_list(self):
vehicles = np.array(glob.glob('training_data/vehicles/*/*'))
y_vehicles = np.zeros(vehicles.shape) + 1
non_vehicles = np.array(glob.glob('training_data/non-vehicles/*/*'))
y_non_vehicles = np.zeros(non_vehicles.shape)
X_data = np.concatenate((vehicles, non_vehicles))
Y_data = np.concatenate((y_vehicles, y_non_vehicles))
return X_data, Y_data
def predict(self, image):
#img = np.copy(image)
#img = cv2.resize(img, (64, 64))
x = image[None, :, :, :]
result = self.model.predict(x, 1)
return result
def train(self, file_list, labels, test_size=0.2, nb_epoch=30, batch_size=128):
X_train, X_test, Y_train, Y_test = train_test_split(file_list, labels, test_size=test_size, random_state=100)
test_images = build_images(X_test)
train_images = build_images(X_train)
train_datagen = ImageDataGenerator(
rescale=1. / 255,
shear_range=0.05,
zoom_range=0.05,
width_shift_range=0.1,
height_shift_range=0.1,
rotation_range=5,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1. / 255)
train_generator = train_datagen.flow(train_images, Y_train, batch_size)
test_generator = test_datagen.flow(test_images, Y_test, batch_size)
nb_train_samples = (batch_size-1)*100
nb_validation_samples = (batch_size-1)*20
#self.get_model(parallel=False)
self._model()
self.compile()
self.model.fit_generator(
train_generator,
samples_per_epoch=nb_train_samples,
nb_epoch=nb_epoch, show_accuracy=True,
validation_data=test_generator,
nb_val_samples=nb_validation_samples)
def build_images(x):
images = np.zeros((len(x), 64, 64, 3))
for idx, img_fname in enumerate(x):
im = cv2.imread(img_fname)
im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
im = cv2.resize(im, (64, 64), interpolation=cv2.INTER_AREA)
images[idx] = im
return images
def do_all(nb_epoch=30, batch_size=256):
clf = CNNClassifier()
x, y = clf.get_list()
clf.train(x, y, nb_epoch=nb_epoch, batch_size=batch_size)
clf.save()
| [
[
[
38,
56
],
[
5497,
5515
],
[
5764,
5782
]
],
[
[
82,
92
],
[
2234,
2244
],
[
3266,
3276
]
],
[
[
118,
131
],
[
2343,
2356
],
[
2455,
2468
],
[
2568,
2581
],
[
3297,
3310
]
],
[
[
133,
145
],
[
3416,
3428
]
],
[
[
171,
181
],
[
2962,
2972
],
[
3379,
3389
]
],
[
[
183,
190
],
[
2784,
2791
],
[
2888,
2895
],
[
3794,
3801
]
],
[
[
192,
199
],
[
2681,
2688
],
[
3735,
3742
]
],
[
[
201,
206
],
[
2735,
2740
],
[
2840,
2845
],
[
2919,
2924
],
[
3764,
3769
],
[
3826,
3831
]
],
[
[
208,
214
]
],
[
[
216,
219
],
[
2710,
2713
],
[
2815,
2818
]
],
[
[
249,
253
]
],
[
[
290,
306
],
[
5308,
5324
]
],
[
[
332,
347
],
[
4452,
4467
]
],
[
[
382,
391
]
],
[
[
399,
402
],
[
6515,
6518
],
[
6550,
6553
],
[
6567,
6570
],
[
6599,
6602
],
[
6638,
6641
]
],
[
[
410,
421
],
[
4600,
4602
],
[
4671,
4673
],
[
4723,
4725
],
[
4802,
4804
],
[
4848,
4850
],
[
4906,
4908
],
[
6432,
6434
]
],
[
[
429,
433
],
[
4609,
4613
],
[
4732,
4736
]
],
[
[
441,
445
],
[
4241,
4245
],
[
4468,
4472
]
],
[
[
471,
476
],
[
1995,
2000
]
],
[
[
507,
513
],
[
1419,
1425
]
],
[
[
539,
544
],
[
2057,
2062
]
],
[
[
553,
569
],
[
1094,
1096
],
[
1137,
1139
],
[
1892,
1894
],
[
662,
664
],
[
692,
694
],
[
755,
757
],
[
849,
851
]
],
[
[
576,
589
],
[
3026,
3039
]
],
[
[
2106,
2119
],
[
6749,
6762
]
],
[
[
6402,
6414
],
[
5406,
5418
],
[
5450,
5462
]
],
[
[
6702,
6708
]
]
] |
import pytest
from brownie import interface
def test_uniswap_add_two_tokens(
admin, alice, chain, bank, werc20, ufactory, urouter, simple_oracle, oracle, celo, cusd, ceur, UniswapV2SpellV1, UniswapV2Oracle, core_oracle
):
spell = UniswapV2SpellV1.deploy(bank, werc20, urouter, celo, {'from': admin})
cusd.mint(admin, 10000000 * 10**6, {'from': admin})
ceur.mint(admin, 10000000 * 10**6, {'from': admin})
cusd.approve(urouter, 2**256-1, {'from': admin})
ceur.approve(urouter, 2**256-1, {'from': admin})
urouter.addLiquidity(
cusd,
ceur,
1000000 * 10**6,
1000000 * 10**6,
0,
0,
admin,
chain.time() + 60,
{'from': admin},
)
lp = ufactory.getPair(cusd, ceur)
print('admin lp bal', interface.IERC20(lp).balanceOf(admin))
uniswap_lp_oracle = UniswapV2Oracle.deploy(core_oracle, {'from': admin})
print('ceur Px', simple_oracle.getCELOPx(ceur))
print('cusd Px', simple_oracle.getCELOPx(cusd))
core_oracle.setRoute([cusd, ceur, lp], [simple_oracle, simple_oracle, uniswap_lp_oracle])
print('lp Px', uniswap_lp_oracle.getCELOPx(lp))
oracle.setTokenFactors(
[cusd, ceur, lp],
[
[10000, 10000, 10000],
[10000, 10000, 10000],
[10000, 10000, 10000],
],
{'from': admin},
)
cusd.mint(alice, 10000000 * 10**6, {'from': admin})
ceur.mint(alice, 10000000 * 10**6, {'from': admin})
cusd.approve(bank, 2**256-1, {'from': alice})
ceur.approve(bank, 2**256-1, {'from': alice})
spell.getAndApprovePair(cusd, ceur, {'from': admin})
lp = ufactory.getPair(cusd, ceur)
spell.setWhitelistLPTokens([lp], [True], {'from': admin})
bank.setWhitelistSpells([spell], [True], {'from': admin})
bank.setWhitelistTokens([cusd, ceur], [True, True], {'from': admin})
tx = bank.execute(
0,
spell,
spell.addLiquidityWERC20.encode_input(
ceur, # token 0
cusd, # token 1
[
40000 * 10**6, # 40000 ceur
50000 * 10**6, # 50000 cusd
0,
1000 * 10**6, # 1000 ceur
200 * 10**6, # 200 cusd
0, # borrow LP tokens
0, # min ceur
0, # min cusd
],
),
{'from': alice}
)
position_id = tx.return_value
print('tx gas used', tx.gas_used)
print('bank collateral size', bank.getPositionInfo(position_id))
print('bank collateral value', bank.getCollateralCELOValue(position_id))
print('bank borrow value', bank.getBorrowCELOValue(position_id))
print('bank ceur', bank.getBankInfo(ceur))
print('bank cusd', bank.getBankInfo(cusd))
print('ceur Px', simple_oracle.getCELOPx(ceur))
print('cusd Px', simple_oracle.getCELOPx(cusd))
print('lp Px', uniswap_lp_oracle.getCELOPx(lp))
| [
[
[
7,
13
]
],
[
[
34,
43
],
[
792,
801
]
],
[
[
50,
77
]
]
] |
"""
This module patches a few core functions to add compression capabilities,
since gevent-websocket does not appear to be maintained anymore.
"""
from socket import error
from zlib import (
decompressobj,
MAX_WBITS,
Z_FULL_FLUSH,
)
from geventwebsocket.exceptions import (
ProtocolError,
WebSocketError,
)
from geventwebsocket.websocket import (
MSG_SOCKET_DEAD,
Header,
WebSocket,
)
DECOMPRESSOR = decompressobj(-MAX_WBITS)
def _encode_bytes(text):
if isinstance(text, str):
return text
if not isinstance(text, unicode):
text = unicode(text or '')
return text.encode('utf-8')
def make_compressed_frame(message, compressor):
"""
Make a compressed websocket frame from a message and compressor.
Generates header and a compressed message which can then be used on any
websocket connection where `no_context_takeover` has been negotiated.
This prevents the need to re-compress a broadcast-style message for every
websocket connection.
`compressor` is a zlib compressor object.
"""
binary = not isinstance(message, (str, unicode))
opcode = WebSocket.OPCODE_BINARY if binary else WebSocket.OPCODE_TEXT
if binary:
message = str(message)
else:
message = _encode_bytes(message)
message = compressor.compress(message)
# We use Z_FULL_FLUSH (rather than Z_SYNC_FLUSH) here when
# server_no_context_takeover has been passed, to reset the context at
# the end of every frame. Patches to the actual gevent-websocket
# library should probably be able to support both.
message += compressor.flush(Z_FULL_FLUSH)
# See https://tools.ietf.org/html/rfc7692#page-19
if message.endswith('\x00\x00\xff\xff'):
message = message[:-4]
# Generate header. The RSV0 bit indicates the payload is compressed.
flags = Header.RSV0_MASK
header = Header.encode_header(
fin=True, opcode=opcode, mask='', length=len(message), flags=flags)
return header + message
def send_raw_frame(websocket, raw_message):
"""
`raw_message` includes both the header and the encoded message.
"""
try:
websocket.raw_write(raw_message)
except error:
websocket.current_app.on_close(MSG_SOCKET_DEAD)
raise WebSocketError(MSG_SOCKET_DEAD)
def read_frame(websocket):
# Patched `read_frame` method that supports decompression
header = Header.decode_header(websocket.stream)
# Start patched lines
compressed = header.flags & header.RSV0_MASK
if compressed:
header.flags &= ~header.RSV0_MASK
# End patched lines
if header.flags:
raise ProtocolError
if not header.length:
return header, ''
try:
payload = websocket.raw_read(header.length)
except error:
payload = ''
except Exception:
# Start patched lines
raise WebSocketError('Could not read payload')
# End patched lines
if len(payload) != header.length:
raise WebSocketError('Unexpected EOF reading frame payload')
if header.mask:
payload = header.unmask_payload(payload)
# Start patched lines
if compressed:
payload = ''.join((
DECOMPRESSOR.decompress(payload),
DECOMPRESSOR.decompress('\0\0\xff\xff'),
DECOMPRESSOR.flush(),
))
# End patched lines
return header, payload
| [
[
[
166,
171
],
[
2222,
2227
],
[
2812,
2817
]
],
[
[
195,
208
],
[
435,
448
]
],
[
[
214,
223
],
[
450,
459
]
],
[
[
229,
241
],
[
1643,
1655
]
],
[
[
291,
304
],
[
2672,
2685
]
],
[
[
310,
324
],
[
2299,
2313
],
[
2907,
2921
],
[
3029,
3043
]
],
[
[
372,
387
],
[
2268,
2283
],
[
2314,
2329
]
],
[
[
393,
399
],
[
1874,
1880
],
[
1904,
1910
],
[
2436,
2442
]
],
[
[
405,
414
],
[
1148,
1157
],
[
1187,
1196
]
],
[
[
420,
432
],
[
3240,
3252
],
[
3286,
3298
],
[
3339,
3351
]
],
[
[
467,
480
],
[
1283,
1296
]
],
[
[
651,
672
]
],
[
[
2037,
2051
]
],
[
[
2337,
2347
]
]
] |
import os
import json
Environ = os._Environ
def is_on_cloudfoundry(env: Environ=os.environ) -> bool:
return 'VCAP_SERVICES' in env
def load_cups_from_vcap_services(name: str, env: Environ=os.environ) -> None:
'''
Detects if VCAP_SERVICES exists in the environment; if so, parses
it and imports all the credentials from the given custom
user-provided service (CUPS) as strings into the environment.
For more details on CUPS, see:
https://docs.cloudfoundry.org/devguide/services/user-provided.html
'''
if not is_on_cloudfoundry(env):
return
vcap = json.loads(env['VCAP_SERVICES'])
for entry in vcap.get('user-provided', []):
if entry['name'] == name:
for key, value in entry['credentials'].items():
env[key] = value
def load_database_url_from_vcap_services(name: str, service: str,
env: Environ=os.environ) -> str:
"""
Sets os.environ[DATABASE_URL] from a service entry in VCAP_SERVICES.
"""
if not is_on_cloudfoundry(env):
return
# FIXME: this'll break if there are multiple databases. Not an issue right
# now, but could be in the future. Keep an eye on it.
vcap = json.loads(env['VCAP_SERVICES'])
env['DATABASE_URL'] = vcap[service][0]["credentials"]["uri"]
| [
[
[
7,
9
],
[
33,
35
],
[
83,
85
],
[
197,
199
],
[
932,
934
]
],
[
[
17,
21
],
[
601,
605
],
[
1241,
1245
]
],
[
[
23,
30
],
[
75,
82
],
[
189,
196
],
[
924,
931
]
],
[
[
51,
69
],
[
549,
567
],
[
1052,
1070
]
],
[
[
144,
172
]
],
[
[
816,
852
]
]
] |
from pymongo import MongoClient
from pymongo import ReadPreference
from datetime import datetime, timedelta
class Mongo(MongoClient):
def __init__(self, username, password, host, db='tags', collection='tweets_pipeline_v2'):
uri = f"mongodb://{username}:{password}@{host}/{db}"
super(Mongo, self).__init__(host=uri,
authSource=db,
authMechanism='SCRAM-SHA-256',
port=27017,
replicaset="rs0",
read_preference=ReadPreference.SECONDARY,
)
self.database = self.get_default_database()
self.collection = collection
def pipelined(self, count=True):
query = {"status": "pipelined"}
if count:
return self.database[self.collection].count_documents(query)
return self.database[self.collection].find(query)
def feed(self, count=True):
query = {"status": "graphicone_feed"}
if count:
return self.database[self.collection].count_documents(query)
return self.database[self.collection].find(query)
def search(self, count=True):
query = {"status": "graphicone_search"}
if count:
return self.database[self.collection].count_documents(query)
return self.database[self.collection].find(query)
def left_for_analysts(self, count=True):
query = {"in_app": {"$exists": False},
"status": "graphicone_feed"}
if count:
return self.database[self.collection].count_documents(query)
return self.database[self.collection].find(query)
def removed_validators(self, count=True):
query = {"validator_username": {"$exists": True},
"status": "deleted"}
if count:
return self.database[self.collection].count_documents(query)
return self.database[self.collection].find(query)
def removed_analysts(self, count=True):
query = {"status": "deleted_from_analytics"}
if count:
return self.database[self.collection].count_documents(query)
return self.database[self.collection].find(query)
# if __name__ == "__main__":
# _username = "login"
# _password = "passwd"
# mongodb_host = "host address"
#
# mongo_client = Mongo(_username, _password, mongodb_host)
# print(mongo_client.pipelined())
# print(mongo_client.search())
# print(mongo_client.feed())
# print(mongo_client.left_for_analysts())
# print(mongo_client.removed_validators())
# print(mongo_client.removed_analysts())
| [
[
[
20,
31
],
[
122,
133
]
],
[
[
52,
66
],
[
610,
624
]
],
[
[
88,
96
]
],
[
[
98,
107
]
],
[
[
116,
121
],
[
306,
311
]
]
] |
# Data Preprocessing Template
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('50_Startups.csv')
X = dataset.iloc[:, :-1].values
y = dataset.iloc[:, 4].values
#encoding independent variable state
#from sklearn.preprocessing import LabelEncoder, OneHotEncoder
#labelencoder_X = LabelEncoder()
#X[:, 3] = labelencoder_X.fit_transform(X[:, 3])
#onehotencoder = OneHotEncoder(categorical_features = [3])
#X = onehotencoder.fit_transform(X).toarray()
from sklearn.preprocessing import LabelEncoder,OneHotEncoder
from sklearn.compose import ColumnTransformer
ct = ColumnTransformer([("State", OneHotEncoder(), [3])], remainder = 'passthrough')
X= ct.fit_transform(X)
#avoiding the dummy variable trap
X=X[:,1:]
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)
# Feature Scaling
"""from sklearn.preprocessing import StandardScaler
sc_X = StandardScaler()
X_train = sc_X.fit_transform(X_train)
X_test = sc_X.transform(X_test)
sc_y = StandardScaler()
y_train = sc_y.fit_transform(y_train)"""
#fitting multiple linear regression to the training set
from sklearn.linear_model import LinearRegression
regressor=LinearRegression()
regressor.fit(X_train,y_train)
#Predicting the test set results
y_pred=regressor.predict(X_test)
#Building the optimal model using backward elimination
import statsmodels.api as sm
X=np.append(arr=np.ones((50,1)).astype(int),values=X,axis=1)
#X_opt=X[:,[0,1,2,3,4,5]]
X_opt = np.array(X[:, [0, 1, 2, 3, 4, 5]], dtype=float)
regressor_OLS=sm.OLS(endog=y,exog=X_opt).fit()
regressor_OLS.summary()
X_opt = np.array(X[:, [0, 1, 3, 4, 5]], dtype=float)
regressor_OLS=sm.OLS(endog=y,exog=X_opt).fit()
regressor_OLS.summary()
X_opt = np.array(X[:, [0, 3, 4, 5]], dtype=float)
regressor_OLS=sm.OLS(endog=y,exog=X_opt).fit()
regressor_OLS.summary()
X_opt = np.array(X[:, [0, 3, 5]], dtype=float)
regressor_OLS=sm.OLS(endog=y,exog=X_opt).fit()
regressor_OLS.summary()
X_opt = np.array(X[:, [0, 3]], dtype=float)
regressor_OLS=sm.OLS(endog=y,exog=X_opt).fit()
regressor_OLS.summary()
| [
[
[
64,
75
],
[
1565,
1567
],
[
1579,
1581
],
[
1659,
1661
],
[
1788,
1790
],
[
1913,
1915
],
[
2036,
2038
],
[
2156,
2158
]
],
[
[
83,
107
]
],
[
[
115,
127
],
[
163,
165
]
],
[
[
153,
160
],
[
198,
205
],
[
230,
237
]
],
[
[
194,
195
],
[
758,
759
]
],
[
[
226,
227
],
[
974,
975
],
[
1734,
1735
],
[
1860,
1861
],
[
1982,
1983
],
[
2102,
2103
],
[
2219,
2220
]
],
[
[
580,
592
]
],
[
[
593,
606
],
[
687,
700
]
],
[
[
635,
652
],
[
658,
675
]
],
[
[
653,
655
],
[
741,
743
]
],
[
[
738,
739
],
[
798,
799
]
],
[
[
796,
797
],
[
971,
972
],
[
1614,
1615
]
],
[
[
902,
918
],
[
954,
970
]
],
[
[
919,
926
],
[
1393,
1400
]
],
[
[
928,
934
],
[
1470,
1476
]
],
[
[
936,
943
],
[
1401,
1408
]
],
[
[
945,
951
]
],
[
[
1333,
1349
],
[
1360,
1376
]
],
[
[
1350,
1359
],
[
1379,
1388
],
[
1452,
1461
]
],
[
[
1445,
1451
]
],
[
[
1541,
1562
],
[
1721,
1723
],
[
1847,
1849
],
[
1969,
1971
],
[
2089,
2091
],
[
2206,
2208
]
],
[
[
1563,
1564
],
[
1668,
1669
],
[
1797,
1798
],
[
1922,
1923
],
[
2045,
2046
],
[
2165,
2166
]
],
[
[
1651,
1656
],
[
1741,
1746
]
],
[
[
1707,
1720
],
[
1754,
1767
]
],
[
[
1780,
1785
],
[
1867,
1872
]
],
[
[
1833,
1846
],
[
1880,
1893
]
],
[
[
1905,
1910
],
[
1989,
1994
]
],
[
[
1955,
1968
],
[
2002,
2015
]
],
[
[
2028,
2033
],
[
2109,
2114
]
],
[
[
2075,
2088
],
[
2122,
2135
]
],
[
[
2148,
2153
],
[
2226,
2231
]
],
[
[
2192,
2205
],
[
2239,
2252
]
]
] |