code
stringlengths
10
805k
def_use_chains
sequencelengths
0
667
# Copyright © 2014-2016 Jakub Wilk <jwilk@jwilk.net> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the “Software”), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from nose.tools import ( assert_equal, assert_greater_equal, ) import lib.text as M def test_ltrim(): def t(s, n, expected): result = M.ltrim(s, n) assert_greater_equal( max(1, n), len(result) ) assert_equal(result, expected) truncations = [ '…', '…', '…s', '…gs', 'eggs', 'eggs', ] for n, s in enumerate(truncations): t(truncations[-1], n, s) def test_rtrim(): def t(s, n, expected): result = M.rtrim(s, n) assert_equal(result, expected) truncations = [ '…', '…', 'e…', 'eg…', 'eggs', 'eggs', ] for n, s in enumerate(truncations): t(truncations[-1], n, s) # vim:ts=4 sts=4 sw=4 et
[ [ [ 1140, 1152 ], [ 1376, 1388 ], [ 1678, 1690 ] ], [ [ 1158, 1178 ], [ 1289, 1309 ] ], [ [ 1190, 1203 ], [ 1267, 1268 ], [ 1656, 1657 ] ], [ [ 1209, 1219 ] ], [ [ 1598, 1608 ] ] ]
# Copyright 2018 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language govestylerning permissions and # limitations under the License. """Colab import customizations to the IPython runtime.""" from google.colab._import_hooks import _altair from google.colab._import_hooks import _cv2 def _register_hooks(): _altair._register_hook() # pylint:disable=protected-access _cv2._register_hook() # pylint:disable=protected-access
[ [ [ 678, 685 ], [ 757, 764 ] ], [ [ 725, 729 ], [ 819, 823 ] ], [ [ 736, 751 ] ] ]
# -*- coding: utf-8 -*- from expects.testing import failure from expects import * from datetime import datetime from dateutil.relativedelta import relativedelta import json import os from esios import Esios from esios.parsers import P48CierreParser from pytz import timezone LOCAL_TZ = timezone('Europe/Madrid') UTC_TZ = timezone('UTC') def validate_json(result): expect(result).to(be_a(str)) data = json.loads(result) expect(data).to(be_a(list)) expect(len(data)).to(be_above(22)) expect(data[0]).to(be_a(dict)) expect(data[0]).to( have_keys('hour', 'up', 'value', 'cierre', 'utc_timestamp', 'local_timestamp') ) for register in data: # validate timestamps local_datetime, local_offset = register['local_timestamp'].split('+') is_dst = local_offset != '01:00' local_ts = LOCAL_TZ.localize(datetime.strptime(local_datetime, '%Y-%m-%d %H:%M:%S'), is_dst=is_dst) utc_ts = UTC_TZ.localize(datetime.strptime(register['utc_timestamp'], '%Y-%m-%d %H:%M:%S+00:00')) expect(register['local_timestamp']).to_not(equal(register['utc_timestamp'])) expected_local_ts = LOCAL_TZ.normalize(utc_ts.astimezone(LOCAL_TZ)) expect(local_ts).to(equal(expected_local_ts)) def validate_data(result, start, end, cierre=None): data = json.loads(result) hours = int(((end - start).total_seconds() / 3600) + 1) expect(len(data)).to(be(hours)) max_date = max([d['local_timestamp'] for d in data]) min_date = min([d['local_timestamp'] for d in data]) expect(min_date).to(equal(str(start))) expect(max_date).to(equal(str(end))) if cierre is not None: for c in data: expect(c['cierre']).to(equal(cierre)) with description('Esios Parsers'): with before.all: ESIOS_TOKEN = os.getenv('ESIOS_TOKEN') self.token = ESIOS_TOKEN self.today = datetime.today() self.e = Esios(self.token) with context('p48CierreParser: p48cierre files parser'): with context('Can download data from esios'): with it('Creates an instance'): parser = P48CierreParser(self.e) expect(parser).to(be_a(P48CierreParser)) with it('may be parsed as json'): parser = P48CierreParser(self.e) today = datetime.now() start = LOCAL_TZ.localize( today.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(days=1) ) end = LOCAL_TZ.localize( today.replace(hour=23, minute=59, second=59, microsecond=0) ) result = parser.get_data_json('SOMEC01', start, end) validate_json(result) validate_data(result, start + relativedelta(hours=1), end + relativedelta(seconds=1)) with context('parses local files'): with it('gets a zipfile and may be parsed as json'): parser = P48CierreParser(self.e) result = parser.get_data_json_from_file('SOMEC01', 'spec/data/p48cierre.zip') validate_json(result) # contains full 2020/09/15 and full 2020/09/17 data = json.loads(result) expect(len(data)).to(equal(48)) local_timestamps = [r['local_timestamp'] for r in data] ts_template = '2020-09-{:02} {:02}:00:00+02:00' # 2020/09/15 for hour in range(1, 24): expect(local_timestamps).to(contain(ts_template.format(15, hour))) expect(local_timestamps).to(contain(ts_template.format(16, 0))) # 2020/09/17 for hour in range(1, 24): expect(local_timestamps).to(contain(ts_template.format(17, hour))) expect(local_timestamps).to(contain(ts_template.format(18, 0))) # cierre for c in data: if '2020-09-15' in c['local_timestamp']: expect(c['cierre']).to(be_true) elif '2020-09-16' in c['local_timestamp']: expect(c['cierre']).to(be_true) elif '2020-09-17' in c['local_timestamp']: expect(c['cierre']).to(be_false) elif '2020-09-18' in c['local_timestamp']: expect(c['cierre']).to(be_false) with it('gets a p48cierre xml file and may be parsed as json'): parser = P48CierreParser(self.e) result = parser.get_data_json_from_file('SOMEC01', 'spec/data/p48cierre_20200915.xml') validate_json(result) validate_data( result, LOCAL_TZ.localize(datetime(2020, 9, 15, 1, 0)), LOCAL_TZ.localize(datetime(2020, 9, 16, 0, 0), True) ) with it('gets a p48 xml file and may be parsed as json'): parser = P48CierreParser(self.e) result = parser.get_data_json_from_file('SOMEC01', 'spec/data/p48_2020091618.xml') validate_json(result) validate_data( result, LOCAL_TZ.localize(datetime(2020, 9, 17, 1, 0)), LOCAL_TZ.localize(datetime(2020, 9, 18, 0, 0), False) ) with it('gets 25 registers for a p48cierre xml file from October saving time day'): parser = P48CierreParser(self.e) result = parser.get_data_json_from_file('SOMEC01', 'spec/data/p48cierre_20191027.xml') validate_json(result) validate_data( result, LOCAL_TZ.localize(datetime(2019, 10, 27, 1, 0)), LOCAL_TZ.localize(datetime(2019, 10, 28, 0, 0)), True ) data = json.loads(result) expect(len(data)).to(equal(25)) with it('gets 23 registers for a p48cierre xml file from March saving time day'): parser = P48CierreParser(self.e) result = parser.get_data_json_from_file('SOMEC01', 'spec/data/p48cierre_20200329.xml') validate_json(result) validate_data( result, LOCAL_TZ.localize(datetime(2020, 3, 29, 1, 0)), LOCAL_TZ.localize(datetime(2020, 3, 30, 0, 0)), True ) data = json.loads(result) expect(len(data)).to(equal(23))
[ [ [ 52, 59 ] ], [ [ 80, 81 ], [ 1751, 1762 ], [ 1790, 1796 ], [ 1857, 1861 ], [ 1891, 1895 ], [ 1945, 1949 ], [ 1930, 1934 ], [ 1967, 1974 ], [ 2033, 2040 ], [ 2091, 2093 ], [ 2159, 2163 ], [ 2183, 2189 ], [ 2201, 2205 ], [ 2242, 2244 ], [ 2312, 2316 ], [ 2886, 2893 ], [ 2935, 2937 ], [ 3024, 3028 ], [ 3286, 3292 ], [ 3307, 3312 ], [ 3545, 3551 ], [ 3573, 3580 ], [ 3628, 3634 ], [ 3656, 3663 ], [ 3783, 3789 ], [ 3811, 3818 ], [ 3866, 3872 ], [ 3894, 3901 ], [ 4072, 4078 ], [ 4095, 4102 ], [ 4191, 4197 ], [ 4214, 4221 ], [ 4310, 4316 ], [ 4333, 4341 ], [ 4430, 4436 ], [ 4453, 4461 ], [ 4482, 4484 ], [ 4582, 4586 ], [ 4929, 4931 ], [ 5023, 5027 ], [ 5366, 5368 ], [ 5486, 5490 ], [ 5874, 5880 ], [ 5895, 5900 ], [ 5924, 5926 ], [ 6042, 6046 ], [ 6429, 6435 ], [ 6450, 6455 ], [ 374, 380 ], [ 392, 396 ], [ 439, 445 ], [ 455, 459 ], [ 471, 477 ], [ 492, 500 ], [ 510, 516 ], [ 529, 533 ], [ 545, 551 ], [ 573, 582 ], [ 1055, 1061 ], [ 1098, 1103 ], [ 1216, 1222 ], [ 1236, 1241 ], [ 1411, 1417 ], [ 1432, 1434 ], [ 1563, 1569 ], [ 1583, 1588 ], [ 1606, 1612 ], [ 1626, 1631 ], [ 1706, 1712 ], [ 1729, 1734 ] ], [ [ 104, 112 ], [ 1904, 1912 ], [ 2345, 2353 ], [ 4809, 4817 ], [ 4857, 4865 ], [ 5246, 5254 ], [ 5294, 5302 ], [ 5713, 5721 ], [ 5762, 5770 ], [ 6269, 6277 ], [ 6317, 6325 ], [ 870, 878 ], [ 974, 982 ] ], [ [ 148, 161 ], [ 2482, 2495 ], [ 2816, 2829 ], [ 2846, 2859 ] ], [ [ 170, 174 ], [ 3251, 3255 ], [ 5839, 5843 ], [ 6394, 6398 ], [ 415, 419 ], [ 1326, 1330 ] ], [ [ 182, 184 ], [ 1824, 1826 ] ], [ [ 204, 209 ], [ 1939, 1944 ] ], [ [ 236, 251 ], [ 2143, 2158 ], [ 2206, 2221 ], [ 2296, 2311 ], [ 3008, 3023 ], [ 4566, 4581 ], [ 5007, 5022 ], [ 5470, 5485 ], [ 6026, 6041 ] ], [ [ 269, 277 ], [ 290, 298 ], [ 325, 333 ] ], [ [ 279, 287 ], [ 2384, 2392 ], [ 2544, 2552 ], [ 4791, 4799 ], [ 4839, 4847 ], [ 5228, 5236 ], [ 5276, 5284 ], [ 5695, 5703 ], [ 5744, 5752 ], [ 6251, 6259 ], [ 6299, 6307 ], [ 852, 860 ], [ 1160, 1168 ], [ 1197, 1205 ] ], [ [ 316, 322 ], [ 958, 964 ] ], [ [ 347, 360 ], [ 2748, 2761 ], [ 3143, 3156 ], [ 4710, 4723 ], [ 5147, 5160 ], [ 5614, 5627 ], [ 6170, 6183 ] ], [ [ 1267, 1280 ], [ 2786, 2799 ], [ 4748, 4761 ], [ 5185, 5198 ], [ 5652, 5665 ], [ 6208, 6221 ] ], [ [ 1810, 1821 ], [ 1870, 1881 ] ], [ [ 2134, 2140 ], [ 2190, 2196 ] ], [ [ 2287, 2293 ], [ 2687, 2693 ] ], [ [ 2337, 2342 ], [ 2423, 2428 ], [ 2583, 2588 ] ], [ [ 2376, 2381 ], [ 2719, 2724 ], [ 2808, 2813 ] ], [ [ 2538, 2541 ], [ 2726, 2729 ], [ 2840, 2843 ] ], [ [ 2678, 2684 ], [ 2762, 2768 ], [ 2800, 2806 ] ], [ [ 2999, 3005 ], [ 3057, 3063 ] ], [ [ 3048, 3054 ], [ 3157, 3163 ], [ 3262, 3268 ] ], [ [ 3244, 3248 ], [ 3297, 3301 ], [ 3384, 3388 ], [ 3981, 3985 ] ], [ [ 3334, 3350 ], [ 3552, 3568 ], [ 3635, 3651 ], [ 3790, 3806 ], [ 3873, 3889 ] ], [ [ 3406, 3417 ], [ 3581, 3592 ], [ 3664, 3675 ], [ 3819, 3830 ], [ 3902, 3913 ] ], [ [ 3503, 3507 ], [ 3604, 3608 ] ], [ [ 3741, 3745 ], [ 3842, 3846 ] ], [ [ 3976, 3977 ], [ 4026, 4027 ], [ 4079, 4080 ], [ 4145, 4146 ], [ 4198, 4199 ], [ 4264, 4265 ], [ 4317, 4318 ], [ 4384, 4385 ], [ 4437, 4438 ] ], [ [ 4557, 4563 ], [ 4615, 4621 ] ], [ [ 4606, 4612 ], [ 4724, 4730 ], [ 4783, 4789 ] ], [ [ 4998, 5004 ], [ 5056, 5062 ] ], [ [ 5047, 5053 ], [ 5161, 5167 ], [ 5220, 5226 ] ], [ [ 5461, 5467 ], [ 5519, 5525 ] ], [ [ 5510, 5516 ], [ 5628, 5634 ], [ 5687, 5693 ], [ 5850, 5856 ] ], [ [ 5832, 5836 ], [ 5885, 5889 ] ], [ [ 6017, 6023 ], [ 6075, 6081 ] ], [ [ 6066, 6072 ], [ 6184, 6190 ], [ 6243, 6249 ], [ 6405, 6411 ] ], [ [ 6387, 6391 ], [ 6440, 6444 ] ] ]
# -*- coding: utf-8 -*- """ Correa González Alfredo De gatos y ratones - Tengo k gatos (e I ratones) en casa. - Les sirvo comida a mis gatos en m platos. - Gatos y ratones han llegado a un acuerdo para repartirse el tiempo y comida pero tienen que convencerme que están haciendo su trabajo - Los gatos pueden comer en sus m platos de comida. - Los ratones pueden comer en esos platos siempre y cuando no sean vistos. - Si un gato ve a un ratón comiendo, se lo debe comer. - Los platos están puestos uno junto al otro. - Solo un animal puede comer en un plato a la vez. - Si un gato está comiendo y ve a un ratón que comienza a comer de oitro plato, el gato se lo ve y se lo come. - Por acuerdo de caballeros, los gatos no pueden acercarse a los platos mientras haya ratones comiendo. """ from threading import Semaphore, Thread, Event import threading import time import random hambreDeGato = 100 hambreDeRaton = 2 numeroDeGatos = 2 numeroDeRatones = 10 platos = [] p = 5 gatosComiendo = 0 ratonesComiendo = 0 mutex_hambreGato = threading.Semaphore(1) mutex_hambreRaton = threading.Semaphore(1) entrar_a_comer = Semaphore(1) def gato(id,m): global gatosComiendo, ratonesComiendo, platos, numeroDeRatones while numeroDeRatones != 0: time.sleep(random.random() / hambreDeGato) entrar_a_comer.acquire() entrar_a_comer.release() mutex_hambreGato.acquire() if ratonesComiendo > 0: print("Gato {} no se acerca a los platos por su orgullo de caballero".format(id)) mutex_hambreGato.release() else: platos[id%m].acquire() print("El gato {} comienza a comer del plato {}".format(id, id%m)) gatosComiendo = gatosComiendo + 1 print("El gato {} terminó de comer".format(id)) gatosComiendo = gatosComiendo - 1 platos[id%m].release() mutex_hambreGato.release() def raton(id,m): global gatosComiendo, ratonesComiendo, platos, numeroDeRatones while numeroDeRatones != 0: time.sleep(random.random() / hambreDeRaton) entrar_a_comer.acquire() entrar_a_comer.release() mutex_hambreRaton.acquire() if gatosComiendo > 0: print("Se comieron al ratón {}".format(id)) ratonesComiendo = ratonesComiendo - 1 numeroDeRatones = numeroDeRatones - 1 if(numeroDeRatones == 0): print("¡¡¡¡¡SE MURIERON TODOS LOS RATONES :(!!!!!") time.sleep(10000) mutex_hambreRaton.release() else: platos[id%m].acquire() print("El ratón {} comienza a comer en el plato {}".format(id, id%m)) ratonesComiendo = ratonesComiendo + 1 print("El ratón {} terminó de comer".format(id)) ratonesComiendo = ratonesComiendo - 1 platos[id%m].release() mutex_hambreRaton.release() for i in range(p): platos.append(Semaphore(1)) for i in range(numeroDeGatos): Thread(target = gato, args = [i,p]).start() for i in range(numeroDeRatones): Thread(target = raton, args = [i,p]).start()
[ [ [ 825, 834 ], [ 1131, 1140 ], [ 3138, 3147 ] ], [ [ 836, 842 ], [ 3188, 3194 ], [ 3274, 3280 ] ], [ [ 844, 849 ] ], [ [ 857, 866 ], [ 1048, 1057 ], [ 1091, 1100 ] ], [ [ 874, 878 ], [ 1270, 1274 ], [ 2127, 2131 ], [ 2590, 2594 ] ], [ [ 886, 892 ], [ 1281, 1287 ], [ 2138, 2144 ] ], [ [ 894, 906 ], [ 1299, 1311 ] ], [ [ 913, 926 ], [ 2156, 2169 ] ], [ [ 932, 945 ], [ 3168, 3181 ] ], [ [ 950, 965 ], [ 3252, 3267 ], [ 1240, 1255 ], [ 2097, 2112 ], [ 2448, 2463 ] ], [ [ 971, 977 ], [ 3124, 3130 ], [ 1636, 1642 ], [ 1928, 1934 ], [ 2695, 2701 ], [ 3012, 3018 ] ], [ [ 983, 984 ], [ 3116, 3117 ], [ 3220, 3221 ], [ 3307, 3308 ] ], [ [ 990, 1003 ], [ 1766, 1779 ], [ 2293, 2306 ] ], [ [ 1008, 1023 ], [ 1443, 1458 ], [ 2398, 2413 ], [ 2830, 2845 ] ], [ [ 1029, 1045 ], [ 1396, 1412 ], [ 1570, 1586 ], [ 1963, 1979 ] ], [ [ 1071, 1088 ], [ 2245, 2262 ], [ 2624, 2641 ], [ 3047, 3064 ] ], [ [ 1114, 1128 ], [ 1321, 1335 ], [ 1354, 1368 ], [ 2179, 2193 ], [ 2212, 2226 ] ], [ [ 1150, 1154 ], [ 3204, 3208 ] ], [ [ 2007, 2012 ], [ 3290, 3295 ] ], [ [ 3105, 3106 ] ], [ [ 3157, 3158 ], [ 3218, 3219 ] ], [ [ 3241, 3242 ], [ 3305, 3306 ] ], [ [ 1750, 1763 ], [ 1885, 1898 ], [ 1766, 1779 ] ], [ [ 1869, 1882 ], [ 1766, 1779 ], [ 1885, 1898 ] ], [ [ 2380, 2395 ], [ 2398, 2413 ], [ 2830, 2845 ], [ 2967, 2982 ] ], [ [ 2430, 2445 ], [ 2483, 2498 ], [ 2097, 2112 ], [ 2448, 2463 ] ], [ [ 2812, 2827 ], [ 2967, 2982 ], [ 2398, 2413 ], [ 2830, 2845 ] ], [ [ 2949, 2964 ], [ 2398, 2413 ], [ 2830, 2845 ], [ 2967, 2982 ] ] ]
# This script reads a PNG file containing a single row of 26 x 26 tiles and outputs binary data. # NumPy and Pillow are required as dependencies. # # Specify an input PNG file and an optional output file as arguments. # If an output file is not given, the binary data will be written in the console. # # The original graphic format is 4 bits per pixel, with each byte representing two pixels stacked vertically. # The left nybble represents the lower pixel and the right nybble represents the upper pixel. # 13 rows of these bytes create a 26 x 26 tile. # # To create replacement tiles, create a non-transparent image with the following 16-color palette: # 000000 101010 202020 303030 404040 505050 606060 707070 808080 909090 A0A0A0 B0B0B0 C0C0C0 D0D0D0 E0E0E0 F0F0F0 # # Although the resulting image will be grayscale, this image should be saved as 8-bit RGB. # Image editors will frequently override indexed palettes when converting to grayscale, # so creating RGB images is recommended to guarantee the palette will not be changed. # The first channel (red) of this file will be read and used as pixel data. # # Overwrite SKFONT.CG with the output starting at the tile offset to replace. import struct import sys import numpy as np from PIL import Image def main(): if len(sys.argv) < 2: print("Specify input PNG file.") return with Image.open(sys.argv[1]) as input_file: output = b'' # Read image and split into equal number of 26 x 26 arrays. image = list(input_file.getdata(0)) image_size = input_file.size image_2d = np.empty((image_size[1],image_size[0]),dtype="uint8") # rows = image[2] try: for i in range(0,25): image_2d[i] = image[i * image_size[0]:(i + 1) * image_size[0]] # Split into individual tiles. tiles = np.hsplit(image_2d,image_size[0] / 26) for i in tiles: # Bitwise shift 4 to the right to obtain 0-F value for each pixel. tile = np.right_shift(i,4) # Divide each tile into 26 x 2 arrays. tile_row_pairs = np.vsplit(tile,13) for row_pair in tile_row_pairs: for column in range(0,26): # Upper pixel is right nybble; lower pixel is left nybble. upper_pixel = row_pair[0][column] lower_pixel = row_pair[1][column] << 4 pixels = upper_pixel + lower_pixel output += struct.pack("=B",pixels) except ValueError: print("Input PNG file must be 8-bit, no transparency, and have a height of 26 pixels and width a multiple of 26 pixels.") return if len(sys.argv) >= 3: with open(sys.argv[2],"wb") as output_file: output_file.write(output) print(f"Paste the contents of {sys.argv[2]} into SKFONT.CG starting at the tile(s) to replace.") else: print(output.hex()) print("\nPaste the above hex into SKFONT.CG starting at the tile(s) to replace.") if __name__ == "__main__": main()
[ [ [ 1202, 1208 ], [ 2574, 2580 ] ], [ [ 1216, 1219 ], [ 1286, 1289 ], [ 1378, 1381 ], [ 2796, 2799 ], [ 2834, 2837 ], [ 2957, 2960 ] ], [ [ 1227, 1238 ], [ 1596, 1598 ], [ 1875, 1877 ], [ 2048, 2050 ], [ 2157, 2159 ] ], [ [ 1255, 1260 ], [ 1367, 1372 ] ], [ [ 1266, 1270 ], [ 3197, 3201 ] ] ]
# Copyright 2020 The Emscripten Authors. All rights reserved. # Emscripten is available under two separate licenses, the MIT license and the # University of Illinois/NCSA Open Source License. Both these licenses can be # found in the LICENSE file. import os import sys import logging from .utils import path_from_root, exit_with_error, __rootpath__, which logger = logging.getLogger('shared') # The following class can be overridden by the config file and/or # environment variables. Specifically any variable whose name # is in ALL_UPPER_CASE is condifered a valid config file key. # See parse_config_file below. EMSCRIPTEN_ROOT = __rootpath__ NODE_JS = None BINARYEN_ROOT = None SPIDERMONKEY_ENGINE = None V8_ENGINE = None LLVM_ROOT = None LLVM_ADD_VERSION = None CLANG_ADD_VERSION = None CLOSURE_COMPILER = None JAVA = None JS_ENGINE = None JS_ENGINES = None WASMER = None WASMTIME = None WASM_ENGINES = [] FROZEN_CACHE = None CACHE = None PORTS = None COMPILER_WRAPPER = None def listify(x): if type(x) is not list: return [x] return x def fix_js_engine(old, new): if old is None: return global JS_ENGINES JS_ENGINES = [new if x == old else x for x in JS_ENGINES] return new def root_is_writable(): return os.access(__rootpath__, os.W_OK) def normalize_config_settings(): global CACHE, PORTS, JAVA, LLVM_ADD_VERSION, CLANG_ADD_VERSION global NODE_JS, V8_ENGINE, JS_ENGINE, JS_ENGINES, SPIDERMONKEY_ENGINE, WASM_ENGINES # EM_CONFIG stuff if not JS_ENGINES: JS_ENGINES = [NODE_JS] if not JS_ENGINE: JS_ENGINE = JS_ENGINES[0] # Engine tweaks if SPIDERMONKEY_ENGINE: new_spidermonkey = SPIDERMONKEY_ENGINE if '-w' not in str(new_spidermonkey): new_spidermonkey += ['-w'] SPIDERMONKEY_ENGINE = fix_js_engine(SPIDERMONKEY_ENGINE, new_spidermonkey) NODE_JS = fix_js_engine(NODE_JS, listify(NODE_JS)) V8_ENGINE = fix_js_engine(V8_ENGINE, listify(V8_ENGINE)) JS_ENGINE = fix_js_engine(JS_ENGINE, listify(JS_ENGINE)) JS_ENGINES = [listify(engine) for engine in JS_ENGINES] WASM_ENGINES = [listify(engine) for engine in WASM_ENGINES] if not CACHE: if root_is_writable(): CACHE = path_from_root('cache') else: # Use the legacy method of putting the cache in the user's home directory # if the emscripten root is not writable. # This is useful mostly for read-only installation and perhaps could # be removed in the future since such installations should probably be # setting a specific cache location. logger.debug('Using home-directory for emscripten cache due to read-only root') CACHE = os.path.expanduser(os.path.join('~', '.emscripten_cache')) if not PORTS: PORTS = os.path.join(CACHE, 'ports') if JAVA is None: logger.debug('JAVA not defined in ' + config_file_location() + ', using "java"') JAVA = 'java' # Tools/paths if LLVM_ADD_VERSION is None: LLVM_ADD_VERSION = os.getenv('LLVM_ADD_VERSION') if CLANG_ADD_VERSION is None: CLANG_ADD_VERSION = os.getenv('CLANG_ADD_VERSION') def parse_config_file(): """Parse the emscripten config file using python's exec. Also check EM_<KEY> environment variables to override specific config keys. """ config = {} config_text = open(config_file, 'r').read() if config_file else EM_CONFIG try: exec(config_text, config) except Exception as e: exit_with_error('Error in evaluating %s (at %s): %s, text: %s', EM_CONFIG, config_file, str(e), config_text) CONFIG_KEYS = ( 'NODE_JS', 'BINARYEN_ROOT', 'SPIDERMONKEY_ENGINE', 'V8_ENGINE', 'LLVM_ROOT', 'LLVM_ADD_VERSION', 'CLANG_ADD_VERSION', 'CLOSURE_COMPILER', 'JAVA', 'JS_ENGINE', 'JS_ENGINES', 'WASMER', 'WASMTIME', 'WASM_ENGINES', 'FROZEN_CACHE', 'CACHE', 'PORTS', 'COMPILER_WRAPPER', ) # Only propagate certain settings from the config file. for key in CONFIG_KEYS: env_var = 'EM_' + key env_value = os.environ.get(env_var) if env_value is not None: globals()[key] = env_value elif key in config: globals()[key] = config[key] # Certain keys are mandatory for key in ('LLVM_ROOT', 'NODE_JS', 'BINARYEN_ROOT'): if key not in config: exit_with_error('%s is not defined in %s', key, config_file_location()) if not globals()[key]: exit_with_error('%s is set to empty value in %s', key, config_file_location()) if not NODE_JS: exit_with_error('NODE_JS is not defined in %s', config_file_location()) normalize_config_settings() # Returns the location of the emscripten config file. def config_file_location(): # Handle the case where there is no config file at all (i.e. If EM_CONFIG is passed as python code # direclty on the command line). if not config_file: return '<inline config>' return config_file def generate_config(path, first_time=False): # Note: repr is used to ensure the paths are escaped correctly on Windows. # The full string is replaced so that the template stays valid Python. config_file = open(path_from_root('tools', 'settings_template.py')).read().splitlines() config_file = config_file[3:] # remove the initial comment config_file = '\n'.join(config_file) # autodetect some default paths config_file = config_file.replace('\'{{{ EMSCRIPTEN_ROOT }}}\'', repr(__rootpath__)) llvm_root = os.path.dirname(which('llvm-dis') or '/usr/bin/llvm-dis') config_file = config_file.replace('\'{{{ LLVM_ROOT }}}\'', repr(llvm_root)) node = which('nodejs') or which('node') or 'node' config_file = config_file.replace('\'{{{ NODE }}}\'', repr(node)) abspath = os.path.abspath(os.path.expanduser(path)) # write with open(abspath, 'w') as f: f.write(config_file) if first_time: print(''' ============================================================================== Welcome to Emscripten! This is the first time any of the Emscripten tools has been run. A settings file has been copied to %s, at absolute path: %s It contains our best guesses for the important paths, which are: LLVM_ROOT = %s NODE_JS = %s EMSCRIPTEN_ROOT = %s Please edit the file if any of those are incorrect. This command will now exit. When you are done editing those paths, re-run it. ============================================================================== ''' % (path, abspath, llvm_root, node, __rootpath__), file=sys.stderr) # Emscripten configuration is done through the --em-config command line option # or the EM_CONFIG environment variable. If the specified string value contains # newline or semicolon-separated definitions, then these definitions will be # used to configure Emscripten. Otherwise, the string is understood to be a # path to a settings file that contains the required definitions. # The search order from the config file is as follows: # 1. Specified on the command line (--em-config) # 2. Specified via EM_CONFIG environment variable # 3. Local .emscripten file, if found # 4. Local .emscripten file, as used by `emsdk --embedded` (two levels above, # see below) # 5. User home directory config (~/.emscripten), if found. embedded_config = path_from_root('.emscripten') # For compatibility with `emsdk --embedded` mode also look two levels up. The # layout of the emsdk puts emcc two levels below emsdk. For exmaple: # - emsdk/upstream/emscripten/emcc # - emsdk/emscipten/1.38.31/emcc # However `emsdk --embedded` stores the config file in the emsdk root. # Without this check, when emcc is run from within the emsdk in embedded mode # and the user forgets to first run `emsdk_env.sh` (which sets EM_CONFIG) emcc # will not see any config file at all and fall back to creating a new/emtpy # one. # We could remove this special case if emsdk were to write its embedded config # file into the emscripten directory itself. # See: https://github.com/emscripten-core/emsdk/pull/367 emsdk_root = os.path.dirname(os.path.dirname(path_from_root())) emsdk_embedded_config = os.path.join(emsdk_root, '.emscripten') user_home_config = os.path.expanduser('~/.emscripten') if '--em-config' in sys.argv: EM_CONFIG = sys.argv[sys.argv.index('--em-config') + 1] # And now remove it from sys.argv skip = False newargs = [] for arg in sys.argv: if not skip and arg != '--em-config': newargs += [arg] elif arg == '--em-config': skip = True elif skip: skip = False sys.argv = newargs if not os.path.isfile(EM_CONFIG): if EM_CONFIG.startswith('-'): exit_with_error('Passed --em-config without an argument. Usage: --em-config /path/to/.emscripten or --em-config LLVM_ROOT=/path;...') if '=' not in EM_CONFIG: exit_with_error('File ' + EM_CONFIG + ' passed to --em-config does not exist!') else: EM_CONFIG = EM_CONFIG.replace(';', '\n') + '\n' elif 'EM_CONFIG' in os.environ: EM_CONFIG = os.environ['EM_CONFIG'] elif os.path.exists(embedded_config): EM_CONFIG = embedded_config elif os.path.exists(emsdk_embedded_config): EM_CONFIG = emsdk_embedded_config elif os.path.exists(user_home_config): EM_CONFIG = user_home_config else: if root_is_writable(): generate_config(embedded_config, first_time=True) else: generate_config(user_home_config, first_time=True) sys.exit(0) if '\n' in EM_CONFIG: config_file = None logger.debug('config is specified inline without a file') else: config_file = os.path.expanduser(EM_CONFIG) logger.debug('emscripten config is located in ' + config_file) if not os.path.exists(config_file): exit_with_error('emscripten config file not found: ' + config_file) # Emscripten compiler spawns other processes, which can reimport shared.py, so # make sure that those child processes get the same configuration file by # setting it to the currently active environment. os.environ['EM_CONFIG'] = EM_CONFIG parse_config_file()
[ [ [ 258, 260 ], [ 7928, 7930 ], [ 7944, 7946 ], [ 8003, 8005 ], [ 8062, 8064 ], [ 8454, 8456 ], [ 8854, 8856 ], [ 8880, 8882 ], [ 8909, 8911 ], [ 8977, 8979 ], [ 9057, 9059 ], [ 9410, 9412 ], [ 9514, 9516 ], [ 9819, 9821 ], [ 1244, 1246 ], [ 1268, 1270 ], [ 2629, 2631 ], [ 2648, 2650 ], [ 2716, 2718 ], [ 2939, 2941 ], [ 3026, 3028 ], [ 3980, 3982 ], [ 5371, 5373 ], [ 5641, 5643 ], [ 5657, 5659 ] ], [ [ 268, 271 ], [ 8119, 8122 ], [ 8143, 8146 ], [ 8152, 8155 ], [ 8266, 8269 ], [ 8426, 8429 ], [ 9272, 9275 ], [ 6417, 6420 ] ], [ [ 279, 286 ], [ 369, 376 ] ], [ [ 306, 320 ], [ 7174, 7188 ], [ 7960, 7974 ], [ 2172, 2186 ], [ 5067, 5081 ] ], [ [ 322, 337 ], [ 8521, 8536 ], [ 8690, 8705 ], [ 9547, 9562 ], [ 3384, 3399 ], [ 4246, 4261 ], [ 4351, 4366 ], [ 4453, 4468 ] ], [ [ 339, 351 ], [ 638, 650 ], [ 1254, 1266 ], [ 5342, 5354 ], [ 6397, 6409 ] ], [ [ 353, 358 ], [ 5387, 5392 ], [ 5517, 5522 ], [ 5536, 5541 ] ], [ [ 360, 366 ], [ 9330, 9336 ], [ 9442, 9448 ], [ 2535, 2541 ], [ 2769, 2775 ] ], [ [ 620, 635 ] ], [ [ 651, 658 ], [ 1523, 1530 ], [ 1850, 1857 ], [ 1867, 1874 ], [ 4440, 4447 ] ], [ [ 666, 679 ] ], [ [ 687, 706 ], [ 1606, 1625 ], [ 1650, 1669 ], [ 1785, 1804 ] ], [ [ 714, 723 ], [ 1905, 1914 ], [ 1924, 1933 ] ], [ [ 731, 740 ] ], [ [ 748, 764 ], [ 2890, 2906 ] ], [ [ 772, 789 ], [ 2975, 2992 ] ], [ [ 797, 813 ] ], [ [ 821, 825 ], [ 2751, 2755 ] ], [ [ 833, 842 ], [ 1541, 1550 ] ], [ [ 850, 860 ], [ 1184, 1194 ], [ 1493, 1503 ] ], [ [ 868, 874 ] ], [ [ 882, 890 ] ], [ [ 898, 910 ], [ 2101, 2113 ] ], [ [ 916, 928 ] ], [ [ 936, 941 ], [ 2124, 2129 ] ], [ [ 949, 954 ], [ 2697, 2702 ] ], [ [ 962, 978 ] ], [ [ 992, 999 ], [ 1859, 1866 ], [ 1916, 1923 ], [ 1975, 1982 ], [ 2011, 2018 ], [ 2071, 2078 ] ], [ [ 1062, 1075 ], [ 1771, 1784 ], [ 1836, 1849 ], [ 1891, 1904 ], [ 1950, 1963 ] ], [ [ 1215, 1231 ], [ 9133, 9149 ], [ 2138, 2154 ] ], [ [ 1283, 1308 ], [ 4528, 4553 ] ], [ [ 3063, 3080 ], [ 9856, 9873 ] ], [ [ 4616, 4636 ], [ 2807, 2827 ], [ 4294, 4314 ], [ 4406, 4426 ], [ 4501, 4521 ] ], [ [ 4855, 4870 ], [ 9157, 9172 ], [ 9219, 9234 ] ], [ [ 7156, 7171 ], [ 8924, 8939 ], [ 8956, 8971 ], [ 9173, 9188 ] ], [ [ 7915, 7925 ], [ 8016, 8026 ] ], [ [ 7979, 8000 ], [ 8992, 9013 ], [ 9030, 9051 ] ], [ [ 8043, 8059 ], [ 9072, 9088 ], [ 9105, 9121 ], [ 9235, 9251 ] ], [ [ 8131, 8140 ], [ 8469, 8478 ], [ 8488, 8497 ], [ 8673, 8682 ], [ 8716, 8725 ], [ 8798, 8807 ], [ 9296, 9305 ], [ 9429, 9438 ], [ 9845, 9854 ], [ 3308, 3317 ], [ 3448, 3457 ] ], [ [ 8225, 8229 ], [ 8287, 8291 ], [ 8399, 8403 ] ], [ [ 8240, 8247 ], [ 8324, 8331 ], [ 8437, 8444 ] ], [ [ 8259, 8262 ], [ 8296, 8299 ], [ 8336, 8339 ], [ 8350, 8353 ] ], [ [ 8378, 8382 ], [ 8287, 8291 ], [ 8399, 8403 ] ], [ [ 8411, 8415 ], [ 8287, 8291 ], [ 8399, 8403 ] ], [ [ 8786, 8795 ], [ 9296, 9305 ], [ 9429, 9438 ], [ 9845, 9854 ], [ 3308, 3317 ], [ 3448, 3457 ] ], [ [ 8868, 8877 ], [ 9296, 9305 ], [ 9429, 9438 ], [ 9845, 9854 ], [ 3308, 3317 ], [ 3448, 3457 ] ], [ [ 8944, 8953 ], [ 9296, 9305 ], [ 9429, 9438 ], [ 9845, 9854 ], [ 3308, 3317 ], [ 3448, 3457 ] ], [ [ 9018, 9027 ], [ 9296, 9305 ], [ 9429, 9438 ], [ 9845, 9854 ], [ 3308, 3317 ], [ 3448, 3457 ] ], [ [ 9093, 9102 ], [ 9296, 9305 ], [ 9429, 9438 ], [ 9845, 9854 ], [ 3308, 3317 ], [ 3448, 3457 ] ], [ [ 9309, 9320 ], [ 3291, 3302 ], [ 3263, 3274 ], [ 3459, 3470 ], [ 4785, 4796 ], [ 4837, 4848 ] ], [ [ 9396, 9407 ], [ 9492, 9503 ], [ 9529, 9540 ], [ 9602, 9613 ], [ 3291, 3302 ], [ 3263, 3274 ], [ 3459, 3470 ], [ 4785, 4796 ], [ 4837, 4848 ] ], [ [ 1138, 1148 ] ], [ [ 1509, 1519 ], [ 1568, 1578 ], [ 2041, 2051 ] ], [ [ 1556, 1565 ], [ 1964, 1973 ], [ 1983, 1992 ] ], [ [ 1749, 1768 ] ], [ [ 1826, 1833 ] ], [ [ 1879, 1888 ] ], [ [ 1938, 1947 ] ], [ [ 1997, 2007 ] ], [ [ 2055, 2067 ] ], [ [ 2164, 2169 ], [ 2729, 2734 ] ], [ [ 2621, 2626 ], [ 2729, 2734 ] ], [ [ 2708, 2713 ] ], [ [ 2854, 2858 ] ], [ [ 2920, 2936 ] ], [ [ 3006, 3023 ] ] ]
from django.contrib.auth.models import AbstractUser from django.db.models import CharField from django.urls import reverse from django.utils.translation import gettext_lazy as _ class User(AbstractUser): """Default user for Redirink.""" #: First and last name do not cover name patterns around the globe name = CharField(_("Name of User"), blank=True, max_length=255) first_name = None # type: ignore last_name = None # type: ignore def get_absolute_url(self): """Get url for user's detail view. Returns: str: URL for user detail. """ return reverse("users:detail", kwargs={"username": self.username})
[ [ [ 39, 51 ], [ 191, 203 ] ], [ [ 81, 90 ], [ 326, 335 ] ], [ [ 115, 122 ], [ 618, 625 ] ], [ [ 160, 177 ], [ 336, 337 ] ], [ [ 186, 190 ] ] ]
import numpy as np import random from dataLoader.batch import batcher from transformers import BertTokenizerFast, ElectraTokenizerFast from configs.WNUT_configs import * from utils.ml_utils import * from utils.data_utils import * from utils.metric_utils import * import argparse from tqdm import tqdm from pathlib import Path import os import torch as T import torch.nn as nn from models.BigTransformerTagger import BigTransformerTagger from models.CSETagger import CSETagger from models.layers.BigTransformers.BERT import BertModel from models.layers.BigTransformers.ELECTRA import ElectraModel from models.cse_generator import CSEGenerator import json import sys import re """ FUTURE STUFF TO KEEP IN MIND: """ """ TRY SAVE BY LOSS IN THE FUTURE """ """ IN FUTURE CHECK IF KEEPING TRUE CASES HARMS OR HELPS BERT """ """ CHECK WORD 2 VEC OOV STUFF """ """ CHECK CLASS WEIGHING """ """ CHECK FOR QA CHECK WITHOUT NEGATIVE EXAMPLES """ """ CHECK FOR QA IN FULL MODE """ """ IMPORT MODEL HERE """ """ FIX LSTM AND TRY ORDERED MEMORY AND GCDT AND STUFFS """ device = T.device('cuda' if T.cuda.is_available() else 'cpu') parser = argparse.ArgumentParser(description='Model Name and stuff') parser.add_argument('--model', type=str, default="ELECTRA_extra_BiLSTM_CRF", choices=["BERT", "BERT_CRF", "BERT_BiLSTM_CRF", "BERT_w2v_BiLSTM_CRF", "BERT_extra_BiLSTM_CRF", "ELECTRA", "ELECTRA_CRF", "ELECTRA_fine_tune_CRF", "ELECTRA_BiLSTM_CRF", "ELECTRA_w2v_BiLSTM_CRF", "ELECTRA_extra_BiLSTM_CRF", "ELECTRA_extra_CRF", "ELECTRA_extra", "ELECTRA_w2v_extra_BiLSTM_CRF", "ELECTRA_extra_BiLSTM_DSC", "CSE", "CSE_CRF", "CSE_BiLSTM_CRF", "CSE_w2v_BiLSTM_CRF", "CSE_w2v_extra_BiLSTM_CRF", "CSE_extra_BiLSTM_CRF"]) parser.add_argument('--dataset', type=str, default="WNUT_2017") parser.add_argument('--display_step', type=int, default=30) parser.add_argument('--lr', type=float, default=-1) parser.add_argument('--fine_tune_lr', type=float, default=-1) parser.add_argument('--times', type=int, default=1) parser.add_argument('--mixed_case_training', type=str, default="no", choices=["yes", "no"]) flags = parser.parse_args() SEED_base_value = 101 """ CREATE MAPPINGS HERE """ if re.match("^BERT|^ELECTRA", flags.model): model_dict = {flags.model: BigTransformerTagger} elif re.match("^CSE", flags.model): model_dict = {flags.model: CSETagger} else: raise ValueError("Invalid model") config_dict = {flags.model: eval("{0}_config".format(flags.model))} """ model_dict = {'BERT': BigTransformerTagger, 'ELECTRA': BigTransformerTagger, 'ELECTRA_CRF': BigTransformerTagger, "ELECTRA_BiLSTM_CRF": BigTransformerTagger, 'ELECTRA_w2v_BiLSTM_CRF': BigTransformerTagger, "ELECTRA_w2v_extra_BiLSTM_CRF": BigTransformerTagger, "ELECTRA_extra_BiLSTM_CRF": BigTransformerTagger, "ELECTRA_extra": BigTransformerTagger, "ELECTRA_extra_CRF": BigTransformerTagger} config_dict = {'BERT': BERT_config, 'ELECTRA': ELECTRA_config, 'ELECTRA_CRF': ELECTRA_CRF_config, "ELECTRA_BiLSTM_CRF": ELECTRA_BiLSTM_CRF_config, 'ELECTRA_w2v_BiLSTM_CRF': ELECTRA_w2v_BiLSTM_CRF_config, 'ELECTRA_w2v_extra_BiLSTM_CRF': ELECTRA_w2v_extra_BiLSTM_CRF_config, "ELECTRA_extra_BiLSTM_CRF": ELECTRA_extra_BiLSTM_CRF_config, "ELECTRA_extra": ELECTRA_extra_config, "ELECTRA_extra_CRF": ELECTRA_extra_CRF_config} """ config = config_dict[flags.model] config = config() if flags.lr >= 0: config.lr = flags.lr if flags.fine_tune_lr >= 0: config.fine_tune_lr = flags.fine_tune_lr display_step = flags.display_step print('Dataset: {}'.format(flags.dataset)) print("Model Name: {}".format(flags.model)) print("Total Runs: {}".format(flags.times)) print("Learning Rate: {}".format(config.lr)) print("Fine-Tune Learning Rate: {}".format(config.fine_tune_lr)) print("Mixed-Case Training: {}".format(flags.mixed_case_training)) print("Display Step: {}".format(flags.display_step)) print("SEED base value: {}".format(SEED_base_value)) common_data_path = "processed_data/{}/vocab_and_embd.pkl".format(flags.dataset) if flags.mixed_case_training.lower() == "no": train_data_path = "processed_data/{}/train_data.json".format(flags.dataset) else: train_data_path = "processed_data/{}/train_mixed_data.json".format(flags.dataset) dev_data_path = "processed_data/{}/dev_data.json".format(flags.dataset) test_data_path = "processed_data/{}/test_data.json".format(flags.dataset) checkpoint_directory = "saved_params/{}/".format(flags.dataset) Path(checkpoint_directory).mkdir(parents=True, exist_ok=True) Path("output/").mkdir(parents=True, exist_ok=True) log_directory = os.path.join("logs", "{}".format(flags.dataset)) Path(log_directory).mkdir(parents=True, exist_ok=True) keys = ['labels2idx', 'segment_labels2idx', 'w2v_vocab2idx', 'ft_vocab2idx', 'ipa2idx', 'pos2idx', 'w2v_embeddings', 'ft_embeddings'] labels2idx, segment_labels2idx,\ w2v_vocab2idx, ft_vocab2idx, ipa2idx, pos2idx, \ w2v_embeddings, ft_embeddings = load_data(common_data_path, 'rb', 'pickle', keys=keys) idx2labels = {v: k for k, v in labels2idx.items()} """ DETERMINES WHAT TO LOAD AND IN WHICH ORDER. NEEDS TO MAKE CHANGES IF YOU WANT TO LOAD SOMETHING ELSE """ keys = ["sequence", "w2v_feats", "fasttext_feats", "pos_tags", "ipa_feats", "phono_feats", "labels", "segment_labels"] """ sequence = variable length natural language sequences w2v_feats = variable length sequences in int format where int id correspond to a word2vec vector (mapped to a word in w2v_vocab2idx) fasttext_feats = same as above but for fasttext pos_tags = same as above but int id corresponds to the pos tag of the corresponding word. the id is associated to pos2idx (mapping between id and pos tags). Need to create random embeddings for pos tags. ipa_feats = character level features will be padded and batched to batch_size x sequence_len x word_len. int format where id correspond to a specific ipa alphabet in ipa2idx mapping. Need to create a randomly initialized embedding. phono_feats = same as above but each character is represented as a float vector of 22 dimensions instead (can be directly treated as char-level embeddings) labels = variable length sequence labels for the corresponding sequences. int format. id correspond to a particular label (mapping in labels2idx) segment_label = we can ignore it for now. Can be later used for multi-tasking for entity-segmentation task (where we do not predict the type of the entity just the boundaries) """ """ For more about load_data see: utils/data_utils.py """ train_sample_tuples = load_data(train_data_path, 'r', 'json', keys=keys) val_sample_tuples = load_data(dev_data_path, 'r', 'json', keys=keys) test_sample_tuples = load_data(test_data_path, 'r', 'json', keys=keys) MAX_CHAR_LEN = len(train_sample_tuples[4][0][0]) IPA_PAD = [0]*MAX_CHAR_LEN PHONO_PAD = [0]*config.phono_feats_dim PHONO_PAD = [PHONO_PAD]*MAX_CHAR_LEN if "bert" in flags.model.lower() or "electra" in flags.model.lower(): if "bert" in flags.model.lower(): BigModel = BertModel.from_pretrained(config.embedding_path, output_hidden_states=True, output_attentions=False) tokenizer = BertTokenizerFast.from_pretrained(config.embedding_path, output_hidden_states=True, output_attentions=False) elif "electra" in flags.model.lower(): BigModel = ElectraModel.from_pretrained(config.embedding_path, output_hidden_states=True, output_attentions=False) tokenizer = ElectraTokenizerFast.from_pretrained(config.embedding_path, output_hidden_states=True, output_attentions=False) pad_types = [None, w2v_vocab2idx['<pad>'], ft_vocab2idx['<pad>'], pos2idx['G'], IPA_PAD, PHONO_PAD, labels2idx["O"], segment_labels2idx["O"]] else: cse_gen = CSEGenerator(config.use_forward, config.use_backward) tokenizer = None """ Probably need to do nothing for CSE here text sequences will not be padded (can be padded later after embedding) will need to change things if using precomputed embeddings """ pad_types = [None, w2v_vocab2idx['<pad>'], ft_vocab2idx['<pad>'], pos2idx['G'], IPA_PAD, PHONO_PAD, labels2idx["O"], segment_labels2idx["O"]] def run(time, display_params=False): global model_dict global flags global config global device global checkpoint_directory, log_directory global BigModel global w2v_embeddings, ft_embeddings global ft_vocab2idx, w2v_vocab2idx, pos2idx, ipa2idx, labels2idx mixed_string = "" if flags.mixed_case_training.lower() == "no" else "mixed_case_" checkpoint_path = os.path.join( checkpoint_directory, "{}_{}run{}.pt".format(flags.model, mixed_string, time)) log_path = os.path.join(log_directory, "{}_{}run{}.json".format(flags.model, mixed_string, time)) # print(checkpoint_path) # print("Model: {}".format(config.model_name)) NamedEntitiyRecognizer = model_dict[flags.model] """ May need to make changes here and may be some conditional statements """ if 'bert' in flags.model.lower() or 'electra' in flags.model.lower(): if config.use_w2v: classic_embeddings = w2v_embeddings word_pad_id = w2v_vocab2idx['<pad>'] elif config.use_fasttext: classic_embeddings = ft_embeddings word_pad_id = ft_vocab2idx['<pad>'] else: classic_embeddings = None word_pad_id = None if config.use_pos_tags: pos_vocab_size = len(pos2idx) else: pos_vocab_size = None if config.use_char_feats: ipa_vocab_size = len(ipa2idx) else: ipa_vocab_size = None model = NamedEntitiyRecognizer(BigTransformer=BigModel, classes_num=len(labels2idx), negative_index=labels2idx['O'], config=config, device=device, classic_embeddings=classic_embeddings, word_pad_id=word_pad_id, pos_vocab_size=pos_vocab_size, ipa_vocab_size=ipa_vocab_size) else: """ Put CSE code here """ if config.use_w2v: classic_embeddings = w2v_embeddings word_pad_id = w2v_vocab2idx['<pad>'] elif config.use_fasttext: classic_embeddings = ft_embeddings word_pad_id = ft_vocab2idx['<pad>'] else: classic_embeddings = None word_pad_id = None if config.use_pos_tags: pos_vocab_size = len(pos2idx) else: pos_vocab_size = None if config.use_char_feats: ipa_vocab_size = len(ipa2idx) else: ipa_vocab_size = None model = NamedEntitiyRecognizer(cse_gen, classes_num=len(labels2idx), config=config, device=device, classic_embeddings=classic_embeddings, word_pad_id=word_pad_id, ipa_vocab_size=ipa_vocab_size, pos_vocab_size=pos_vocab_size) model = model.to(device) parameters = [p for p in model.parameters() if p.requires_grad] parameter_count = param_count(parameters) print("\n\nParameter Count: {}\n\n".format(parameter_count)) if display_params: param_display_fn(model) print("RUN: {}\n\n".format(time)) run_epochs(model, config, checkpoint_path, log_path) def run_epochs(model, config, checkpoint_path, log_path): """ raise ValueError( "Have you remembered to save the whole epoch log? (both dump output and in a dict)") """ global train_sample_tuples, val_sample_tuples, test_sample_tuples train_actual_iters = count_actual_iterations(train_sample_tuples[0], config) val_actual_iters = count_actual_iterations(val_sample_tuples[0], config) test_actual_iters = count_actual_iterations(test_sample_tuples[0], config) train_effective_iters = count_effective_iterations(train_sample_tuples[0], config) val_effective_iters = count_effective_iterations(val_sample_tuples[0], config) test_effective_iters = count_effective_iterations(test_sample_tuples[0], config) # print(train_iters) optimizer = load_LRangerMod(model, config=config) # misleading just running AdamW now print('Loading pre-trained weights for the model...') checkpoint = T.load(checkpoint_path) model.load_state_dict(checkpoint['model_state_dict']) print('\nRESTORATION COMPLETE\n') optimizer.zero_grad() # with tqdm(total=config.epochs-past_epoch, desc='Epoch', position=0) as pbar: print("TESTING\n") test_loss, test_F1 = run_batches(test_sample_tuples, epoch=0, model=model, optimizer=optimizer, config=config, generator_len=test_actual_iters, train=False, desc='Test Batch') # print(test_F1) def run_batches(sample_tuples, epoch, model, optimizer, config, generator_len, train=True, scheduler=None, desc=None): global display_step global pad_types global tokenizer global idx2labels global flags accu_step = config.total_batch_size//config.train_batch_size if desc is None: desc = 'Batch' losses = [] F1s = [] total_tp = 0 total_pred_len = 0 total_gold_len = 0 # copy_tuples = copy.deepcopy(sample_tuples) f = open("output/out_{}.txt".format(flags.model), "w") f.write('') f.close() with tqdm(total=generator_len, desc=desc, position=0) as pbar: i = 0 for batch, batch_masks in batcher(sample_tuples, pad_types, config.train_batch_size, sort_by_idx=1): # pbar = tqdm(total=generator_len, desc='Batch', position=0) batch_texts = batch[0] batch_w2v_idx = batch[1] batch_ft_idx = batch[2] batch_pos_idx = batch[3] batch_ipa_idx = batch[4] batch_phono = batch[5] batch_labels = batch[6] batch_segment_labels = batch[7] batch_mask = batch_masks[1] """ IMPLEMENT INSIDE utils/ml_utils.py """ predictions, loss = predict_NER(model=model, tokenizer=tokenizer, batch_texts=batch_texts, batch_w2v_idx=batch_w2v_idx, batch_ft_idx=batch_ft_idx, batch_pos_idx=batch_pos_idx, batch_ipa_idx=batch_ipa_idx, batch_phono=batch_phono, batch_labels=batch_labels, batch_segment_labels=batch_segment_labels, batch_mask=batch_mask, device=device, config=config, train=train) losses.append(loss.item()) if train: loss = loss/accu_step loss.backward() if (i+1) % accu_step == 0: # Update accumulated gradients T.nn.utils.clip_grad_norm_(model.parameters(), config.max_grad_norm) optimizer.step() optimizer.zero_grad() tp, pred_len, gold_len = eval_stats(predictions, batch_labels, batch_mask, idx2labels) prec, rec, F1 = compute_F1(tp, pred_len, gold_len) F1s.append(F1) if i % display_step == 0: pbar.write("Model: {}, Epoch: {:3d}, Iter: {:5d}, ".format(config.model_name, epoch, i) + "Loss: {:.3f}, F1: {:.3f}".format(loss, F1)) else: f = open("output/out_{}.txt".format(flags.model), "a") for prediction_sample, gold_sample, mask in zip(predictions, batch_labels, batch_mask): true_seq_len = sum(mask) prediction_sample = prediction_sample[0:true_seq_len] gold_sample = gold_sample[0:true_seq_len] for pred, gold in zip(prediction_sample, gold_sample): f.write("test NNP "+str(idx2labels[gold])+" "+str(idx2labels[pred])+"\n") f.close() tp, pred_len, gold_len = eval_stats(predictions, batch_labels, batch_mask, idx2labels) prec, rec, F1 = compute_F1(tp, pred_len, gold_len) total_tp += tp total_pred_len += pred_len total_gold_len += gold_len if i % display_step == 0: pbar.write("Model: {}, Epoch: {:3d}, Iter: {:5d}, ".format(config.model_name, epoch, i) + "Loss: {:.3f}".format(loss)) i += 1 pbar.update(1) # print("generator_len", generator_len) # print("i", i) print("\n\n") if train: F1 = np.mean(F1s) else: prec, rec, F1 = compute_F1(total_tp, total_pred_len, total_gold_len) # del copy_tuples return np.mean(losses), F1 if __name__ == '__main__': time = 0 while time < flags.times: if time == 0: """ time_str = input("\nStarting time (0,1,2.....times): ") try: time = int(time_str) except: time = 0 """ time = 0 SEED = SEED_base_value+time T.manual_seed(SEED) random.seed(SEED) T.backends.cudnn.deterministic = True T.backends.cudnn.benchmark = False np.random.seed(SEED) run(time, display_params=True) time += 1
[ [ [ 7, 18 ], [ 20163, 20165 ], [ 19505, 19507 ], [ 19640, 19642 ] ], [ [ 26, 32 ], [ 20048, 20054 ] ], [ [ 62, 69 ], [ 15499, 15506 ] ], [ [ 95, 112 ], [ 8063, 8080 ] ], [ [ 114, 134 ], [ 8564, 8584 ] ], [ [ 168, 169 ] ], [ [ 197, 198 ] ], [ [ 228, 229 ] ], [ [ 261, 262 ], [ 5768, 5777 ], [ 7376, 7385 ], [ 7447, 7456 ], [ 7517, 7526 ], [ 12802, 12813 ], [ 12923, 12939 ], [ 13333, 13356 ], [ 13412, 13435 ], [ 13490, 13513 ], [ 13574, 13600 ], [ 13659, 13685 ], [ 13743, 13769 ], [ 13844, 13859 ], [ 16226, 16237 ], [ 17564, 17574 ], [ 17815, 17825 ], [ 18728, 18738 ], [ 18979, 18989 ], [ 19552, 19562 ] ], [ [ 270, 278 ], [ 1130, 1138 ] ], [ [ 296, 300 ], [ 15391, 15395 ] ], [ [ 321, 325 ], [ 5259, 5263 ], [ 5322, 5326 ], [ 5439, 5443 ] ], [ [ 333, 335 ], [ 5390, 5392 ], [ 9815, 9817 ], [ 9932, 9934 ] ], [ [ 343, 353 ], [ 1067, 1068 ], [ 1086, 1087 ], [ 20020, 20021 ], [ 20074, 20075 ], [ 20120, 20121 ], [ 14028, 14029 ], [ 17374, 17375 ] ], [ [ 361, 375 ] ], [ [ 416, 436 ], [ 2859, 2879 ] ], [ [ 466, 475 ], [ 2948, 2957 ] ], [ [ 523, 532 ], [ 7851, 7860 ] ], [ [ 583, 595 ], [ 8343, 8355 ] ], [ [ 629, 641 ], [ 8975, 8987 ] ], [ [ 649, 653 ] ], [ [ 661, 664 ] ], [ [ 672, 674 ], [ 2787, 2789 ], [ 2886, 2888 ] ], [ [ 1058, 1064 ], [ 11237, 11243 ], [ 12391, 12397 ], [ 12703, 12709 ], [ 17020, 17026 ] ], [ [ 1121, 1127 ], [ 1190, 1196 ], [ 2300, 2306 ], [ 2364, 2370 ], [ 2424, 2430 ], [ 2476, 2482 ], [ 2538, 2544 ], [ 2590, 2596 ], [ 2711, 2717 ] ], [ [ 2703, 2708 ], [ 2814, 2819 ], [ 2846, 2851 ], [ 2903, 2908 ], [ 2935, 2940 ], [ 3020, 3025 ], [ 3058, 3063 ], [ 4149, 4154 ], [ 4184, 4189 ], [ 4215, 4220 ], [ 4228, 4233 ], [ 4279, 4284 ], [ 4314, 4319 ], [ 4361, 4366 ], [ 4407, 4412 ], [ 4451, 4456 ], [ 4614, 4619 ], [ 4674, 4679 ], [ 4815, 4820 ], [ 4833, 4838 ], [ 4941, 4946 ], [ 5033, 5038 ], [ 5105, 5110 ], [ 5179, 5184 ], [ 5244, 5249 ], [ 5423, 5428 ], [ 7737, 7742 ], [ 7773, 7778 ], [ 7811, 7816 ], [ 8302, 8307 ], [ 19719, 19724 ], [ 9731, 9736 ], [ 9882, 9887 ], [ 10013, 10018 ], [ 10170, 10175 ], [ 10291, 10296 ], [ 10327, 10332 ], [ 15332, 15337 ], [ 18184, 18189 ] ], [ [ 2731, 2746 ], [ 4730, 4745 ], [ 19991, 20006 ] ], [ [ 2832, 2842 ], [ 10159, 10169 ] ], [ [ 2921, 2931 ], [ 10159, 10169 ] ], [ [ 3005, 3016 ], [ 4137, 4148 ] ], [ [ 4128, 4134 ], [ 4171, 4177 ] ], [ [ 4162, 4168 ], [ 4203, 4209 ], [ 4257, 4263 ], [ 4498, 4504 ], [ 4553, 4559 ], [ 7663, 7669 ], [ 7877, 7883 ], [ 8097, 8103 ], [ 8372, 8378 ], [ 8601, 8607 ], [ 8988, 8994 ], [ 9008, 9014 ], [ 10360, 10366 ], [ 10486, 10492 ], [ 10697, 10703 ], [ 10820, 10826 ], [ 11183, 11189 ], [ 11601, 11607 ], [ 11727, 11733 ], [ 11938, 11944 ], [ 12061, 12067 ], [ 12337, 12343 ], [ 13009, 13015 ] ], [ [ 4299, 4311 ], [ 17906, 17918 ], [ 19156, 19168 ] ], [ [ 4750, 4766 ], [ 5778, 5794 ] ], [ [ 4880, 4895 ], [ 7386, 7401 ] ], [ [ 4966, 4981 ], [ 7386, 7401 ] ], [ [ 5048, 5061 ], [ 7457, 7470 ] ], [ [ 5120, 5134 ], [ 7527, 7541 ] ], [ [ 5195, 5215 ], [ 5264, 5284 ], [ 9837, 9857 ] ], [ [ 5374, 5387 ], [ 5444, 5457 ], [ 9945, 9958 ] ], [ [ 5495, 5499 ], [ 5817, 5821 ] ], [ [ 5646, 5656 ], [ 5856, 5866 ], [ 8912, 8922 ], [ 9371, 9381 ], [ 11053, 11063 ], [ 11120, 11130 ], [ 12278, 12288 ] ], [ [ 5658, 5676 ], [ 8929, 8947 ], [ 9388, 9406 ] ], [ [ 5683, 5696 ], [ 8814, 8827 ], [ 9273, 9286 ], [ 10450, 10463 ], [ 11691, 11704 ] ], [ [ 5698, 5710 ], [ 8838, 8850 ], [ 9297, 9309 ], [ 10580, 10592 ], [ 11821, 11833 ] ], [ [ 5712, 5719 ], [ 10876, 10883 ], [ 12117, 12124 ] ], [ [ 5721, 5728 ], [ 8878, 8885 ], [ 9337, 9344 ], [ 10751, 10758 ], [ 11992, 11999 ] ], [ [ 5736, 5750 ], [ 10409, 10423 ], [ 11650, 11664 ] ], [ [ 5752, 5765 ], [ 10540, 10553 ], [ 11781, 11794 ] ], [ [ 5825, 5835 ], [ 17770, 17780 ], [ 18611, 18621 ], [ 18637, 18647 ], [ 18934, 18944 ] ], [ [ 5986, 5990 ], [ 7421, 7425 ], [ 7490, 7494 ], [ 7561, 7565 ] ], [ [ 7354, 7373 ], [ 7587, 7606 ], [ 13357, 13376 ], [ 13601, 13620 ] ], [ [ 7427, 7444 ], [ 13436, 13453 ], [ 13686, 13703 ] ], [ [ 7496, 7514 ], [ 13514, 13532 ], [ 13770, 13788 ], [ 14321, 14339 ] ], [ [ 7568, 7580 ], [ 7632, 7644 ], [ 7710, 7722 ] ], [ [ 7618, 7625 ], [ 8892, 8899 ], [ 9351, 9358 ] ], [ [ 7647, 7656 ], [ 7699, 7708 ] ], [ [ 7686, 7695 ], [ 8901, 8910 ], [ 9360, 9369 ] ], [ [ 7840, 7848 ], [ 10988, 10996 ] ], [ [ 8051, 8060 ], [ 16305, 16314 ] ], [ [ 8332, 8340 ], [ 10988, 10996 ] ], [ [ 8552, 8561 ], [ 16305, 16314 ] ], [ [ 8795, 8804 ], [ 15564, 15573 ] ], [ [ 8965, 8972 ], [ 12214, 12221 ] ], [ [ 9033, 9042 ], [ 16305, 16314 ] ], [ [ 9254, 9263 ], [ 15564, 15573 ] ], [ [ 9419, 9422 ], [ 20193, 20196 ] ], [ [ 13050, 13060 ], [ 12991, 13001 ] ], [ [ 14751, 14762 ], [ 14309, 14320 ] ], [ [ 19693, 19697 ], [ 19712, 19716 ], [ 19744, 19748 ], [ 20007, 20011 ], [ 20197, 20201 ], [ 20232, 20236 ] ], [ [ 19966, 19970 ], [ 20007, 20011 ], [ 20197, 20201 ], [ 20232, 20236 ] ], [ [ 19984, 19988 ], [ 20034, 20038 ], [ 20060, 20064 ], [ 20178, 20182 ] ] ]
# Create a function named more_than_n that has three parameters named lst, item, and n. # The function should return True if item appears in the list more than n times. The function should return False otherwise. def more_than_n(lst, item, n): if lst.count(item) > n: return True else: return False
[ [ [ 218, 229 ] ] ]
import FWCore.ParameterSet.Config as cms from PhysicsTools.PatAlgos.tools.helpers import getPatAlgosToolsTask, addToProcessAndTask def applySubstructure( process, postfix="" ) : task = getPatAlgosToolsTask(process) from PhysicsTools.PatAlgos.tools.jetTools import addJetCollection from PhysicsTools.PatAlgos.producersLayer1.jetProducer_cfi import _patJets as patJetsDefault # Configure the RECO jets from RecoJets.JetProducers.ak4PFJets_cfi import ak4PFJetsPuppi from RecoJets.JetProducers.ak8PFJets_cfi import ak8PFJetsPuppi, ak8PFJetsPuppiSoftDrop, ak8PFJetsPuppiConstituents, ak8PFJetsCHSConstituents from RecoJets.JetProducers.ak8GenJets_cfi import ak8GenJets, ak8GenJetsSoftDrop, ak8GenJetsConstituents addToProcessAndTask('ak4PFJetsPuppi'+postfix,ak4PFJetsPuppi.clone(), process, task) addToProcessAndTask('ak8PFJetsPuppi'+postfix,ak8PFJetsPuppi.clone(), process, task) addToProcessAndTask('ak8PFJetsPuppiConstituents', ak8PFJetsPuppiConstituents.clone(cut = cms.string('pt > 170.0 && abs(rapidity()) < 2.4') ), process, task ) addToProcessAndTask('ak8PFJetsCHSConstituents', ak8PFJetsCHSConstituents.clone(), process, task ) addToProcessAndTask('ak8PFJetsPuppiSoftDrop'+postfix, ak8PFJetsPuppiSoftDrop.clone( src = cms.InputTag('ak8PFJetsPuppiConstituents', 'constituents') ), process, task) addToProcessAndTask('ak8GenJetsNoNuConstituents'+postfix, ak8GenJetsConstituents.clone(src='ak8GenJetsNoNu'), process, task ) addToProcessAndTask('ak8GenJetsNoNuSoftDrop'+postfix,ak8GenJetsSoftDrop.clone(src=cms.InputTag('ak8GenJetsNoNuConstituents'+postfix, 'constituents')),process,task) addToProcessAndTask('slimmedGenJetsAK8SoftDropSubJets'+postfix, cms.EDProducer("PATGenJetSlimmer", src = cms.InputTag("ak8GenJetsNoNuSoftDrop"+postfix, "SubJets"), packedGenParticles = cms.InputTag("packedGenParticles"), cut = cms.string(""), cutLoose = cms.string(""), nLoose = cms.uint32(0), clearDaughters = cms.bool(False), #False means rekeying dropSpecific = cms.bool(True), # Save space ), process, task ) #add AK8 CHS addJetCollection(process, postfix=postfix, labelName = 'AK8', jetSource = cms.InputTag('ak8PFJetsCHS'+postfix), algo= 'AK', rParam = 0.8, btagDiscriminators = ['None'], jetCorrections = ('AK8PFchs', cms.vstring(['L1FastJet', 'L2Relative', 'L3Absolute']), 'None'), genJetCollection = cms.InputTag('slimmedGenJetsAK8') ) getattr(process,"patJetsAK8"+postfix).userData.userFloats.src = [] # start with empty list of user floats getattr(process,"selectedPatJetsAK8").cut = cms.string("pt > 170") ## add AK8 groomed masses with CHS from RecoJets.Configuration.RecoPFJets_cff import ak8PFJetsCHSPruned, ak8PFJetsCHSSoftDrop addToProcessAndTask('ak8PFJetsCHSPruned'+postfix, ak8PFJetsCHSPruned.clone(), process, task) addToProcessAndTask('ak8PFJetsCHSSoftDrop'+postfix, ak8PFJetsCHSSoftDrop.clone(), process, task) from RecoJets.JetProducers.ak8PFJetsCHS_groomingValueMaps_cfi import ak8PFJetsCHSPrunedMass, ak8PFJetsCHSTrimmedMass, ak8PFJetsCHSFilteredMass, ak8PFJetsCHSSoftDropMass addToProcessAndTask('ak8PFJetsCHSPrunedMass'+postfix, ak8PFJetsCHSPrunedMass.clone(), process, task) addToProcessAndTask('ak8PFJetsCHSTrimmedMass'+postfix, ak8PFJetsCHSTrimmedMass.clone(), process, task) addToProcessAndTask('ak8PFJetsCHSFilteredMass'+postfix, ak8PFJetsCHSFilteredMass.clone(), process, task) addToProcessAndTask('ak8PFJetsCHSSoftDropMass'+postfix, ak8PFJetsCHSSoftDropMass.clone(), process, task) getattr(process,"patJetsAK8").userData.userFloats.src += ['ak8PFJetsCHSPrunedMass'+postfix,'ak8PFJetsCHSSoftDropMass'+postfix] getattr(process,"patJetsAK8").addTagInfos = cms.bool(False) # add Njetiness for CHS process.load('RecoJets.JetProducers.nJettinessAdder_cfi') task.add(process.Njettiness) addToProcessAndTask('NjettinessAK8'+postfix, process.Njettiness.clone(), process, task) getattr(process,"NjettinessAK8").src = cms.InputTag("ak8PFJetsCHS"+postfix) getattr(process,"NjettinessAK8").cone = cms.double(0.8) getattr(process,"patJetsAK8").userData.userFloats.src += ['NjettinessAK8'+postfix+':tau1','NjettinessAK8'+postfix+':tau2','NjettinessAK8'+postfix+':tau3','NjettinessAK8'+postfix+':tau4'] # add Njetiness from CHS addToProcessAndTask('NjettinessAK8Subjets'+postfix, process.Njettiness.clone(), process, task) getattr(process,"NjettinessAK8Subjets"+postfix).src = cms.InputTag("ak8PFJetsPuppiSoftDrop"+postfix, "SubJets") getattr(process,"NjettinessAK8Subjets").cone = cms.double(0.8) ## PATify CHS soft drop fat jets addJetCollection( process, postfix=postfix, labelName = 'AK8PFCHSSoftDrop', jetSource = cms.InputTag('ak8PFJetsCHSSoftDrop'+postfix), btagDiscriminators = ['None'], jetCorrections = ('AK8PFchs', ['L1FastJet', 'L2Relative', 'L3Absolute'], 'None'), getJetMCFlavour = False # jet flavor disabled ) #add RECO AK8 from PUPPI and RECO AK8 PUPPI with soft drop... will be needed by ungroomed AK8 jets later ## PATify puppi soft drop fat jets addJetCollection( process, postfix=postfix, labelName = 'AK8PFPuppiSoftDrop' + postfix, jetSource = cms.InputTag('ak8PFJetsPuppiSoftDrop'+postfix), btagDiscriminators = ['None'], genJetCollection = cms.InputTag('slimmedGenJetsAK8'), jetCorrections = ('AK8PFPuppi', ['L2Relative', 'L3Absolute'], 'None'), getJetMCFlavour = False # jet flavor disabled ) ## PATify soft drop subjets addJetCollection( process, postfix=postfix, labelName = 'AK8PFPuppiSoftDropSubjets', jetSource = cms.InputTag('ak8PFJetsPuppiSoftDrop'+postfix,'SubJets'), algo = 'ak', # needed for subjet flavor clustering rParam = 0.8, # needed for subjet flavor clustering btagDiscriminators = ['pfDeepCSVJetTags:probb', 'pfDeepCSVJetTags:probbb', 'pfCombinedInclusiveSecondaryVertexV2BJetTags','pfCombinedMVAV2BJetTags'], jetCorrections = ('AK4PFPuppi', ['L2Relative', 'L3Absolute'], 'None'), explicitJTA = True, # needed for subjet b tagging svClustering = True, # needed for subjet b tagging genJetCollection = cms.InputTag('slimmedGenJetsAK8SoftDropSubJets'), fatJets=cms.InputTag('ak8PFJetsPuppi'), # needed for subjet flavor clustering groomedFatJets=cms.InputTag('ak8PFJetsPuppiSoftDrop') # needed for subjet flavor clustering ) # add groomed ECFs and N-subjettiness to soft dropped pat::Jets for fat jets and subjets process.load('RecoJets.JetProducers.ECF_cff') addToProcessAndTask('nb1AK8PuppiSoftDrop'+postfix, process.ecfNbeta1.clone(src = cms.InputTag("ak8PFJetsPuppiSoftDrop"+postfix), cuts = cms.vstring('', '', 'pt > 250')), process, task) addToProcessAndTask('nb2AK8PuppiSoftDrop'+postfix, process.ecfNbeta2.clone(src = cms.InputTag("ak8PFJetsPuppiSoftDrop"+postfix), cuts = cms.vstring('', '', 'pt > 250')), process, task) #too slow now ==> disable from Configuration.Eras.Modifier_pp_on_AA_2018_cff import pp_on_AA_2018 from Configuration.Eras.Modifier_pp_on_XeXe_2017_cff import pp_on_XeXe_2017 from Configuration.Eras.Modifier_phase2_common_cff import phase2_common for e in [pp_on_XeXe_2017, pp_on_AA_2018, phase2_common]: e.toModify(getattr(process,'nb1AK8PuppiSoftDrop'+postfix), cuts = ['pt > 999999', 'pt > 999999', 'pt > 999999'] ) e.toModify(getattr(process,'nb2AK8PuppiSoftDrop'+postfix), cuts = ['pt > 999999', 'pt > 999999', 'pt > 999999'] ) getattr(process,"patJetsAK8PFPuppiSoftDrop").userData.userFloats.src += ['nb1AK8PuppiSoftDrop'+postfix+':ecfN2','nb1AK8PuppiSoftDrop'+postfix+':ecfN3'] getattr(process,"patJetsAK8PFPuppiSoftDrop").userData.userFloats.src += ['nb2AK8PuppiSoftDrop'+postfix+':ecfN2','nb2AK8PuppiSoftDrop'+postfix+':ecfN3'] addToProcessAndTask('nb1AK8PuppiSoftDropSubjets'+postfix, process.ecfNbeta1.clone(src = cms.InputTag("ak8PFJetsPuppiSoftDrop"+postfix, "SubJets")), process, task) addToProcessAndTask('nb2AK8PuppiSoftDropSubjets'+postfix, process.ecfNbeta2.clone(src = cms.InputTag("ak8PFJetsPuppiSoftDrop"+postfix, "SubJets")), process, task) getattr(process,"patJetsAK8PFPuppiSoftDropSubjets"+postfix).userData.userFloats.src += ['nb1AK8PuppiSoftDropSubjets'+postfix+':ecfN2','nb1AK8PuppiSoftDropSubjets'+postfix+':ecfN3'] getattr(process,"patJetsAK8PFPuppiSoftDropSubjets"+postfix).userData.userFloats.src += ['nb2AK8PuppiSoftDropSubjets'+postfix+':ecfN2','nb2AK8PuppiSoftDropSubjets'+postfix+':ecfN3'] getattr(process,"patJetsAK8PFPuppiSoftDropSubjets"+postfix).userData.userFloats.src += ['NjettinessAK8Subjets'+postfix+':tau1','NjettinessAK8Subjets'+postfix+':tau2','NjettinessAK8Subjets'+postfix+':tau3','NjettinessAK8Subjets'+postfix+':tau4'] for e in [pp_on_XeXe_2017, pp_on_AA_2018, phase2_common]: e.toModify(getattr(process,'nb1AK8PuppiSoftDropSubjets'+postfix), cuts = ['pt > 999999', 'pt > 999999', 'pt > 999999'] ) e.toModify(getattr(process,'nb2AK8PuppiSoftDropSubjets'+postfix), cuts = ['pt > 999999', 'pt > 999999', 'pt > 999999'] ) # rekey the groomed ECF value maps to the ungroomed reco jets, which will then be picked # up by PAT in the user floats. addToProcessAndTask("ak8PFJetsPuppiSoftDropValueMap"+postfix, cms.EDProducer("RecoJetToPatJetDeltaRValueMapProducer", src = cms.InputTag("ak8PFJetsPuppi"+postfix), matched = cms.InputTag("patJetsAK8PFPuppiSoftDrop"+postfix), distMax = cms.double(0.8), values = cms.vstring([ 'userFloat("nb1AK8PuppiSoftDrop'+postfix+':ecfN2")', 'userFloat("nb1AK8PuppiSoftDrop'+postfix+':ecfN3")', 'userFloat("nb2AK8PuppiSoftDrop'+postfix+':ecfN2")', 'userFloat("nb2AK8PuppiSoftDrop'+postfix+':ecfN3")', ]), valueLabels = cms.vstring( [ 'nb1AK8PuppiSoftDropN2', 'nb1AK8PuppiSoftDropN3', 'nb2AK8PuppiSoftDropN2', 'nb2AK8PuppiSoftDropN3', ]) ), process, task) # Patify AK8 PF PUPPI addJetCollection(process, postfix=postfix, labelName = 'AK8Puppi', jetSource = cms.InputTag('ak8PFJetsPuppi'+postfix), algo= 'AK', rParam = 0.8, jetCorrections = ('AK8PFPuppi', cms.vstring(['L2Relative', 'L3Absolute']), 'None'), btagDiscriminators = ([ 'pfCombinedSecondaryVertexV2BJetTags', 'pfCombinedInclusiveSecondaryVertexV2BJetTags', 'pfCombinedMVAV2BJetTags', 'pfDeepCSVJetTags:probb', 'pfDeepCSVJetTags:probc', 'pfDeepCSVJetTags:probudsg', 'pfDeepCSVJetTags:probbb', 'pfBoostedDoubleSecondaryVertexAK8BJetTags']), genJetCollection = cms.InputTag('slimmedGenJetsAK8') ) getattr(process,"patJetsAK8Puppi"+postfix).userData.userFloats.src = [] # start with empty list of user floats getattr(process,"selectedPatJetsAK8Puppi"+postfix).cut = cms.string("pt > 100") getattr(process,"selectedPatJetsAK8Puppi"+postfix).cutLoose = cms.string("pt > 30") getattr(process,"selectedPatJetsAK8Puppi"+postfix).nLoose = cms.uint32(3) from RecoJets.JetAssociationProducers.j2tParametersVX_cfi import j2tParametersVX addToProcessAndTask('ak8PFJetsPuppiTracksAssociatorAtVertex'+postfix, cms.EDProducer("JetTracksAssociatorAtVertex", j2tParametersVX.clone( coneSize = cms.double(0.8) ), jets = cms.InputTag("ak8PFJetsPuppi") ), process, task) addToProcessAndTask('patJetAK8PuppiCharge'+postfix, cms.EDProducer("JetChargeProducer", src = cms.InputTag("ak8PFJetsPuppiTracksAssociatorAtVertex"), var = cms.string('Pt'), exp = cms.double(1.0) ), process, task) ## now add AK8 groomed masses and ECF from RecoJets.JetProducers.ak8PFJetsPuppi_groomingValueMaps_cfi import ak8PFJetsPuppiSoftDropMass addToProcessAndTask('ak8PFJetsPuppiSoftDropMass'+postfix, ak8PFJetsPuppiSoftDropMass.clone(), process, task) getattr(process,"patJetsAK8Puppi"+postfix).userData.userFloats.src += ['ak8PFJetsPuppiSoftDropMass'+postfix] getattr(process,"patJetsAK8Puppi"+postfix).addTagInfos = cms.bool(False) getattr(process,"patJetsAK8Puppi"+postfix).userData.userFloats.src += [ cms.InputTag('ak8PFJetsPuppiSoftDropValueMap'+postfix,'nb1AK8PuppiSoftDropN2'), cms.InputTag('ak8PFJetsPuppiSoftDropValueMap'+postfix,'nb1AK8PuppiSoftDropN3'), cms.InputTag('ak8PFJetsPuppiSoftDropValueMap'+postfix,'nb2AK8PuppiSoftDropN2'), cms.InputTag('ak8PFJetsPuppiSoftDropValueMap'+postfix,'nb2AK8PuppiSoftDropN3'), ] # add PUPPI Njetiness addToProcessAndTask('NjettinessAK8Puppi'+postfix, process.Njettiness.clone(), process, task) getattr(process,"NjettinessAK8Puppi"+postfix).src = cms.InputTag("ak8PFJetsPuppi"+postfix) getattr(process,"NjettinessAK8Puppi").cone = cms.double(0.8) getattr(process,"patJetsAK8Puppi").userData.userFloats.src += ['NjettinessAK8Puppi'+postfix+':tau1','NjettinessAK8Puppi'+postfix+':tau2','NjettinessAK8Puppi'+postfix+':tau3','NjettinessAK8Puppi'+postfix+':tau4'] # Now combine the CHS and PUPPI information into the PUPPI jets via delta R value maps addToProcessAndTask("ak8PFJetsCHSValueMap"+postfix, cms.EDProducer("RecoJetToPatJetDeltaRValueMapProducer", src = cms.InputTag("ak8PFJetsPuppi"+postfix), matched = cms.InputTag("patJetsAK8"+postfix), distMax = cms.double(0.8), values = cms.vstring([ 'userFloat("ak8PFJetsCHSPrunedMass"'+postfix+')', 'userFloat("ak8PFJetsCHSSoftDropMass"'+postfix+')', 'userFloat("NjettinessAK8'+postfix+':tau1")', 'userFloat("NjettinessAK8'+postfix+':tau2")', 'userFloat("NjettinessAK8'+postfix+':tau3")', 'userFloat("NjettinessAK8'+postfix+':tau4")', 'pt','eta','phi','mass', 'jetArea', 'jecFactor(0)' ]), valueLabels = cms.vstring( [ 'ak8PFJetsCHSPrunedMass', 'ak8PFJetsCHSSoftDropMass', 'NjettinessAK8CHSTau1', 'NjettinessAK8CHSTau2', 'NjettinessAK8CHSTau3', 'NjettinessAK8CHSTau4', 'pt','eta','phi','mass', 'jetArea', 'rawFactor' ]) ), process, task) # Now set up the user floats getattr(process,"patJetsAK8Puppi"+postfix).userData.userFloats.src += [ cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'ak8PFJetsCHSPrunedMass'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'ak8PFJetsCHSSoftDropMass'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'NjettinessAK8CHSTau1'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'NjettinessAK8CHSTau2'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'NjettinessAK8CHSTau3'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'NjettinessAK8CHSTau4'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'pt'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'eta'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'phi'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'mass'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'jetArea'), cms.InputTag('ak8PFJetsCHSValueMap'+postfix,'rawFactor'), ] addToProcessAndTask("slimmedJetsAK8PFPuppiSoftDropSubjets"+postfix, cms.EDProducer("PATJetSlimmer", src = cms.InputTag("selectedPatJetsAK8PFPuppiSoftDropSubjets"), packedPFCandidates = cms.InputTag("packedPFCandidates"), dropJetVars = cms.string("1"), dropDaughters = cms.string("0"), rekeyDaughters = cms.string("1"), dropTrackRefs = cms.string("1"), dropSpecific = cms.string("1"), dropTagInfos = cms.string("1"), modifyJets = cms.bool(True), mixedDaughters = cms.bool(False), modifierConfig = cms.PSet( modifications = cms.VPSet() ) ), process, task) ## Establish references between PATified fat jets and subjets using the BoostedJetMerger addToProcessAndTask("slimmedJetsAK8PFPuppiSoftDropPacked"+postfix, cms.EDProducer("BoostedJetMerger", jetSrc=cms.InputTag("selectedPatJetsAK8PFPuppiSoftDrop"), subjetSrc=cms.InputTag("slimmedJetsAK8PFPuppiSoftDropSubjets") ), process, task ) addToProcessAndTask("packedPatJetsAK8"+postfix, cms.EDProducer("JetSubstructurePacker", jetSrc = cms.InputTag("selectedPatJetsAK8Puppi"+postfix), distMax = cms.double(0.8), algoTags = cms.VInputTag( cms.InputTag("slimmedJetsAK8PFPuppiSoftDropPacked"+postfix) ), algoLabels = cms.vstring( 'SoftDropPuppi' ), fixDaughters = cms.bool(True), packedPFCandidates = cms.InputTag("packedPFCandidates"+postfix), ), process, task) # switch off daughter re-keying since it's done in the JetSubstructurePacker (and can't be done afterwards) process.slimmedJetsAK8.rekeyDaughters = "0" # Reconfigure the slimmedAK8 jet information to keep process.slimmedJetsAK8.dropDaughters = cms.string("pt < 170") process.slimmedJetsAK8.dropSpecific = cms.string("pt < 170") process.slimmedJetsAK8.dropTagInfos = cms.string("pt < 170")
[ [ [ 7, 40 ], [ 1013, 1016 ], [ 1278, 1281 ], [ 1571, 1574 ], [ 1749, 1752 ], [ 1837, 1840 ], [ 1964, 1967 ], [ 2053, 2056 ], [ 2127, 2130 ], [ 2199, 2202 ], [ 2278, 2281 ], [ 2379, 2382 ], [ 2601, 2604 ], [ 2789, 2792 ], [ 2894, 2897 ], [ 3109, 3112 ], [ 4251, 4254 ], [ 4526, 4529 ], [ 4607, 4610 ], [ 5001, 5004 ], [ 5110, 5113 ], [ 5296, 5299 ], [ 5820, 5823 ], [ 5934, 5937 ], [ 6274, 6277 ], [ 6834, 6837 ], [ 6901, 6904 ], [ 7006, 7009 ], [ 7319, 7322 ], [ 7374, 7377 ], [ 7508, 7511 ], [ 7563, 7566 ], [ 8587, 8590 ], [ 8754, 8757 ], [ 9991, 9994 ], [ 10092, 10095 ], [ 10181, 10184 ], [ 10281, 10284 ], [ 10346, 10349 ], [ 10729, 10732 ], [ 11125, 11128 ], [ 11265, 11268 ], [ 11871, 11874 ], [ 12104, 12107 ], [ 12193, 12196 ], [ 12279, 12282 ], [ 12453, 12456 ], [ 12571, 12574 ], [ 12635, 12638 ], [ 12764, 12767 ], [ 12843, 12846 ], [ 12942, 12945 ], [ 13003, 13006 ], [ 13494, 13497 ], [ 13594, 13597 ], [ 13682, 13685 ], [ 13770, 13773 ], [ 13858, 13861 ], [ 14129, 14132 ], [ 14217, 14220 ], [ 14597, 14600 ], [ 14703, 14706 ], [ 14797, 14800 ], [ 14887, 14890 ], [ 14957, 14960 ], [ 15750, 15753 ], [ 16550, 16553 ], [ 16672, 16675 ], [ 16796, 16799 ], [ 16916, 16919 ], [ 17036, 17039 ], [ 17156, 17159 ], [ 17276, 17279 ], [ 17378, 17381 ], [ 17481, 17484 ], [ 17584, 17587 ], [ 17688, 17691 ], [ 17795, 17798 ], [ 18014, 18017 ], [ 18081, 18084 ], [ 18189, 18192 ], [ 18268, 18271 ], [ 18330, 18333 ], [ 18393, 18396 ], [ 18455, 18458 ], [ 18516, 18519 ], [ 18577, 18580 ], [ 18636, 18639 ], [ 18698, 18701 ], [ 18761, 18764 ], [ 18787, 18790 ], [ 19076, 19079 ], [ 19149, 19152 ], [ 19241, 19244 ], [ 19434, 19437 ], [ 19526, 19529 ], [ 19628, 19631 ], [ 19699, 19702 ], [ 19761, 19764 ], [ 19923, 19926 ], [ 20102, 20105 ], [ 20181, 20184 ], [ 20596, 20599 ], [ 20661, 20664 ], [ 20726, 20729 ] ], [ [ 90, 110 ], [ 192, 212 ] ], [ [ 112, 131 ], [ 748, 767 ], [ 836, 855 ], [ 924, 943 ], [ 1086, 1105 ], [ 1188, 1207 ], [ 1359, 1378 ], [ 1489, 1508 ], [ 1657, 1676 ], [ 3272, 3291 ], [ 3369, 3388 ], [ 3643, 3662 ], [ 3748, 3767 ], [ 3855, 3874 ], [ 3964, 3983 ], [ 4395, 4414 ], [ 4848, 4867 ], [ 7238, 7257 ], [ 7427, 7446 ], [ 8499, 8518 ], [ 8666, 8685 ], [ 9904, 9923 ], [ 12383, 12402 ], [ 12712, 12731 ], [ 13211, 13230 ], [ 13980, 13999 ], [ 14545, 14564 ], [ 17922, 17941 ], [ 18985, 19004 ], [ 19386, 19405 ] ], [ [ 137, 154 ] ] ]
# This scripts assumes that the dataframe has been created and saved in data.txt import pickle import matplotlib.pyplot as plt import numpy as np import pandas as pd from dataFrameUtilities import addInsultIntensityColumns, getInsultAboveThreshold, getPainAboveThreshold, selectColumns,selectTime from sklearn.preprocessing import MinMaxScaler # Getting data input = open("../data/preprocessed/preprocessedDataParticipant1.txt", "rb") data = pickle.load(input) input.close() timeSelected = selectTime(data, "2016-09-01", "2019-10-20") # Removing "steps" caused by scooter riding timeSelected["steps"] = timeSelected["steps"] - 37 * timeSelected["scooterRiding"] timeSelected["steps"][timeSelected["steps"] < 0] = 0 # Getting knee pain information kneePain = selectColumns(timeSelected, ["kneePain"]) thres = kneePain.copy() thres[:] = 3.3 # Calculating knee stress over time env = addInsultIntensityColumns(timeSelected, ["steps", "kneePain"], 21, 30) envRollingMean = selectColumns(env, ["stepsInsultIntensity"]) envMaxInsultDiff = selectColumns(env, ["stepsMaxInsultDiff"]) kneePainRollingMean = selectColumns(env, ["kneePainInsultIntensity"]) kneePainRollingMean = kneePainRollingMean.replace(0, 0.4) scaler = MinMaxScaler() kneePainRollingMeanArray = scaler.fit_transform(kneePainRollingMean) for i in range(0, len(kneePainRollingMean)): kneePainRollingMean["kneePainInsultIntensity"][i] = kneePainRollingMeanArray[i] kneePainRollingMean = kneePainRollingMean.replace(0.0, 0.4) thres2 = kneePain.copy() thres2[:] = 1.1 for i in range(0, 300): thres2["kneePain"][i] = 1.2 for i in range(810, len(thres2)): thres2["kneePain"][i] = 1.8 envBrut = selectColumns(env, ["steps"]) betterMaxInsult = envMaxInsultDiff.copy() scaler = MinMaxScaler() betterMaxInsultArray = scaler.fit_transform(betterMaxInsult) for i in range(0, len(betterMaxInsult)): betterMaxInsult["stepsMaxInsultDiff"][i] = betterMaxInsultArray[i] + envBrut["steps"][i] + kneePainRollingMean["kneePainInsultIntensity"][i] # Finding time points where knee pain and knee stress are above a certain threshold painAboveThresh = getPainAboveThreshold(kneePain, "kneePain", 3.3) painAboveThresh = selectColumns(painAboveThresh, ["kneePainThreshed"]) stepsMaxInsultDiffThresh = getInsultAboveThreshold(betterMaxInsult, "stepsMaxInsultDiff", thres2) stepsMaxInsultDiffThresh = selectColumns(stepsMaxInsultDiffThresh, ["stepsMaxInsultDiffThreshed"]) # Plotting results fig, axes = plt.subplots(nrows=3, ncols=1) selectColumns(kneePain, ["kneePain"]).rename(columns={"kneePain": "knee pain"}).plot(ax=axes[0]) thres.rename(columns={"kneePain": "pain threshold"}).plot(ax=axes[0]) selectColumns(betterMaxInsult, ["stepsMaxInsultDiff"]).rename(columns={"stepsMaxInsultDiff": "knee stress"}).plot(ax=axes[1]) thres2.rename(columns={"kneePain": "knee stress threshold"}).plot(ax=axes[1]) painAboveThresh.rename(columns={"kneePainThreshed": "knee pain is above threshold"}).plot(ax=axes[2]) stepsMaxInsultDiffThresh = 0.95 * stepsMaxInsultDiffThresh stepsMaxInsultDiffThresh.rename(columns={"stepsMaxInsultDiffThreshed": "knee stress is above threshold"}).plot(ax=axes[2]) leg = plt.legend(loc="best") leg.set_draggable(True) plt.show()
[ [ [ 89, 95 ], [ 446, 452 ] ], [ [ 104, 128 ], [ 2478, 2481 ], [ 3174, 3177 ], [ 3221, 3224 ] ], [ [ 136, 147 ] ], [ [ 155, 167 ] ], [ [ 199, 224 ], [ 896, 921 ] ], [ [ 226, 249 ], [ 2274, 2297 ] ], [ [ 251, 272 ], [ 2126, 2147 ] ], [ [ 274, 287 ], [ 769, 782 ], [ 984, 997 ], [ 1048, 1061 ], [ 1114, 1127 ], [ 1677, 1690 ], [ 2193, 2206 ], [ 2372, 2385 ], [ 2510, 2523 ], [ 2678, 2691 ] ], [ [ 288, 298 ], [ 495, 505 ] ], [ [ 333, 345 ], [ 1229, 1241 ], [ 1759, 1771 ] ], [ [ 363, 368 ], [ 458, 463 ], [ 465, 470 ] ], [ [ 439, 443 ], [ 506, 510 ] ], [ [ 480, 492 ], [ 611, 623 ], [ 640, 652 ], [ 587, 599 ], [ 670, 682 ], [ 692, 704 ], [ 783, 795 ], [ 922, 934 ] ], [ [ 758, 766 ], [ 820, 828 ], [ 1512, 1520 ], [ 2148, 2156 ], [ 2524, 2532 ] ], [ [ 812, 817 ], [ 836, 841 ], [ 2607, 2612 ] ], [ [ 890, 893 ], [ 998, 1001 ], [ 1062, 1065 ], [ 1128, 1131 ], [ 1691, 1694 ] ], [ [ 967, 981 ] ], [ [ 1029, 1045 ], [ 1726, 1742 ] ], [ [ 1092, 1111 ], [ 1184, 1203 ] ], [ [ 1162, 1181 ], [ 1292, 1311 ], [ 1335, 1354 ], [ 1362, 1381 ], [ 1464, 1483 ] ], [ [ 1220, 1226 ], [ 1271, 1277 ] ], [ [ 1244, 1268 ], [ 1414, 1438 ] ], [ [ 1317, 1318 ], [ 1439, 1440 ], [ 1409, 1410 ] ], [ [ 1442, 1461 ], [ 1971, 1990 ] ], [ [ 1503, 1509 ], [ 1528, 1534 ], [ 1572, 1578 ], [ 1624, 1630 ], [ 1638, 1644 ], [ 2337, 2343 ], [ 2804, 2810 ] ], [ [ 1548, 1549 ], [ 1591, 1592 ] ], [ [ 1604, 1605 ], [ 1657, 1658 ] ], [ [ 1667, 1674 ], [ 1949, 1956 ] ], [ [ 1708, 1723 ], [ 1818, 1833 ], [ 1857, 1872 ], [ 1880, 1895 ], [ 2298, 2313 ], [ 2692, 2707 ] ], [ [ 1750, 1756 ], [ 1797, 1803 ] ], [ [ 1774, 1794 ], [ 1923, 1943 ] ], [ [ 1839, 1840 ], [ 1944, 1945 ], [ 1966, 1967 ], [ 2018, 2019 ], [ 1918, 1919 ] ], [ [ 2108, 2123 ], [ 2207, 2222 ] ], [ [ 2175, 2190 ], [ 2883, 2898 ] ], [ [ 2247, 2271 ], [ 2386, 2410 ] ], [ [ 2345, 2369 ], [ 3019, 3043 ] ], [ [ 2466, 2469 ] ], [ [ 2471, 2475 ], [ 2598, 2602 ], [ 2668, 2672 ], [ 2795, 2799 ], [ 2873, 2877 ], [ 2976, 2980 ], [ 3158, 3162 ] ], [ [ 2985, 3009 ], [ 3044, 3068 ] ], [ [ 3168, 3171 ], [ 3197, 3200 ] ] ]
# Copyright (C) 2021, QuantStack # SPDX-License-Identifier: BSD-3-Clause version_info = (0, 7, 0) __version__ = ".".join(map(str, version_info))
[ [ [ 74, 86 ], [ 131, 143 ] ], [ [ 99, 110 ] ] ]
from django.core.urlresolvers import resolve, reverse from django.db import transaction from django.test import TestCase from django.test import Client from django.utils import translation from django.contrib.auth.models import User, Group from django.contrib.auth import authenticate, login, logout from rest_framework import status from rest_framework.test import APIClient from rest_framework.test import APITestCase from rest_framework.authtoken.models import Token from django_tenants.test.cases import TenantTestCase from django_tenants.test.client import TenantClient from smegurus import constants TEST_USER_EMAIL = "ledo@gah.com" TEST_USER_USERNAME = "ledo" TEST_USER_PASSWORD = "GalacticAllianceOfHumankind" class APIEmailActivationTestCase(APITestCase, TenantTestCase): fixtures = [] def setup_tenant(self, tenant): """Public Schema""" tenant.schema_name = 'test' tenant.name = "Galactic Alliance of Humankind" tenant.has_perks=True tenant.has_mentors=True tenant.how_discovered = "Command HQ" tenant.how_many_served = 1 @classmethod def setUpTestData(cls): Group.objects.bulk_create([ Group(id=constants.ENTREPRENEUR_GROUP_ID, name="Entreprenuer",), Group(id=constants.MENTOR_GROUP_ID, name="Mentor",), Group(id=constants.ADVISOR_GROUP_ID, name="Advisor",), Group(id=constants.ORGANIZATION_MANAGER_GROUP_ID, name="Org Manager",), Group(id=constants.ORGANIZATION_ADMIN_GROUP_ID, name="Org Admin",), Group(id=constants.CLIENT_MANAGER_GROUP_ID, name="Client Manager",), Group(id=constants.SYSTEM_ADMIN_GROUP_ID, name="System Admin",), ]) user = User.objects.create_user( # Create our User. email=TEST_USER_EMAIL, username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD ) user.is_active = True user.save() @transaction.atomic def setUp(self): translation.activate('en') # Set English super(APIEmailActivationTestCase, self).setUp() self.c = TenantClient(self.tenant) @transaction.atomic def tearDown(self): users = User.objects.all() for user in users.all(): user.delete() # super(APIEmailActivationTestCase, self).tearDown() @transaction.atomic def test_api_send_activation(self): url = reverse('api_emailactivation') data = { 'email': TEST_USER_EMAIL, } response = self.c.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) from django.core import mail # Test that one message has been sent. self.assertEqual(len(mail.outbox), 1) # Verify that the subject of the first message is correct. self.assertEqual(mail.outbox[0].subject, 'Den Activation') @transaction.atomic def test_api_send_activation_with_no_email(self): url = reverse('api_emailactivation') data = { 'email': 'whalesquid@hideauze.com', } response = self.c.post(url, data, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
[ [ [ 37, 44 ] ], [ [ 46, 53 ], [ 2441, 2448 ], [ 3021, 3028 ] ], [ [ 76, 87 ], [ 1969, 1980 ], [ 2164, 2175 ], [ 2368, 2379 ], [ 2934, 2945 ] ], [ [ 112, 120 ] ], [ [ 145, 151 ] ], [ [ 177, 188 ], [ 2017, 2028 ] ], [ [ 228, 232 ], [ 1741, 1745 ], [ 2223, 2227 ] ], [ [ 234, 239 ], [ 1155, 1160 ], [ 1195, 1200 ], [ 1272, 1277 ], [ 1337, 1342 ], [ 1404, 1409 ], [ 1488, 1493 ], [ 1568, 1573 ], [ 1649, 1654 ] ], [ [ 272, 284 ] ], [ [ 286, 291 ] ], [ [ 293, 299 ] ], [ [ 327, 333 ], [ 2641, 2647 ], [ 3231, 3237 ] ], [ [ 366, 375 ] ], [ [ 408, 419 ], [ 755, 766 ] ], [ [ 464, 469 ] ], [ [ 508, 522 ], [ 768, 782 ] ], [ [ 562, 574 ], [ 2132, 2144 ] ], [ [ 596, 605 ], [ 1204, 1213 ], [ 1281, 1290 ], [ 1346, 1355 ], [ 1413, 1422 ], [ 1497, 1506 ], [ 1577, 1586 ], [ 1658, 1667 ] ], [ [ 608, 623 ], [ 1805, 1820 ], [ 2510, 2525 ] ], [ [ 641, 659 ], [ 1843, 1861 ] ], [ [ 669, 687 ], [ 1884, 1902 ] ], [ [ 728, 754 ], [ 2073, 2099 ] ] ]
OEMBED_ENDPOINTS = { "https://speakerdeck.com/oembed.{format}": [ "^http(?:s)?://speakerdeck\\.com/.+$" ], "https://alpha-api.app.net/oembed": [ "^http(?:s)?://alpha\\.app\\.net/[^#?/]+/post/.+$", "^http(?:s)?://photos\\.app\\.net/[^#?/]+/.+$" ], "http://www.youtube.com/oembed": [ "^http(?:s)?://(?:[-\\w]+\\.)?youtube\\.com/watch.+$", "^http(?:s)?://(?:[-\\w]+\\.)?youtube\\.com/v/.+$", "^http(?:s)?://youtu\\.be/.+$", "^http(?:s)?://(?:[-\\w]+\\.)?youtube\\.com/user/.+$", "^http(?:s)?://(?:[-\\w]+\\.)?youtube\\.com/[^#?/]+#[^#?/]+/.+$", "^http(?:s)?://m\\.youtube\\.com/index.+$", "^http(?:s)?://(?:[-\\w]+\\.)?youtube\\.com/profile.+$", "^http(?:s)?://(?:[-\\w]+\\.)?youtube\\.com/view_play_list.+$", "^http(?:s)?://(?:[-\\w]+\\.)?youtube\\.com/playlist.+$" ], "http://backend.deviantart.com/oembed": [ "^http://(?:[-\\w]+\\.)?deviantart\\.com/art/.+$", "^http://fav\\.me/.+$", "^http://sta\\.sh/.+$", "^http://(?:[-\\w]+\\.)?deviantart\\.com/[^#?/]+#/d.+$" ], "http://blip.tv/oembed/": [ "^http://[-\\w]+\\.blip\\.tv/.+$" ], "http://www.dailymotion.com/api/oembed/": [ "^http://[-\\w]+\\.dailymotion\\.com/.+$" ], "http://www.flickr.com/services/oembed/": [ "^http://[-\\w]+\\.flickr\\.com/photos/.+$", "^http://flic\\.kr\\.com/.+$" ], "http://www.hulu.com/api/oembed.{format}": [ "^http://www\\.hulu\\.com/watch/.+$" ], "http://www.nfb.ca/remote/services/oembed/": [ "^http://(?:[-\\w]+\\.)?nfb\\.ca/film/.+$" ], "http://qik.com/api/oembed.{format}": [ "^http://qik\\.com/.+$", "^http://qik\\.ly/.+$" ], "http://revision3.com/api/oembed/": [ "^http://[-\\w]+\\.revision3\\.com/.+$" ], "http://www.scribd.com/services/oembed": [ "^http://[-\\w]+\\.scribd\\.com/.+$" ], "http://www.viddler.com/oembed/": [ "^http://[-\\w]+\\.viddler\\.com/v/.+$", "^http://[-\\w]+\\.viddler\\.com/explore/.+$" ], "http://www.vimeo.com/api/oembed.{format}": [ "^http(?:s)?://(?:www\\.)?vimeo\\.com/.+$", "^http(?:s)?://player\\.vimeo\\.com/.+$" ], "http://dotsub.com/services/oembed": [ "^http://dotsub\\.com/view/.+$" ], "http://www.yfrog.com/api/oembed": [ "^http(?:s)?://(?:www\\.)?yfrog\\.com/.+$", "^http(?:s)?://(?:www\\.)?yfrog\\.us/.+$" ], "http://clikthrough.com/services/oembed": [ "^http(?:s)?://(?:[-\\w]+\\.)?clikthrough\\.com/.+$" ], "http://www.kinomap.com/oembed": [ "^http://[-\\w]+\\.kinomap\\.com/.+$" ], "https://photobucket.com/oembed": [ "^http://(?:[-\\w]+\\.)?photobucket\\.com/albums/.+$", "^http://(?:[-\\w]+\\.)?photobucket\\.com/groups/.+$" ], "http://api.instagram.com/oembed": [ "^http://instagr\\.am/p/.+$", "^http[s]?://instagram\\.com/p/.+$" ], "https://www.slideshare.net/api/oembed/2": [ "^http://www\\.slideshare\\.net/.+$" ], "http://tv.majorleaguegaming.com/oembed": [ "^http://mlg\\.tv/.+$", "^http://tv\\.majorleaguegaming\\.com/.+$" ], "http://my.opera.com/service/oembed": [ "^http://my\\.opera\\.com/.+$" ], "http://skitch.com/oembed": [ "^http(?:s)?://(?:www\\.)?skitch\\.com/.+$", "^http://skit\\.ch/.+$" ], "https://api.twitter.com/1/statuses/oembed.{format}": [ "^http(?:s)?://twitter\\.com/(?:#!)?[^#?/]+/status/.+$" ], "https://soundcloud.com/oembed": [ "^https://soundcloud\\.com/[^#?/]+/.+$" ], "http://www.collegehumor.com/oembed.{format}": [ "^http://(?:www\\.)?collegehumor\\.com/video/.+$", "^http://(?:www\\.)?collegehumor\\.com/video:.+$" ], "http://www.polleverywhere.com/services/oembed/": [ "^http://www\\.polleverywhere\\.com/polls/.+$", "^http://www\\.polleverywhere\\.com/multiple_choice_polls/.+$", "^http://www\\.polleverywhere\\.com/free_text_polls/.+$" ], "http://www.ifixit.com/Embed": [ "^http://www\\.ifixit\\.com/[^#?/]+/[^#?/]+/.+$" ], "http://api.smugmug.com/services/oembed/": [ "^http(?:s)?://(?:www\\.)?smugmug\\.com/[^#?/]+/.+$" ], "https://github.com/api/oembed": [ "^http(?:s)?://gist\\.github\\.com/.+$" ], "http://animoto.com/services/oembed": [ "^http://animoto\\.com/play/.+$" ], "http://www.rdio.com/api/oembed": [ "^http://(?:wwww\\.)?rdio\\.com/people/[^#?/]+/playlists/.+$", "^http://[-\\w]+\\.rdio\\.com/artist/[^#?/]+/album/.+$" ], "http://api.5min.com/oembed.{format}": [ "^http://www\\.5min\\.com/video/.+$" ], "http://500px.com/photo/{1}/oembed.{format}": [ "^http://500px\\.com/photo/([^#?/]+)(?:.+)?$" ], "http://api.dipdive.com/oembed.{format}": [ "^http://[-\\w]+\\.dipdive\\.com/media/.+$" ], "http://video.yandex.ru/oembed.{format}": [ "^http://video\\.yandex\\.ru/users/[^#?/]+/view/.+$" ], "http://www.mixcloud.com/oembed/": [ "^http://www\\.mixcloud\\.com/oembed/[^#?/]+/.+$" ], "http://www.kickstarter.com/services/oembed": [ "^http(?:s)://[-\\w]+\\.kickstarter\\.com/projects/.+$" ], "http://coub.com/api/oembed.{format}": [ "^http(?:s)?://coub\\.com/view/.+$", "^http(?:s)?://coub\\.com/embed/.+$" ], "http://www.screenr.com/api/oembed.{format}": [ "^http://www\\.screenr\\.com/.+$" ], "http://www.funnyordie.com/oembed.{format}": [ "^http://www\\.funnyordie\\.com/videos/.+$" ], "http://fast.wistia.com/oembed.{format}": [ "^http://[-\\w]+\\.wista\\.com/medias/.+$" ], "http://www.ustream.tv/oembed": [ "^http(?:s)?://(?:www\\.)?ustream\\.tv/.+$", "^http(?:s)?://(?:www\\.)?ustream\\.com/.+$", "^http://ustre\\.am/.+$" ], "http://wordpress.tv/oembed/": [ "^http://wordpress\\.tv/.+$" ], "http://polldaddy.com/oembed/": [ "^http(?:s)?://(?:[-\\w]+\\.)?polldaddy\\.com/.+$" ], "http://api.bambuser.com/oembed.{format}": [ "^http://bambuser\\.com/channel/[^#?/]+/broadcast/.+$", "^http://bambuser\\.com/channel/.+$", "^http://bambuser\\.com/v/.+$" ], "http://www.ted.com/talks/oembed.{format}": [ "^http(?:s)?://(?:www\\.)?ted\\.com/talks/.+$", "^http(?:s)?://(?:www\\.)?ted\\.com/talks/lang/[^#?/]+/.+$", "^http(?:s)?://(?:www\\.)?ted\\.com/index\\.php/talks/.+$", "^http(?:s)?://(?:www\\.)?ted\\.com/index\\.php/talks/lang/[^#?/]+/.+$" ], "http://chirb.it/oembed.{format}": [ "^http://chirb\\.it/.+$" ], "https://www.circuitlab.com/circuit/oembed/": [ "^http(?:s)?://(?:www\\.)?circuitlab\\.com/circuit/.+$" ], "http://api.geograph.org.uk/api/oembed": [ "^http://(?:[-\\w]+\\.)?geograph\\.org\\.uk/.+$", "^http://(?:[-\\w]+\\.)?geograph\\.co\\.uk/.+$", "^http://(?:[-\\w]+\\.)?geograph\\.ie/.+$" ], "http://geo.hlipp.de/restapi.php/api/oembed": [ "^http://geo-en\\.hlipp\\.de/.+$", "^http://geo\\.hlipp\\.de/.+$", "^http://germany\\.geograph\\.org/.+$" ], "http://www.geograph.org.gg/api/oembed": [ "^http://(?:[-\\w]+\\.)?geograph\\.org\\.gg/.+$", "^http://(?:[-\\w]+\\.)?geograph\\.org\\.je/.+$", "^http://channel-islands\\.geograph\\.org/.+$", "^http://channel-islands\\.geographs\\.org/.+$", "^http://(?:[-\\w]+\\.)?channel\\.geographs\\.org/.+$" ], "http://vzaar.com/api/videos/{1}.{format}": [ "^http://(?:www\\.)?vzaar\\.com/videos/([^#?/]+)(?:.+)?$", "^http://www\\.vzaar\\.tv/([^#?/]+)(?:.+)?$", "^http://vzaar\\.tv/([^#?/]+)(?:.+)?$", "^http://vzaar\\.me/([^#?/]+)(?:.+)?$", "^http://[-\\w]+\\.vzaar\\.me/([^#?/]+)(?:.+)?$" ], "http://api.minoto-video.com/services/oembed.{format}": [ "^http://api\\.minoto-video\\.com/publishers/[^#?/]+/videos/.+$", "^http://dashboard\\.minoto-video\\.com/main/video/details/.+$", "^http://embed\\.minoto-video\\.com/.+$" ], "http://www.videojug.com/oembed.{format}": [ "^http(?:s)?://(?:[-\\w]+\\.)?videojug\\.com/film/.+$", "^http(?:s)?://(?:[-\\w]+\\.)?videojug\\.com/payer/.+$", "^http(?:s)?://(?:[-\\w]+\\.)?videojug\\.com/interview/.+$" ], "http://videos.sapo.pt/oembed": [ "^http(?:s)?://videos\\.sapo\\.pt/.+$" ], "http://vhx.tv/services/oembed.{format}": [ "^http(?:s)?://(?:www\\.)?vhx\\.tv/.+$" ], "http://api.justin.tv/api/embed/from_url.{format}": [ "^http(?:s)?://(?:www\\.)?justin\\.tv/.+$" ], "http://official.fm/services/oembed.{format}": [ "^http(?:s)?://official\\.fm/.+$" ], "http://huffduffer.com/oembed": [ "^http(?:s)?://(?:www\\.)?huffduffer\\.com/[^#?/]+/.+$" ], "https://embed.spotify.com/oembed/": [ "^http(?:s)?://open\\.spotify\\.com/.+$", "^http(?:s)?://spoti\\.fi/.+$" ], "http://shoudio.com/api/oembed": [ "^http://shoudio\\.com/.+$", "^http://shoud\\.io/.+$" ], "http://api.mobypicture.com/oEmbed": [ "^http(?:s)?://(?:www\\.)?mobypicture\\.com/user/[^#?/]+/view/.+$", "^http(?:s)?://(?:www\\.)?moby\\.to/.+$" ], "http://www.23hq.com/23/oembed": [ "^http(?:s)?://(?:www\\.)?23hq\\.com/[^#?/]+/photo/.+$" ], "http://gmep.org/oembed.{format}": [ "^http(?:s)?://(?:www\\.)?gmep\\.org/.+$", "^http(?:s)?://gmep\\.imeducate\\.com/.+$" ], "http://oembed.urtak.com/1/oembed": [ "^http(?:s)?://(?:[-\\w]+\\.)?urtak\\.com/.+$" ], "http://cacoo.com/oembed.{format}": [ "^http(?:s)?://cacoo\\.com/.+$" ], "http://api.dailymile.com/oembed": [ "^http(?:s)?://(?:www\\.)?dailymile\\.com/people/[^#?/]+/entries/.+$" ], "http://www.dipity.com/oembed/timeline/": [ "^http(?:s)?://(?:www\\.)?dipity\\.com/timeline/.+$", "^http(?:s)?://(?:www\\.)?dipity\\.com/voaweb/.+$" ], "https://sketchfab.com/oembed": [ "^http(?:s)?://sketchfab\\.com/show/.+$" ], "https://api.meetup.com/oembed": [ "^http(?:s)?://(?:www\\.)?meetup\\.com/.+$", "^http(?:s)?://(?:www\\.)?meetup\\.ps/.+$" ], "https://roomshare.jp/oembed.{format}": [ "^http(?:s)?://(?:www\\.)?roomshare\\.jp/(?:en/)?post/.+$" ], "http://crowdranking.com/api/oembed.{format}": [ "^http(?:s)?://crowdranking\\.com/crowdrankings/.+$", "^http(?:s)?://crowdranking\\.com/rankings/.+$", "^http(?:s)?://crowdranking\\.com/topics/.+$", "^http(?:s)?://crowdranking\\.com/widgets/.+$", "^http(?:s)?://crowdranking\\.com/r/.+$" ], "http://openapi.etsy.com/svc/oembed/": [ "^http(?:s)?://(?:www\\.)?etsy\\.com/listing/.+$" ], "https://audioboo.fm/publishing/oembed.{format}": [ "^http(?:s)?://audioboo\\.fm/boos/.+$" ], "http://demo.clikthrough.com/services/oembed/": [ "^http(?:s)?://demo\\.clikthrough\\.com/theater/video/.+$" ], "http://www.ifttt.com/oembed/": [ "^http(?:s)?://ifttt\\.com/recipes/.+$" ], # Added 11th December 2014 - http://developers.issuu.com/api/oembed.html "http://issuu.com/oembed": [ "^http(?:s)?://(?:www\\.)?issuu\\.com/[^#?/]+/docs/.+$" ], } # Compile endpoints into regular expression objects import re def compile_endpoints(): endpoints = {} for endpoint in OEMBED_ENDPOINTS.keys(): endpoint_key = endpoint.replace('{format}', 'json') endpoints[endpoint_key] = [] for pattern in OEMBED_ENDPOINTS[endpoint]: endpoints[endpoint_key].append(re.compile(pattern)) return endpoints OEMBED_ENDPOINTS_COMPILED = compile_endpoints() def get_oembed_provider(url): for endpoint in OEMBED_ENDPOINTS_COMPILED.keys(): for pattern in OEMBED_ENDPOINTS_COMPILED[endpoint]: if re.match(pattern, url): return endpoint return
[ [ [ 0, 16 ], [ 11736, 11752 ], [ 11882, 11898 ] ], [ [ 11667, 11669 ], [ 11953, 11955 ], [ 12206, 12208 ] ], [ [ 11676, 11693 ], [ 12025, 12042 ] ], [ [ 11997, 12022 ], [ 12097, 12122 ], [ 12154, 12179 ] ], [ [ 12051, 12070 ] ] ]
""" The MIT License (MIT) Copyright (c) 2015-2021 Rapptz Copyright (c) 2021-2021 Pycord Development Copyright (c) 2021-present Texus Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from __future__ import annotations import asyncio import types import functools import inspect from collections import OrderedDict from typing import Any, Callable, Dict, List, Optional, Union, TYPE_CHECKING from ..enums import SlashCommandOptionType, ChannelType from ..member import Member from ..user import User from ..message import Message from .context import ApplicationContext, AutocompleteContext from ..utils import find, get_or_fetch, async_all from ..errors import ValidationError, ClientException from .errors import ApplicationCommandError, CheckFailure, ApplicationCommandInvokeError from .permissions import Permission __all__ = ( "_BaseCommand", "ApplicationCommand", "SlashCommand", "Option", "OptionChoice", "option", "slash_command", "application_command", "user_command", "message_command", "command", "SlashCommandGroup", "ContextMenuCommand", "UserCommand", "MessageCommand", ) if TYPE_CHECKING: from ..interactions import Interaction def wrap_callback(coro): @functools.wraps(coro) async def wrapped(*args, **kwargs): try: ret = await coro(*args, **kwargs) except ApplicationCommandError: raise except asyncio.CancelledError: return except Exception as exc: raise ApplicationCommandInvokeError(exc) from exc return ret return wrapped def hooked_wrapped_callback(command, ctx, coro): @functools.wraps(coro) async def wrapped(arg): try: ret = await coro(arg) except ApplicationCommandError: raise except asyncio.CancelledError: return except Exception as exc: raise ApplicationCommandInvokeError(exc) from exc finally: await command.call_after_hooks(ctx) return ret return wrapped class _BaseCommand: __slots__ = () class ApplicationCommand(_BaseCommand): cog = None def __repr__(self): return f"<discord.commands.{self.__class__.__name__} name={self.name}>" def __eq__(self, other): return isinstance(other, self.__class__) async def __call__(self, ctx, *args, **kwargs): """|coro| Calls the command's callback. This method bypasses all checks that a command has and does not convert the arguments beforehand, so take care to pass the correct arguments in. """ return await self.callback(ctx, *args, **kwargs) async def prepare(self, ctx: ApplicationContext) -> None: # This should be same across all 3 types ctx.command = self if not await self.can_run(ctx): raise CheckFailure( f"The check functions for the command {self.name} failed" ) # TODO: Add cooldown await self.call_before_hooks(ctx) async def invoke(self, ctx: ApplicationContext) -> None: await self.prepare(ctx) injected = hooked_wrapped_callback(self, ctx, self._invoke) await injected(ctx) async def can_run(self, ctx: ApplicationContext) -> bool: if not await ctx.bot.can_run(ctx): raise CheckFailure( f"The global check functions for command {self.name} failed." ) predicates = self.checks if not predicates: # since we have no checks, then we just return True. return True return await async_all(predicate(ctx) for predicate in predicates) # type: ignore async def dispatch_error(self, ctx: ApplicationContext, error: Exception) -> None: ctx.command_failed = True cog = self.cog try: coro = self.on_error except AttributeError: pass else: injected = wrap_callback(coro) if cog is not None: await injected(cog, ctx, error) else: await injected(ctx, error) try: if cog is not None: local = cog.__class__._get_overridden_method(cog.cog_command_error) if local is not None: wrapped = wrap_callback(local) await wrapped(ctx, error) finally: ctx.bot.dispatch("application_command_error", ctx, error) def _get_signature_parameters(self): return OrderedDict(inspect.signature(self.callback).parameters) def error(self, coro): """A decorator that registers a coroutine as a local error handler. A local error handler is an :func:`.on_command_error` event limited to a single command. However, the :func:`.on_command_error` is still invoked afterwards as the catch-all. Parameters ----------- coro: :ref:`coroutine <coroutine>` The coroutine to register as the local error handler. Raises ------- TypeError The coroutine passed is not actually a coroutine. """ if not asyncio.iscoroutinefunction(coro): raise TypeError("The error handler must be a coroutine.") self.on_error = coro return coro def has_error_handler(self) -> bool: """:class:`bool`: Checks whether the command has an error handler registered.""" return hasattr(self, "on_error") def before_invoke(self, coro): """A decorator that registers a coroutine as a pre-invoke hook. A pre-invoke hook is called directly before the command is called. This makes it a useful function to set up database connections or any type of set up required. This pre-invoke hook takes a sole parameter, a :class:`.Context`. See :meth:`.Bot.before_invoke` for more info. Parameters ----------- coro: :ref:`coroutine <coroutine>` The coroutine to register as the pre-invoke hook. Raises ------- TypeError The coroutine passed is not actually a coroutine. """ if not asyncio.iscoroutinefunction(coro): raise TypeError("The pre-invoke hook must be a coroutine.") self._before_invoke = coro return coro def after_invoke(self, coro): """A decorator that registers a coroutine as a post-invoke hook. A post-invoke hook is called directly after the command is called. This makes it a useful function to clean-up database connections or any type of clean up required. This post-invoke hook takes a sole parameter, a :class:`.Context`. See :meth:`.Bot.after_invoke` for more info. Parameters ----------- coro: :ref:`coroutine <coroutine>` The coroutine to register as the post-invoke hook. Raises ------- TypeError The coroutine passed is not actually a coroutine. """ if not asyncio.iscoroutinefunction(coro): raise TypeError("The post-invoke hook must be a coroutine.") self._after_invoke = coro return coro async def call_before_hooks(self, ctx: ApplicationContext) -> None: # now that we're done preparing we can call the pre-command hooks # first, call the command local hook: cog = self.cog if self._before_invoke is not None: # should be cog if @commands.before_invoke is used instance = getattr(self._before_invoke, "__self__", cog) # __self__ only exists for methods, not functions # however, if @command.before_invoke is used, it will be a function if instance: await self._before_invoke(instance, ctx) # type: ignore else: await self._before_invoke(ctx) # type: ignore # call the cog local hook if applicable: if cog is not None: hook = cog.__class__._get_overridden_method(cog.cog_before_invoke) if hook is not None: await hook(ctx) # call the bot global hook if necessary hook = ctx.bot._before_invoke if hook is not None: await hook(ctx) async def call_after_hooks(self, ctx: ApplicationContext) -> None: cog = self.cog if self._after_invoke is not None: instance = getattr(self._after_invoke, "__self__", cog) if instance: await self._after_invoke(instance, ctx) # type: ignore else: await self._after_invoke(ctx) # type: ignore # call the cog local hook if applicable: if cog is not None: hook = cog.__class__._get_overridden_method(cog.cog_after_invoke) if hook is not None: await hook(ctx) hook = ctx.bot._after_invoke if hook is not None: await hook(ctx) @property def full_parent_name(self) -> str: """:class:`str`: Retrieves the fully qualified parent command name. This the base command name required to execute it. For example, in ``/one two three`` the parent name would be ``one two``. """ entries = [] command = self while command.parent is not None and hasattr(command.parent, "name"): command = command.parent entries.append(command.name) return " ".join(reversed(entries)) def qualified_name(self) -> str: """:class:`str`: Retrieves the fully qualified command name. This is the full parent name with the command name as well. For example, in ``/one two three`` the qualified name would be ``one two three``. """ parent = self.full_parent_name if parent: return parent + " " + self.name else: return self.name class SlashCommand(ApplicationCommand): r"""A class that implements the protocol for a slash command. These are not created manually, instead they are created via the decorator or functional interface. Attributes ----------- name: :class:`str` The name of the command. callback: :ref:`coroutine <coroutine>` The coroutine that is executed when the command is called. description: Optional[:class:`str`] The description for the command. guild_ids: Optional[List[:class:`int`]] The ids of the guilds where this command will be registered. options: List[:class:`Option`] The parameters for this command. parent: Optional[:class:`SlashCommandGroup`] The parent group that this command belongs to. ``None`` if there isn't one. default_permission: :class:`bool` Whether the command is enabled by default when it is added to a guild. permissions: List[:class:`Permission`] The permissions for this command. .. note:: If this is not empty then default_permissions will be set to False. cog: Optional[:class:`Cog`] The cog that this command belongs to. ``None`` if there isn't one. checks: List[Callable[[:class:`.ApplicationContext`], :class:`bool`]] A list of predicates that verifies if the command could be executed with the given :class:`.ApplicationContext` as the sole parameter. If an exception is necessary to be thrown to signal failure, then one inherited from :exc:`.CommandError` should be used. Note that if the checks fail then :exc:`.CheckFailure` exception is raised to the :func:`.on_application_command_error` event. """ type = 1 def __new__(cls, *args, **kwargs) -> SlashCommand: self = super().__new__(cls) self.__original_kwargs__ = kwargs.copy() return self def __init__(self, func: Callable, *args, **kwargs) -> None: if not asyncio.iscoroutinefunction(func): raise TypeError("Callback must be a coroutine.") self.callback = func self.guild_ids: Optional[List[int]] = kwargs.get("guild_ids", None) name = kwargs.get("name") or func.__name__ validate_chat_input_name(name) self.name: str = name self.id = None description = kwargs.get("description") or ( inspect.cleandoc(func.__doc__).splitlines()[0] if func.__doc__ is not None else "No description provided" ) validate_chat_input_description(description) self.description: str = description self.parent = kwargs.get("parent") self.is_subcommand: bool = self.parent is not None self.cog = None params = self._get_signature_parameters() self.options: List[Option] = kwargs.get("options") or self._parse_options( params ) try: checks = func.__commands_checks__ checks.reverse() except AttributeError: checks = kwargs.get("checks", []) self.checks = checks self._before_invoke = None self._after_invoke = None # Permissions self.default_permission = kwargs.get("default_permission", True) self.permissions: List[Permission] = getattr( func, "__app_cmd_perms__", [] ) + kwargs.get("permissions", []) if self.permissions and self.default_permission: self.default_permission = False def _parse_options(self, params) -> List[Option]: final_options = [] if list(params.items())[0][0] == "self": temp = list(params.items()) temp.pop(0) params = dict(temp) params = iter(params.items()) # next we have the 'ctx' as the next parameter try: next(params) except StopIteration: raise ClientException( f'Callback for {self.name} command is missing "ctx" parameter.' ) final_options = [] for p_name, p_obj in params: option = p_obj.annotation if option == inspect.Parameter.empty: option = str if self._is_typing_union(option): if self._is_typing_optional(option): option = Option( option.__args__[0], "No description provided", required=False ) else: option = Option(option.__args__, "No description provided") if not isinstance(option, Option): option = Option(option, "No description provided") if p_obj.default != inspect.Parameter.empty: option.required = False option.default = ( option.default if option.default is not None else p_obj.default ) if option.default == inspect.Parameter.empty: option.default = None if option.name is None: option.name = p_name option._parameter_name = p_name final_options.append(option) return final_options def _is_typing_union(self, annotation): return getattr(annotation, "__origin__", None) is Union or type( annotation ) is getattr( types, "UnionType", Union ) # type: ignore def _is_typing_optional(self, annotation): return self._is_typing_union(annotation) and type(None) in annotation.__args__ # type: ignore def to_dict(self) -> Dict: as_dict = { "name": self.name, "description": self.description, "options": [o.to_dict() for o in self.options], "default_permission": self.default_permission, } if self.is_subcommand: as_dict["type"] = SlashCommandOptionType.sub_command.value return as_dict def __eq__(self, other) -> bool: return ( isinstance(other, SlashCommand) and other.name == self.name and other.description == self.description ) async def _invoke(self, ctx: ApplicationContext) -> None: # TODO: Parse the args better kwargs = {} for arg in ctx.interaction.data.get("options", []): op = find(lambda x: x.name == arg["name"], self.options) arg = arg["value"] # Checks if input_type is user, role or channel if ( SlashCommandOptionType.user.value <= op.input_type.value <= SlashCommandOptionType.role.value ): name = "member" if op.input_type.name == "user" else op.input_type.name arg = await get_or_fetch(ctx.guild, name, int(arg), default=int(arg)) elif op.input_type == SlashCommandOptionType.mentionable: arg_id = int(arg) arg = await get_or_fetch(ctx.guild, "member", arg_id) if arg is None: arg = ctx.guild.get_role(arg_id) or arg_id elif ( op.input_type == SlashCommandOptionType.string and op._converter is not None ): arg = await op._converter.convert(ctx, arg) kwargs[op._parameter_name] = arg for o in self.options: if o._parameter_name not in kwargs: kwargs[o._parameter_name] = o.default if self.cog is not None: await self.callback(self.cog, ctx, **kwargs) else: await self.callback(ctx, **kwargs) async def invoke_autocomplete_callback(self, ctx: AutocompleteContext): values = {i.name: i.default for i in self.options} for op in ctx.interaction.data.get("options", []): if op.get("focused", False): option = find(lambda o: o.name == op["name"], self.options) values.update( {i["name"]: i["value"] for i in ctx.interaction.data["options"]} ) ctx.command = self ctx.focused = option ctx.value = op.get("value") ctx.options = values if len(inspect.signature(option.autocomplete).parameters) == 2: instance = getattr(option.autocomplete, "__self__", ctx.cog) result = option.autocomplete(instance, ctx) else: result = option.autocomplete(ctx) if asyncio.iscoroutinefunction(option.autocomplete): result = await result choices = [ o if isinstance(o, OptionChoice) else OptionChoice(o) for o in result ][:25] return await ctx.interaction.response.send_autocomplete_result( choices=choices ) def copy(self): """Creates a copy of this command. Returns -------- :class:`SlashCommand` A new instance of this command. """ ret = self.__class__(self.callback, **self.__original_kwargs__) return self._ensure_assignment_on_copy(ret) def _ensure_assignment_on_copy(self, other): other._before_invoke = self._before_invoke other._after_invoke = self._after_invoke if self.checks != other.checks: other.checks = self.checks.copy() # if self._buckets.valid and not other._buckets.valid: # other._buckets = self._buckets.copy() # if self._max_concurrency != other._max_concurrency: # # _max_concurrency won't be None at this point # other._max_concurrency = self._max_concurrency.copy() # type: ignore try: other.on_error = self.on_error except AttributeError: pass return other def _update_copy(self, kwargs: Dict[str, Any]): if kwargs: kw = kwargs.copy() kw.update(self.__original_kwargs__) copy = self.__class__(self.callback, **kw) return self._ensure_assignment_on_copy(copy) else: return self.copy() channel_type_map = { "TextChannel": ChannelType.text, "VoiceChannel": ChannelType.voice, "StageChannel": ChannelType.stage_voice, "CategoryChannel": ChannelType.category, } class Option: def __init__(self, input_type: Any, /, description: str = None, **kwargs) -> None: self.name: Optional[str] = kwargs.pop("name", None) self.description = description or "No description provided" self._converter = None self.channel_types: List[SlashCommandOptionType] = kwargs.pop( "channel_types", [] ) if not isinstance(input_type, SlashCommandOptionType): if hasattr(input_type, "convert"): self._converter = input_type input_type = SlashCommandOptionType.string else: _type = SlashCommandOptionType.from_datatype(input_type) if _type == SlashCommandOptionType.channel: if not isinstance(input_type, tuple): input_type = (input_type,) for i in input_type: if i.__name__ == "GuildChannel": continue channel_type = channel_type_map[i.__name__] self.channel_types.append(channel_type) input_type = _type self.input_type = input_type self.required: bool = kwargs.pop("required", True) self.choices: List[OptionChoice] = [ o if isinstance(o, OptionChoice) else OptionChoice(o) for o in kwargs.pop("choices", list()) ] self.default = kwargs.pop("default", None) if self.input_type == SlashCommandOptionType.integer: minmax_types = (int, type(None)) elif self.input_type == SlashCommandOptionType.number: minmax_types = (int, float, type(None)) else: minmax_types = (type(None),) minmax_typehint = Optional[Union[minmax_types]] # type: ignore self.min_value: minmax_typehint = kwargs.pop("min_value", None) self.max_value: minmax_typehint = kwargs.pop("max_value", None) if not (isinstance(self.min_value, minmax_types) or self.min_value is None): raise TypeError( f'Expected {minmax_typehint} for min_value, got "{type(self.min_value).__name__}"' ) if not (isinstance(self.max_value, minmax_types) or self.min_value is None): raise TypeError( f'Expected {minmax_typehint} for max_value, got "{type(self.max_value).__name__}"' ) self.autocomplete = kwargs.pop("autocomplete", None) def to_dict(self) -> Dict: as_dict = { "name": self.name, "description": self.description, "type": self.input_type.value, "required": self.required, "choices": [c.to_dict() for c in self.choices], "autocomplete": bool(self.autocomplete), } if self.channel_types: as_dict["channel_types"] = [t.value for t in self.channel_types] if self.min_value is not None: as_dict["min_value"] = self.min_value if self.max_value is not None: as_dict["max_value"] = self.max_value return as_dict def __repr__(self): return f"<discord.commands.{self.__class__.__name__} name={self.name}>" class OptionChoice: def __init__(self, name: str, value: Optional[Union[str, int, float]] = None): self.name = name self.value = value or name def to_dict(self) -> Dict[str, Union[str, int, float]]: return {"name": self.name, "value": self.value} def option(name, type=None, **kwargs): """A decorator that can be used instead of typehinting Option""" def decor(func): nonlocal type type = type or func.__annotations__.get(name, str) func.__annotations__[name] = Option(type, **kwargs) return func return decor class SlashCommandGroup(ApplicationCommand, Option): r"""A class that implements the protocol for a slash command group. These can be created manually, but they should be created via the decorator or functional interface. Attributes ----------- name: :class:`str` The name of the command. description: Optional[:class:`str`] The description for the command. guild_ids: Optional[List[:class:`int`]] The ids of the guilds where this command will be registered. parent: Optional[:class:`SlashCommandGroup`] The parent group that this group belongs to. ``None`` if there isn't one. subcommands: List[Union[:class:`SlashCommand`, :class:`SlashCommandGroup`]] The list of all subcommands under this group. cog: Optional[:class:`Cog`] The cog that this command belongs to. ``None`` if there isn't one. checks: List[Callable[[:class:`.ApplicationContext`], :class:`bool`]] A list of predicates that verifies if the command could be executed with the given :class:`.ApplicationContext` as the sole parameter. If an exception is necessary to be thrown to signal failure, then one inherited from :exc:`.CommandError` should be used. Note that if the checks fail then :exc:`.CheckFailure` exception is raised to the :func:`.on_application_command_error` event. """ type = 1 def __new__(cls, *args, **kwargs) -> SlashCommandGroup: self = super().__new__(cls) self.__original_kwargs__ = kwargs.copy() return self def __init__( self, name: str, description: str, guild_ids: Optional[List[int]] = None, parent: Optional[SlashCommandGroup] = None, **kwargs, ) -> None: validate_chat_input_name(name) validate_chat_input_description(description) super().__init__( SlashCommandOptionType.sub_command_group, name=name, description=description, ) self.subcommands: List[Union[SlashCommand, SlashCommandGroup]] = [] self.guild_ids = guild_ids self.parent = parent self.checks = [] self._before_invoke = None self._after_invoke = None self.cog = None # Permissions self.default_permission = kwargs.get("default_permission", True) self.permissions: List[Permission] = kwargs.get("permissions", []) if self.permissions and self.default_permission: self.default_permission = False def to_dict(self) -> Dict: as_dict = { "name": self.name, "description": self.description, "options": [c.to_dict() for c in self.subcommands], } if self.parent is not None: as_dict["type"] = self.input_type.value return as_dict def command(self, **kwargs) -> SlashCommand: def wrap(func) -> SlashCommand: command = SlashCommand(func, parent=self, **kwargs) self.subcommands.append(command) return command return wrap def command_group(self, name, description) -> SlashCommandGroup: if self.parent is not None: # TODO: Improve this error message raise Exception("Subcommands can only be nested once") sub_command_group = SlashCommandGroup(name, description, parent=self) self.subcommands.append(sub_command_group) return sub_command_group async def _invoke(self, ctx: ApplicationContext) -> None: option = ctx.interaction.data["options"][0] command = find(lambda x: x.name == option["name"], self.subcommands) ctx.interaction.data = option await command.invoke(ctx) async def invoke_autocomplete_callback(self, ctx: AutocompleteContext) -> None: option = ctx.interaction.data["options"][0] command = find(lambda x: x.name == option["name"], self.subcommands) ctx.interaction.data = option await command.invoke_autocomplete_callback(ctx) class ContextMenuCommand(ApplicationCommand): r"""A class that implements the protocol for context menu commands. These are not created manually, instead they are created via the decorator or functional interface. Attributes ----------- name: :class:`str` The name of the command. callback: :ref:`coroutine <coroutine>` The coroutine that is executed when the command is called. guild_ids: Optional[List[:class:`int`]] The ids of the guilds where this command will be registered. cog: Optional[:class:`Cog`] The cog that this command belongs to. ``None`` if there isn't one. checks: List[Callable[[:class:`.ApplicationContext`], :class:`bool`]] A list of predicates that verifies if the command could be executed with the given :class:`.ApplicationContext` as the sole parameter. If an exception is necessary to be thrown to signal failure, then one inherited from :exc:`.CommandError` should be used. Note that if the checks fail then :exc:`.CheckFailure` exception is raised to the :func:`.on_application_command_error` event. """ def __new__(cls, *args, **kwargs) -> ContextMenuCommand: self = super().__new__(cls) self.__original_kwargs__ = kwargs.copy() return self def __init__(self, func: Callable, *args, **kwargs) -> None: if not asyncio.iscoroutinefunction(func): raise TypeError("Callback must be a coroutine.") self.callback = func self.guild_ids: Optional[List[int]] = kwargs.get("guild_ids", None) # Discord API doesn't support setting descriptions for context menu commands # so it must be empty self.description = "" self.name: str = kwargs.pop("name", func.__name__) if not isinstance(self.name, str): raise TypeError("Name of a command must be a string.") self.cog = None try: checks = func.__commands_checks__ checks.reverse() except AttributeError: checks = kwargs.get("checks", []) self.checks = checks self._before_invoke = None self._after_invoke = None self.validate_parameters() # Context Menu commands don't have permissions self.permissions = [] # Context Menu commands can't have parents self.parent = None def validate_parameters(self): params = self._get_signature_parameters() if list(params.items())[0][0] == "self": temp = list(params.items()) temp.pop(0) params = dict(temp) params = iter(params) # next we have the 'ctx' as the next parameter try: next(params) except StopIteration: raise ClientException( f'Callback for {self.name} command is missing "ctx" parameter.' ) # next we have the 'user/message' as the next parameter try: next(params) except StopIteration: cmd = "user" if type(self) == UserCommand else "message" raise ClientException( f'Callback for {self.name} command is missing "{cmd}" parameter.' ) # next there should be no more parameters try: next(params) raise ClientException( f"Callback for {self.name} command has too many parameters." ) except StopIteration: pass def qualified_name(self): return self.name def to_dict(self) -> Dict[str, Union[str, int]]: return {"name": self.name, "description": self.description, "type": self.type} class UserCommand(ContextMenuCommand): type = 2 def __new__(cls, *args, **kwargs) -> UserCommand: self = super().__new__(cls) self.__original_kwargs__ = kwargs.copy() return self async def _invoke(self, ctx: ApplicationContext) -> None: if "members" not in ctx.interaction.data["resolved"]: _data = ctx.interaction.data["resolved"]["users"] for i, v in _data.items(): v["id"] = int(i) user = v target = User(state=ctx.interaction._state, data=user) else: _data = ctx.interaction.data["resolved"]["members"] for i, v in _data.items(): v["id"] = int(i) member = v _data = ctx.interaction.data["resolved"]["users"] for i, v in _data.items(): v["id"] = int(i) user = v member["user"] = user target = Member( data=member, guild=ctx.interaction._state._get_guild(ctx.interaction.guild_id), state=ctx.interaction._state, ) if self.cog is not None: await self.callback(self.cog, ctx, target) else: await self.callback(ctx, target) def copy(self): """Creates a copy of this command. Returns -------- :class:`UserCommand` A new instance of this command. """ ret = self.__class__(self.callback, **self.__original_kwargs__) return self._ensure_assignment_on_copy(ret) def _ensure_assignment_on_copy(self, other): other._before_invoke = self._before_invoke other._after_invoke = self._after_invoke if self.checks != other.checks: other.checks = self.checks.copy() # if self._buckets.valid and not other._buckets.valid: # other._buckets = self._buckets.copy() # if self._max_concurrency != other._max_concurrency: # # _max_concurrency won't be None at this point # other._max_concurrency = self._max_concurrency.copy() # type: ignore try: other.on_error = self.on_error except AttributeError: pass return other def _update_copy(self, kwargs: Dict[str, Any]): if kwargs: kw = kwargs.copy() kw.update(self.__original_kwargs__) copy = self.__class__(self.callback, **kw) return self._ensure_assignment_on_copy(copy) else: return self.copy() class MessageCommand(ContextMenuCommand): type = 3 def __new__(cls, *args, **kwargs) -> MessageCommand: self = super().__new__(cls) self.__original_kwargs__ = kwargs.copy() return self async def _invoke(self, ctx: ApplicationContext): _data = ctx.interaction.data["resolved"]["messages"] for i, v in _data.items(): v["id"] = int(i) message = v channel = ctx.interaction._state.get_channel(int(message["channel_id"])) if channel is None: data = await ctx.interaction._state.http.start_private_message( int(message["author"]["id"]) ) channel = ctx.interaction._state.add_dm_channel(data) target = Message(state=ctx.interaction._state, channel=channel, data=message) if self.cog is not None: await self.callback(self.cog, ctx, target) else: await self.callback(ctx, target) def copy(self): """Creates a copy of this command. Returns -------- :class:`MessageCommand` A new instance of this command. """ ret = self.__class__(self.callback, **self.__original_kwargs__) return self._ensure_assignment_on_copy(ret) def _ensure_assignment_on_copy(self, other): other._before_invoke = self._before_invoke other._after_invoke = self._after_invoke if self.checks != other.checks: other.checks = self.checks.copy() # if self._buckets.valid and not other._buckets.valid: # other._buckets = self._buckets.copy() # if self._max_concurrency != other._max_concurrency: # # _max_concurrency won't be None at this point # other._max_concurrency = self._max_concurrency.copy() # type: ignore try: other.on_error = self.on_error except AttributeError: pass return other def _update_copy(self, kwargs: Dict[str, Any]): if kwargs: kw = kwargs.copy() kw.update(self.__original_kwargs__) copy = self.__class__(self.callback, **kw) return self._ensure_assignment_on_copy(copy) else: return self.copy() def slash_command(**kwargs): """Decorator for slash commands that invokes :func:`application_command`. .. versionadded:: 2.0 Returns -------- Callable[..., :class:`SlashCommand`] A decorator that converts the provided method into a :class:`.SlashCommand`. """ return application_command(cls=SlashCommand, **kwargs) def user_command(**kwargs): """Decorator for user commands that invokes :func:`application_command`. .. versionadded:: 2.0 Returns -------- Callable[..., :class:`UserCommand`] A decorator that converts the provided method into a :class:`.UserCommand`. """ return application_command(cls=UserCommand, **kwargs) def message_command(**kwargs): """Decorator for message commands that invokes :func:`application_command`. .. versionadded:: 2.0 Returns -------- Callable[..., :class:`MessageCommand`] A decorator that converts the provided method into a :class:`.MessageCommand`. """ return application_command(cls=MessageCommand, **kwargs) def application_command(cls=SlashCommand, **attrs): """A decorator that transforms a function into an :class:`.ApplicationCommand`. More specifically, usually one of :class:`.SlashCommand`, :class:`.UserCommand`, or :class:`.MessageCommand`. The exact class depends on the ``cls`` parameter. By default the ``description`` attribute is received automatically from the docstring of the function and is cleaned up with the use of ``inspect.cleandoc``. If the docstring is ``bytes``, then it is decoded into :class:`str` using utf-8 encoding. The ``name`` attribute also defaults to the function name unchanged. .. versionadded:: 2.0 Parameters ----------- cls: :class:`.ApplicationCommand` The class to construct with. By default this is :class:`.SlashCommand`. You usually do not change this. attrs Keyword arguments to pass into the construction of the class denoted by ``cls``. Raises ------- TypeError If the function is not a coroutine or is already a command. """ def decorator(func: Callable) -> cls: if isinstance(func, ApplicationCommand): func = func.callback elif not callable(func): raise TypeError( "func needs to be a callable or a subclass of ApplicationCommand." ) return cls(func, **attrs) return decorator def command(**kwargs): """There is an alias for :meth:`application_command`. .. note:: This decorator is overridden by :func:`commands.command`. .. versionadded:: 2.0 Returns -------- Callable[..., :class:`ApplicationCommand`] A decorator that converts the provided method into an :class:`.ApplicationCommand`. """ return application_command(**kwargs) # Validation def validate_chat_input_name(name: Any): if not isinstance(name, str): raise TypeError("Name of a command must be a string.") if " " in name: raise ValidationError("Name of a chat input command cannot have spaces.") if not name.islower(): raise ValidationError("Name of a chat input command must be lowercase.") if len(name) > 32 or len(name) < 1: raise ValidationError( "Name of a chat input command must be less than 32 characters and non empty." ) def validate_chat_input_description(description: Any): if not isinstance(description, str): raise TypeError("Description of a command must be a string.") if len(description) > 100 or len(description) < 1: raise ValidationError( "Description of a chat input command must be less than 100 characters and non empty." )
[ [ [ 1186, 1197 ] ], [ [ 1206, 1213 ], [ 6223, 6230 ], [ 7254, 7261 ], [ 8126, 8133 ], [ 13033, 13040 ], [ 19638, 19645 ], [ 30841, 30848 ], [ 2416, 2423 ], [ 2819, 2826 ] ], [ [ 1221, 1226 ], [ 16431, 16436 ] ], [ [ 1234, 1243 ], [ 2222, 2231 ], [ 2649, 2658 ] ], [ [ 1251, 1258 ], [ 5586, 5593 ], [ 13445, 13452 ], [ 15222, 15229 ], [ 15774, 15781 ], [ 16003, 16010 ], [ 19340, 19347 ] ], [ [ 1283, 1294 ], [ 5574, 5585 ] ], [ [ 1314, 1317 ], [ 21061, 21064 ], [ 21565, 21568 ], [ 35474, 35477 ], [ 37743, 37746 ], [ 40939, 40942 ], [ 41474, 41477 ] ], [ [ 1319, 1327 ], [ 12982, 12990 ], [ 30790, 30798 ], [ 40172, 40180 ] ], [ [ 1329, 1333 ], [ 16660, 16664 ], [ 21051, 21055 ], [ 24023, 24027 ], [ 24936, 24940 ], [ 27940, 27944 ], [ 33040, 33044 ], [ 35464, 35468 ], [ 37733, 37737 ] ], [ [ 1335, 1339 ], [ 13192, 13196 ], [ 13882, 13886 ], [ 14360, 14364 ], [ 14614, 14618 ], [ 21804, 21808 ], [ 22784, 22788 ], [ 27037, 27041 ], [ 27409, 27413 ], [ 27764, 27768 ], [ 31000, 31004 ] ], [ [ 1341, 1349 ], [ 13183, 13191 ], [ 21636, 21644 ], [ 23289, 23297 ], [ 24808, 24816 ], [ 27028, 27036 ], [ 27072, 27080 ], [ 30991, 30999 ] ], [ [ 1351, 1356 ], [ 16359, 16364 ], [ 16451, 16456 ], [ 23298, 23303 ], [ 24817, 24822 ], [ 24946, 24951 ], [ 27414, 27419 ], [ 33050, 33055 ] ], [ [ 1358, 1371 ], [ 2132, 2145 ] ], [ [ 1393, 1415 ], [ 16952, 16974 ], [ 17595, 17617 ], [ 17687, 17709 ], [ 17945, 17967 ], [ 18233, 18255 ], [ 21809, 21831 ], [ 21927, 21949 ], [ 22073, 22095 ], [ 22145, 22167 ], [ 22222, 22244 ], [ 23016, 23038 ], [ 23125, 23147 ], [ 27271, 27293 ] ], [ [ 1417, 1428 ], [ 21365, 21376 ], [ 21403, 21414 ], [ 21442, 21453 ], [ 21490, 21501 ] ], [ [ 1450, 1456 ], [ 34112, 34118 ] ], [ [ 1476, 1480 ], [ 33675, 33679 ] ], [ [ 1503, 1510 ], [ 36489, 36496 ] ], [ [ 1532, 1550 ], [ 3725, 3743 ], [ 4097, 4115 ], [ 4289, 4307 ], [ 4769, 4787 ], [ 8333, 8351 ], [ 9411, 9429 ], [ 17254, 17272 ], [ 28893, 28911 ], [ 33404, 33422 ], [ 35991, 36009 ] ], [ [ 1552, 1571 ], [ 18771, 18790 ], [ 29178, 29197 ] ], [ [ 1592, 1596 ], [ 17418, 17422 ], [ 18978, 18982 ], [ 28992, 28996 ], [ 29278, 29282 ] ], [ [ 1598, 1610 ], [ 17852, 17864 ], [ 18043, 18055 ] ], [ [ 1612, 1621 ], [ 4658, 4667 ] ], [ [ 1643, 1658 ], [ 41076, 41091 ], [ 41185, 41200 ], [ 41306, 41321 ], [ 41660, 41675 ] ], [ [ 1660, 1675 ], [ 14981, 14996 ], [ 32252, 32267 ], [ 32583, 32598 ], [ 32803, 32818 ] ], [ [ 1696, 1719 ], [ 2358, 2381 ], [ 2761, 2784 ] ], [ [ 1721, 1733 ], [ 3889, 3901 ], [ 4380, 4392 ] ], [ [ 1735, 1764 ], [ 2510, 2539 ], [ 2913, 2942 ] ], [ [ 1790, 1800 ], [ 14365, 14375 ], [ 27769, 27779 ] ], [ [ 1802, 1809 ] ], [ [ 2178, 2189 ] ], [ [ 2196, 2209 ], [ 5004, 5017 ], [ 5363, 5376 ] ], [ [ 2599, 2622 ], [ 4178, 4201 ] ], [ [ 3069, 3081 ], [ 3129, 3141 ] ], [ [ 3110, 3128 ], [ 11048, 11066 ], [ 25364, 25382 ], [ 29458, 29476 ], [ 40218, 40236 ] ], [ [ 11035, 11047 ], [ 39099, 39111 ], [ 12832, 12844 ], [ 17102, 17114 ], [ 27420, 27432 ], [ 28265, 28277 ], [ 38334, 38346 ], [ 28305, 28317 ], [ 28341, 28353 ] ], [ [ 21325, 21341 ], [ 22538, 22554 ] ], [ [ 21522, 21528 ], [ 25384, 25390 ], [ 13887, 13893 ], [ 14619, 14625 ], [ 15405, 15411 ], [ 15572, 15578 ], [ 15662, 15668 ], [ 15696, 15702 ], [ 25277, 25283 ] ], [ [ 24753, 24765 ], [ 19798, 19810 ], [ 19817, 19829 ], [ 22838, 22850 ], [ 22857, 22869 ], [ 22789, 22801 ] ], [ [ 25033, 25039 ] ], [ [ 25346, 25363 ], [ 26806, 26823 ], [ 27081, 27098 ], [ 27434, 27451 ], [ 28527, 28544 ], [ 28725, 28742 ] ], [ [ 29439, 29457 ], [ 33175, 33193 ], [ 35759, 35777 ], [ 30634, 30652 ] ], [ [ 33163, 33174 ], [ 32538, 32549 ], [ 33251, 33262 ], [ 38683, 38694 ] ], [ [ 35744, 35758 ], [ 35835, 35849 ], [ 39043, 39057 ] ], [ [ 38011, 38024 ] ], [ [ 38364, 38376 ] ], [ [ 38712, 38727 ] ], [ [ 39075, 39094 ], [ 38310, 38329 ], [ 38659, 38678 ], [ 39019, 39038 ], [ 40859, 40878 ] ], [ [ 40493, 40500 ] ], [ [ 40908, 40932 ], [ 13295, 13319 ], [ 27149, 27173 ] ], [ [ 41429, 41460 ], [ 13593, 13624 ], [ 27188, 27219 ] ] ]
import sqlalchemy as sa from sqlalchemy import and_ from sqlalchemy import event from sqlalchemy import exc from sqlalchemy import func from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import select from sqlalchemy import String from sqlalchemy import testing from sqlalchemy import text from sqlalchemy.ext.declarative import comparable_using from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import aliased from sqlalchemy.orm import AttributeExtension from sqlalchemy.orm import attributes from sqlalchemy.orm import collections from sqlalchemy.orm import column_property from sqlalchemy.orm import comparable_property from sqlalchemy.orm import composite from sqlalchemy.orm import configure_mappers from sqlalchemy.orm import contains_eager from sqlalchemy.orm import create_session from sqlalchemy.orm import defer from sqlalchemy.orm import deferred from sqlalchemy.orm import EXT_CONTINUE from sqlalchemy.orm import identity from sqlalchemy.orm import instrumentation from sqlalchemy.orm import joinedload from sqlalchemy.orm import joinedload_all from sqlalchemy.orm import mapper from sqlalchemy.orm import MapperExtension from sqlalchemy.orm import PropComparator from sqlalchemy.orm import relationship from sqlalchemy.orm import Session from sqlalchemy.orm import SessionExtension from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import synonym from sqlalchemy.orm import undefer from sqlalchemy.orm import with_polymorphic from sqlalchemy.orm.collections import collection from sqlalchemy.orm.util import polymorphic_union from sqlalchemy.testing import assert_raises from sqlalchemy.testing import assert_raises_message from sqlalchemy.testing import assertions from sqlalchemy.testing import AssertsCompiledSQL from sqlalchemy.testing import eq_ from sqlalchemy.testing import fixtures from sqlalchemy.testing import is_ from sqlalchemy.testing import is_true from sqlalchemy.testing.schema import Column from sqlalchemy.testing.schema import Table from sqlalchemy.testing.util import gc_collect from sqlalchemy.util.compat import pypy from . import _fixtures from .inheritance import _poly_fixtures from .test_options import PathTest as OptionsPathTest from .test_transaction import _LocalFixture class DeprecationWarningsTest(fixtures.DeclarativeMappedTest): run_setup_classes = "each" run_setup_mappers = "each" run_define_tables = "each" run_create_tables = None def test_attribute_extension(self): class SomeExtension(AttributeExtension): def append(self, obj, value, initiator): pass def remove(self, obj, value, initiator): pass def set(self, obj, value, oldvalue, initiator): pass with assertions.expect_deprecated( ".*The column_property.extension parameter will be removed in a " "future release." ): class Foo(self.DeclarativeBasic): __tablename__ = "foo" id = Column(Integer, primary_key=True) foo = column_property( Column("q", Integer), extension=SomeExtension() ) with assertions.expect_deprecated( "AttributeExtension.append is deprecated. The " "AttributeExtension class will be removed in a future release.", "AttributeExtension.remove is deprecated. The " "AttributeExtension class will be removed in a future release.", "AttributeExtension.set is deprecated. The " "AttributeExtension class will be removed in a future release.", ): configure_mappers() def test_attribute_extension_parameter(self): class SomeExtension(AttributeExtension): def append(self, obj, value, initiator): pass with assertions.expect_deprecated( ".*The relationship.extension parameter will be removed in a " "future release." ): relationship("Bar", extension=SomeExtension) with assertions.expect_deprecated( ".*The column_property.extension parameter will be removed in a " "future release." ): column_property(Column("q", Integer), extension=SomeExtension) with assertions.expect_deprecated( ".*The composite.extension parameter will be removed in a " "future release." ): composite("foo", extension=SomeExtension) def test_session_extension(self): class SomeExtension(SessionExtension): def after_commit(self, session): pass def after_rollback(self, session): pass def before_flush(self, session, flush_context, instances): pass with assertions.expect_deprecated( ".*The Session.extension parameter will be removed", "SessionExtension.after_commit is deprecated. " "The SessionExtension class", "SessionExtension.before_flush is deprecated. " "The SessionExtension class", "SessionExtension.after_rollback is deprecated. " "The SessionExtension class", ): Session(extension=SomeExtension()) def test_mapper_extension(self): class SomeExtension(MapperExtension): def init_instance( self, mapper, class_, oldinit, instance, args, kwargs ): pass def init_failed( self, mapper, class_, oldinit, instance, args, kwargs ): pass with assertions.expect_deprecated( "MapperExtension.init_instance is deprecated. " "The MapperExtension class", "MapperExtension.init_failed is deprecated. " "The MapperExtension class", ".*The mapper.extension parameter will be removed", ): class Foo(self.DeclarativeBasic): __tablename__ = "foo" id = Column(Integer, primary_key=True) __mapper_args__ = {"extension": SomeExtension()} def test_session_weak_identity_map(self): with testing.expect_deprecated( ".*Session.weak_identity_map parameter as well as the" ): s = Session(weak_identity_map=True) is_(s._identity_cls, identity.WeakInstanceDict) with assertions.expect_deprecated( "The Session.weak_identity_map parameter as well as" ): s = Session(weak_identity_map=False) is_(s._identity_cls, identity.StrongInstanceDict) s = Session() is_(s._identity_cls, identity.WeakInstanceDict) def test_session_prune(self): s = Session() with assertions.expect_deprecated( r"The Session.prune\(\) method is deprecated along with " "Session.weak_identity_map" ): s.prune() def test_session_enable_transaction_accounting(self): with assertions.expect_deprecated( "the Session._enable_transaction_accounting parameter is " "deprecated" ): Session(_enable_transaction_accounting=False) def test_session_is_modified(self): class Foo(self.DeclarativeBasic): __tablename__ = "foo" id = Column(Integer, primary_key=True) f1 = Foo() s = Session() with assertions.expect_deprecated( "The Session.is_modified.passive flag is deprecated" ): # this flag was for a long time documented as requiring # that it be set to True, so we've changed the default here # so that the warning emits s.is_modified(f1, passive=True) class DeprecatedAccountingFlagsTest(_LocalFixture): def test_rollback_no_accounting(self): User, users = self.classes.User, self.tables.users with testing.expect_deprecated( "The Session._enable_transaction_accounting parameter" ): sess = sessionmaker(_enable_transaction_accounting=False)() u1 = User(name="ed") sess.add(u1) sess.commit() u1.name = "edwardo" sess.rollback() testing.db.execute( users.update(users.c.name == "ed").values(name="edward") ) assert u1.name == "edwardo" sess.expire_all() assert u1.name == "edward" def test_commit_no_accounting(self): User, users = self.classes.User, self.tables.users with testing.expect_deprecated( "The Session._enable_transaction_accounting parameter" ): sess = sessionmaker(_enable_transaction_accounting=False)() u1 = User(name="ed") sess.add(u1) sess.commit() u1.name = "edwardo" sess.rollback() testing.db.execute( users.update(users.c.name == "ed").values(name="edward") ) assert u1.name == "edwardo" sess.commit() assert testing.db.execute(select([users.c.name])).fetchall() == [ ("edwardo",) ] assert u1.name == "edwardo" sess.delete(u1) sess.commit() def test_preflush_no_accounting(self): User, users = self.classes.User, self.tables.users with testing.expect_deprecated( "The Session._enable_transaction_accounting parameter" ): sess = Session( _enable_transaction_accounting=False, autocommit=True, autoflush=False, ) u1 = User(name="ed") sess.add(u1) sess.flush() sess.begin() u1.name = "edwardo" u2 = User(name="some other user") sess.add(u2) sess.rollback() sess.begin() assert testing.db.execute(select([users.c.name])).fetchall() == [ ("ed",) ] class DeprecatedSessionFeatureTest(_fixtures.FixtureTest): run_inserts = None def test_fast_discard_race(self): # test issue #4068 users, User = self.tables.users, self.classes.User mapper(User, users) with testing.expect_deprecated(".*identity map are deprecated"): sess = Session(weak_identity_map=False) u1 = User(name="u1") sess.add(u1) sess.commit() u1_state = u1._sa_instance_state sess.identity_map._dict.pop(u1_state.key) ref = u1_state.obj u1_state.obj = lambda: None u2 = sess.query(User).first() u1_state._cleanup(ref) u3 = sess.query(User).first() is_(u2, u3) u2_state = u2._sa_instance_state assert sess.identity_map.contains_state(u2._sa_instance_state) ref = u2_state.obj u2_state.obj = lambda: None u2_state._cleanup(ref) assert not sess.identity_map.contains_state(u2._sa_instance_state) def test_is_modified_passive_on(self): User, Address = self.classes.User, self.classes.Address users, addresses = self.tables.users, self.tables.addresses mapper(User, users, properties={"addresses": relationship(Address)}) mapper(Address, addresses) s = Session() u = User(name="fred", addresses=[Address(email_address="foo")]) s.add(u) s.commit() u.id def go(): assert not s.is_modified(u, passive=True) with testing.expect_deprecated( ".*Session.is_modified.passive flag is deprecated " ): self.assert_sql_count(testing.db, go, 0) u.name = "newname" def go(): assert s.is_modified(u, passive=True) with testing.expect_deprecated( ".*Session.is_modified.passive flag is deprecated " ): self.assert_sql_count(testing.db, go, 0) class StrongIdentityMapTest(_fixtures.FixtureTest): run_inserts = None def _strong_ident_fixture(self): with testing.expect_deprecated( ".*Session.weak_identity_map parameter as well as the" ): sess = create_session(weak_identity_map=False) def prune(): with testing.expect_deprecated(".*Session.prune"): return sess.prune() return sess, prune def _event_fixture(self): session = create_session() @event.listens_for(session, "pending_to_persistent") @event.listens_for(session, "deleted_to_persistent") @event.listens_for(session, "detached_to_persistent") @event.listens_for(session, "loaded_as_persistent") def strong_ref_object(sess, instance): if "refs" not in sess.info: sess.info["refs"] = refs = set() else: refs = sess.info["refs"] refs.add(instance) @event.listens_for(session, "persistent_to_detached") @event.listens_for(session, "persistent_to_deleted") @event.listens_for(session, "persistent_to_transient") def deref_object(sess, instance): sess.info["refs"].discard(instance) def prune(): if "refs" not in session.info: return 0 sess_size = len(session.identity_map) session.info["refs"].clear() gc_collect() session.info["refs"] = set( s.obj() for s in session.identity_map.all_states() ) return sess_size - len(session.identity_map) return session, prune def test_strong_ref_imap(self): self._test_strong_ref(self._strong_ident_fixture) def test_strong_ref_events(self): self._test_strong_ref(self._event_fixture) def _test_strong_ref(self, fixture): s, prune = fixture() users, User = self.tables.users, self.classes.User mapper(User, users) # save user s.add(User(name="u1")) s.flush() user = s.query(User).one() user = None print(s.identity_map) gc_collect() assert len(s.identity_map) == 1 user = s.query(User).one() assert not s.identity_map._modified user.name = "u2" assert s.identity_map._modified s.flush() eq_(users.select().execute().fetchall(), [(user.id, "u2")]) def test_prune_imap(self): self._test_prune(self._strong_ident_fixture) def test_prune_events(self): self._test_prune(self._event_fixture) @testing.fails_if(lambda: pypy, "pypy has a real GC") @testing.fails_on("+zxjdbc", "http://www.sqlalchemy.org/trac/ticket/1473") def _test_prune(self, fixture): s, prune = fixture() users, User = self.tables.users, self.classes.User mapper(User, users) for o in [User(name="u%s" % x) for x in range(10)]: s.add(o) # o is still live after this loop... self.assert_(len(s.identity_map) == 0) eq_(prune(), 0) s.flush() gc_collect() eq_(prune(), 9) # o is still in local scope here, so still present self.assert_(len(s.identity_map) == 1) id_ = o.id del o eq_(prune(), 1) self.assert_(len(s.identity_map) == 0) u = s.query(User).get(id_) eq_(prune(), 0) self.assert_(len(s.identity_map) == 1) u.name = "squiznart" del u eq_(prune(), 0) self.assert_(len(s.identity_map) == 1) s.flush() eq_(prune(), 1) self.assert_(len(s.identity_map) == 0) s.add(User(name="x")) eq_(prune(), 0) self.assert_(len(s.identity_map) == 0) s.flush() self.assert_(len(s.identity_map) == 1) eq_(prune(), 1) self.assert_(len(s.identity_map) == 0) u = s.query(User).get(id_) s.delete(u) del u eq_(prune(), 0) self.assert_(len(s.identity_map) == 1) s.flush() eq_(prune(), 0) self.assert_(len(s.identity_map) == 0) class DeprecatedQueryTest(_fixtures.FixtureTest, AssertsCompiledSQL): __dialect__ = "default" run_setup_mappers = "once" run_inserts = "once" run_deletes = None @classmethod def setup_mappers(cls): cls._setup_stock_mapping() @classmethod def _expect_implicit_subquery(cls): return assertions.expect_deprecated( "Implicit coercion of SELECT and textual SELECT constructs into " r"FROM clauses is deprecated; please call \.subquery\(\) on any " "Core select or ORM Query object in order to produce a " "subquery object." ) def test_via_textasfrom_select_from(self): User = self.classes.User s = create_session() with self._expect_implicit_subquery(): eq_( s.query(User) .select_from( text("select * from users").columns( id=Integer, name=String ) ) .order_by(User.id) .all(), [User(id=7), User(id=8), User(id=9), User(id=10)], ) def test_query_as_scalar(self): User = self.classes.User s = Session() with assertions.expect_deprecated( r"The Query.as_scalar\(\) method is deprecated and will " "be removed in a future release." ): s.query(User).as_scalar() def test_select_entity_from_crit(self): User, users = self.classes.User, self.tables.users sel = users.select() sess = create_session() with self._expect_implicit_subquery(): eq_( sess.query(User) .select_entity_from(sel) .filter(User.id.in_([7, 8])) .all(), [User(name="jack", id=7), User(name="ed", id=8)], ) def test_select_entity_from_select(self): User, users = self.classes.User, self.tables.users sess = create_session() with self._expect_implicit_subquery(): self.assert_compile( sess.query(User.name).select_entity_from( users.select().where(users.c.id > 5) ), "SELECT anon_1.name AS anon_1_name FROM " "(SELECT users.id AS id, users.name AS name FROM users " "WHERE users.id > :id_1) AS anon_1", ) def test_select_entity_from_q_statement(self): User = self.classes.User sess = create_session() q = sess.query(User) with self._expect_implicit_subquery(): q = sess.query(User).select_entity_from(q.statement) self.assert_compile( q.filter(User.name == "ed"), "SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name " "FROM (SELECT users.id AS id, users.name AS name FROM " "users) AS anon_1 WHERE anon_1.name = :name_1", ) def test_select_from_q_statement_no_aliasing(self): User = self.classes.User sess = create_session() q = sess.query(User) with self._expect_implicit_subquery(): q = sess.query(User).select_from(q.statement) self.assert_compile( q.filter(User.name == "ed"), "SELECT users.id AS users_id, users.name AS users_name " "FROM users, (SELECT users.id AS id, users.name AS name FROM " "users) AS anon_1 WHERE users.name = :name_1", ) def test_from_alias_three(self): User, addresses, users = ( self.classes.User, self.tables.addresses, self.tables.users, ) query = ( users.select(users.c.id == 7) .union(users.select(users.c.id > 7)) .alias("ulist") .outerjoin(addresses) .select( use_labels=True, order_by=[text("ulist.id"), addresses.c.id] ) ) sess = create_session() # better way. use select_entity_from() def go(): with self._expect_implicit_subquery(): result = ( sess.query(User) .select_entity_from(query) .options(contains_eager("addresses")) .all() ) assert self.static.user_address_result == result self.assert_sql_count(testing.db, go, 1) def test_from_alias_four(self): User, addresses, users = ( self.classes.User, self.tables.addresses, self.tables.users, ) sess = create_session() # same thing, but alias addresses, so that the adapter # generated by select_entity_from() is wrapped within # the adapter created by contains_eager() adalias = addresses.alias() query = ( users.select(users.c.id == 7) .union(users.select(users.c.id > 7)) .alias("ulist") .outerjoin(adalias) .select(use_labels=True, order_by=[text("ulist.id"), adalias.c.id]) ) def go(): with self._expect_implicit_subquery(): result = ( sess.query(User) .select_entity_from(query) .options(contains_eager("addresses", alias=adalias)) .all() ) assert self.static.user_address_result == result self.assert_sql_count(testing.db, go, 1) def test_select(self): users = self.tables.users sess = create_session() with self._expect_implicit_subquery(): self.assert_compile( sess.query(users) .select_entity_from(users.select()) .with_labels() .statement, "SELECT users.id AS users_id, users.name AS users_name " "FROM users, " "(SELECT users.id AS id, users.name AS name FROM users) " "AS anon_1", ) def test_join(self): users, Address, User = ( self.tables.users, self.classes.Address, self.classes.User, ) # mapper(User, users, properties={"addresses": relationship(Address)}) # mapper(Address, addresses) sel = users.select(users.c.id.in_([7, 8])) sess = create_session() with self._expect_implicit_subquery(): result = ( sess.query(User) .select_entity_from(sel) .join("addresses") .add_entity(Address) .order_by(User.id) .order_by(Address.id) .all() ) eq_( result, [ ( User(name="jack", id=7), Address(user_id=7, email_address="jack@bean.com", id=1), ), ( User(name="ed", id=8), Address(user_id=8, email_address="ed@wood.com", id=2), ), ( User(name="ed", id=8), Address(user_id=8, email_address="ed@bettyboop.com", id=3), ), ( User(name="ed", id=8), Address(user_id=8, email_address="ed@lala.com", id=4), ), ], ) adalias = aliased(Address) with self._expect_implicit_subquery(): result = ( sess.query(User) .select_entity_from(sel) .join(adalias, "addresses") .add_entity(adalias) .order_by(User.id) .order_by(adalias.id) .all() ) eq_( result, [ ( User(name="jack", id=7), Address(user_id=7, email_address="jack@bean.com", id=1), ), ( User(name="ed", id=8), Address(user_id=8, email_address="ed@wood.com", id=2), ), ( User(name="ed", id=8), Address(user_id=8, email_address="ed@bettyboop.com", id=3), ), ( User(name="ed", id=8), Address(user_id=8, email_address="ed@lala.com", id=4), ), ], ) def test_more_joins(self): (users, Keyword, User) = ( self.tables.users, self.classes.Keyword, self.classes.User, ) sess = create_session() sel = users.select(users.c.id.in_([7, 8])) with self._expect_implicit_subquery(): eq_( sess.query(User) .select_entity_from(sel) .join("orders", "items", "keywords") .filter(Keyword.name.in_(["red", "big", "round"])) .all(), [User(name="jack", id=7)], ) with self._expect_implicit_subquery(): eq_( sess.query(User) .select_entity_from(sel) .join("orders", "items", "keywords", aliased=True) .filter(Keyword.name.in_(["red", "big", "round"])) .all(), [User(name="jack", id=7)], ) def test_join_no_order_by(self): User, users = self.classes.User, self.tables.users sel = users.select(users.c.id.in_([7, 8])) sess = create_session() with self._expect_implicit_subquery(): eq_( sess.query(User).select_entity_from(sel).all(), [User(name="jack", id=7), User(name="ed", id=8)], ) def test_replace_with_eager(self): users, Address, User = ( self.tables.users, self.classes.Address, self.classes.User, ) sel = users.select(users.c.id.in_([7, 8])) sess = create_session() def go(): with self._expect_implicit_subquery(): eq_( sess.query(User) .options(joinedload("addresses")) .select_entity_from(sel) .order_by(User.id) .all(), [ User(id=7, addresses=[Address(id=1)]), User( id=8, addresses=[ Address(id=2), Address(id=3), Address(id=4), ], ), ], ) self.assert_sql_count(testing.db, go, 1) sess.expunge_all() def go(): with self._expect_implicit_subquery(): eq_( sess.query(User) .options(joinedload("addresses")) .select_entity_from(sel) .filter(User.id == 8) .order_by(User.id) .all(), [ User( id=8, addresses=[ Address(id=2), Address(id=3), Address(id=4), ], ) ], ) self.assert_sql_count(testing.db, go, 1) sess.expunge_all() def go(): with self._expect_implicit_subquery(): eq_( sess.query(User) .options(joinedload("addresses")) .select_entity_from(sel) .order_by(User.id)[1], User( id=8, addresses=[ Address(id=2), Address(id=3), Address(id=4), ], ), ) self.assert_sql_count(testing.db, go, 1) def test_onclause_conditional_adaption(self): Item, Order, orders, order_items, User = ( self.classes.Item, self.classes.Order, self.tables.orders, self.tables.order_items, self.classes.User, ) sess = Session() oalias = orders.select() with self._expect_implicit_subquery(): self.assert_compile( sess.query(User) .join(oalias, User.orders) .join( Item, and_( Order.id == order_items.c.order_id, order_items.c.item_id == Item.id, ), from_joinpoint=True, ), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN " "(SELECT orders.id AS id, orders.user_id AS user_id, " "orders.address_id AS address_id, orders.description " "AS description, orders.isopen AS isopen FROM orders) " "AS anon_1 ON users.id = anon_1.user_id JOIN items " "ON anon_1.id = order_items.order_id " "AND order_items.item_id = items.id", use_default_dialect=True, ) class DeprecatedInhTest(_poly_fixtures._Polymorphic): def test_with_polymorphic(self): Person = _poly_fixtures.Person Engineer = _poly_fixtures.Engineer with DeprecatedQueryTest._expect_implicit_subquery(): p_poly = with_polymorphic(Person, [Engineer], select([Person])) is_true( sa.inspect(p_poly).selectable.compare(select([Person]).subquery()) ) def test_multiple_adaption(self): """test that multiple filter() adapters get chained together " and work correctly within a multiple-entry join().""" Company = _poly_fixtures.Company Machine = _poly_fixtures.Machine Engineer = _poly_fixtures.Engineer people = self.tables.people engineers = self.tables.engineers machines = self.tables.machines sess = create_session() mach_alias = machines.select() with DeprecatedQueryTest._expect_implicit_subquery(): self.assert_compile( sess.query(Company) .join(people.join(engineers), Company.employees) .join(mach_alias, Engineer.machines, from_joinpoint=True) .filter(Engineer.name == "dilbert") .filter(Machine.name == "foo"), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies JOIN (people " "JOIN engineers ON people.person_id = " "engineers.person_id) ON companies.company_id = " "people.company_id JOIN " "(SELECT machines.machine_id AS machine_id, " "machines.name AS name, " "machines.engineer_id AS engineer_id " "FROM machines) AS anon_1 " "ON engineers.person_id = anon_1.engineer_id " "WHERE people.name = :name_1 AND anon_1.name = :name_2", use_default_dialect=True, ) class DeprecatedMapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): __dialect__ = "default" def test_polymorphic_union_w_select(self): users, addresses = self.tables.users, self.tables.addresses with DeprecatedQueryTest._expect_implicit_subquery(): dep = polymorphic_union( {"u": users.select(), "a": addresses.select()}, "type", "bcjoin", ) subq_version = polymorphic_union( { "u": users.select().subquery(), "a": addresses.select().subquery(), }, "type", "bcjoin", ) is_true(dep.compare(subq_version)) def test_cancel_order_by(self): users, User = self.tables.users, self.classes.User with testing.expect_deprecated( "The Mapper.order_by parameter is deprecated, and will be " "removed in a future release." ): mapper(User, users, order_by=users.c.name.desc()) assert ( "order by users.name desc" in str(create_session().query(User).statement).lower() ) assert ( "order by" not in str( create_session().query(User).order_by(None).statement ).lower() ) assert ( "order by users.name asc" in str( create_session() .query(User) .order_by(User.name.asc()) .statement ).lower() ) eq_( create_session().query(User).all(), [ User(id=7, name="jack"), User(id=9, name="fred"), User(id=8, name="ed"), User(id=10, name="chuck"), ], ) eq_( create_session().query(User).order_by(User.name).all(), [ User(id=10, name="chuck"), User(id=8, name="ed"), User(id=9, name="fred"), User(id=7, name="jack"), ], ) def test_comparable(self): users = self.tables.users class extendedproperty(property): attribute = 123 def method1(self): return "method1" from sqlalchemy.orm.properties import ColumnProperty class UCComparator(ColumnProperty.Comparator): __hash__ = None def method1(self): return "uccmethod1" def method2(self, other): return "method2" def __eq__(self, other): cls = self.prop.parent.class_ col = getattr(cls, "name") if other is None: return col is None else: return sa.func.upper(col) == sa.func.upper(other) def map_(with_explicit_property): class User(object): @extendedproperty def uc_name(self): if self.name is None: return None return self.name.upper() if with_explicit_property: args = (UCComparator, User.uc_name) else: args = (UCComparator,) with assertions.expect_deprecated( r"comparable_property\(\) is deprecated and will be " "removed in a future release." ): mapper( User, users, properties=dict(uc_name=sa.orm.comparable_property(*args)), ) return User for User in (map_(True), map_(False)): sess = create_session() sess.begin() q = sess.query(User) assert hasattr(User, "name") assert hasattr(User, "uc_name") eq_(User.uc_name.method1(), "method1") eq_(User.uc_name.method2("x"), "method2") assert_raises_message( AttributeError, "Neither 'extendedproperty' object nor 'UCComparator' " "object associated with User.uc_name has an attribute " "'nonexistent'", getattr, User.uc_name, "nonexistent", ) # test compile assert not isinstance(User.uc_name == "jack", bool) u = q.filter(User.uc_name == "JACK").one() assert u.uc_name == "JACK" assert u not in sess.dirty u.name = "some user name" eq_(u.name, "some user name") assert u in sess.dirty eq_(u.uc_name, "SOME USER NAME") sess.flush() sess.expunge_all() q = sess.query(User) u2 = q.filter(User.name == "some user name").one() u3 = q.filter(User.uc_name == "SOME USER NAME").one() assert u2 is u3 eq_(User.uc_name.attribute, 123) sess.rollback() def test_comparable_column(self): users, User = self.tables.users, self.classes.User class MyComparator(sa.orm.properties.ColumnProperty.Comparator): __hash__ = None def __eq__(self, other): # lower case comparison return func.lower(self.__clause_element__()) == func.lower( other ) def intersects(self, other): # non-standard comparator return self.__clause_element__().op("&=")(other) mapper( User, users, properties={ "name": sa.orm.column_property( users.c.name, comparator_factory=MyComparator ) }, ) assert_raises_message( AttributeError, "Neither 'InstrumentedAttribute' object nor " "'MyComparator' object associated with User.name has " "an attribute 'nonexistent'", getattr, User.name, "nonexistent", ) eq_( str( (User.name == "ed").compile( dialect=sa.engine.default.DefaultDialect() ) ), "lower(users.name) = lower(:lower_1)", ) eq_( str( (User.name.intersects("ed")).compile( dialect=sa.engine.default.DefaultDialect() ) ), "users.name &= :name_1", ) def test_info(self): class MyComposite(object): pass with assertions.expect_deprecated( r"comparable_property\(\) is deprecated and will be " "removed in a future release." ): for constructor, args in [(comparable_property, "foo")]: obj = constructor(info={"x": "y"}, *args) eq_(obj.info, {"x": "y"}) obj.info["q"] = "p" eq_(obj.info, {"x": "y", "q": "p"}) obj = constructor(*args) eq_(obj.info, {}) obj.info["q"] = "p" eq_(obj.info, {"q": "p"}) def test_add_property(self): users = self.tables.users assert_col = [] class User(fixtures.ComparableEntity): def _get_name(self): assert_col.append(("get", self._name)) return self._name def _set_name(self, name): assert_col.append(("set", name)) self._name = name name = property(_get_name, _set_name) def _uc_name(self): if self._name is None: return None return self._name.upper() uc_name = property(_uc_name) uc_name2 = property(_uc_name) m = mapper(User, users) class UCComparator(PropComparator): __hash__ = None def __eq__(self, other): cls = self.prop.parent.class_ col = getattr(cls, "name") if other is None: return col is None else: return func.upper(col) == func.upper(other) m.add_property("_name", deferred(users.c.name)) m.add_property("name", synonym("_name")) with assertions.expect_deprecated( r"comparable_property\(\) is deprecated and will be " "removed in a future release." ): m.add_property("uc_name", comparable_property(UCComparator)) m.add_property( "uc_name2", comparable_property(UCComparator, User.uc_name2) ) sess = create_session(autocommit=False) assert sess.query(User).get(7) u = sess.query(User).filter_by(name="jack").one() def go(): eq_(u.name, "jack") eq_(u.uc_name, "JACK") eq_(u.uc_name2, "JACK") eq_(assert_col, [("get", "jack")], str(assert_col)) self.sql_count_(1, go) def test_kwarg_accepted(self): class DummyComposite(object): def __init__(self, x, y): pass class MyFactory(PropComparator): pass with assertions.expect_deprecated( r"comparable_property\(\) is deprecated and will be " "removed in a future release." ): for args in ((comparable_property,),): fn = args[0] args = args[1:] fn(comparator_factory=MyFactory, *args) def test_merge_synonym_comparable(self): users = self.tables.users class User(object): class Comparator(PropComparator): pass def _getValue(self): return self._value def _setValue(self, value): setattr(self, "_value", value) value = property(_getValue, _setValue) with assertions.expect_deprecated( r"comparable_property\(\) is deprecated and will be " "removed in a future release." ): mapper( User, users, properties={ "uid": synonym("id"), "foobar": comparable_property(User.Comparator, User.value), }, ) sess = create_session() u = User() u.name = "ed" sess.add(u) sess.flush() sess.expunge(u) sess.merge(u) class DeprecatedDeclTest(fixtures.TestBase): @testing.provide_metadata def test_comparable_using(self): class NameComparator(sa.orm.PropComparator): @property def upperself(self): cls = self.prop.parent.class_ col = getattr(cls, "name") return sa.func.upper(col) def operate(self, op, other, **kw): return op(self.upperself, other, **kw) Base = declarative_base(metadata=self.metadata) with testing.expect_deprecated( r"comparable_property\(\) is deprecated and will be " "removed in a future release." ): class User(Base, fixtures.ComparableEntity): __tablename__ = "users" id = Column( "id", Integer, primary_key=True, test_needs_autoincrement=True, ) name = Column("name", String(50)) @comparable_using(NameComparator) @property def uc_name(self): return self.name is not None and self.name.upper() or None Base.metadata.create_all() sess = create_session() u1 = User(name="someuser") eq_(u1.name, "someuser", u1.name) eq_(u1.uc_name, "SOMEUSER", u1.uc_name) sess.add(u1) sess.flush() sess.expunge_all() rt = sess.query(User).filter(User.uc_name == "SOMEUSER").one() eq_(rt, u1) sess.expunge_all() rt = sess.query(User).filter(User.uc_name.startswith("SOMEUSE")).one() eq_(rt, u1) class DeprecatedMapperExtensionTest(_fixtures.FixtureTest): """Superseded by MapperEventsTest - test backwards compatibility of MapperExtension.""" run_inserts = None def extension(self): methods = [] class Ext(MapperExtension): def instrument_class(self, mapper, cls): methods.append("instrument_class") return EXT_CONTINUE def init_instance( self, mapper, class_, oldinit, instance, args, kwargs ): methods.append("init_instance") return EXT_CONTINUE def init_failed( self, mapper, class_, oldinit, instance, args, kwargs ): methods.append("init_failed") return EXT_CONTINUE def reconstruct_instance(self, mapper, instance): methods.append("reconstruct_instance") return EXT_CONTINUE def before_insert(self, mapper, connection, instance): methods.append("before_insert") return EXT_CONTINUE def after_insert(self, mapper, connection, instance): methods.append("after_insert") return EXT_CONTINUE def before_update(self, mapper, connection, instance): methods.append("before_update") return EXT_CONTINUE def after_update(self, mapper, connection, instance): methods.append("after_update") return EXT_CONTINUE def before_delete(self, mapper, connection, instance): methods.append("before_delete") return EXT_CONTINUE def after_delete(self, mapper, connection, instance): methods.append("after_delete") return EXT_CONTINUE return Ext, methods def test_basic(self): """test that common user-defined methods get called.""" User, users = self.classes.User, self.tables.users Ext, methods = self.extension() with testing.expect_deprecated( "MapperExtension is deprecated in favor of the MapperEvents", "MapperExtension.before_insert is deprecated", "MapperExtension.instrument_class is deprecated", "MapperExtension.init_instance is deprecated", "MapperExtension.after_insert is deprecated", "MapperExtension.reconstruct_instance is deprecated", "MapperExtension.before_delete is deprecated", "MapperExtension.after_delete is deprecated", "MapperExtension.before_update is deprecated", "MapperExtension.after_update is deprecated", "MapperExtension.init_failed is deprecated", ): mapper(User, users, extension=Ext()) sess = create_session() u = User(name="u1") sess.add(u) sess.flush() u = sess.query(User).populate_existing().get(u.id) sess.expunge_all() u = sess.query(User).get(u.id) u.name = "u1 changed" sess.flush() sess.delete(u) sess.flush() eq_( methods, [ "instrument_class", "init_instance", "before_insert", "after_insert", "reconstruct_instance", "before_update", "after_update", "before_delete", "after_delete", ], ) def test_inheritance(self): users, addresses, User = ( self.tables.users, self.tables.addresses, self.classes.User, ) Ext, methods = self.extension() class AdminUser(User): pass with testing.expect_deprecated( "MapperExtension is deprecated in favor of the MapperEvents", "MapperExtension.before_insert is deprecated", "MapperExtension.instrument_class is deprecated", "MapperExtension.init_instance is deprecated", "MapperExtension.after_insert is deprecated", "MapperExtension.reconstruct_instance is deprecated", "MapperExtension.before_delete is deprecated", "MapperExtension.after_delete is deprecated", "MapperExtension.before_update is deprecated", "MapperExtension.after_update is deprecated", "MapperExtension.init_failed is deprecated", ): mapper(User, users, extension=Ext()) mapper( AdminUser, addresses, inherits=User, properties={"address_id": addresses.c.id}, ) sess = create_session() am = AdminUser(name="au1", email_address="au1@e1") sess.add(am) sess.flush() am = sess.query(AdminUser).populate_existing().get(am.id) sess.expunge_all() am = sess.query(AdminUser).get(am.id) am.name = "au1 changed" sess.flush() sess.delete(am) sess.flush() eq_( methods, [ "instrument_class", "instrument_class", "init_instance", "before_insert", "after_insert", "reconstruct_instance", "before_update", "after_update", "before_delete", "after_delete", ], ) def test_before_after_only_collection(self): """before_update is called on parent for collection modifications, after_update is called even if no columns were updated. """ keywords, items, item_keywords, Keyword, Item = ( self.tables.keywords, self.tables.items, self.tables.item_keywords, self.classes.Keyword, self.classes.Item, ) Ext1, methods1 = self.extension() Ext2, methods2 = self.extension() with testing.expect_deprecated( "MapperExtension is deprecated in favor of the MapperEvents", "MapperExtension.before_insert is deprecated", "MapperExtension.instrument_class is deprecated", "MapperExtension.init_instance is deprecated", "MapperExtension.after_insert is deprecated", "MapperExtension.reconstruct_instance is deprecated", "MapperExtension.before_delete is deprecated", "MapperExtension.after_delete is deprecated", "MapperExtension.before_update is deprecated", "MapperExtension.after_update is deprecated", "MapperExtension.init_failed is deprecated", ): mapper( Item, items, extension=Ext1(), properties={ "keywords": relationship(Keyword, secondary=item_keywords) }, ) with testing.expect_deprecated( "MapperExtension is deprecated in favor of the MapperEvents", "MapperExtension.before_insert is deprecated", "MapperExtension.instrument_class is deprecated", "MapperExtension.init_instance is deprecated", "MapperExtension.after_insert is deprecated", "MapperExtension.reconstruct_instance is deprecated", "MapperExtension.before_delete is deprecated", "MapperExtension.after_delete is deprecated", "MapperExtension.before_update is deprecated", "MapperExtension.after_update is deprecated", "MapperExtension.init_failed is deprecated", ): mapper(Keyword, keywords, extension=Ext2()) sess = create_session() i1 = Item(description="i1") k1 = Keyword(name="k1") sess.add(i1) sess.add(k1) sess.flush() eq_( methods1, [ "instrument_class", "init_instance", "before_insert", "after_insert", ], ) eq_( methods2, [ "instrument_class", "init_instance", "before_insert", "after_insert", ], ) del methods1[:] del methods2[:] i1.keywords.append(k1) sess.flush() eq_(methods1, ["before_update", "after_update"]) eq_(methods2, []) def test_inheritance_with_dupes(self): """Inheritance with the same extension instance on both mappers.""" users, addresses, User = ( self.tables.users, self.tables.addresses, self.classes.User, ) Ext, methods = self.extension() class AdminUser(User): pass ext = Ext() with testing.expect_deprecated( "MapperExtension is deprecated in favor of the MapperEvents", "MapperExtension.before_insert is deprecated", "MapperExtension.instrument_class is deprecated", "MapperExtension.init_instance is deprecated", "MapperExtension.after_insert is deprecated", "MapperExtension.reconstruct_instance is deprecated", "MapperExtension.before_delete is deprecated", "MapperExtension.after_delete is deprecated", "MapperExtension.before_update is deprecated", "MapperExtension.after_update is deprecated", "MapperExtension.init_failed is deprecated", ): mapper(User, users, extension=ext) with testing.expect_deprecated( "MapperExtension is deprecated in favor of the MapperEvents" ): mapper( AdminUser, addresses, inherits=User, extension=ext, properties={"address_id": addresses.c.id}, ) sess = create_session() am = AdminUser(name="au1", email_address="au1@e1") sess.add(am) sess.flush() am = sess.query(AdminUser).populate_existing().get(am.id) sess.expunge_all() am = sess.query(AdminUser).get(am.id) am.name = "au1 changed" sess.flush() sess.delete(am) sess.flush() eq_( methods, [ "instrument_class", "instrument_class", "init_instance", "before_insert", "after_insert", "reconstruct_instance", "before_update", "after_update", "before_delete", "after_delete", ], ) def test_unnecessary_methods_not_evented(self): users = self.tables.users class MyExtension(MapperExtension): def before_insert(self, mapper, connection, instance): pass class Foo(object): pass with testing.expect_deprecated( "MapperExtension is deprecated in favor of the MapperEvents", "MapperExtension.before_insert is deprecated", ): m = mapper(Foo, users, extension=MyExtension()) assert not m.class_manager.dispatch.load assert not m.dispatch.before_update assert len(m.dispatch.before_insert) == 1 class DeprecatedSessionExtensionTest(_fixtures.FixtureTest): run_inserts = None def test_extension(self): User, users = self.classes.User, self.tables.users mapper(User, users) log = [] class MyExt(SessionExtension): def before_commit(self, session): log.append("before_commit") def after_commit(self, session): log.append("after_commit") def after_rollback(self, session): log.append("after_rollback") def before_flush(self, session, flush_context, objects): log.append("before_flush") def after_flush(self, session, flush_context): log.append("after_flush") def after_flush_postexec(self, session, flush_context): log.append("after_flush_postexec") def after_begin(self, session, transaction, connection): log.append("after_begin") def after_attach(self, session, instance): log.append("after_attach") def after_bulk_update(self, session, query, query_context, result): log.append("after_bulk_update") def after_bulk_delete(self, session, query, query_context, result): log.append("after_bulk_delete") with testing.expect_deprecated( "SessionExtension is deprecated in favor of " "the SessionEvents", "SessionExtension.before_commit is deprecated", "SessionExtension.after_commit is deprecated", "SessionExtension.after_begin is deprecated", "SessionExtension.after_attach is deprecated", "SessionExtension.before_flush is deprecated", "SessionExtension.after_flush is deprecated", "SessionExtension.after_flush_postexec is deprecated", "SessionExtension.after_rollback is deprecated", "SessionExtension.after_bulk_update is deprecated", "SessionExtension.after_bulk_delete is deprecated", ): sess = create_session(extension=MyExt()) u = User(name="u1") sess.add(u) sess.flush() assert log == [ "after_attach", "before_flush", "after_begin", "after_flush", "after_flush_postexec", "before_commit", "after_commit", ] log = [] with testing.expect_deprecated( "SessionExtension is deprecated in favor of " "the SessionEvents", "SessionExtension.before_commit is deprecated", "SessionExtension.after_commit is deprecated", "SessionExtension.after_begin is deprecated", "SessionExtension.after_attach is deprecated", "SessionExtension.before_flush is deprecated", "SessionExtension.after_flush is deprecated", "SessionExtension.after_flush_postexec is deprecated", "SessionExtension.after_rollback is deprecated", "SessionExtension.after_bulk_update is deprecated", "SessionExtension.after_bulk_delete is deprecated", ): sess = create_session(autocommit=False, extension=MyExt()) u = User(name="u1") sess.add(u) sess.flush() assert log == [ "after_attach", "before_flush", "after_begin", "after_flush", "after_flush_postexec", ] log = [] u.name = "ed" sess.commit() assert log == [ "before_commit", "before_flush", "after_flush", "after_flush_postexec", "after_commit", ] log = [] sess.commit() assert log == ["before_commit", "after_commit"] log = [] sess.query(User).delete() assert log == ["after_begin", "after_bulk_delete"] log = [] sess.query(User).update({"name": "foo"}) assert log == ["after_bulk_update"] log = [] with testing.expect_deprecated( "SessionExtension is deprecated in favor of " "the SessionEvents", "SessionExtension.before_commit is deprecated", "SessionExtension.after_commit is deprecated", "SessionExtension.after_begin is deprecated", "SessionExtension.after_attach is deprecated", "SessionExtension.before_flush is deprecated", "SessionExtension.after_flush is deprecated", "SessionExtension.after_flush_postexec is deprecated", "SessionExtension.after_rollback is deprecated", "SessionExtension.after_bulk_update is deprecated", "SessionExtension.after_bulk_delete is deprecated", ): sess = create_session( autocommit=False, extension=MyExt(), bind=testing.db ) sess.connection() assert log == ["after_begin"] sess.close() def test_multiple_extensions(self): User, users = self.classes.User, self.tables.users log = [] class MyExt1(SessionExtension): def before_commit(self, session): log.append("before_commit_one") class MyExt2(SessionExtension): def before_commit(self, session): log.append("before_commit_two") mapper(User, users) with testing.expect_deprecated( "SessionExtension is deprecated in favor of " "the SessionEvents", "SessionExtension.before_commit is deprecated", ): sess = create_session(extension=[MyExt1(), MyExt2()]) u = User(name="u1") sess.add(u) sess.flush() assert log == ["before_commit_one", "before_commit_two"] def test_unnecessary_methods_not_evented(self): class MyExtension(SessionExtension): def before_commit(self, session): pass with testing.expect_deprecated( "SessionExtension is deprecated in favor of " "the SessionEvents", "SessionExtension.before_commit is deprecated.", ): s = Session(extension=MyExtension()) assert not s.dispatch.after_commit assert len(s.dispatch.before_commit) == 1 class DeprecatedAttributeExtensionTest1(fixtures.ORMTest): def test_extension_commit_attr(self): """test that an extension which commits attribute history maintains the end-result history. This won't work in conjunction with some unitofwork extensions. """ class Foo(fixtures.BasicEntity): pass class Bar(fixtures.BasicEntity): pass class ReceiveEvents(AttributeExtension): def __init__(self, key): self.key = key def append(self, state, child, initiator): if commit: state._commit_all(state.dict) return child def remove(self, state, child, initiator): if commit: state._commit_all(state.dict) return child def set(self, state, child, oldchild, initiator): if commit: state._commit_all(state.dict) return child instrumentation.register_class(Foo) instrumentation.register_class(Bar) b1, b2, b3, b4 = Bar(id="b1"), Bar(id="b2"), Bar(id="b3"), Bar(id="b4") def loadcollection(state, passive): if passive is attributes.PASSIVE_NO_FETCH: return attributes.PASSIVE_NO_RESULT return [b1, b2] def loadscalar(state, passive): if passive is attributes.PASSIVE_NO_FETCH: return attributes.PASSIVE_NO_RESULT return b2 with testing.expect_deprecated( "AttributeExtension.append is deprecated.", "AttributeExtension.remove is deprecated.", "AttributeExtension.set is deprecated.", ): attributes.register_attribute( Foo, "bars", uselist=True, useobject=True, callable_=loadcollection, extension=[ReceiveEvents("bars")], ) with testing.expect_deprecated( "AttributeExtension.append is deprecated.", "AttributeExtension.remove is deprecated.", "AttributeExtension.set is deprecated.", ): attributes.register_attribute( Foo, "bar", uselist=False, useobject=True, callable_=loadscalar, extension=[ReceiveEvents("bar")], ) with testing.expect_deprecated( "AttributeExtension.append is deprecated.", "AttributeExtension.remove is deprecated.", "AttributeExtension.set is deprecated.", ): attributes.register_attribute( Foo, "scalar", uselist=False, useobject=False, extension=[ReceiveEvents("scalar")], ) def create_hist(): def hist(key, fn, *arg): attributes.instance_state(f1)._commit_all( attributes.instance_dict(f1) ) fn(*arg) histories.append(attributes.get_history(f1, key)) f1 = Foo() hist("bars", f1.bars.append, b3) hist("bars", f1.bars.append, b4) hist("bars", f1.bars.remove, b2) hist("bar", setattr, f1, "bar", b3) hist("bar", setattr, f1, "bar", None) hist("bar", setattr, f1, "bar", b4) hist("scalar", setattr, f1, "scalar", 5) hist("scalar", setattr, f1, "scalar", None) hist("scalar", setattr, f1, "scalar", 4) histories = [] commit = False create_hist() without_commit = list(histories) histories[:] = [] commit = True create_hist() with_commit = histories for without, with_ in zip(without_commit, with_commit): woc = without wic = with_ eq_(woc, wic) def test_extension_lazyload_assertion(self): class Foo(fixtures.BasicEntity): pass class Bar(fixtures.BasicEntity): pass class ReceiveEvents(AttributeExtension): def append(self, state, child, initiator): state.obj().bars return child def remove(self, state, child, initiator): state.obj().bars return child def set(self, state, child, oldchild, initiator): return child instrumentation.register_class(Foo) instrumentation.register_class(Bar) bar1, bar2, bar3 = [Bar(id=1), Bar(id=2), Bar(id=3)] def func1(state, passive): if passive is attributes.PASSIVE_NO_FETCH: return attributes.PASSIVE_NO_RESULT return [bar1, bar2, bar3] with testing.expect_deprecated( "AttributeExtension.append is deprecated.", "AttributeExtension.remove is deprecated.", "AttributeExtension.set is deprecated.", ): attributes.register_attribute( Foo, "bars", uselist=True, callable_=func1, useobject=True, extension=[ReceiveEvents()], ) attributes.register_attribute( Bar, "foos", uselist=True, useobject=True, backref="bars" ) x = Foo() assert_raises(AssertionError, Bar(id=4).foos.append, x) x.bars b = Bar(id=4) b.foos.append(x) attributes.instance_state(x)._expire_attributes( attributes.instance_dict(x), ["bars"] ) assert_raises(AssertionError, b.foos.remove, x) def test_scalar_listener(self): # listeners on ScalarAttributeImpl aren't used normally. test that # they work for the benefit of user extensions class Foo(object): pass results = [] class ReceiveEvents(AttributeExtension): def append(self, state, child, initiator): assert False def remove(self, state, child, initiator): results.append(("remove", state.obj(), child)) def set(self, state, child, oldchild, initiator): results.append(("set", state.obj(), child, oldchild)) return child instrumentation.register_class(Foo) with testing.expect_deprecated( "AttributeExtension.append is deprecated.", "AttributeExtension.remove is deprecated.", "AttributeExtension.set is deprecated.", ): attributes.register_attribute( Foo, "x", uselist=False, useobject=False, extension=ReceiveEvents(), ) f = Foo() f.x = 5 f.x = 17 del f.x eq_( results, [ ("set", f, 5, attributes.NEVER_SET), ("set", f, 17, 5), ("remove", f, 17), ], ) def test_cascading_extensions(self): t1 = Table( "t1", MetaData(), Column("id", Integer, primary_key=True), Column("type", String(40)), Column("data", String(50)), ) ext_msg = [] class Ex1(AttributeExtension): def set(self, state, value, oldvalue, initiator): ext_msg.append("Ex1 %r" % value) return "ex1" + value class Ex2(AttributeExtension): def set(self, state, value, oldvalue, initiator): ext_msg.append("Ex2 %r" % value) return "ex2" + value class A(fixtures.BasicEntity): pass class B(A): pass class C(B): pass with testing.expect_deprecated( "AttributeExtension is deprecated in favor of the " "AttributeEvents listener interface. " "The column_property.extension parameter" ): mapper( A, t1, polymorphic_on=t1.c.type, polymorphic_identity="a", properties={ "data": column_property(t1.c.data, extension=Ex1()) }, ) mapper(B, polymorphic_identity="b", inherits=A) with testing.expect_deprecated( "AttributeExtension is deprecated in favor of the " "AttributeEvents listener interface. " "The column_property.extension parameter" ): mapper( C, polymorphic_identity="c", inherits=B, properties={ "data": column_property(t1.c.data, extension=Ex2()) }, ) with testing.expect_deprecated( "AttributeExtension.set is deprecated. " ): configure_mappers() a1 = A(data="a1") b1 = B(data="b1") c1 = C(data="c1") eq_(a1.data, "ex1a1") eq_(b1.data, "ex1b1") eq_(c1.data, "ex2c1") a1.data = "a2" b1.data = "b2" c1.data = "c2" eq_(a1.data, "ex1a2") eq_(b1.data, "ex1b2") eq_(c1.data, "ex2c2") eq_( ext_msg, [ "Ex1 'a1'", "Ex1 'b1'", "Ex2 'c1'", "Ex1 'a2'", "Ex1 'b2'", "Ex2 'c2'", ], ) class DeprecatedOptionAllTest(OptionsPathTest, _fixtures.FixtureTest): run_inserts = "once" run_deletes = None def _mapper_fixture_one(self): users, User, addresses, Address, orders, Order = ( self.tables.users, self.classes.User, self.tables.addresses, self.classes.Address, self.tables.orders, self.classes.Order, ) keywords, items, item_keywords, Keyword, Item = ( self.tables.keywords, self.tables.items, self.tables.item_keywords, self.classes.Keyword, self.classes.Item, ) mapper( User, users, properties={ "addresses": relationship(Address), "orders": relationship(Order), }, ) mapper(Address, addresses) mapper( Order, orders, properties={ "items": relationship(Item, secondary=self.tables.order_items) }, ) mapper( Keyword, keywords, properties={ "keywords": column_property(keywords.c.name + "some keyword") }, ) mapper( Item, items, properties=dict( keywords=relationship(Keyword, secondary=item_keywords) ), ) def _assert_eager_with_entity_exception( self, entity_list, options, message ): assert_raises_message( sa.exc.ArgumentError, message, create_session().query(*entity_list).options, *options ) def test_option_against_nonexistent_twolevel_all(self): self._mapper_fixture_one() Item = self.classes.Item with testing.expect_deprecated( r"The joinedload_all\(\) function is deprecated, and " "will be removed in a future release. " r"Please use method chaining with joinedload\(\)" ): self._assert_eager_with_entity_exception( [Item], (joinedload_all("keywords.foo"),), 'Can\'t find property named \\"foo\\" on mapped class ' "Keyword->keywords in this Query.", ) def test_all_path_vs_chained(self): self._mapper_fixture_one() User = self.classes.User Order = self.classes.Order Item = self.classes.Item with testing.expect_deprecated( r"The joinedload_all\(\) function is deprecated, and " "will be removed in a future release. " r"Please use method chaining with joinedload\(\)" ): l1 = joinedload_all("orders.items.keywords") sess = Session() q = sess.query(User) self._assert_path_result( l1, q, [ (User, "orders"), (User, "orders", Order, "items"), (User, "orders", Order, "items", Item, "keywords"), ], ) l2 = joinedload("orders").joinedload("items").joinedload("keywords") self._assert_path_result( l2, q, [ (User, "orders"), (User, "orders", Order, "items"), (User, "orders", Order, "items", Item, "keywords"), ], ) def test_subqueryload_mapper_order_by(self): users, User, Address, addresses = ( self.tables.users, self.classes.User, self.classes.Address, self.tables.addresses, ) mapper(Address, addresses) with testing.expect_deprecated( ".*Mapper.order_by parameter is deprecated" ): mapper( User, users, properties={ "addresses": relationship( Address, lazy="subquery", order_by=addresses.c.id ) }, order_by=users.c.id.desc(), ) sess = create_session() q = sess.query(User) result = q.limit(2).all() eq_(result, list(reversed(self.static.user_address_result[2:4]))) def test_selectinload_mapper_order_by(self): users, User, Address, addresses = ( self.tables.users, self.classes.User, self.classes.Address, self.tables.addresses, ) mapper(Address, addresses) with testing.expect_deprecated( ".*Mapper.order_by parameter is deprecated" ): mapper( User, users, properties={ "addresses": relationship( Address, lazy="selectin", order_by=addresses.c.id ) }, order_by=users.c.id.desc(), ) sess = create_session() q = sess.query(User) result = q.limit(2).all() eq_(result, list(reversed(self.static.user_address_result[2:4]))) def test_join_mapper_order_by(self): """test that mapper-level order_by is adapted to a selectable.""" User, users = self.classes.User, self.tables.users with testing.expect_deprecated( ".*Mapper.order_by parameter is deprecated" ): mapper(User, users, order_by=users.c.id) sel = users.select(users.c.id.in_([7, 8])) sess = create_session() with DeprecatedQueryTest._expect_implicit_subquery(): eq_( sess.query(User).select_entity_from(sel).all(), [User(name="jack", id=7), User(name="ed", id=8)], ) def test_defer_addtl_attrs(self): users, User, Address, addresses = ( self.tables.users, self.classes.User, self.classes.Address, self.tables.addresses, ) mapper(Address, addresses) mapper( User, users, properties={ "addresses": relationship( Address, lazy="selectin", order_by=addresses.c.id ) }, ) sess = create_session() with testing.expect_deprecated( r"The \*addl_attrs on orm.defer is deprecated. " "Please use method chaining" ): sess.query(User).options(defer("addresses", "email_address")) with testing.expect_deprecated( r"The \*addl_attrs on orm.undefer is deprecated. " "Please use method chaining" ): sess.query(User).options(undefer("addresses", "email_address")) class LegacyLockModeTest(_fixtures.FixtureTest): run_inserts = None @classmethod def setup_mappers(cls): User, users = cls.classes.User, cls.tables.users mapper(User, users) def _assert_legacy(self, arg, read=False, nowait=False): User = self.classes.User s = Session() with testing.expect_deprecated( r"The Query.with_lockmode\(\) method is deprecated" ): q = s.query(User).with_lockmode(arg) sel = q._compile_context().statement if arg is None: assert q._for_update_arg is None assert sel._for_update_arg is None return assert q._for_update_arg.read is read assert q._for_update_arg.nowait is nowait assert sel._for_update_arg.read is read assert sel._for_update_arg.nowait is nowait def test_false_legacy(self): self._assert_legacy(None) def test_plain_legacy(self): self._assert_legacy("update") def test_nowait_legacy(self): self._assert_legacy("update_nowait", nowait=True) def test_read_legacy(self): self._assert_legacy("read", read=True) def test_unknown_legacy_lock_mode(self): User = self.classes.User sess = Session() with testing.expect_deprecated( r"The Query.with_lockmode\(\) method is deprecated" ): assert_raises_message( exc.ArgumentError, "Unknown with_lockmode argument: 'unknown_mode'", sess.query(User.id).with_lockmode, "unknown_mode", ) class InstrumentationTest(fixtures.ORMTest): def test_dict_subclass4(self): # tests #2654 with testing.expect_deprecated( r"The collection.converter\(\) handler is deprecated and will " "be removed in a future release. Please refer to the " "AttributeEvents" ): class MyDict(collections.MappedCollection): def __init__(self): super(MyDict, self).__init__(lambda value: "k%d" % value) @collection.converter def _convert(self, dictlike): for key, value in dictlike.items(): yield value + 5 class Foo(object): pass instrumentation.register_class(Foo) attributes.register_attribute( Foo, "attr", uselist=True, typecallable=MyDict, useobject=True ) f = Foo() f.attr = {"k1": 1, "k2": 2} eq_(f.attr, {"k7": 7, "k6": 6}) def test_name_setup(self): with testing.expect_deprecated( r"The collection.converter\(\) handler is deprecated and will " "be removed in a future release. Please refer to the " "AttributeEvents" ): class Base(object): @collection.iterator def base_iterate(self, x): return "base_iterate" @collection.appender def base_append(self, x): return "base_append" @collection.converter def base_convert(self, x): return "base_convert" @collection.remover def base_remove(self, x): return "base_remove" from sqlalchemy.orm.collections import _instrument_class _instrument_class(Base) eq_(Base._sa_remover(Base(), 5), "base_remove") eq_(Base._sa_appender(Base(), 5), "base_append") eq_(Base._sa_iterator(Base(), 5), "base_iterate") eq_(Base._sa_converter(Base(), 5), "base_convert") with testing.expect_deprecated( r"The collection.converter\(\) handler is deprecated and will " "be removed in a future release. Please refer to the " "AttributeEvents" ): class Sub(Base): @collection.converter def base_convert(self, x): return "sub_convert" @collection.remover def sub_remove(self, x): return "sub_remove" _instrument_class(Sub) eq_(Sub._sa_appender(Sub(), 5), "base_append") eq_(Sub._sa_remover(Sub(), 5), "sub_remove") eq_(Sub._sa_iterator(Sub(), 5), "base_iterate") eq_(Sub._sa_converter(Sub(), 5), "sub_convert") def test_link_event(self): canary = [] with testing.expect_deprecated( r"The collection.linker\(\) handler is deprecated and will " "be removed in a future release. Please refer to the " "AttributeEvents" ): class Collection(list): @collection.linker def _on_link(self, obj): canary.append(obj) class Foo(object): pass instrumentation.register_class(Foo) attributes.register_attribute( Foo, "attr", uselist=True, typecallable=Collection, useobject=True ) f1 = Foo() f1.attr.append(3) eq_(canary, [f1.attr._sa_adapter]) adapter_1 = f1.attr._sa_adapter l2 = Collection() f1.attr = l2 eq_(canary, [adapter_1, f1.attr._sa_adapter, None]) class NonPrimaryRelationshipLoaderTest(_fixtures.FixtureTest): run_inserts = "once" run_deletes = None def test_selectload(self): """tests lazy loading with two relationships simultaneously, from the same table, using aliases. """ users, orders, User, Address, Order, addresses = ( self.tables.users, self.tables.orders, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) openorders = sa.alias(orders, "openorders") closedorders = sa.alias(orders, "closedorders") mapper(Address, addresses) mapper(Order, orders) with testing.expect_deprecated( "The mapper.non_primary parameter is deprecated" ): open_mapper = mapper(Order, openorders, non_primary=True) closed_mapper = mapper(Order, closedorders, non_primary=True) mapper( User, users, properties=dict( addresses=relationship(Address, lazy=True), open_orders=relationship( open_mapper, primaryjoin=sa.and_( openorders.c.isopen == 1, users.c.id == openorders.c.user_id, ), lazy="select", ), closed_orders=relationship( closed_mapper, primaryjoin=sa.and_( closedorders.c.isopen == 0, users.c.id == closedorders.c.user_id, ), lazy="select", ), ), ) self._run_double_test(10) def test_joinedload(self): """Eager loading with two relationships simultaneously, from the same table, using aliases.""" users, orders, User, Address, Order, addresses = ( self.tables.users, self.tables.orders, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) openorders = sa.alias(orders, "openorders") closedorders = sa.alias(orders, "closedorders") mapper(Address, addresses) mapper(Order, orders) with testing.expect_deprecated( "The mapper.non_primary parameter is deprecated" ): open_mapper = mapper(Order, openorders, non_primary=True) closed_mapper = mapper(Order, closedorders, non_primary=True) mapper( User, users, properties=dict( addresses=relationship( Address, lazy="joined", order_by=addresses.c.id ), open_orders=relationship( open_mapper, primaryjoin=sa.and_( openorders.c.isopen == 1, users.c.id == openorders.c.user_id, ), lazy="joined", order_by=openorders.c.id, ), closed_orders=relationship( closed_mapper, primaryjoin=sa.and_( closedorders.c.isopen == 0, users.c.id == closedorders.c.user_id, ), lazy="joined", order_by=closedorders.c.id, ), ), ) self._run_double_test(1) def test_selectin(self): users, orders, User, Address, Order, addresses = ( self.tables.users, self.tables.orders, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) openorders = sa.alias(orders, "openorders") closedorders = sa.alias(orders, "closedorders") mapper(Address, addresses) mapper(Order, orders) with testing.expect_deprecated( "The mapper.non_primary parameter is deprecated" ): open_mapper = mapper(Order, openorders, non_primary=True) closed_mapper = mapper(Order, closedorders, non_primary=True) mapper( User, users, properties=dict( addresses=relationship( Address, lazy="selectin", order_by=addresses.c.id ), open_orders=relationship( open_mapper, primaryjoin=sa.and_( openorders.c.isopen == 1, users.c.id == openorders.c.user_id, ), lazy="selectin", order_by=openorders.c.id, ), closed_orders=relationship( closed_mapper, primaryjoin=sa.and_( closedorders.c.isopen == 0, users.c.id == closedorders.c.user_id, ), lazy="selectin", order_by=closedorders.c.id, ), ), ) self._run_double_test(4) def test_subqueryload(self): users, orders, User, Address, Order, addresses = ( self.tables.users, self.tables.orders, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses, ) openorders = sa.alias(orders, "openorders") closedorders = sa.alias(orders, "closedorders") mapper(Address, addresses) mapper(Order, orders) with testing.expect_deprecated( "The mapper.non_primary parameter is deprecated" ): open_mapper = mapper(Order, openorders, non_primary=True) closed_mapper = mapper(Order, closedorders, non_primary=True) mapper( User, users, properties=dict( addresses=relationship( Address, lazy="subquery", order_by=addresses.c.id ), open_orders=relationship( open_mapper, primaryjoin=sa.and_( openorders.c.isopen == 1, users.c.id == openorders.c.user_id, ), lazy="subquery", order_by=openorders.c.id, ), closed_orders=relationship( closed_mapper, primaryjoin=sa.and_( closedorders.c.isopen == 0, users.c.id == closedorders.c.user_id, ), lazy="subquery", order_by=closedorders.c.id, ), ), ) self._run_double_test(4) def _run_double_test(self, count): User, Address, Order, Item = self.classes( "User", "Address", "Order", "Item" ) q = create_session().query(User).order_by(User.id) def go(): eq_( [ User( id=7, addresses=[Address(id=1)], open_orders=[Order(id=3)], closed_orders=[Order(id=1), Order(id=5)], ), User( id=8, addresses=[ Address(id=2), Address(id=3), Address(id=4), ], open_orders=[], closed_orders=[], ), User( id=9, addresses=[Address(id=5)], open_orders=[Order(id=4)], closed_orders=[Order(id=2)], ), User(id=10), ], q.all(), ) self.assert_sql_count(testing.db, go, count) sess = create_session() user = sess.query(User).get(7) closed_mapper = User.closed_orders.entity open_mapper = User.open_orders.entity eq_( [Order(id=1), Order(id=5)], create_session() .query(closed_mapper) .with_parent(user, property="closed_orders") .all(), ) eq_( [Order(id=3)], create_session() .query(open_mapper) .with_parent(user, property="open_orders") .all(), ) class NonPrimaryMapperTest(_fixtures.FixtureTest, AssertsCompiledSQL): __dialect__ = "default" def test_non_primary_identity_class(self): User = self.classes.User users, addresses = self.tables.users, self.tables.addresses class AddressUser(User): pass mapper(User, users, polymorphic_identity="user") m2 = mapper( AddressUser, addresses, inherits=User, polymorphic_identity="address", properties={"address_id": addresses.c.id}, ) with testing.expect_deprecated( "The mapper.non_primary parameter is deprecated" ): m3 = mapper(AddressUser, addresses, non_primary=True) assert m3._identity_class is m2._identity_class eq_( m2.identity_key_from_instance(AddressUser()), m3.identity_key_from_instance(AddressUser()), ) def test_illegal_non_primary(self): users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User, ) mapper(User, users) mapper(Address, addresses) with testing.expect_deprecated( "The mapper.non_primary parameter is deprecated" ): mapper( User, users, non_primary=True, properties={"addresses": relationship(Address)}, ) assert_raises_message( sa.exc.ArgumentError, "Attempting to assign a new relationship 'addresses' " "to a non-primary mapper on class 'User'", configure_mappers, ) def test_illegal_non_primary_2(self): User, users = self.classes.User, self.tables.users with testing.expect_deprecated( "The mapper.non_primary parameter is deprecated" ): assert_raises_message( sa.exc.InvalidRequestError, "Configure a primary mapper first", mapper, User, users, non_primary=True, ) def test_illegal_non_primary_3(self): users, addresses = self.tables.users, self.tables.addresses class Base(object): pass class Sub(Base): pass mapper(Base, users) with testing.expect_deprecated( "The mapper.non_primary parameter is deprecated" ): assert_raises_message( sa.exc.InvalidRequestError, "Configure a primary mapper first", mapper, Sub, addresses, non_primary=True, )
[ [ [ 7, 23 ], [ 30217, 30219 ], [ 37104, 37106 ], [ 37628, 37630 ], [ 38173, 38175 ], [ 38414, 38416 ], [ 42693, 42695 ], [ 73312, 73314 ], [ 84458, 84460 ], [ 84512, 84514 ], [ 85118, 85120 ], [ 85425, 85427 ], [ 86119, 86121 ], [ 86173, 86175 ], [ 86846, 86848 ], [ 87199, 87201 ], [ 87822, 87824 ], [ 87876, 87878 ], [ 88551, 88553 ], [ 88906, 88908 ], [ 89536, 89538 ], [ 89590, 89592 ], [ 90265, 90267 ], [ 90620, 90622 ], [ 94287, 94289 ], [ 94738, 94740 ], [ 95327, 95329 ], [ 34747, 34749 ], [ 34769, 34771 ], [ 35507, 35509 ], [ 42884, 42886 ] ], [ [ 47, 51 ], [ 29110, 29114 ] ], [ [ 75, 80 ], [ 12515, 12520 ], [ 12576, 12581 ], [ 12637, 12642 ], [ 12699, 12704 ], [ 12987, 12992 ], [ 13049, 13054 ], [ 13110, 13115 ] ], [ [ 104, 107 ], [ 79996, 79999 ] ], [ [ 131, 135 ], [ 37279, 37283 ], [ 37320, 37324 ], [ 40204, 40208 ], [ 40223, 40227 ] ], [ [ 159, 166 ], [ 3066, 3073 ], [ 3164, 3171 ], [ 4310, 4317 ], [ 6140, 6147 ], [ 7467, 7474 ], [ 17119, 17126 ], [ 43399, 43406 ], [ 69350, 69357 ] ], [ [ 190, 198 ], [ 69313, 69321 ] ], [ [ 222, 228 ], [ 9176, 9182 ], [ 9983, 9989 ], [ 30169, 30175 ], [ 30255, 30261 ] ], [ [ 252, 258 ], [ 17133, 17139 ], [ 43553, 43559 ], [ 69405, 69411 ], [ 69445, 69451 ] ], [ [ 282, 289 ], [ 14633, 14640 ], [ 14691, 14698 ], [ 42602, 42609 ], [ 6293, 6300 ], [ 8049, 8056 ], [ 8360, 8367 ], [ 8672, 8679 ], [ 8983, 8990 ], [ 9157, 9164 ], [ 9451, 9458 ], [ 9964, 9971 ], [ 10305, 10312 ], [ 11577, 11584 ], [ 11713, 11720 ], [ 11843, 11850 ], [ 11979, 11986 ], [ 12126, 12133 ], [ 20637, 20644 ], [ 21730, 21737 ], [ 27116, 27123 ], [ 27894, 27901 ], [ 28530, 28537 ], [ 32706, 32713 ], [ 43078, 43085 ], [ 46335, 46342 ], [ 48068, 48075 ], [ 50301, 50308 ], [ 51261, 51268 ], [ 53175, 53182 ], [ 53943, 53950 ], [ 55326, 55333 ], [ 57052, 57059 ], [ 58167, 58174 ], [ 59801, 59808 ], [ 60620, 60627 ], [ 61160, 61167 ], [ 61716, 61723 ], [ 63596, 63603 ], [ 64070, 64077 ], [ 64539, 64546 ], [ 66955, 66962 ], [ 68547, 68554 ], [ 70013, 70020 ], [ 70567, 70574 ], [ 71032, 71039 ], [ 73586, 73593 ], [ 74264, 74271 ], [ 75471, 75478 ], [ 76334, 76341 ], [ 77103, 77110 ], [ 78099, 78106 ], [ 78328, 78335 ], [ 78883, 78890 ], [ 79843, 79850 ], [ 80295, 80302 ], [ 81214, 81221 ], [ 82290, 82297 ], [ 83091, 83098 ], [ 84626, 84633 ], [ 86286, 86293 ], [ 87989, 87996 ], [ 89703, 89710 ], [ 92146, 92153 ], [ 93304, 93311 ], [ 93967, 93974 ], [ 94588, 94595 ], [ 95177, 95184 ], [ 12329, 12336 ] ], [ [ 313, 317 ], [ 17055, 17059 ], [ 20123, 20127 ], [ 21296, 21300 ] ], [ [ 357, 373 ], [ 43583, 43599 ] ], [ [ 413, 429 ], [ 43023, 43039 ] ], [ [ 457, 464 ], [ 23705, 23712 ] ], [ [ 492, 510 ], [ 2537, 2555 ], [ 3795, 3813 ], [ 62480, 62498 ], [ 66260, 66278 ], [ 68103, 68121 ], [ 69509, 69527 ], [ 69697, 69715 ] ], [ [ 538, 548 ], [ 63811, 63821 ], [ 64285, 64295 ], [ 64754, 64764 ], [ 67170, 67180 ], [ 67408, 67418 ], [ 67673, 67683 ], [ 67734, 67744 ], [ 68762, 68772 ], [ 69103, 69113 ], [ 80957, 80967 ], [ 83550, 83560 ], [ 63303, 63313 ], [ 63355, 63365 ], [ 63479, 63489 ], [ 63531, 63541 ], [ 66821, 66831 ], [ 66873, 66883 ], [ 65044, 65054 ], [ 65107, 65117 ], [ 65212, 65222 ] ], [ [ 576, 587 ], [ 80533, 80544 ] ], [ [ 615, 630 ], [ 3115, 3130 ], [ 4282, 4297 ], [ 70421, 70436 ], [ 70941, 70956 ], [ 72918, 72933 ] ], [ [ 658, 677 ], [ 38810, 38829 ], [ 40548, 40567 ], [ 40639, 40658 ], [ 41449, 41468 ], [ 42306, 42325 ] ], [ [ 705, 714 ], [ 4514, 4523 ] ], [ [ 742, 759 ], [ 3696, 3713 ], [ 71135, 71152 ], [ 94443, 94460 ] ], [ [ 787, 801 ], [ 20471, 20485 ], [ 21549, 21563 ] ], [ [ 829, 843 ], [ 12250, 12264 ], [ 12488, 12502 ], [ 16893, 16907 ], [ 17770, 17784 ], [ 18197, 18211 ], [ 18727, 18741 ], [ 19272, 19286 ], [ 20196, 20210 ], [ 20851, 20865 ], [ 21827, 21841 ], [ 22640, 22654 ], [ 24947, 24961 ], [ 25873, 25887 ], [ 26345, 26359 ], [ 30727, 30741 ], [ 32997, 33011 ], [ 33135, 33149 ], [ 33312, 33326 ], [ 33486, 33500 ], [ 33751, 33765 ], [ 35656, 35670 ], [ 40718, 40732 ], [ 42405, 42419 ], [ 43807, 43821 ], [ 47106, 47120 ], [ 48994, 49008 ], [ 52040, 52054 ], [ 54279, 54293 ], [ 57797, 57811 ], [ 58912, 58926 ], [ 60546, 60560 ], [ 61356, 61370 ], [ 73367, 73381 ], [ 75895, 75909 ], [ 76758, 76772 ], [ 77317, 77331 ], [ 78068, 78082 ], [ 91089, 91103 ], [ 92185, 92199 ], [ 92403, 92417 ], [ 92593, 92607 ] ], [ [ 871, 876 ], [ 78277, 78282 ] ], [ [ 904, 912 ], [ 40274, 40282 ] ], [ [ 940, 952 ], [ 44629, 44641 ], [ 44830, 44842 ], [ 45027, 45039 ], [ 45181, 45193 ], [ 45333, 45345 ], [ 45483, 45495 ], [ 45635, 45647 ], [ 45785, 45797 ], [ 45937, 45949 ], [ 46087, 46099 ] ], [ [ 980, 988 ], [ 6476, 6484 ], [ 6706, 6714 ], [ 6787, 6795 ] ], [ [ 1016, 1031 ], [ 63071, 63086 ], [ 63115, 63130 ], [ 66617, 66632 ], [ 66661, 66676 ], [ 68498, 68513 ], [ 80913, 80928 ], [ 83506, 83521 ] ], [ [ 1059, 1069 ], [ 74866, 74876 ], [ 26519, 26529 ], [ 27319, 27329 ], [ 28097, 28107 ] ], [ [ 1097, 1111 ], [ 73901, 73915 ], [ 74501, 74515 ] ], [ [ 1139, 1145 ], [ 10271, 10277 ], [ 11241, 11247 ], [ 11318, 11324 ], [ 13995, 14001 ], [ 14899, 14905 ], [ 32871, 32877 ], [ 37534, 37540 ], [ 39862, 39868 ], [ 42152, 42158 ], [ 47054, 47060 ], [ 48787, 48793 ], [ 48832, 48838 ], [ 51020, 51026 ], [ 51980, 51986 ], [ 53894, 53900 ], [ 54066, 54072 ], [ 55513, 55519 ], [ 55885, 55891 ], [ 61127, 61133 ], [ 70233, 70239 ], [ 70506, 70512 ], [ 70787, 70793 ], [ 72393, 72399 ], [ 72595, 72601 ], [ 72630, 72636 ], [ 72814, 72820 ], [ 73001, 73007 ], [ 75430, 75436 ], [ 75577, 75583 ], [ 76294, 76300 ], [ 76440, 76446 ], [ 77209, 77215 ], [ 77791, 77797 ], [ 77826, 77832 ], [ 78732, 78738 ], [ 84554, 84560 ], [ 84590, 84596 ], [ 84751, 84757 ], [ 84823, 84829 ], [ 84877, 84883 ], [ 86215, 86221 ], [ 86250, 86256 ], [ 86411, 86417 ], [ 86483, 86489 ], [ 86538, 86544 ], [ 87918, 87924 ], [ 87953, 87959 ], [ 88114, 88120 ], [ 88186, 88192 ], [ 88241, 88247 ], [ 89632, 89638 ], [ 89667, 89673 ], [ 89828, 89834 ], [ 89900, 89906 ], [ 89955, 89961 ], [ 93037, 93043 ], [ 93099, 93105 ], [ 93420, 93426 ], [ 93899, 93905 ], [ 93927, 93933 ], [ 94078, 94084 ], [ 94834, 94840 ], [ 95144, 95150 ], [ 95423, 95429 ], [ 35402, 35408 ] ], [ [ 1173, 1188 ], [ 5414, 5429 ], [ 44484, 44499 ], [ 55161, 55176 ] ], [ [ 1216, 1230 ], [ 39910, 39924 ], [ 41225, 41239 ], [ 41729, 41743 ] ], [ [ 1258, 1270 ], [ 4062, 4074 ], [ 11286, 11298 ], [ 51168, 51180 ], [ 72492, 72504 ], [ 72541, 72553 ], [ 72727, 72739 ], [ 73100, 73112 ], [ 75692, 75704 ], [ 76555, 76567 ], [ 77925, 77937 ], [ 84977, 84989 ], [ 85039, 85051 ], [ 85344, 85356 ], [ 86638, 86650 ], [ 86767, 86779 ], [ 87118, 87130 ], [ 88341, 88353 ], [ 88472, 88484 ], [ 88825, 88837 ], [ 90055, 90067 ], [ 90186, 90198 ], [ 90539, 90551 ], [ 94206, 94218 ] ], [ [ 1298, 1305 ], [ 5313, 5320 ], [ 6414, 6421 ], [ 6639, 6646 ], [ 6748, 6755 ], [ 6861, 6868 ], [ 7279, 7286 ], [ 7526, 7533 ], [ 9575, 9582 ], [ 10384, 10391 ], [ 11358, 11365 ], [ 17403, 17410 ], [ 28840, 28847 ], [ 61910, 61917 ], [ 74557, 74564 ], [ 78859, 78866 ], [ 79820, 79827 ] ], [ [ 1333, 1349 ], [ 4623, 4639 ], [ 55943, 55959 ], [ 60870, 60886 ], [ 61005, 61021 ], [ 61616, 61632 ] ], [ [ 1377, 1389 ], [ 8173, 8185 ], [ 8796, 8808 ] ], [ [ 1417, 1424 ], [ 40329, 40336 ], [ 42261, 42268 ] ], [ [ 1452, 1459 ], [ 78508, 78515 ] ], [ [ 1487, 1503 ], [ 30132, 30148 ] ], [ [ 1543, 1553 ], [ 80696, 80706 ], [ 81476, 81486 ], [ 81599, 81609 ], [ 81720, 81730 ], [ 81844, 81854 ], [ 82549, 82559 ], [ 82672, 82682 ], [ 83354, 83364 ] ], [ [ 1586, 1603 ], [ 32182, 32199 ], [ 32353, 32370 ] ], [ [ 1635, 1648 ], [ 67546, 67559 ], [ 67790, 67803 ] ], [ [ 1680, 1701 ], [ 35936, 35957 ], [ 37770, 37791 ], [ 73277, 73298 ], [ 79957, 79978 ], [ 94252, 94273 ], [ 94699, 94720 ], [ 95288, 95309 ] ], [ [ 1733, 1743 ], [ 2803, 2813 ], [ 3232, 3242 ], [ 3904, 3914 ], [ 4121, 4131 ], [ 4359, 4369 ], [ 4884, 4894 ], [ 5719, 5729 ], [ 6517, 6527 ], [ 6885, 6895 ], [ 7130, 7140 ], [ 7549, 7559 ], [ 16504, 16514 ], [ 17426, 17436 ], [ 38621, 38631 ], [ 40360, 40370 ], [ 41273, 41283 ], [ 41990, 42000 ], [ 35224, 35234 ] ], [ [ 1775, 1793 ], [ 16221, 16239 ], [ 31936, 31954 ], [ 92779, 92797 ] ], [ [ 1825, 1828 ], [ 14402, 14405 ], [ 15102, 15105 ], [ 15165, 15168 ], [ 15329, 15332 ], [ 15436, 15439 ], [ 15550, 15553 ], [ 15639, 15642 ], [ 15741, 15744 ], [ 15877, 15880 ], [ 16018, 16021 ], [ 16107, 16110 ], [ 16970, 16973 ], [ 17847, 17850 ], [ 22993, 22996 ], [ 24065, 24068 ], [ 25075, 25078 ], [ 25415, 25418 ], [ 25950, 25953 ], [ 33469, 33472 ], [ 33734, 33737 ], [ 35830, 35833 ], [ 35881, 35884 ], [ 36545, 36548 ], [ 36622, 36625 ], [ 36917, 36920 ], [ 38078, 38081 ], [ 38310, 38313 ], [ 38914, 38917 ], [ 38992, 38995 ], [ 39086, 39089 ], [ 39156, 39159 ], [ 43867, 43870 ], [ 43909, 43912 ], [ 44097, 44100 ], [ 44223, 44226 ], [ 47420, 47423 ], [ 49357, 49360 ], [ 52196, 52199 ], [ 52404, 52407 ], [ 52713, 52716 ], [ 52770, 52773 ], [ 54642, 54645 ], [ 66050, 66053 ], [ 69033, 69036 ], [ 71243, 71246 ], [ 71273, 71276 ], [ 71303, 71306 ], [ 71403, 71406 ], [ 71433, 71436 ], [ 71463, 71466 ], [ 71494, 71497 ], [ 75984, 75987 ], [ 76847, 76850 ], [ 77409, 77412 ], [ 81137, 81140 ], [ 82054, 82057 ], [ 82110, 82113 ], [ 82167, 82170 ], [ 82225, 82228 ], [ 82813, 82816 ], [ 82868, 82871 ], [ 82921, 82924 ], [ 82977, 82980 ], [ 83725, 83728 ], [ 83856, 83859 ], [ 92346, 92349 ], [ 92549, 92552 ], [ 93533, 93536 ], [ 26448, 26451 ], [ 27248, 27251 ], [ 28026, 28029 ], [ 40880, 40883 ], [ 40912, 40915 ], [ 40947, 40950 ], [ 40983, 40986 ], [ 91167, 91170 ] ], [ [ 1860, 1868 ], [ 2313, 2321 ], [ 42577, 42585 ], [ 62078, 62086 ], [ 80206, 80214 ], [ 39295, 39303 ], [ 43255, 43263 ], [ 62352, 62360 ], [ 62411, 62419 ], [ 66132, 66140 ], [ 66191, 66199 ], [ 69883, 69891 ] ], [ [ 1900, 1903 ], [ 6455, 6458 ], [ 6685, 6688 ], [ 6766, 6769 ], [ 10763, 10766 ] ], [ [ 1935, 1942 ], [ 30196, 30203 ], [ 32561, 32568 ] ], [ [ 1981, 1987 ], [ 3059, 3065 ], [ 3152, 3158 ], [ 4298, 4304 ], [ 6133, 6139 ], [ 7460, 7466 ], [ 43345, 43351 ], [ 43538, 43544 ], [ 69337, 69343 ], [ 69390, 69396 ], [ 69430, 69436 ] ], [ [ 2026, 2031 ], [ 69276, 69281 ] ], [ [ 2068, 2078 ], [ 14178, 14188 ], [ 15144, 15154 ], [ 13448, 13458 ] ], [ [ 2114, 2118 ], [ 14658, 14662 ] ], [ [ 2133, 2142 ], [ 10090, 10099 ], [ 12028, 12037 ], [ 16198, 16207 ], [ 31913, 31922 ], [ 44273, 44282 ], [ 55739, 55748 ], [ 71776, 71785 ], [ 78574, 78583 ], [ 83949, 83958 ], [ 92756, 92765 ] ], [ [ 2168, 2182 ], [ 29899, 29913 ], [ 29983, 29997 ], [ 30024, 30038 ], [ 30485, 30499 ], [ 30526, 30540 ], [ 30568, 30582 ] ], [ [ 2209, 2236 ], [ 71759, 71774 ] ], [ [ 2267, 2280 ], [ 7917, 7930 ] ], [ [ 2289, 2312 ] ], [ [ 7887, 7916 ] ], [ [ 10061, 10089 ] ], [ [ 12006, 12027 ] ], [ [ 16178, 16197 ], [ 30062, 30081 ], [ 30797, 30816 ], [ 32115, 32134 ], [ 77348, 77367 ] ], [ [ 29881, 29898 ] ], [ [ 31892, 31912 ] ], [ [ 42558, 42576 ] ], [ [ 44243, 44272 ] ], [ [ 55708, 55738 ] ], [ [ 62044, 62077 ] ], [ [ 71735, 71758 ] ], [ [ 78555, 78573 ] ], [ [ 80186, 80205 ] ], [ [ 83916, 83948 ] ], [ [ 92735, 92755 ] ] ]
import torch import mxnet as mx import numpy as np from gluon2pytorch import gluon2pytorch class SoftmaxTest(mx.gluon.nn.HybridSequential): def __init__(self): super(SoftmaxTest, self).__init__() from mxnet.gluon import nn with self.name_scope(): self.conv1 = nn.Conv2D(3, 32) def hybrid_forward(self, F, x): x = F.softmax(self.conv1(x)) return x def check_error(gluon_output, pytorch_output, epsilon=1e-5): pytorch_output = pytorch_output.data.numpy() gluon_output = gluon_output.asnumpy() error = np.max(pytorch_output - gluon_output) print('Error:', error) assert error < epsilon return error if __name__ == '__main__': print('Test softmax:') net = SoftmaxTest() # Make sure it's hybrid and initialized net.hybridize() net.collect_params().initialize() pytorch_model = gluon2pytorch(net, [(1, 3, 224, 224)], dst_dir=None, pytorch_module_name='SoftmaxTest') input_np = np.random.uniform(-1, 1, (1, 3, 224, 224)) gluon_output = net(mx.nd.array(input_np)) pytorch_output = pytorch_model(torch.FloatTensor(input_np)) check_error(gluon_output, pytorch_output)
[ [ [ 7, 12 ], [ 1121, 1126 ] ], [ [ 20, 31 ], [ 111, 113 ], [ 1063, 1065 ] ], [ [ 39, 50 ], [ 996, 998 ], [ 577, 579 ] ], [ [ 77, 90 ], [ 892, 905 ] ], [ [ 99, 110 ], [ 754, 765 ], [ 180, 191 ] ], [ [ 416, 427 ], [ 1154, 1165 ] ], [ [ 748, 751 ], [ 817, 820 ], [ 837, 840 ], [ 906, 909 ], [ 1059, 1062 ] ], [ [ 876, 889 ], [ 1107, 1120 ] ], [ [ 985, 993 ], [ 1075, 1083 ], [ 1139, 1147 ] ], [ [ 1044, 1056 ], [ 1166, 1178 ] ], [ [ 1090, 1104 ], [ 1180, 1194 ] ] ]
""" * SearchAThing.UnitTest, Copyright(C) 2015-2017 Lorenzo Delana, License under MIT * * The MIT License(MIT) * Copyright(c) 2015-2017 Lorenzo Delana, https://searchathing.com * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. """ import unittest from searchathing_core.number import * class Core(unittest.TestCase): def test_equals_auto_tol(self): self.assertTrue(equals_auto_tol(1, 1)) self.assertTrue(equals_auto_tol(1, 1 + 1e-20)) self.assertFalse(equals_auto_tol(1, 2)) self.assertTrue(equals_auto_tol(1, 2, precision=2)) def test_mround(self): self.assertTrue(equals_tol(1e-10, mround(4, 3), 3)) self.assertTrue(equals_tol(1e-10, mround(5, 3), 6)) self.assertTrue(equals_tol(1e-10, mround(-3.21, .1), -3.2)) self.assertTrue(equals_tol(1e-10, mround(-3.29, .1), -3.3)) def test_angle(self): self.assertTrue(equals_tol(1e-6, to_deg(.21294), 12.200563)) self.assertTrue(equals_tol(1e-6, to_rad(140.3), 2.448697)) if __name__ == '__main__': unittest.main()
[ [ [ 1247, 1255 ], [ 1308, 1316 ], [ 2054, 2062 ] ], [ [ 1293, 1294 ], [ 1388, 1403 ], [ 1435, 1450 ], [ 1491, 1506 ], [ 1538, 1553 ], [ 1626, 1636 ], [ 1644, 1650 ], [ 1686, 1696 ], [ 1704, 1710 ], [ 1746, 1756 ], [ 1764, 1770 ], [ 1814, 1824 ], [ 1832, 1838 ], [ 1909, 1919 ], [ 1926, 1932 ], [ 1978, 1988 ], [ 1995, 2001 ] ], [ [ 1303, 1307 ] ] ]
r""" Quiver mutation types AUTHORS: - Gregg Musiker (2012, initial version) - Christian Stump (2012, initial version) - Hugh Thomas (2012, initial version) """ #***************************************************************************** # Copyright (C) 2011 Gregg Musiker <gmusiker@gmail.com> # Christian Stump <christian.stump@gmail.com> # Hugh Thomas <hugh@math.unb.ca> # # Distributed under the terms of the GNU General Public License (GPL) # http://www.gnu.org/licenses/ #***************************************************************************** # python3 from __future__ import division, print_function from __future__ import absolute_import from six.moves import range from sage.structure.sage_object import SageObject from copy import copy from sage.structure.unique_representation import UniqueRepresentation from sage.misc.all import cached_method from sage.rings.all import ZZ, infinity from sage.graphs.all import Graph, DiGraph from sage.arith.all import binomial, Euler_Phi from sage.all import prod from sage.matrix.all import matrix class QuiverMutationTypeFactory(SageObject): def __call__(self, *args): """ For a detailed description, see :meth:`QuiverMutationType`. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import QuiverMutationTypeFactory sage: QuiverMutationTypeFactory() QuiverMutationType """ # get data as arguments or as list/tuple if len( args ) == 1: data = args[0] else: data = args # data is a QuiverMutationType if isinstance(data, QuiverMutationType_Irreducible): return data elif isinstance(data, QuiverMutationType_Reducible): return data # check that data is a tuple or list if isinstance(data, tuple) and len( data ) > 0: pass elif isinstance(data, list) and len( data ) > 0: data = tuple( data ) else: _mutation_type_error( data ) # check for reducible types if all( type( data_component ) in [list,tuple,QuiverMutationType_Irreducible] for data_component in data ): if len( data ) == 1: return QuiverMutationType( data[0] ) else: data = tuple( QuiverMutationType(comp) for comp in data ) return QuiverMutationType_Reducible( *data ) # check for irreducible types if len(data) == 2: data = (data[0],data[1],None) elif len(data) == 3: pass else: _mutation_type_error(data) if isinstance(data[2], list): data = (data[0],data[1],tuple(data[2])) if isinstance(data[1], list): data = (data[0],tuple(data[1]),data[2]) # mutation type casting if True: if data == ('D',2,None): return QuiverMutationType( ('A',1,None), ('A',1,None) ) elif data == ('D',3,None): data = ('A',3,None) elif data == ('C',2,None): data = ('B',2,None) elif data == ('E',9,None): data = ('E',8,1) elif data[0] == 'A' and data[2] == 1 and isinstance(data[1], tuple) and len(data[1]) == 2 and min(data[1]) == 0: if max(data[1]) == 0: pass elif max(data[1]) == 1: data = ('A', 1,None) elif max(data[1]) == 2: return QuiverMutationType( ('A',1,None), ('A',1,None) ) elif max(data[1]) == 3: data = ('A',3,None) else: data = ('D',max(data[1]),None) elif data[0] == 'GR' and data[2] is None and isinstance(data[1], tuple) and len(data[1]) == 2 and data[1][1] > data[1][0]: if min(data[1]) > max(data[1])/2 and max(data[1]) != min(data[1])+1: data = (data[0],(max(data[1])-min(data[1]),max(data[1])),data[2]) if min(data[1]) == 2 and max(data[1]) > 3: data = ('A',max(data[1])-3,None) elif data[1] == (3,6): data = ('D',4,None) elif data[1] == (3,7): data = ('E',6,None) elif data[1] == (3,8): data = ('E',8,None) elif data[1] == (3,9): data = ('E',8,[1,1]) elif data[1] == (4,8): data = ('E',7,[1,1]) elif data == ('TR',1,None): data = ('A',1,None) elif data == ('TR',2,None): data = ('A',3,None) elif data == ('TR',3,None): data = ('D',6,None) elif data == ('TR',4,None): data = ('E',8,(1,1)) # mutation type casting from Kac conventions elif data == ('A',1,1): data = ('A',(1,1),1) elif data[0] == 'B' and data[2] == 1: if data[1] == 2: data = ('CC',2,1) elif data[1] > 2: data = ('BD',data[1],1) elif data[0] == 'B' and data[2] == -1: if data[1] == 2: data = ('BB',2,1) elif data[1] > 2: data= ('CD',data[1],1) elif data[0] == 'C' and data[1] > 1 and data[2] == 1: data = ('CC',data[1],1) elif data[0] == 'C' and data[1] > 1 and data[2] == -1: data = ('BB',data[1],1) elif data == ('A',2,2): data = ('BC',1,1) elif data[0] == 'A' and data[1] in ZZ and data[1] > 1 and data[1]%2 == 0 and data[2] == 2: data = ('BC',data[1]//2,1) elif data[0] == 'A' and data[1] in ZZ and data[1] > 3 and data[1]%2 == 1 and data[2] == 2: data = ('CD',(data[1]+1)//2,1) # We think of ('A',3,2) as ('D',3,2) elif data == ('A',3,2): data = ('BB',2,1) elif data[0] == 'D' and data[1] in ZZ and data[1] > 2 and data[2] == 2: data = ('BB',data[1]-1,1) elif data == ('E',6,2): data = ('F',4,-1) elif data == ('D',4,3): data = ('G',2,-1) elif data == ('F',4,(2,1)): data = ('F',4,(1,2)) elif data == ('G',2,(3,1)): data = ('G',2,(1,3)) elif data[0] == 'T' and data[2] is None: data = (data[0],tuple(sorted(data[1])),data[2]) r,p,q = data[1] if r == 1: data = ('A',p+q-1,None) elif r == p == 2: data = ('D',q+2,None) elif r == 2 and p == 3: if q in (3,4,5): data = ('E',q+3,None) elif q == 6: data = ('E',8,1) else: data = ('E',q+3,None) elif r== 2 and p == q == 4: data = ('E',7,1) elif r == p == q == 3: data = ('E',6,1) elif data[0] == 'R2' and data[2] is None and all(data[1][i] in ZZ and data[1][i] > 0 for i in [0,1]): data = (data[0],tuple(sorted(data[1])),data[2]) b,c = data[1] if data[1] == (1,1): data = ('A',2,None) elif data[1] == (1,2): data = ('B',2,None) elif data[1] == (1,3): data = ('G',2,None) elif data[1] == (1,4): data = ('BC',1,1) elif data[1] == (2,2): data = ('A',(1,1),1) # setting the parameters and returning the mutation type letter,rank,twist = data if not isinstance(letter, str): _mutation_type_error(data) if isinstance(rank, list): rank = tuple(rank) if isinstance(twist, list): twist = tuple(twist) return QuiverMutationType_Irreducible(letter,rank,twist) def _repr_(self): """ Return the string representation of ``self``. EXAMPLES:: sage: QuiverMutationType # indirect doctest QuiverMutationType """ return "QuiverMutationType" def samples(self, finite=None, affine=None, elliptic=None, mutation_finite=None): """ Return a sample of the available quiver mutations types. INPUT: - ``finite`` - ``affine`` - ``elliptic`` - ``mutation_finite`` All four input keywords default values are ``None``. If set to ``True`` or ``False``, only these samples are returned. EXAMPLES:: sage: QuiverMutationType.samples() [['A', 1], ['A', 5], ['B', 2], ['B', 5], ['C', 3], ['C', 5], [ ['A', 1], ['A', 1] ], ['D', 5], ['E', 6], ['E', 7], ['E', 8], ['F', 4], ['G', 2], ['A', [1, 1], 1], ['A', [4, 5], 1], ['D', 4, 1], ['BB', 5, 1], ['E', 6, [1, 1]], ['E', 7, [1, 1]], ['R2', [1, 5]], ['R2', [3, 5]], ['E', 10], ['BE', 5], ['GR', [3, 10]], ['T', [3, 3, 4]]] sage: QuiverMutationType.samples(finite=True) [['A', 1], ['A', 5], ['B', 2], ['B', 5], ['C', 3], ['C', 5], [ ['A', 1], ['A', 1] ], ['D', 5], ['E', 6], ['E', 7], ['E', 8], ['F', 4], ['G', 2]] sage: QuiverMutationType.samples(affine=True) [['A', [1, 1], 1], ['A', [4, 5], 1], ['D', 4, 1], ['BB', 5, 1]] sage: QuiverMutationType.samples(elliptic=True) [['E', 6, [1, 1]], ['E', 7, [1, 1]]] sage: QuiverMutationType.samples(mutation_finite=False) [['R2', [1, 5]], ['R2', [3, 5]], ['E', 10], ['BE', 5], ['GR', [3, 10]], ['T', [3, 3, 4]]] """ result = self._samples() if finite is not None: result = [t for t in result if t.is_finite() == finite] if affine is not None: result = [t for t in result if t.is_affine() == affine] if elliptic is not None: result = [t for t in result if t.is_elliptic() == elliptic] if mutation_finite is not None: result = [t for t in result if t.is_mutation_finite() == mutation_finite] return result @cached_method def _samples(self): """ Return a list of sample of available Cartan types. EXAMPLES:: sage: X = QuiverMutationType._samples() """ finite_types = \ [QuiverMutationType(t) for t in [['A', 1], ['A', 5], ['B', 2], ['B', 5], ['C', 3], ['C', 5], ['D', 2], ['D', 5], ["E", 6], ["E", 7], ["E", 8], ["F", 4], ["G", 2]]] affine_types = \ [QuiverMutationType(t) for t in [['A', [1,1], 1], ['A', [4,5], 1], ['D', 4, 1], ['BB', 5, 1]]] elliptic_types = \ [QuiverMutationType(t) for t in [['E', 6, [1,1]], ['E', 7, [1,1]]]] mutation_finite_types = \ [QuiverMutationType(t) for t in [['R2',(1,5)], ['R2',(3,5)]]] mutation_infinite_types = \ [QuiverMutationType(t) for t in [['E',10], ['BE',5], ['GR',(3,10)], ['T',(3,3,4)]]] return finite_types + affine_types + elliptic_types + mutation_finite_types + mutation_infinite_types QuiverMutationType = QuiverMutationTypeFactory() QuiverMutationType.__doc__ = \ r""" *Quiver mutation types* can be seen as a slight generalization of *generalized Cartan types*. Background on generalized Cartan types can be found at :wikipedia:`Generalized_Cartan_matrix` For the compendium on the cluster algebra and quiver package in Sage see [MS2011]_ A `B`-matrix is a skew-symmetrizable `( n \times n )`-matrix `M`. I.e., there exists an invertible diagonal matrix `D` such that `DM` is skew-symmetric. `M` can be encoded as a *quiver* by having a directed edge from vertex `i` to vertex `j` with label `(a,b)` if `a = M_{i,j} > 0` and `b = M_{j,i} < 0`. We consider quivers up to *mutation equivalence*. To a quiver mutation type we can associate a *generalized Cartan type* by sending `M` to the generalized Cartan matrix `C(M)` obtained by replacing all positive entries by their negatives and adding `2`'s on the main diagonal. ``QuiverMutationType`` constructs a quiver mutation type object. For more detail on the possible different types, please see the compendium. INPUT: The input consists either of a quiver mutation type, or of a ``letter`` (a string), a ``rank`` (one integer or a list/tuple of integers), and an optional ``twist`` (an integer or a list of integers). There are several different naming conventions for quiver mutation types. - Finite type -- ``letter`` is a Dynkin type (A-G), and ``rank`` is the rank. - Affine type -- there is more than one convention for naming affine types. * Kac's notation: ``letter`` is a Dynkin type, ``rank`` is the rank of the associated finite Dynkin diagram, and ``twist`` is the twist, which could be 1, 2, or 3. In the special case of affine type A, there is more than one quiver mutation type associated to the Cartan type. In this case only, ``rank`` is a pair of integers (i,j), giving the number of edges pointing clockwise and the number of edges pointing counter-clockwise. The total number of vertices is given by i+j in this case. * Naive notation: ``letter`` is one of 'BB', 'BC', 'BD', 'CC', 'CD'. The name specifies the two ends of the diagram, which are joined by a path. The total number of vertices is given by ``rank +1`` (to match the indexing people expect because these are affine types). In general, ``rank`` must be large enough for the picture to make sense, but we accept ``letter`` is ``BC`` and ``rank=1``. * Macdonald notation: for the dual of an untwisted affine type (such as ['C', 6,1]), we accept a twist of -1 (i.e., ['C',6,-1]). - Elliptic type -- ``letter`` is a Dynkin type, ``rank`` is the rank of the finite Dynkin diagram, and ``twist`` is a tuple of two integers. We follow Saito's notation. - Other shapes: * Rank 2: ``letter`` is 'R2', and ``rank`` is a pair of integers specifying the label on the unique edge. * Triangle: ``letter`` is ``TR``, and ``rank`` is the number of vertices along a side. * T: This defines a quiver shaped like a T. ``letter`` is 'T', and the ``rank`` is a triple, whose entries specify the number of vertices along each path from the branch point (counting the branch point). * Grassmannian: This defines the cluster algebra (without coefficients) corresponding to the cluster algebra with coefficients which is the co-ordinate ring of a Grassmannian. ``letter`` is 'GR'. ``rank`` is a pair of integers (`k`, `n`) with 'k' < 'n' specifying the Grassmannian of `k`-planes in `n`-space. This defines a quiver given by a (k-1) x (n-k-1) grid where each square is cyclically oriented. * Exceptional mutation finite quivers: The two exceptional mutation finite quivers, found by Derksen-Owen, have ``letter`` as 'X' and ``rank`` 6 or 7, equal to the number of vertices. * AE, BE, CE, DE: Quivers are built of one end which looks like type (affine A), B, C, or D, and the other end which looks like type E (i.e., it consists of two antennae, one of length one, and one of length two). ``letter`` is 'AE', 'BE', 'CE', or 'DE', and ``rank`` is the total number of vertices. Note that 'AE' is of a slightly different form and requires ``rank`` to be a pair of integers (i,j) just as in the case of affine type A. See Exercise 4.3 in Kac's book Infinite Dimensional Lie Algebras for more details. * Infinite type E: It is also possible to obtain infinite-type E quivers by specifying ``letter`` as 'E' and ``rank`` as the number of vertices. REFERENCES: - A good reference for finite and affine Dynkin diagrams, including Kac's notation, is the :wikipedia:`Dynkin_diagram`. - A good reference for the skew-symmetrizable elliptic diagrams is "Cluster algebras of finite mutation type via unfolding" by A. Felikson, M. Shapiro, and P. Tumarkin, [FST2012]_. EXAMPLES: Finite types:: sage: QuiverMutationType('A',1) ['A', 1] sage: QuiverMutationType('A',5) ['A', 5] sage: QuiverMutationType('B',2) ['B', 2] sage: QuiverMutationType('B',5) ['B', 5] sage: QuiverMutationType('C',2) ['B', 2] sage: QuiverMutationType('C',5) ['C', 5] sage: QuiverMutationType('D',2) [ ['A', 1], ['A', 1] ] sage: QuiverMutationType('D',3) ['A', 3] sage: QuiverMutationType('D',4) ['D', 4] sage: QuiverMutationType('E',6) ['E', 6] sage: QuiverMutationType('G',2) ['G', 2] sage: QuiverMutationType('A',(1,0),1) ['A', 1] sage: QuiverMutationType('A',(2,0),1) [ ['A', 1], ['A', 1] ] sage: QuiverMutationType('A',(7,0),1) ['D', 7] Affine types:: sage: QuiverMutationType('A',(1,1),1) ['A', [1, 1], 1] sage: QuiverMutationType('A',(2,4),1) ['A', [2, 4], 1] sage: QuiverMutationType('BB',2,1) ['BB', 2, 1] sage: QuiverMutationType('BB',4,1) ['BB', 4, 1] sage: QuiverMutationType('CC',2,1) ['CC', 2, 1] sage: QuiverMutationType('CC',4,1) ['CC', 4, 1] sage: QuiverMutationType('BC',1,1) ['BC', 1, 1] sage: QuiverMutationType('BC',5,1) ['BC', 5, 1] sage: QuiverMutationType('BD',3,1) ['BD', 3, 1] sage: QuiverMutationType('BD',5,1) ['BD', 5, 1] sage: QuiverMutationType('CD',3,1) ['CD', 3, 1] sage: QuiverMutationType('CD',5,1) ['CD', 5, 1] sage: QuiverMutationType('D',4,1) ['D', 4, 1] sage: QuiverMutationType('D',6,1) ['D', 6, 1] sage: QuiverMutationType('E',6,1) ['E', 6, 1] sage: QuiverMutationType('E',7,1) ['E', 7, 1] sage: QuiverMutationType('E',8,1) ['E', 8, 1] sage: QuiverMutationType('F',4,1) ['F', 4, 1] sage: QuiverMutationType('F',4,-1) ['F', 4, -1] sage: QuiverMutationType('G',2,1) ['G', 2, 1] sage: QuiverMutationType('G',2,-1) ['G', 2, -1] sage: QuiverMutationType('A',3,2) == QuiverMutationType('D',3,2) True Affine types using Kac's Notation:: sage: QuiverMutationType('A',1,1) ['A', [1, 1], 1] sage: QuiverMutationType('B',5,1) ['BD', 5, 1] sage: QuiverMutationType('C',5,1) ['CC', 5, 1] sage: QuiverMutationType('A',2,2) ['BC', 1, 1] sage: QuiverMutationType('A',7,2) ['CD', 4, 1] sage: QuiverMutationType('A',8,2) ['BC', 4, 1] sage: QuiverMutationType('D',6,2) ['BB', 5, 1] sage: QuiverMutationType('E',6,2) ['F', 4, -1] sage: QuiverMutationType('D',4,3) ['G', 2, -1] Elliptic types:: sage: QuiverMutationType('E',6,[1,1]) ['E', 6, [1, 1]] sage: QuiverMutationType('F',4,[2,1]) ['F', 4, [1, 2]] sage: QuiverMutationType('G',2,[3,3]) ['G', 2, [3, 3]] Mutation finite types: rank 2 cases:: sage: QuiverMutationType('R2',(1,1)) ['A', 2] sage: QuiverMutationType('R2',(1,2)) ['B', 2] sage: QuiverMutationType('R2',(1,3)) ['G', 2] sage: QuiverMutationType('R2',(1,4)) ['BC', 1, 1] sage: QuiverMutationType('R2',(1,5)) ['R2', [1, 5]] sage: QuiverMutationType('R2',(2,2)) ['A', [1, 1], 1] sage: QuiverMutationType('R2',(3,5)) ['R2', [3, 5]] Exceptional Derksen-Owen quivers:: sage: QuiverMutationType('X',6) ['X', 6] (Mainly) mutation infinite types: Infinite type E:: sage: QuiverMutationType('E',9) ['E', 8, 1] sage: QuiverMutationType('E',10) ['E', 10] sage: QuiverMutationType('E',12) ['E', 12] sage: QuiverMutationType('AE',(2,3)) ['AE', [2, 3]] sage: QuiverMutationType('BE',5) ['BE', 5] sage: QuiverMutationType('CE',5) ['CE', 5] sage: QuiverMutationType('DE',6) ['DE', 6] Grassmannian types:: sage: QuiverMutationType('GR',(2,4)) ['A', 1] sage: QuiverMutationType('GR',(2,6)) ['A', 3] sage: QuiverMutationType('GR',(3,6)) ['D', 4] sage: QuiverMutationType('GR',(3,7)) ['E', 6] sage: QuiverMutationType('GR',(3,8)) ['E', 8] sage: QuiverMutationType('GR',(3,10)) ['GR', [3, 10]] Triangular types:: sage: QuiverMutationType('TR',2) ['A', 3] sage: QuiverMutationType('TR',3) ['D', 6] sage: QuiverMutationType('TR',4) ['E', 8, [1, 1]] sage: QuiverMutationType('TR',5) ['TR', 5] T types:: sage: QuiverMutationType('T',(1,1,1)) ['A', 1] sage: QuiverMutationType('T',(1,1,4)) ['A', 4] sage: QuiverMutationType('T',(1,4,4)) ['A', 7] sage: QuiverMutationType('T',(2,2,2)) ['D', 4] sage: QuiverMutationType('T',(2,2,4)) ['D', 6] sage: QuiverMutationType('T',(2,3,3)) ['E', 6] sage: QuiverMutationType('T',(2,3,4)) ['E', 7] sage: QuiverMutationType('T',(2,3,5)) ['E', 8] sage: QuiverMutationType('T',(2,3,6)) ['E', 8, 1] sage: QuiverMutationType('T',(2,3,7)) ['E', 10] sage: QuiverMutationType('T',(3,3,3)) ['E', 6, 1] sage: QuiverMutationType('T',(3,3,4)) ['T', [3, 3, 4]] Reducible types:: sage: QuiverMutationType(['A',3],['B',4]) [ ['A', 3], ['B', 4] ] """ class QuiverMutationType_abstract(UniqueRepresentation, SageObject): """ EXAMPLES:: sage: mut_type1 = QuiverMutationType('A',5) sage: mut_type2 = QuiverMutationType('A',5) sage: mut_type3 = QuiverMutationType('A',6) sage: mut_type1 == mut_type2 True sage: mut_type1 == mut_type3 False """ def _repr_(self): """ Return the string representation of ``self``. EXAMPLES:: sage: QuiverMutationType(['A',2]) # indirect doctest ['A', 2] """ return self._description def plot(self, circular=False, directed=True): """ Return the plot of the underlying graph or digraph of ``self``. INPUT: - ``circular`` -- (default:``False``) if ``True``, the circular plot is chosen, otherwise >>spring<< is used. - ``directed`` -- (default: ``True``) if ``True``, the directed version is shown, otherwise the undirected. EXAMPLES:: sage: QMT = QuiverMutationType(['A',5]) sage: pl = QMT.plot() sage: pl = QMT.plot(circular=True) """ return self.standard_quiver().plot(circular=circular, directed=directed) def show(self, circular=False, directed=True): """ Show the plot of the underlying digraph of ``self``. INPUT: - ``circular`` -- (default:``False``) if ``True``, the circular plot is chosen, otherwise >>spring<< is used. - ``directed`` -- (default: ``True``) if ``True``, the directed version is shown, otherwise the undirected. TESTS:: sage: QMT = QuiverMutationType(['A',5]) sage: QMT.show() # long time """ self.plot( circular=circular, directed=directed ).show() def letter(self): """ Return the classification letter of ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.letter() 'A' sage: mut_type = QuiverMutationType( ['BC',5,1] ); mut_type ['BC', 5, 1] sage: mut_type.letter() 'BC' sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.letter() 'A x B' sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]); mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.letter() 'A x B x X' """ return self._letter def rank(self): """ Return the rank in the standard quiver of ``self``. The rank is the number of vertices. EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.rank() 5 sage: mut_type = QuiverMutationType( ['A',[4,5],1] ); mut_type ['A', [4, 5], 1] sage: mut_type.rank() 9 sage: mut_type = QuiverMutationType( ['BC',5,1] ); mut_type ['BC', 5, 1] sage: mut_type.rank() 6 sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.rank() 6 sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]); mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.rank() 12 """ return self._rank @cached_method def b_matrix(self): """ Return the B-matrix of the standard quiver of ``self``. The conventions for B-matrices agree with Fomin-Zelevinsky (up to a reordering of the simple roots). EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.b_matrix() [ 0 1 0 0 0] [-1 0 -1 0 0] [ 0 1 0 1 0] [ 0 0 -1 0 -1] [ 0 0 0 1 0] sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.b_matrix() [ 0 1 0 0 0 0] [-1 0 -1 0 0 0] [ 0 1 0 0 0 0] [ 0 0 0 0 1 0] [ 0 0 0 -1 0 -1] [ 0 0 0 0 2 0] """ return _edge_list_to_matrix(self._digraph.edges(), list(range(self._rank)), []) @cached_method def standard_quiver(self): """ Return the standard quiver of ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.standard_quiver() Quiver on 5 vertices of type ['A', 5] sage: mut_type = QuiverMutationType( ['A',[5,3],1] ); mut_type ['A', [3, 5], 1] sage: mut_type.standard_quiver() Quiver on 8 vertices of type ['A', [3, 5], 1] sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.standard_quiver() Quiver on 6 vertices of type [ ['A', 3], ['B', 3] ] sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]); mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.standard_quiver() Quiver on 12 vertices of type [ ['A', 3], ['B', 3], ['X', 6] ] """ from .quiver import ClusterQuiver Q = ClusterQuiver(self._digraph) Q._mutation_type = self return Q @cached_method def cartan_matrix(self): """ Return the Cartan matrix of ``self``. Note that (up to a reordering of the simple roots) the convention for the definition of Cartan matrix, used here and elsewhere in Sage, agrees with the conventions of Kac, Fulton-Harris, and Fomin-Zelevinsky, but disagrees with the convention of Bourbaki. The `(i,j)` entry is `2(\\alpha_i,\\alpha_j)/(\\alpha_i,\\alpha_i)`. EXAMPLES:: sage: mut_type = QuiverMutationType(['A',5]); mut_type ['A', 5] sage: mut_type.cartan_matrix() [ 2 -1 0 0 0] [-1 2 -1 0 0] [ 0 -1 2 -1 0] [ 0 0 -1 2 -1] [ 0 0 0 -1 2] sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.cartan_matrix() [ 2 -1 0 0 0 0] [-1 2 -1 0 0 0] [ 0 -1 2 0 0 0] [ 0 0 0 2 -1 0] [ 0 0 0 -1 2 -1] [ 0 0 0 0 -2 2] """ # as soon as CartanMatrix is implemented we should use it here: # from sage.combinat.root_system.cartan_matrix import CartanMatrix cmat = copy(self.b_matrix()) for i,j in cmat.nonzero_positions(): a = cmat[i,j] if a > 0: cmat[i,j] = -a for i in range(self._rank): cmat[i,i] = 2 # return CartanMatrix(cmat) return cmat def is_irreducible(self): """ Return ``True`` if ``self`` is irreducible. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_irreducible() True """ return self._info['irreducible'] def is_mutation_finite(self): """ Return ``True`` if ``self`` is of finite mutation type. This means that its mutation class has only finitely many different B-matrices. EXAMPLES:: sage: mt = QuiverMutationType(['D',5,1]) sage: mt.is_mutation_finite() True """ return self._info['mutation_finite'] def is_simply_laced(self): """ Return ``True`` if ``self`` is simply laced. This means that the only arrows that appear in the quiver of ``self`` are single unlabelled arrows. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_simply_laced() True sage: mt = QuiverMutationType(['B',2]) sage: mt.is_simply_laced() False sage: mt = QuiverMutationType(['A',(1,1),1]) sage: mt.is_simply_laced() False """ return self._info['simply_laced'] def is_skew_symmetric(self): """ Return ``True`` if the B-matrix of ``self`` is skew-symmetric. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_skew_symmetric() True sage: mt = QuiverMutationType(['B',2]) sage: mt.is_skew_symmetric() False sage: mt = QuiverMutationType(['A',(1,1),1]) sage: mt.is_skew_symmetric() True """ return self._info['skew_symmetric'] def is_finite(self): """ Return ``True`` if ``self`` is of finite type. This means that the cluster algebra associated to ``self`` has only a finite number of cluster variables. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_finite() True sage: mt = QuiverMutationType(['A',[4,2],1]) sage: mt.is_finite() False """ return self._info['finite'] def is_affine(self): """ Return ``True`` if ``self`` is of affine type. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_affine() False sage: mt = QuiverMutationType(['A',[4,2],1]) sage: mt.is_affine() True """ if self.is_irreducible(): return self._info['affine'] else: return False def is_elliptic(self): """ Return ``True`` if ``self`` is of elliptic type. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_elliptic() False sage: mt = QuiverMutationType(['E',6,[1,1]]) sage: mt.is_elliptic() True """ if self.is_irreducible(): return self._info['elliptic'] else: return False def properties(self): """ Print a scheme of all properties of ``self``. Most properties have natural definitions for either irreducible or reducible types. ``affine`` and ``elliptic`` are only defined for irreducible types. EXAMPLES:: sage: mut_type = QuiverMutationType(['A',3]); mut_type ['A', 3] sage: mut_type.properties() ['A', 3] has rank 3 and the following properties: - irreducible: True - mutation finite: True - simply-laced: True - skew-symmetric: True - finite: True - affine: False - elliptic: False sage: mut_type = QuiverMutationType(['B',3]); mut_type ['B', 3] sage: mut_type.properties() ['B', 3] has rank 3 and the following properties: - irreducible: True - mutation finite: True - simply-laced: False - skew-symmetric: False - finite: True - affine: False - elliptic: False sage: mut_type = QuiverMutationType(['B',3,1]); mut_type ['BD', 3, 1] sage: mut_type.properties() ['BD', 3, 1] has rank 4 and the following properties: - irreducible: True - mutation finite: True - simply-laced: False - skew-symmetric: False - finite: False - affine: True - elliptic: False sage: mut_type = QuiverMutationType(['E',6,[1,1]]); mut_type ['E', 6, [1, 1]] sage: mut_type.properties() ['E', 6, [1, 1]] has rank 8 and the following properties: - irreducible: True - mutation finite: True - simply-laced: False - skew-symmetric: True - finite: False - affine: False - elliptic: True sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.properties() [ ['A', 3], ['B', 3] ] has rank 6 and the following properties: - irreducible: False - mutation finite: True - simply-laced: False - skew-symmetric: False - finite: True sage: mut_type = QuiverMutationType('GR',[4,9]); mut_type ['GR', [4, 9]] sage: mut_type.properties() ['GR', [4, 9]] has rank 12 and the following properties: - irreducible: True - mutation finite: False - simply-laced: True - skew-symmetric: True - finite: False - affine: False - elliptic: False """ txt = '{} has rank {} and the following properties:' print(txt.format(self, self.rank())) s = "\t- {} {}" print(s.format('irreducible: ', self.is_irreducible())) print(s.format('mutation finite: ', self.is_mutation_finite())) print(s.format('simply-laced: ', self.is_simply_laced())) print(s.format('skew-symmetric: ', self.is_skew_symmetric())) print(s.format('finite: ', self.is_finite())) if self.is_irreducible(): print(s.format('affine: ', self.is_affine())) print(s.format('elliptic: ', self.is_elliptic())) class QuiverMutationType_Irreducible(QuiverMutationType_abstract): """ The mutation type for a cluster algebra or a quiver. Should not be called directly, but through QuiverMutationType. """ def __init__(self, letter, rank, twist=None): """ Should not be called directly but through QuiverMutationType. INPUT: - ``letter`` -- the letter of the mutation type - ``rank`` -- the rank of the mutation type - ``twist`` -- the twist of the mutation type EXAMPLES:: sage: QuiverMutationType('A',5) ['A', 5] sage: QuiverMutationType('A',[4,5],1) ['A', [4, 5], 1] sage: QuiverMutationType('BB',5,1) ['BB', 5, 1] sage: QuiverMutationType('X',6) ['X', 6] """ # _rank and _bi_rank are initialized self._rank = None self._bi_rank = None # _graph and _digraph are initialized self._graph = Graph() self._digraph = DiGraph() # _info is initialized self._info = {} self._info['irreducible'] = True self._info['mutation_finite'] = False self._info['simply_laced'] = False self._info['skew_symmetric'] = False self._info['finite'] = False self._info['affine'] = False self._info['elliptic'] = False self._info['irreducible_components'] = False if isinstance(rank, tuple): rank = list(rank) if isinstance(twist, tuple): twist = list(twist) # _letter/twist is the input letter/twist self._letter = letter self._twist = twist data = [letter,rank,twist] # type A (finite and affine) if letter == 'A': if twist is None and rank in ZZ and rank > 0: self._rank = rank self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['finite'] = True elif twist==1 and isinstance(rank, list) and len(rank) == 2 and all( rank[i] in ZZ and rank[i] >= 0 for i in [0,1] ) and rank != [0,0]: if isinstance(rank, tuple): rank = list( rank ) data[1] = rank rank = sorted(rank) self._bi_rank = rank self._rank = sum( self._bi_rank ) self._info['mutation_finite'] = True if self._rank > 2: self._info['simply_laced'] = True self._info['skew_symmetric'] = True if rank[0] > 0: self._info['affine'] = True elif rank[0] == 0: self._info['finite'] = True else: _mutation_type_error( data ) # types ['A',1] and ['A',[0,1],1] need to be treated on # itself (as there is no edge) if twist is None and self._rank == 1 or twist == 1 and self._rank == 1: self._graph.add_vertex( 0 ) # type ['A',[1,1],1] needs to be treated on itself as well # (as there is a double edge) elif twist == 1 and self._bi_rank[0] == 1 and self._bi_rank[1] == 1: self._graph.add_edge( 0,1,2 ) else: for i in range( self._rank - 1 ): self._graph.add_edge( i, i+1, 1 ) if twist == 1: self._digraph.add_edge( self._rank - 1, 0, 1 ) for i in range( self._rank - 1 ): if i < ( 2 * self._bi_rank[0] ) and i%2 == 0: self._digraph.add_edge( i+1, i, 1 ) else: self._digraph.add_edge( i, i+1, 1 ) # type B (finite) elif letter == 'B': if twist is None and rank in ZZ and rank > 1: self._rank = rank self._info['mutation_finite'] = True self._info['finite'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (1,-2) ) else: self._graph.add_edge( rank-2, rank-1, (2,-1) ) # type C (finite) elif letter == 'C': if twist is None and rank in ZZ and rank > 1: self._rank = rank self._info['mutation_finite'] = True self._info['finite'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (2,-1) ) else: self._graph.add_edge( rank-2, rank-1, (1,-2) ) # type BB (affine) elif letter == 'BB': if twist == 1 and rank in ZZ and rank > 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if rank % 2 == 0: self._graph.add_edge( rank-2, rank-1, (1,-2) ) else: self._graph.add_edge( rank-2, rank-1, (2,-1) ) self._graph.add_edge( rank, 0 , (1,-2) ) # type CC (affine) elif letter == 'CC': if twist == 1 and rank in ZZ and rank > 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if rank % 2 == 0: self._graph.add_edge( rank-2, rank-1, (2,-1) ) else: self._graph.add_edge( rank-2, rank-1, (1,-2) ) self._graph.add_edge( rank, 0 , (2,-1) ) # type BC (affine) elif letter == 'BC': if twist == 1 and rank in ZZ and rank >= 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) if rank == 1: self._graph.add_edge( 0,1,(1,-4) ) else: for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (2,-1) ) else: self._graph.add_edge( rank-2, rank-1, (1,-2) ) if twist == 1: self._graph.add_edge( rank, 0 , (1,-2) ) # type BD (affine) elif letter == 'BD': if twist == 1 and rank in ZZ and rank > 2: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (1,-2) ) else: self._graph.add_edge( rank-2, rank-1, (2,-1) ) if twist == 1: self._graph.add_edge( rank, 1 , 1 ) # type CD (affine) elif letter == 'CD': if twist == 1 and rank in ZZ and rank > 2: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (2,-1) ) else: self._graph.add_edge( rank-2, rank-1, (1,-2) ) if twist == 1: self._graph.add_edge( rank, 1 , 1 ) # type D (finite and affine) elif letter == 'D': if rank in ZZ and rank > 3 and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['finite'] = True elif twist == 1 and rank in ZZ and rank > 3: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) self._graph.add_edge( rank-3, rank-1, 1 ) if twist is not None: self._graph.add_edge( rank, 1 ,1 ) # type E (finite, affine and elliptic) elif letter == 'E': if rank in [6,7,8] and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['finite'] = True if rank == 6: self._graph.add_edges( [ (0,1),(1,2),(2,3),(3,4),(2,5) ] ) elif rank == 7: self._graph.add_edges([(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (2, 6)]) elif rank == 8: self._graph.add_edges([(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6),(2, 7)]) elif rank in [6,7,8] and twist == 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['affine'] = True if rank == 6: self._graph.add_edges( [ (0,1),(1,2),(2,3),(3,4),(2,5),(5,6) ] ) elif rank == 7: self._graph.add_edges( [ (0,1),(1,2),(2,3),(3,4),(4,5),(5,6),(3,7) ] ) elif rank == 8: self._graph.add_edges( [ (0,1),(1,2),(2,3),(3,4),(4,5),(5,6),(6,7),(2,8) ] ) elif rank in [6,7,8] and twist == [1,1]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['skew_symmetric'] = True self._info['elliptic'] = True if rank == 6: self._digraph.add_edges( [ (0,1,1),(1,2,1),(3,2,1),(3,4,1),(5,6,1),(6,7,1),(5,1,1),(2,5,2),(5,3,1),(6,2,1) ] ) elif rank == 7: self._digraph.add_edges( [ (1,0,1),(1,2,1),(2,3,1),(4,3,1),(4,5,1),(6,5,1),(7,8,1),(3,7,2),(7,2,1),(7,4,1),(8,3,1) ] ) elif rank == 8: self._digraph.add_edges( [ (0,1,1),(1,9,1),(3,9,1),(3,4,1),(2,8,1),(2,1,1),(9,2,2),(2,3,1),(8,9,1),(5,4,1),(5,6,1),(7,6,1) ] ) # type E (mutation infinite) elif rank > 9 and twist is None: self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._rank = rank for i in range(rank-2): self._graph.add_edge( i, i+1, 1 ) self._graph.add_edge( 2, rank-1 ) else: _mutation_type_error(data) # type AE (mutation infinite) elif letter == 'AE': if isinstance(rank, list) and len(rank) == 2 and all( rank[i] in ZZ and rank[i] > 0 for i in [0,1] ) and twist is None: if isinstance(rank, tuple): rank = list( rank ) data[1] = rank rank = sorted(rank) self._bi_rank = rank self._rank = sum( self._bi_rank ) + 1 if self._rank > 3: self._info['simply_laced'] = True self._info['skew_symmetric'] = True if self._bi_rank == [1,1]: self._graph.add_edges( [(0,1,2),(1,2,None)] ) else: self._digraph.add_edge( self._rank - 2, 0 ) for i in range(self._rank-2): if i < ( 2 * self._bi_rank[0] ) and i%2 == 0: self._digraph.add_edge(i+1,i) else: self._digraph.add_edge(i,i+1) self._digraph.add_edge(self._rank-2,self._rank-1) else: _mutation_type_error( data ) # type BE (mutation infinite) elif letter == 'BE': if rank >4 and twist is None: self._rank = rank for i in range(rank-3): self._graph.add_edge( i, i+1 ) self._graph.add_edge( 2, rank-1 ) if rank%2 == 0: self._graph.add_edge( rank-3,rank-2,(2,-1) ) else: self._graph.add_edge( rank-3,rank-2,(1,-2) ) else: _mutation_type_error( data ) # type CE (mutation infinite) elif letter == 'CE': if rank >4 and twist is None: self._rank = rank for i in range(rank-3): self._graph.add_edge( i, i+1 ) self._graph.add_edge( 2, rank-1 ) if rank%2 == 0: self._graph.add_edge( rank-3,rank-2,(1,-2) ) else: self._graph.add_edge( rank-3,rank-2,(2,-1) ) else: _mutation_type_error( data ) # type DE (mutation infinite) elif letter == 'DE': if rank >5 and twist is None: self._rank = rank self._info['simply_laced'] = True self._info['skew_symmetric'] = True for i in range(rank-3): self._graph.add_edge( i, i+1 ) self._graph.add_edge( 2, rank-2 ) self._graph.add_edge( rank-4, rank-1 ) else: _mutation_type_error( data ) # type F (finite, affine, and elliptic) elif letter == 'F': if rank == 4 and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['finite'] = True self._graph.add_edges( [ (0,1,None),(1,2,(2,-1)),(2,3,None) ] ) elif rank == 4 and twist == 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True self._graph.add_edges( [ (0,1,None), (1,2,None), (2,3,(1,-2)),(3,4,None) ] ) elif rank == 4 and twist == -1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True self._graph.add_edges( [ (0,1,None), (1,2,None), (2,3,(2,-1)),(3,4,None) ] ) elif rank == 4 and (twist == [1,2]): self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,None), (2,3,(2,-1)), (4,2,(1,-2)), (3,4,2), (4,5,None), (5,3,None) ]) elif rank == 4 and (twist == [2,1]): self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,None), (2,3,(1,-2)), (4,2,(2,-1)), (3,4,2), (4,5,None), (5,3,None) ]) elif rank == 4 and twist == [2,2]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,None), (3,1,None), (2,3,2), (4,2,(2,-1)), (3,4,(1,-2)), (5,4,None) ] ) elif rank == 4 and twist == [1,1]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,None), (3,1,None), (2,3,2), (4,2,(1,-2)), (3,4,(2,-1)), (5,4,None) ] ) else: _mutation_type_error( data ) # type G (finite, affine, and elliptic) elif letter == 'G': if rank == 2 and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['finite'] = True self._graph.add_edges( [ (0,1,(1,-3)) ] ) elif rank == 2 and twist == -1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True self._graph.add_edges( [ (0,1,None),(1,2,(1,-3)) ] ) elif rank == 2 and twist == 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True self._graph.add_edges( [ (0,1,None),(1,2,(3,-1)) ] ) elif rank == 2 and (twist == [1,3]): self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,(3,-1)), (3,1,(1,-3)), (2,3,2)] ) elif rank == 2 and (twist == [3,1]): self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,(1,-3)), (3,1,(3,-1)), (2,3,2)] ) elif rank == 2 and twist == [3,3]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (1,0,None), (0,2,2), (3,0,(3,-1)), (2,1,None), (2,3, (1,-3))]) elif rank == 2 and twist == [1,1]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (1,0,None), (0,2,2), (3,0,(1,-3)), (2,1,None), (2,3,(3,-1)) ] ) else: _mutation_type_error( data ) # type GR (mutation infinite) elif letter == 'GR': if twist is None and isinstance(rank, list) and len(rank) == 2 and all( rank[i] in ZZ and rank[i] > 0 for i in [0,1] ) and rank[1] - 1 > rank[0] > 1: gr_rank = (rank[0]-1,rank[1]-rank[0]-1) self._rank = prod(gr_rank) self._info['simply_laced'] = True self._info['skew_symmetric'] = True a,b = gr_rank for i in range(a): for j in range(b): if i < a-1: if (i+j) % 2 == 0: self._digraph.add_edge(i*b+j,(i+1)*b+j) else: self._digraph.add_edge((i+1)*b+j,i*b+j) if j < b-1: if (i+j) % 2 == 0: self._digraph.add_edge(i*b+j+1,i*b+j) else: self._digraph.add_edge(i*b+j,i*b+j+1) else: _mutation_type_error( data ) # type R2 (rank 2 finite mutation types) elif letter == 'R2': if twist is None and isinstance(rank, list) and len(rank) == 2 and all( rank[i] in ZZ and rank[i] > 0 for i in [0,1] ): rank = sorted(rank) b,c = rank self._rank = 2 if b == c: self._info['skew_symmetric'] = True self._graph.add_edge(0,1,(b,-c)) else: _mutation_type_error( data ) # type T elif letter == 'T': if twist is None and isinstance(rank, list) and len(rank) == 3 and all( rank[i] in ZZ and rank[i] > 0 for i in [0,1,2] ): if isinstance(rank, tuple): rank = list( rank ) data[1] = rank rank = sorted( rank ) self._rank = sum( rank ) - 2 self._info['simply_laced'] = True self._info['skew_symmetric'] = True r,p,q = rank for i in range(q-1): if i == 0: self._graph.add_edge(0,1) self._graph.add_edge(0,r) self._graph.add_edge(0,r+p-1) else: if i < r-1: self._graph.add_edge(i,i+1) if i < p-1: self._graph.add_edge(i+r-1,i+r) self._graph.add_edge(i+r+p-2,i+r+p-1) else: _mutation_type_error( data ) # type TR (mutation infinite if rank > 2) elif letter == 'TR': # type ['TR',1] needs to be treated on itself (as there is no edge) if twist is None and rank == 1: self._graph.add_vertex( 0 ) elif twist is None and rank > 1: self._rank = rank*(rank+1)//2 self._info['simply_laced'] = True self._info['skew_symmetric'] = True level = 0 while level < rank: nr = rank*level-sum(range(level)) for i in range(nr,nr+rank-level-1): self._digraph.add_edge(i,i+1) self._digraph.add_edge(i+rank-level,i) self._digraph.add_edge(i+1,i+rank-level) level += 1 else: _mutation_type_error( data ) # type X elif letter == 'X': if rank in [6,7] and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['skew_symmetric'] = True self._digraph.add_edges( [ (0,1,2),(1,2,None),(2,0,None), (2,3,None),(3,4,2),(4,2,None), (2,5,None) ] ) if rank == 7: self._digraph.add_edges( [ (5,6,2),(6,2,None) ] ) else: _mutation_type_error( data ) # otherwise, an error is raised else: _mutation_type_error( data ) # in the bipartite case, the digraph is constructed from the graph if not self._digraph: if self._graph.is_bipartite(): self._digraph = _bipartite_graph_to_digraph( self._graph ) else: raise ValueError('The QuiverMutationType does not have ' 'a Coxeter diagram.') # in the other cases, the graph is constructed from the digraph if not self._graph: self._graph = self._digraph.to_undirected() # _description is as for CartanType if twist: self._description = str( [letter,rank,twist] ) else: self._description = str( [letter,rank] ) def irreducible_components( self ): """ Return a list of all irreducible components of ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType('A',3); mut_type ['A', 3] sage: mut_type.irreducible_components() (['A', 3],) """ return tuple([self]) @cached_method def class_size(self): r""" If it is known, the size of the mutation class of all quivers which are mutation equivalent to the standard quiver of ``self`` (up to isomorphism) is returned. Otherwise, ``NotImplemented`` is returned. Formula for finite type A is taken from Torkildsen - Counting cluster-tilted algebras of type `A_n`. Formulas for affine type A and finite type D are taken from Bastian, Prellberg, Rubey, Stump - Counting the number of elements in the mutation classes of `\widetilde A_n` quivers. Formulas for finite and affine types B and C are proven but not yet published. Conjectural formulas for several other non-simply-laced affine types are implemented. Exceptional Types (finite, affine, and elliptic) E, F, G, and X are hardcoded. EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.class_size() 19 sage: mut_type = QuiverMutationType( ['A',[10,3],1] ); mut_type ['A', [3, 10], 1] sage: mut_type.class_size() 142120 sage: mut_type = QuiverMutationType( ['B',6] ); mut_type ['B', 6] sage: mut_type.class_size() 132 sage: mut_type = QuiverMutationType( ['BD',6,1] ); mut_type ['BD', 6, 1] sage: mut_type.class_size() Warning: This method uses a formula which has not been proved correct. 504 Check that :trac:`14048` is fixed:: sage: mut_type = QuiverMutationType( ['F',4,(2,1)] ) sage: mut_type.class_size() 90 """ if not self.is_mutation_finite(): return infinity # type A (finite and affine) if self._letter == 'A': # the formula is taken from Torkildsen - Counting # cluster-tilted algebras of type A if self.is_finite(): n = self._rank a = binomial( 2*(n+1), n+1 ) // (n+2) if n % 2 == 1: a += binomial( n+1, (n+1)//2 ) if n % 3 == 0: a += 2 * binomial( 2*n//3, n//3 ) return a // (n+3) # the formula is taken from Bastian, Prellberg, Rubey, Stump elif self.is_affine(): i,j = self._bi_rank i = ZZ(i) j = ZZ(j) n = i+j f = Euler_Phi() if i == j: return ( binomial( 2*i,i ) + sum( f(k) * binomial(2*i//k,i//k)**2 for k in [k for k in i.divisors() if k in j.divisors()] ) // n ) // 4 else: return sum( f(k) * binomial(2*i//k,i//k) * binomial(2*j//k,j//k) for k in [k for k in i.divisors() if k in j.divisors()] ) // ( 2 * n ) # types B and C (finite and affine) elif self._letter in ['B', 'C']: # this formula is proven but nowhere published correctness # is clear enough that I don't think a warning is needed if self.is_finite(): n = self._rank return binomial(2 * n, n) // (n + 1) elif self._letter in ['BB','CC']: # these two formulas are not yet proven print(Warning("Warning: This method uses a formula " "which has not been proved correct.")) if self.is_affine(): if self._twist == 1: n = self._rank - 1 if n%2==1: return binomial( 2*n-1, n-1 ) else: return binomial( 2*n-1, n-1 ) + binomial( n-1, n//2 -1 ) # type BC (affine) elif self._letter == 'BC': # this formula is not yet proven print(Warning("Warning: This method uses a formula " "which has not been proved correct.")) if self.is_affine(): if self._twist == 1: n = self._rank - 1 return binomial( 2*n, n ) # types BD and CD (affine) elif self._letter in ['BD','CD']: # this formula is not yet proven print(Warning("Warning: This method uses a formula " "which has not been proved correct.")) if self.is_affine(): if self._twist == 1: n = self._rank - 2 return 2*binomial( 2*n, n ) # type D (finite and affine) elif self._letter == 'D': # the formula is taken from Bastian, Prellberg, Rubey, Stump if self.is_finite(): if self._rank == 4: return 6 else: f = Euler_Phi() n = ZZ(self._rank) return sum( f( n//k ) * binomial( 2*k, k ) for k in n.divisors() ) // (2*n) # this formula is not yet proven elif self.is_affine(): n = self._rank - 3 if n == 2: return 9 else: print(Warning ("Warning: This method uses a formula " "which has not been proved correct.")) if n%2==1: return 2*binomial(2*n,n) else: return 2*binomial(2*n,n) + binomial(n, n//2) # the exceptional types are hard-coded # type E (finite, affine and elliptic) elif self._letter == 'E': if self.is_finite(): if self._rank == 6: return 67 elif self._rank == 7: return 416 elif self._rank == 8: return 1574 elif self.is_affine(): if self._rank == 7: return 132 elif self._rank == 8: return 1080 elif self._rank == 9: return 7560 elif self.is_elliptic(): if self._rank == 8: return 49 elif self._rank == 9: return 506 elif self._rank == 10: return 5739 # type F elif self._letter == 'F': if self.is_finite(): return 15 elif self.is_affine(): return 60 elif self.is_elliptic(): if self._twist == [1,2]: return 90 if self._twist == [1,1] or self._twist == [2,2]: return 35 # type G elif self._letter == 'G': if self.is_finite(): return 2 elif self.is_affine(): return 6 elif self.is_elliptic(): if self._twist == [1,3]: return 7 if self._twist == [1,1] or self._twist == [3,3]: return 2 # type X elif self._letter == 'X': if self._rank == 6: return 5 elif self._rank == 7: return 2 # otherwise the size is returned to be unknown else: print("Size unknown") return NotImplemented def dual(self): """ Return the QuiverMutationType which is dual to ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType('A',5); mut_type ['A', 5] sage: mut_type.dual() ['A', 5] sage: mut_type = QuiverMutationType('B',5); mut_type ['B', 5] sage: mut_type.dual() ['C', 5] sage: mut_type.dual().dual() ['B', 5] sage: mut_type.dual().dual() == mut_type True """ letter = self.letter() # the self-dual cases if letter != 'BC' and letter[0] in ['B','C']: if letter == 'BB': letter = 'CC' elif letter == 'CC': letter = 'BB' elif letter[0] == 'B': letter = 'C' + letter[1:] elif letter[0] == 'C': letter = 'B' + letter[1:] rank = self._rank if self.is_affine(): rank -= 1 twist = self._twist return QuiverMutationType(letter,rank,twist) # the cases F and G have non-trivial duality in some cases elif letter in ['F','G']: if self.is_finite(): return self elif self.is_affine(): rank = self._rank - 1 twist = - self._twist elif self.is_elliptic(): twist = self._twist rank = self._rank - 2 if letter == 'F': if self._twist == [2,2]: twist == [1,1] if self._twist == [1,1]: twist == [2,2] if letter == 'G': if self._twist == [3,3]: twist = [1,1] elif self._twist == [1,1]: twist = [3,3] else: rank = self._rank return QuiverMutationType(letter,rank,twist) else: return self class QuiverMutationType_Reducible(QuiverMutationType_abstract): """ The mutation type for a cluster algebra or a quiver. Should not be called directly, but through QuiverMutationType. Inherits from QuiverMutationType_abstract. """ def __init__(self, *args): """ Should not be called directly, but through QuiverMutationType. INPUT: - ``data`` -- a list each of whose entries is a QuiverMutationType_Irreducible EXAMPLES:: sage: QuiverMutationType(['A',4],['B',6]) [ ['A', 4], ['B', 6] ] """ data = args if len(data) < 2 or not all( isinstance(comp, QuiverMutationType_Irreducible) for comp in data ): return _mutation_type_error(data) # _info is initialized self._info = {} self._info['irreducible'] = False self._info['mutation_finite'] = all(comp.is_mutation_finite() for comp in data) self._info['simply_laced'] = all(comp.is_simply_laced() for comp in data) self._info['skew_symmetric'] = all(comp.is_skew_symmetric() for comp in data) self._info['finite'] = all(comp.is_finite() for comp in data) self._info['irreducible_components'] = copy(data) # letter and rank are initialized self._letter = '' self._rank = 0 # graph and digraph are initialized self._graph = Graph() self._digraph = DiGraph() for comp in data: if self._letter: self._letter += ' x ' self._letter += comp._letter self._rank += comp._rank self._graph = self._graph.disjoint_union(comp._graph, labels='integers') self._digraph = self._digraph.disjoint_union(comp._digraph, labels='integers') self._graph.name('') self._digraph.name('') # _description is as for CartanType self._description = "[ " comps = self.irreducible_components() for i in range(len(comps)): if i > 0: self._description += ", " self._description += comps[i]._description self._description += " ]" def irreducible_components( self ): """ Return a list of all irreducible components of ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType('A',3); mut_type ['A', 3] sage: mut_type.irreducible_components() (['A', 3],) sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.irreducible_components() (['A', 3], ['B', 3]) sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]) sage: mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.irreducible_components() (['A', 3], ['B', 3], ['X', 6]) """ return self._info['irreducible_components'] @cached_method def class_size(self): """ If it is known, the size of the mutation class of all quivers which are mutation equivalent to the standard quiver of ``self`` (up to isomorphism) is returned. Otherwise, ``NotImplemented`` is returned. EXAMPLES:: sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.class_size() 20 sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]) sage: mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.class_size() 100 """ if not self.is_mutation_finite(): return infinity else: components = [] multiplicities = [] for x in self.irreducible_components(): if components.count(x) == 0: components.append(x) multiplicities.append(1) else: y = components.index(x) multiplicities[y] = multiplicities[y]+1 sizes = [ x.class_size() for x in components ] if NotImplemented in sizes: print("Size unknown") return NotImplemented else: return prod( [binomial(sizes[i]+multiplicities[i]-1, multiplicities[i] ) for i in range (0,len(sizes))]) def dual(self): """ Return the QuiverMutationType which is dual to ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType(['A',5],['B',6],['C',5],['D',4]); mut_type [ ['A', 5], ['B', 6], ['C', 5], ['D', 4] ] sage: mut_type.dual() [ ['A', 5], ['C', 6], ['B', 5], ['D', 4] ] """ comps = self.irreducible_components() return QuiverMutationType( [comp.dual() for comp in comps ] ) def _construct_classical_mutation_classes(n): r""" Return a dict with keys being tuples representing regular QuiverMutationTypes of the given rank, and with values being lists or sets containing all mutation equivalent quivers as dig6 data. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _construct_classical_mutation_classes sage: rank_2_classes = _construct_classical_mutation_classes(2) # long time sage: for mut_class in sorted(rank_2_classes.keys(),key=str): # long time ....: print("{} {}".format(mut_class, rank_2_classes[mut_class])) ('A', (1, 1), 1) [('AO', (((0, 1), (2, -2)),))] ('A', 2) [('AO', ())] ('B', 2) [('AO', (((0, 1), (1, -2)),)), ('AO', (((0, 1), (2, -1)),))] ('BC', 1, 1) [('AO', (((0, 1), (1, -4)),)), ('AO', (((0, 1), (4, -1)),))] """ from sage.combinat.cluster_algebra_quiver.quiver import ClusterQuiver data = {} # finite A data[ ('A',n) ] = ClusterQuiver(['A',n]).mutation_class(data_type='dig6') # affine A for j in range(1, n//2+1): data[ ('A',(n-j,j),1) ] = ClusterQuiver(['A',[n-j,j],1]).mutation_class(data_type='dig6') # finite B if n > 1: data[ ('B',n) ] = ClusterQuiver(['B',n]).mutation_class(data_type='dig6') # affine B if n > 2: data[ ('BB',n-1,1) ] = ClusterQuiver(['BB',n-1,1]).mutation_class(data_type='dig6') # finite C if n > 2: data[ ('C',n) ] = ClusterQuiver(['C',n]).mutation_class(data_type='dig6') # affine C if n > 1: data[ ('BC',n-1,1) ] = ClusterQuiver(['BC',n-1,1]).mutation_class(data_type='dig6') # affine CC if n > 2: data[ ('CC',n-1,1) ] = ClusterQuiver(['CC',n-1,1]).mutation_class(data_type='dig6') # affine BD if n > 3: data[ ('BD',n-1,1) ] = ClusterQuiver(['BD',n-1,1]).mutation_class(data_type='dig6') # affine CD if n > 3: data[ ('CD',n-1,1) ] = ClusterQuiver(['CD',n-1,1]).mutation_class(data_type='dig6') # finite D if n > 3: data[ ('D',n) ] = ClusterQuiver(['D',n]).mutation_class(data_type='dig6') # affine D if n > 4: data[ ('D',n-1,1) ] = ClusterQuiver(['D',n-1,1]).mutation_class(data_type='dig6') return data def _construct_exceptional_mutation_classes(n): r""" Return a dict with keys being tuples representing exceptional QuiverMutationTypes of the given rank, and with values being lists or sets containing all mutation equivalent quivers as dig6 data. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _construct_exceptional_mutation_classes sage: rank_3_exceptional = _construct_exceptional_mutation_classes(3) # long time sage: for mut_class in sorted(rank_3_exceptional.keys(), key=str): # long time ....: print("{} {}".format(mut_class, rank_3_exceptional[mut_class])) ('G', 2, -1) [('BH?', (((1, 2), (1, -3)),)), ('BGO', (((2, 1), (3, -1)),)), ('BW?', (((0, 1), (3, -1)),)), ('BP?', (((0, 1), (1, -3)),)), ('BP_', (((0, 1), (1, -3)), ((2, 0), (3, -1)))), ('BP_', (((0, 1), (3, -1)), ((1, 2), (1, -3)), ((2, 0), (2, -2))))] ('G', 2, 1) [('BH?', (((1, 2), (3, -1)),)), ('BGO', (((2, 1), (1, -3)),)), ('BW?', (((0, 1), (1, -3)),)), ('BP?', (((0, 1), (3, -1)),)), ('BKO', (((1, 0), (3, -1)), ((2, 1), (1, -3)))), ('BP_', (((0, 1), (2, -2)), ((1, 2), (1, -3)), ((2, 0), (3, -1))))] """ from sage.combinat.cluster_algebra_quiver.quiver import ClusterQuiver data = {} # finite E if n in [6,7,8]: data[ ('E',n) ] = ClusterQuiver(['E',n]).mutation_class(data_type='dig6') # affine E if n in [7,8,9]: data[ ('E',n-1,1) ] = ClusterQuiver(['E',n-1,1]).mutation_class(data_type='dig6') # elliptic E if n in [8,9,10]: data[ ('E',n-2,(1,1)) ] = ClusterQuiver(['E',n-2,[1,1]]).mutation_class(data_type='dig6') # finite F if n == 4: data[ ('F',4) ] = ClusterQuiver(['F',4]).mutation_class(data_type='dig6') # affine F if n == 5: data[ ('F',4,1) ] = ClusterQuiver(['F',4,1]).mutation_class(data_type='dig6') data[ ('F',4,-1) ] = ClusterQuiver(['F',4,-1]).mutation_class(data_type='dig6') # finite G if n == 2: data[ ('G',2) ] = ClusterQuiver(['G',2]).mutation_class(data_type='dig6') # affine G if n == 3: data[ ('G',2,1) ] = ClusterQuiver(['G',2,1]).mutation_class(data_type='dig6') data[ ('G',2,-1) ] = ClusterQuiver(['G',2,-1]).mutation_class(data_type='dig6') # elliptic G if n == 4: data[ ('G',2,(1,3)) ] = ClusterQuiver(['G',2,(1,3)]).mutation_class(data_type='dig6') data[ ('G',2,(1,1)) ] = ClusterQuiver(['G',2,(1,1)]).mutation_class(data_type='dig6') data[ ('G',2,(3,3)) ] = ClusterQuiver(['G',2,(3,3)]).mutation_class(data_type='dig6') # X if n in [6,7]: data[ ('X',n) ] = ClusterQuiver(['X',n]).mutation_class(data_type='dig6') # elliptic F if n == 6: data[ ('F',4,(1,2)) ] = ClusterQuiver(['F',4,(1,2)]).mutation_class(data_type='dig6') data[ ('F',4,(1,1)) ] = ClusterQuiver(['F',4,(1,1)]).mutation_class(data_type='dig6') data[ ('F',4,(2,2)) ] = ClusterQuiver(['F',4,(2,2)]).mutation_class(data_type='dig6') return data def _save_data_dig6(n, types='ClassicalExceptional', verbose=False): """ Save all exceptional mutation classes as dig6 data into the file ``exc_classes_n.dig6`` in the folder ``DOT_SAGE``. TESTS:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import save_quiver_data sage: save_quiver_data(2) # indirect doctest <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', 1)] <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1), ('G', 2)] sage: save_quiver_data(2,up_to=False) # indirect doctest <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1), ('G', 2)] sage: save_quiver_data(2,up_to=False, types='Classical') # indirect doctest <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1)] sage: save_quiver_data(2,up_to=False, types='Exceptional') # indirect doctest <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('G', 2)] sage: save_quiver_data(2,up_to=False, verbose=False) # indirect doctest """ import os.path from six.moves import cPickle data = {} possible_types = ['Classical', 'ClassicalExceptional', 'Exceptional'] if types not in possible_types: raise ValueError('The third input must be either ClassicalExceptional' ' (default), Classical, or Exceptional.') if types in possible_types[:2]: data.update(_construct_classical_mutation_classes(n)) if types in possible_types[1:]: data.update(_construct_exceptional_mutation_classes(n)) from sage.env import DOT_SAGE from sage.misc.misc import sage_makedirs types_path = os.path.join(DOT_SAGE, 'cluster_algebra_quiver') types_file = os.path.join(types_path,'mutation_classes_%s.dig6'%n) sage_makedirs(types_path) from sage.misc.temporary_file import atomic_write with atomic_write(types_file, binary=True) as f: cPickle.dump(data, f) if verbose: keys = sorted(data.keys(),key=str) print("\nThe following types are saved to file", types_file,"and will now be used to determine quiver mutation types:") print(keys) def save_quiver_data(n, up_to=True, types='ClassicalExceptional', verbose=True): r""" Save mutation classes of certain quivers of ranks up to and equal to ``n`` or equal to ``n`` to ``DOT_SAGE/cluster_algebra_quiver/mutation_classes_n.dig6``. This data will then be used to determine quiver mutation types. INPUT: - ``n``: the rank (or the upper limit on the rank) of the mutation classes that are being saved. - ``up_to`` -- (default:``True``) if ``True``, saves data for ranks smaller than or equal to ``n``. If ``False``, saves data for rank exactly ``n``. - ``types`` -- (default:'ClassicalExceptional') if all, saves data for both exceptional mutation-finite quivers and for classical quiver. The input 'Exceptional' or 'Classical' is also allowed to save only part of this data. TESTS:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import save_quiver_data sage: save_quiver_data(2) <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', 1)] <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1), ('G', 2)] sage: save_quiver_data(2,up_to=False) <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1), ('G', 2)] sage: save_quiver_data(2,up_to=False, types='Classical') <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1)] sage: save_quiver_data(2,up_to=False, types='Exceptional') <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('G', 2)] sage: save_quiver_data(2,up_to=False, verbose=False) """ from sage.combinat.cluster_algebra_quiver.mutation_type import load_data if up_to is True: ranks = range(1,n+1) elif up_to is False: ranks = [n] for i in ranks: _save_data_dig6(i,types=types,verbose=verbose) # we finally clear the load_data load_data.clear_cache() def _bipartite_graph_to_digraph(g): """ Return a digraph obtained from a bipartite graph ``g`` by choosing one set of the bipartition to be the set of sinks and the other to be the set of sources. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _bipartite_graph_to_digraph sage: G = Graph([(1,2)]) sage: _bipartite_graph_to_digraph(G) Digraph on 2 vertices """ if not g.is_bipartite(): raise ValueError('The input graph is not bipartite.') order = g.bipartite_sets() dg = DiGraph() for edge in g.edges(): if edge[0] in order[0]: dg.add_edge( edge[0], edge[1], edge[2] ) else: dg.add_edge( edge[1], edge[0], edge[2] ) for vert in g.vertex_iterator(): if vert not in dg.vertices(): dg.add_vertex(vert) return dg def _is_mutation_type(data): """ Return ``True`` if ``data`` is a QuiverMutationType. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _is_mutation_type sage: _is_mutation_type ( [ 'A', 2 ] ) True sage: _is_mutation_type ( [ 'P', 1 ] ) False """ try: QuiverMutationType(data) return True except Exception: return False def _mutation_type_error(data): r""" Output an error message because data which is not a valid quiver mutation type has been passed to QuiverMutationType. EXAMPLES:: sage: QuiverMutationType( 'Christian', 'Stump' ) # indirect doctest Traceback (most recent call last): ... ValueError: ['Christian', 'Stump'] is not a valid quiver mutation type Finite types have the form [ '?', n ] for type ? and rank n Affine type A has the form [ 'A', [ i, j ], 1 ] for rank i+j Affine type ? has the form [ '?', k, \pm 1 ] for rank k+1 Elliptic type ? has the form [ '?', k, [i, j] ] (1 <= i,j <= 3) for rank k+2 For correct syntax in other types, please consult the documentation. """ if data[2] is None: del data[2] return_str = str(data) + ' is not a valid quiver mutation type' return_str += '\n Finite types have the form [ \'?\', n ] for type ? and rank n' return_str += '\n Affine type A has the form [ \'A\', [ i, j ], 1 ] for rank i+j' return_str += '\n Affine type ? has the form [ \'?\', k, \\pm 1 ] for rank k+1' return_str += '\n Elliptic type ? has the form [ \'?\', k, [i, j] ] (1 <= i,j <= 3) for rank k+2' return_str += '\n For correct syntax in other types, please consult the documentation.' raise ValueError(return_str) def _edge_list_to_matrix(edges, nlist, mlist): r""" Return the matrix obtained from the edge list of a quiver. INPUT: - ``edges`` -- the list of edges - ``nlist`` -- the list of mutable vertices of the quiver - ``mlist`` -- the list of frozen vertices of the quiver OUTPUT: An `(n+m) \times n` matrix corresponding to the edge-list. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _edge_list_to_matrix sage: G = QuiverMutationType(['A',2])._digraph sage: _edge_list_to_matrix(G.edges(), [0,1], []) [ 0 1] [-1 0] sage: G2 = DiGraph([('a', 'b', 1)]) sage: _edge_list_to_matrix(G2.edges(), ['a', 'b'], []) [ 0 1] [-1 0] sage: G3 = DiGraph([('a', 'b', 1), ('b', 'c', 1)]) sage: _edge_list_to_matrix(G3.edges(), ['a', 'b'], ['c']) [ 0 1] [-1 0] [ 0 -1] """ n = len(nlist) m = len(mlist) nmlist = nlist + mlist M = matrix(ZZ, n + m, n, sparse=True) for edge in edges: if edge[2] is None: edge = (edge[0], edge[1], (1, -1)) elif edge[2] in ZZ: edge = (edge[0], edge[1], (edge[2], -edge[2])) v1, v2, (a, b) = edge if v1 in nlist: M[nmlist.index(v2), nmlist.index(v1)] = b if v2 in nlist: M[nmlist.index(v1), nmlist.index(v2)] = a return M
[ [ [ 665, 673 ] ], [ [ 675, 689 ] ], [ [ 713, 728 ] ], [ [ 752, 757 ], [ 26637, 26642 ], [ 29236, 29241 ], [ 39839, 39844 ], [ 40045, 40050 ], [ 40626, 40631 ], [ 41199, 41204 ], [ 41775, 41780 ], [ 42402, 42407 ], [ 43129, 43134 ], [ 43817, 43822 ], [ 44472, 44477 ], [ 45531, 45536 ], [ 48202, 48207 ], [ 49173, 49178 ], [ 49714, 49719 ], [ 50246, 50251 ], [ 50880, 50885 ], [ 56450, 56455 ], [ 56489, 56494 ], [ 58098, 58103 ], [ 59177, 59182 ], [ 59220, 59225 ], [ 73217, 73222 ], [ 75642, 75647 ], [ 77259, 77264 ], [ 86505, 86510 ] ], [ [ 798, 808 ], [ 1166, 1176 ], [ 22135, 22145 ] ], [ [ 826, 830 ], [ 29089, 29093 ], [ 72345, 72349 ] ], [ [ 880, 900 ], [ 22113, 22133 ] ], [ [ 927, 940 ], [ 10511, 10524 ], [ 25694, 25707 ], [ 26667, 26680 ], [ 27805, 27818 ], [ 61261, 61274 ], [ 74181, 74194 ] ], [ [ 968, 970 ], [ 5732, 5734 ], [ 5878, 5880 ], [ 6147, 6149 ], [ 7245, 7247 ], [ 38277, 38279 ], [ 38619, 38621 ], [ 40394, 40396 ], [ 40967, 40969 ], [ 41539, 41541 ], [ 42166, 42168 ], [ 42793, 42795 ], [ 43581, 43583 ], [ 44236, 44238 ], [ 44885, 44887 ], [ 45193, 45195 ], [ 48527, 48529 ], [ 56127, 56129 ], [ 57254, 57256 ], [ 57701, 57703 ], [ 63791, 63793 ], [ 63817, 63819 ], [ 66450, 66452 ], [ 90552, 90554 ], [ 90701, 90703 ] ], [ [ 972, 980 ], [ 63119, 63127 ], [ 74930, 74938 ] ], [ [ 1009, 1014 ], [ 37452, 37457 ], [ 72516, 72521 ] ], [ [ 1016, 1023 ], [ 37484, 37491 ], [ 72548, 72555 ], [ 87295, 87302 ] ], [ [ 1051, 1059 ], [ 63392, 63400 ], [ 63482, 63490 ], [ 63568, 63576 ], [ 63935, 63943 ], [ 63996, 64004 ], [ 64230, 64238 ], [ 64286, 64294 ], [ 64766, 64774 ], [ 65192, 65200 ], [ 65272, 65280 ], [ 65297, 65305 ], [ 65696, 65704 ], [ 66106, 66114 ], [ 66509, 66517 ], [ 66998, 67006 ], [ 67073, 67081 ], [ 67091, 67099 ], [ 75546, 75554 ] ], [ [ 1061, 1070 ], [ 63867, 63876 ], [ 66414, 66423 ] ], [ [ 1092, 1096 ], [ 56279, 56283 ], [ 75539, 75543 ] ], [ [ 1125, 1131 ], [ 90545, 90551 ] ], [ [ 1140, 1165 ], [ 11653, 11678 ] ], [ [ 11632, 11650 ], [ 11681, 11699 ], [ 2324, 2342 ], [ 2402, 2420 ], [ 2945, 2963 ], [ 3552, 3570 ], [ 10743, 10761 ], [ 11079, 11097 ], [ 11213, 11231 ], [ 11327, 11345 ], [ 11437, 11455 ], [ 70028, 70046 ], [ 70893, 70911 ], [ 76092, 76110 ], [ 87966, 87984 ] ], [ [ 22085, 22112 ], [ 36485, 36512 ], [ 71006, 71033 ] ], [ [ 36454, 36484 ], [ 1725, 1755 ], [ 2222, 2252 ], [ 8098, 8128 ], [ 71648, 71678 ] ], [ [ 70977, 71005 ], [ 1812, 1840 ], [ 2469, 2497 ] ], [ [ 76153, 76190 ], [ 83517, 83554 ] ], [ [ 78458, 78497 ], [ 83615, 83654 ] ], [ [ 81573, 81588 ], [ 86591, 86606 ] ], [ [ 84256, 84272 ] ], [ [ 86709, 86736 ], [ 60402, 60429 ] ], [ [ 87611, 87628 ] ], [ [ 88059, 88079 ], [ 2102, 2122 ], [ 2651, 2671 ], [ 7921, 7941 ], [ 39288, 39308 ], [ 40576, 40596 ], [ 41149, 41169 ], [ 41725, 41745 ], [ 42352, 42372 ], [ 42980, 43000 ], [ 43767, 43787 ], [ 44422, 44442 ], [ 45481, 45501 ], [ 48355, 48375 ], [ 49514, 49534 ], [ 50048, 50068 ], [ 50580, 50600 ], [ 51085, 51105 ], [ 53754, 53774 ], [ 55935, 55955 ], [ 57051, 57071 ], [ 57531, 57551 ], [ 58605, 58625 ], [ 59494, 59514 ], [ 60096, 60116 ], [ 60192, 60212 ], [ 71719, 71739 ] ], [ [ 89519, 89539 ], [ 26588, 26608 ] ] ]
#!/usr/bin/env python # -*- coding: utf-8; py-indent-offset:4 -*- ############################################################################### # # Copyright (C) 2015-2020 Daniel Rodriguez # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### from __future__ import (absolute_import, division, print_function, unicode_literals) import backtrader as bt from . import TimeReturn class Benchmark(TimeReturn): '''This observer stores the *returns* of the strategy and the *return* of a reference asset which is one of the datas passed to the system. Params: - ``timeframe`` (default: ``None``) If ``None`` then the complete return over the entire backtested period will be reported - ``compression`` (default: ``None``) Only used for sub-day timeframes to for example work on an hourly timeframe by specifying "TimeFrame.Minutes" and 60 as compression - ``data`` (default: ``None``) Reference asset to track to allow for comparison. .. note:: this data must have been added to a ``cerebro`` instance with ``addata``, ``resampledata`` or ``replaydata``. - ``_doprenext`` (default: ``False``) Benchmarking will take place from the point at which the strategy kicks in (i.e.: when the minimum period of the strategy has been met). Setting this to ``True`` will record benchmarking values from the starting point of the data feeds - ``firstopen`` (default: ``False``) Keepint it as ``False`` ensures that the 1st comparison point between the value and the benchmark starts at 0%, because the benchmark will not use its opening price. See the ``TimeReturn`` analyzer reference for a full explanation of the meaning of the parameter - ``fund`` (default: ``None``) If ``None`` the actual mode of the broker (fundmode - True/False) will be autodetected to decide if the returns are based on the total net asset value or on the fund value. See ``set_fundmode`` in the broker documentation Set it to ``True`` or ``False`` for a specific behavior Remember that at any moment of a ``run`` the current values can be checked by looking at the *lines* by name at index ``0``. ''' _stclock = True lines = ('benchmark',) plotlines = dict(benchmark=dict(_name='Benchmark')) params = ( ('data', None), ('_doprenext', False), # Set to false to ensure the asset is measured at 0% in the 1st tick ('firstopen', False), ('fund', None) ) def _plotlabel(self): labels = super(Benchmark, self)._plotlabel() labels.append(self.p.data._name) return labels def __init__(self): if self.p.data is None: # use the 1st data in the system if none given self.p.data = self.data0 super(Benchmark, self).__init__() # treturn including data parameter # Create a time return object without the data kwargs = self.p._getkwargs() kwargs.update(data=None) # to create a return for the stratey t = self._owner._addanalyzer_slave(bt.analyzers.TimeReturn, **kwargs) # swap for consistency self.treturn, self.tbench = t, self.treturn def next(self): super(Benchmark, self).next() self.lines.benchmark[0] = self.tbench.rets.get(self.treturn.dtkey, float('NaN')) def prenext(self): if self.p._doprenext: super(TimeReturn, self).prenext()
[ [ [ 933, 948 ] ], [ [ 950, 958 ] ], [ [ 960, 974 ] ], [ [ 1000, 1016 ] ], [ [ 1026, 1042 ], [ 3892, 3894 ] ], [ [ 1057, 1067 ], [ 1086, 1096 ], [ 4286, 4296 ] ], [ [ 1076, 1085 ], [ 3372, 3381 ], [ 3622, 3631 ], [ 4046, 4055 ] ] ]
# Test for checking lak observation input. The following observation types: # 'lak', 'wetted-area', and 'conductance,' require that ID2 be provided when # ID is an integer corresponding to a lake number and not BOUNDNAME. # See table in LAK Package section of mf6io.pdf for an explanation of ID, # ID2, and Observation Type. import os import pytest import sys import numpy as np try: import flopy except: msg = "Error. FloPy package is not available.\n" msg += "Try installing using the following command:\n" msg += " pip install flopy" raise Exception(msg) from framework import testing_framework from simulation import Simulation import targets mf6_exe = os.path.abspath(targets.target_dict["mf6"]) ex = "gwf_lakobs_01a" exdir = os.path.join("temp", ex) # store global gwf for subsequent plotting gwf = None def get_idomain(nlay, nrow, ncol, lakend): idomain = np.ones((nlay, nrow, ncol), dtype=int) for k, j in enumerate(lakend): idomain[k, 0, 0:j] = 0 return idomain def build_model(): lx = 300.0 lz = 45.0 nlay = 45 nrow = 1 ncol = 30 nper = 1 delc = 1.0 delr = lx / ncol delz = lz / nlay top = 5.0 botm = [top - (k + 1) * delz for k in range(nlay)] perlen = [20.0] nstp = [1] tsmult = [1.0] Kh = 1.0 Kv = 1.0 tdis_rc = [] for i in range(nper): tdis_rc.append((perlen[i], nstp[i], tsmult[i])) nouter, ninner = 700, 300 hclose, rclose, relax = 1e-8, 1e-6, 0.97 name = ex # build MODFLOW 6 files ws = exdir sim = flopy.mf6.MFSimulation( sim_name=name, version="mf6", exe_name=mf6_exe, sim_ws=ws ) # create tdis package tdis = flopy.mf6.ModflowTdis( sim, time_units="DAYS", nper=nper, perioddata=tdis_rc ) # create gwf model gwfname = name global gwf gwf = flopy.mf6.ModflowGwf(sim, modelname=gwfname, newtonoptions="NEWTON") imsgwf = flopy.mf6.ModflowIms( sim, print_option="SUMMARY", outer_dvclose=hclose, outer_maximum=nouter, under_relaxation="NONE", inner_maximum=ninner, inner_dvclose=hclose, rcloserecord=rclose, linear_acceleration="BICGSTAB", scaling_method="NONE", reordering_method="NONE", relaxation_factor=relax, filename="{}.ims".format(gwfname), ) # number of columns to be a lake for layer 1, 2, , ... len(lakend) lakend = [10, 9, 8, 7, 6] idomain = get_idomain(nlay, nrow, ncol, lakend) dis = flopy.mf6.ModflowGwfdis( gwf, nlay=nlay, nrow=nrow, ncol=ncol, delr=delr, delc=delc, top=top, botm=botm, idomain=idomain, ) # initial conditions strt = np.zeros((nlay, nrow, ncol), dtype=float) strt += top ic = flopy.mf6.ModflowGwfic(gwf, strt=strt) # node property flow npf = flopy.mf6.ModflowGwfnpf( gwf, xt3doptions=False, save_flows=True, save_specific_discharge=True, icelltype=1, k=Kh, k33=Kv, ) sy = 0.3 ss = np.zeros((nlay, nrow, ncol), dtype=float) # ss[0, :, :] = sy idx = np.where(idomain == 0) for k, i, j in zip(*idx): ss[k + 1, i, j] = 0.0 # sy sto = flopy.mf6.ModflowGwfsto(gwf, sy=sy, ss=ss, iconvert=1) irch = np.zeros((nrow, ncol), dtype=int) lake_vconnect = [] idx = np.where(idomain == 0) for k, i, j in zip(*idx): if idomain[k + 1, i, j] == 1: lake_vconnect.append((k + 1, i, j)) irch[i, j] = k + 1 nlakeconn = len(lake_vconnect) # pak_data = [lakeno, strt, nlakeconn] initial_stage = 0.1 pak_data = [(0, initial_stage, nlakeconn)] bedleak = 100.0 # "None" belev = 0.0 con_data = [ (0, i, idx, "VERTICAL", bedleak, belev, -99, -99, -99) for i, idx in enumerate(lake_vconnect) ] # period data p_data = [ (0, "STATUS", "ACTIVE"), ] # note: for specifying lake number, use fortran indexing! fname = "{}.lak.obs.csv".format(gwfname) lak_obs = { fname: [ ("lakestage", "stage", 1), ("lakevolume", "volume", 1), ("lak1", "lak", 1), ], "digits": 10, } lak = flopy.mf6.modflow.ModflowGwflak( gwf, surfdep=0.0, save_flows=True, print_input=True, print_flows=True, print_stage=True, stage_filerecord="{}.lak.bin".format(gwfname), budget_filerecord="{}.lak.bud".format(gwfname), nlakes=len(pak_data), ntables=0, packagedata=pak_data, pname="LAK-1", connectiondata=con_data, perioddata=p_data, observations=lak_obs, ) chdspd = [((0, 0, ncol - 1), 5.0)] chd = flopy.mf6.modflow.ModflowGwfchd(gwf, stress_period_data=chdspd) rech = 0.0001 * np.ones((nrow, ncol), dtype=float) # rech[:, 0:20] = 0. rch = flopy.mf6.modflow.ModflowGwfrcha( gwf, print_flows=True, save_flows=True, recharge=rech, irch=irch ) # output control oc = flopy.mf6.ModflowGwfoc( gwf, budget_filerecord="{}.cbc".format(gwfname), head_filerecord="{}.hds".format(gwfname), headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")], saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], printrecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], ) return sim # - No need to change any code below def test_mf6model(): # initialize testing framework test = testing_framework() # build the models sim = build_model() # write model input sim.write_simulation() # attempt to run model, should fail sim.run_simulation() # ensure that the error msg is contained in the mfsim.lst file f = open(os.path.join(exdir, "mfsim.lst"), "r") lines = f.readlines() error_count = 0 expected_msg = False for line in lines: if "ID2 (iconn) is missing" in line: expected_msg = True error_count += 1 assert error_count == 1, ( "error count = " + str(error_count) + "but should equal 1" ) # fix the error and attempt to rerun model orig_fl = os.path.join(exdir, ex + ".lak.obs") new_fl = os.path.join(exdir, ex + ".lak.obs.new") sr = open(orig_fl, "r") sw = open(new_fl, "w") lines = sr.readlines() error_free_line = " lak1 lak 1 1\n" for line in lines: if " lak " in line: sw.write(error_free_line) else: sw.write(line) sr.close() sw.close() # delete original and replace with corrected lab obs input os.remove(orig_fl) os.rename(new_fl, orig_fl) # rerun the model, should be no errors sim.run_simulation() return def main(): # initialize testing framework test = testing_framework() # build the models sim = build_model() # write model input sim.write_simulation() # attempt to run model, should fail sim.run_simulation() # ensure that the error msg is contained in the mfsim.lst file f = open(os.path.join(exdir, "mfsim.lst"), "r") lines = f.readlines() error_count = 0 expected_msg = False for line in lines: if "ID2 (iconn) is missing" in line: expected_msg = True error_count += 1 assert error_count == 1, ( "error count = " + str(error_count) + ", but should equal 1" ) # fix the error and attempt to rerun model orig_fl = os.path.join(exdir, ex + ".lak.obs") new_fl = os.path.join(exdir, ex + ".lak.obs.new") sr = open(orig_fl, "r") sw = open(new_fl, "w") lines = sr.readlines() error_free_line = " lak1 lak 1 1\n" for line in lines: if " lak " in line: sw.write(error_free_line) else: sw.write(line) sr.close() sw.close() # delete original and replace with corrected lab obs input os.remove(orig_fl) os.rename(new_fl, orig_fl) # rerun the model, should be no errors sim.run_simulation() return if __name__ == "__main__": # print message print("standalone run of {}".format(os.path.basename(__file__))) # run main routine main()
[ [ [ 335, 337 ], [ 683, 685 ], [ 758, 760 ], [ 8270, 8272 ], [ 5884, 5886 ], [ 6290, 6292 ], [ 6340, 6342 ], [ 6737, 6739 ], [ 6760, 6762 ], [ 7195, 7197 ], [ 7603, 7605 ], [ 7653, 7655 ], [ 8050, 8052 ], [ 8073, 8075 ] ], [ [ 345, 351 ] ], [ [ 359, 362 ] ], [ [ 370, 381 ], [ 898, 900 ], [ 2793, 2795 ], [ 3143, 3145 ], [ 3218, 3220 ], [ 3384, 3386 ], [ 3451, 3453 ], [ 4941, 4943 ] ], [ [ 399, 404 ], [ 1580, 1585 ], [ 1714, 1719 ], [ 1873, 1878 ], [ 1956, 1961 ], [ 2556, 2561 ], [ 2860, 2865 ], [ 2935, 2940 ], [ 3317, 3322 ], [ 4327, 4332 ], [ 4856, 4861 ], [ 5011, 5016 ], [ 5155, 5160 ] ], [ [ 417, 420 ], [ 470, 473 ] ], [ [ 605, 622 ], [ 5617, 5634 ], [ 6928, 6945 ] ], [ [ 646, 656 ] ], [ [ 664, 671 ], [ 699, 706 ] ], [ [ 673, 680 ], [ 1651, 1658 ] ], [ [ 728, 730 ], [ 779, 781 ], [ 1523, 1525 ], [ 6310, 6312 ], [ 6360, 6362 ], [ 7623, 7625 ], [ 7673, 7675 ] ], [ [ 750, 755 ], [ 1564, 1569 ], [ 5897, 5902 ], [ 6303, 6308 ], [ 6353, 6358 ], [ 7208, 7213 ], [ 7616, 7621 ], [ 7666, 7671 ] ], [ [ 828, 831 ] ], [ [ 845, 856 ], [ 2508, 2519 ] ], [ [ 1029, 1040 ], [ 5671, 5682 ], [ 6982, 6993 ] ], [ [ 5554, 5567 ] ], [ [ 6874, 6878 ], [ 8327, 8331 ] ], [ [ 1867, 1870 ], [ 2589, 2592 ], [ 2883, 2886 ], [ 2968, 2971 ], [ 3341, 3344 ], [ 4368, 4371 ], [ 4888, 4891 ], [ 5053, 5056 ], [ 5187, 5190 ] ] ]
import tensorflow as tf from model import vAe, decode import util_sp as sp from util_io import load_txt import numpy as np def analyze(z, use_dim=[], seed=25): ''' z = np.array[2, dim], mu of two sentences''' ''' use_dim = list of int describing which dimension should be used ''' # select random path from z1 to z2 np.random.seed(seed) if use_dim == []: rdm_path = np.arange(len(z[0])) else: rdm_path = use_dim np.random.shuffle(rdm_path) # walk the path and print at every step path = np.copy(z[0]) for idx,dim in enumerate(rdm_path): path[dim] = z[1][dim] output = decode(sess, vae, [z[0], path, z[1]]).tolist() _ = [vocab.decode_ids(output[idx]) for idx in range(3)] print(idx,dim, _[1]) #print("{}\n{}\n{}\n{}\n".format(idx,_[0],_[1],_[2])) #print: sentence1, path, sentence2 path_vocab = "../trial/data/vocab.model" path_txt = "../data/test_data.txt" path_ckpt = "../trial/ckpt/kudo18" path_use_dim = "../data/useful_dimension.npy" # load and restore model vae = vAe('infer') sess = tf.InteractiveSession() tf.train.Saver().restore(sess, path_ckpt) # load vocab and text vocab = sp.load_spm(path_vocab) text = list(load_txt(path_txt)) #pick 2 random sentences to explore np.random.seed(23) sen_idx = np.random.random_integers(0, len(text), 2) sentences = [text[idx] for idx in sen_idx] print("sentence 1: {}\nsentence 2: {}".format(sentences[0], sentences[1])) # encode sentences with sentence piece model data = sp.encode(vocab, sentences) ### full high dimensional space z = vae.z.eval({vae.tgt: data}) analyze(z) ### only the dimensions that turned out usefull for our task use_dim = np.load(path_use_dim) analyze(z, use_dim)
[ [ [ 7, 23 ], [ 1094, 1096 ], [ 1118, 1120 ] ], [ [ 42, 45 ], [ 1074, 1077 ] ], [ [ 47, 53 ], [ 646, 652 ] ], [ [ 61, 74 ], [ 1191, 1193 ], [ 1527, 1529 ] ], [ [ 95, 103 ], [ 1227, 1235 ] ], [ [ 111, 122 ], [ 1284, 1286 ], [ 1313, 1315 ], [ 1703, 1705 ], [ 336, 338 ], [ 398, 400 ], [ 460, 462 ], [ 545, 547 ] ], [ [ 129, 136 ], [ 1620, 1627 ], [ 1725, 1732 ] ], [ [ 885, 895 ], [ 1203, 1213 ] ], [ [ 926, 934 ], [ 1236, 1244 ] ], [ [ 961, 970 ], [ 1149, 1158 ] ], [ [ 996, 1008 ], [ 1711, 1723 ] ], [ [ 1068, 1071 ], [ 1592, 1595 ], [ 1604, 1607 ], [ 659, 662 ] ], [ [ 1087, 1091 ], [ 1143, 1147 ], [ 653, 657 ] ], [ [ 1183, 1188 ], [ 1537, 1542 ], [ 706, 711 ] ], [ [ 1215, 1219 ], [ 1346, 1350 ], [ 1369, 1373 ] ], [ [ 1303, 1310 ], [ 1390, 1397 ] ], [ [ 1356, 1365 ], [ 1445, 1454 ], [ 1459, 1468 ], [ 1544, 1553 ] ], [ [ 1520, 1524 ], [ 1613, 1617 ] ], [ [ 1588, 1589 ], [ 1628, 1629 ], [ 1733, 1734 ] ], [ [ 1693, 1700 ], [ 1736, 1743 ] ] ]
## @package onnx # Module caffe2.python.onnx.backend """Backend for running ONNX on Caffe2 To run this, you will need to have Caffe2 installed as well. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import os import collections from subprocess import Popen, PIPE import sys import zipfile import itertools # When onnx is built against a version of protobuf that is older than # that which is vendored with caffe2, onnx will crash if caffe2's # vendored protobuf is loaded first. We can work around this by # importing onnx first, which will cause it to go out and pick up the # system protobuf. import onnx.backend import caffe2 from caffe2.python import core, workspace, rnn_cell, gru_cell from caffe2.python.compatibility import container_abcs from caffe2.python.model_helper import ModelHelper from caffe2.proto import caffe2_pb2 import caffe2.python.utils import numpy as np import onnx from onnx import checker, GraphProto, TensorProto, AttributeProto, ModelProto import onnx.numpy_helper import onnx.defs import onnx.optimizer import onnx.shape_inference import onnx.utils from onnx.backend.base import Backend, Device, DeviceType, namedtupledict from caffe2.python.onnx.workspace import Workspace from caffe2.python.onnx.backend_rep import Caffe2Rep from caffe2.python.onnx.backend_cpp_rep import Caffe2CppRep import caffe2.python._import_c_extension as C import warnings def force_unicode(s): try: return s.decode('utf-8') except AttributeError: return s def get_device_option(device): m = {DeviceType.CPU: caffe2_pb2.CPU, DeviceType.CUDA: workspace.GpuDeviceType} return core.DeviceOption(m[device.type], device.device_id) class OnnxAttributes(dict): """ This is a more convenient way to work with ONNX/Caffe2 attributes that is not the protobuf representation. """ @staticmethod def from_onnx(args): d = OnnxAttributes() for arg in args: d[arg.name] = convertAttributeProto(arg) return d def caffe2(self, kmap=lambda k: k): for k, v in self.items(): if kmap(k) != '': yield caffe2.python.utils.MakeArgument(kmap(k), v) # TODO: Move this into ONNX main library def convertAttributeProto(onnx_arg): """ Convert an ONNX AttributeProto into an appropriate Python object for the type. NB: Tensor attribute gets returned as the straight proto. """ if onnx_arg.HasField('f'): return onnx_arg.f elif onnx_arg.HasField('i'): return onnx_arg.i elif onnx_arg.HasField('s'): return onnx_arg.s elif onnx_arg.HasField('t'): return onnx_arg.t # this is a proto! elif onnx_arg.HasField('g'): return Caffe2Backend._graph_to_net(onnx_arg.g, Caffe2Backend._known_opset_version) elif len(onnx_arg.floats): return list(onnx_arg.floats) elif len(onnx_arg.ints): return list(onnx_arg.ints) elif len(onnx_arg.strings): return list(onnx_arg.strings) elif len(onnx_arg.graphs): retval = [] # TODO: this doesn't work with RNN ops for g in onnx_arg.graphs: retval.append(Caffe2Backend._graph_to_net(g, Caffe2Backend._known_opset_version)) return retval else: raise ValueError("Unsupported ONNX attribute: {}".format(onnx_arg)) # TODO: Move this into ONNX main library class OnnxNode(object): """ Reimplementation of NodeProto from ONNX, but in a form more convenient to work with from Python. We may temporarily edit these nodes to get them into Caffe2 form, before actually translating into the Caffe2 protobuf, since this is easier than decomposing everything, and putting it back together when we're ready. """ def __init__(self, node): self.name = str(node.name) self.op_type = str(node.op_type) self.attrs = OnnxAttributes.from_onnx(node.attribute) self.inputs = list(node.input) self.outputs = list(node.output) Caffe2Ops = collections.namedtuple('Caffe2Ops', ['ops', 'init_ops', 'interface_blobs']) class Caffe2Backend(Backend): # The greatest version of the ONNX operator set which we are aware of. # Models whose version is larger than this will cause us to emit a warning # that we are attempting to translate on a "best effort" basis. # # If you increase this, make SURE you cross-reference all BC-breaking # changes from one version to the next, and any that you did not # implement, mark as broken in _broken_operators _known_opset_version = 9 # This dictionary will record operators which are KNOWN to be # broken, so we give a good error message rather than do something # bogus and then fail. _broken_operators = { # 'BrokenOp': version_it_was_broken_in } # Operators that are different between Caffe2 and # ONNX but only in their name. # In most cases, this should be empty - as the effort of ONNX is # to unify the operator definitions. _renamed_operators = { 'GlobalMaxPool': 'MaxPool', 'GlobalAveragePool': 'AveragePool', 'Pad': 'PadImage', 'Neg': 'Negative', 'BatchNormalization': 'SpatialBN', 'InstanceNormalization': 'InstanceNorm', 'MatMul': 'BatchMatMul', 'Upsample': 'ResizeNearest', 'Identity': 'Copy', 'InstanceNormalization': 'InstanceNorm', 'Equal': 'EQ', 'Less': 'LT', 'Greater': 'GT', 'Unsqueeze': 'ExpandDims', 'Loop': 'ONNXWhile', 'Tile': 'NumpyTile', 'RandomNormal': 'GaussianFill', 'RandomUniform': 'UniformFill', } _global_renamed_attrs = {'kernel_shape': 'kernels'} _per_op_renamed_attrs = { 'Squeeze': {'axes': 'dims'}, 'Unsqueeze': {'axes': 'dims'}, 'Transpose': {'perm': 'axes'}, 'Upsample': {'mode': '', 'scales': ''}, 'ConvTranspose': {'output_padding': 'adjs'}, 'Selu': {'gamma': 'scale'}, 'If': {'then_branch': 'then_net', 'else_branch': 'else_net'}, 'RandomUniform': {'low': 'min', 'high': 'max'} } # operators whose behavior is different beyond renaming # the value is an attribute of this class that is a # function from ToffeIR node_def to caffe2 op_def _special_operators = { 'LSTM': '_create_rnn_variant', 'GRU': '_create_rnn_variant', 'RNN': '_create_rnn_variant', 'Loop': '_create_loop', 'If': '_create_if', 'Upsample': '_create_upsample', 'RandomNormal': '_create_gaussian_fill' } # Dummy name generator _dummy_name = C.DummyName() @classmethod def dummy_name(cls): return cls._dummy_name.new_dummy_name() # NB: By default, you will use the LATEST definition of the operator, # so this interface MAY make BC-breaking changes. Specify an # opset_version if you don't want this to version. @classmethod def run_node(cls, node, inputs, device='CPU', opset_version=_known_opset_version, outputs_info=None): super(Caffe2Backend, cls).run_node(node, inputs, device=device, outputs_info=outputs_info, opset_version=opset_version) value_infos = [] device_option = get_device_option(Device(device)) ws = Workspace() with core.DeviceScope(device_option): # temporary! if isinstance(inputs, dict): for key, value in inputs.items(): ws.FeedBlob(key, value) value_infos.append(onnx.helper.make_tensor_value_info( name=key, elem_type=onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[value.dtype], shape=value.shape).SerializeToString()) else: assert len(node.input) == len(inputs), "{}: expected {} but got {}".format( node.op_type, len(node.input), len(inputs)) for key, value in zip(node.input, inputs): ws.FeedBlob(key, value) value_infos.append(onnx.helper.make_tensor_value_info( name=key, elem_type=onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[value.dtype], shape=value.shape).SerializeToString()) ops = [] cbackend = C.Caffe2Backend(cls._dummy_name) ops_str = cbackend.convert_node(node.SerializeToString(), value_infos, opset_version) for s in ops_str[0] + ops_str[1]: op = caffe2_pb2.OperatorDef() op.ParseFromString(s) op.device_option.CopyFrom(device_option) ops.append(op) ws.RunOperatorsOnce(ops) output_values = [ws.FetchBlob(name) for name in node.output] return namedtupledict('Outputs', node.output)(*output_values) @classmethod def _create_tensor_filling_op(cls, onnx_tensor, name=None): """ Given an Onnx TensorProto, translate it into a Caffe2 operator which produces the given tensor filling op. """ assert name or onnx_tensor.name name = name or onnx_tensor.name c2_op = caffe2_pb2.OperatorDef() c2_values = c2_op.arg.add() c2_values.name = "values" def tensor2list(onnx_tensor): # Use the onnx.numpy_helper because the data may be raw return onnx.numpy_helper.to_array(onnx_tensor).flatten().tolist() if onnx_tensor.data_type in [TensorProto.FLOAT]: c2_op.type = 'GivenTensorFill' c2_values.floats.extend(tensor2list(onnx_tensor)) elif onnx_tensor.data_type in [TensorProto.DOUBLE]: c2_op.type = 'GivenTensorDoubleFill' c2_values.floats.extend(tensor2list(onnx_tensor)) elif onnx_tensor.data_type in [TensorProto.INT64, TensorProto.UINT32]: c2_op.type = 'GivenTensorInt64Fill' c2_values.ints.extend(tensor2list(onnx_tensor)) elif onnx_tensor.data_type in [TensorProto.UINT8, TensorProto.INT8, TensorProto.UINT16, TensorProto.INT16, TensorProto.INT32]: c2_op.type = 'GivenTensorIntFill' c2_values.ints.extend(tensor2list(onnx_tensor)) elif onnx_tensor.data_type == TensorProto.BOOL: c2_op.type = 'GivenTensorBoolFill' c2_values.ints.extend(tensor2list(onnx_tensor)) elif onnx_tensor.data_type == TensorProto.STRING: c2_op.type = 'GivenTensorStringFill' c2_values.strings.extend(onnx_tensor.string_data) else: raise RuntimeError( "unrecognized tensor type {}".format(onnx_tensor.data_type)) c2_shape = c2_op.arg.add() c2_shape.name = "shape" c2_shape.ints.extend(onnx_tensor.dims) c2_op.output.append(name) return c2_op @classmethod def _rnn_reform_weights(cls, reforms, name, hidden_size, init_net, gates, reorder_indices): for name_from, name_to, do_concat, extra_dims in reforms: gate_blobs = ['%s/%s_%s' % (name, prefix, name_to) for prefix in gates] for i, x in enumerate(gate_blobs): dim0 = i * hidden_size, (i+1) * hidden_size starts, ends = zip(dim0, *extra_dims) init_net.Slice(name_from, x, starts=starts, ends=ends) if do_concat: reordered_gate_blobs = [gate_blobs[i] for i in reorder_indices] init_net.Concat(reordered_gate_blobs, ['%s/%s' % (name, name_to), cls.dummy_name()], axis=0) @classmethod def _make_rnn_direction(cls, input_blob, B, W, R, initial_states_and_names, sequence_lens, pred_mh, init_net, input_size, hidden_size, num_gates, direction_offset, Bi, Br, W_, R_, reform, make_cell, keep_outputs): name = cls.dummy_name() # input and recurrence biases are squashed together in onnx # but not in caffe2 gates_hidden_size = num_gates * hidden_size bias_offset = 2 * direction_offset * gates_hidden_size weight_offset = direction_offset * gates_hidden_size Bi = init_net.Slice(B, name + Bi, starts=[bias_offset + 0 * gates_hidden_size], ends =[bias_offset + 1 * gates_hidden_size]) Br = init_net.Slice(B, name + Br, starts=[bias_offset + 1 * gates_hidden_size], ends =[bias_offset + 2 * gates_hidden_size]) W_ = init_net.Slice(W, name + W_, starts=[weight_offset + 0 * gates_hidden_size, 0], ends =[weight_offset + 1 * gates_hidden_size,-1]) R_ = init_net.Slice(R, name + R_, starts=[weight_offset + 0 * gates_hidden_size, 0], ends =[weight_offset + 1 * gates_hidden_size,-1]) initial_states_sliced = [] for initial_state, name_suffix in initial_states_and_names: initial_states_sliced.append( pred_mh.net.Slice(initial_state, name + name_suffix, starts=[direction_offset + 0, 0, 0], ends =[direction_offset + 1,-1,-1])) if direction_offset == 1: if sequence_lens is not None: seq_lens_for_reverse = sequence_lens else: input_shape = pred_mh.net.Shape(input_blob, name + '/input_shape') batch_size = pred_mh.net.Slice(input_shape, name + '/batch_size_slice', starts=[1], ends=[2]) seq_len = pred_mh.net.Slice(input_shape, name + '/seq_len_slice', starts=[0], ends=[1]) dummy_sequence_lens = pred_mh.net.Tile([seq_len, batch_size], name + '/dummy_sequence_lens', axis=0) pred_mh.net.Reshape(dummy_sequence_lens, [dummy_sequence_lens, cls.dummy_name()], shape=[-1]) seq_lens_for_reverse = pred_mh.net.Cast(dummy_sequence_lens, name + '/seq_lens_for_reverse', to=core.DataType.INT32) reform(Bi, Br, W_, R_, name, hidden_size, init_net) if direction_offset == 1: input = pred_mh.net.ReversePackedSegs( [input_blob, seq_lens_for_reverse], name + "/input-reversed") else: input = input_blob outputs = keep_outputs(list(make_cell( pred_mh, input, sequence_lens, initial_states_sliced, input_size, hidden_size, name, drop_states=False, forward_only=True, ))) if direction_offset == 1: outputs[0] = pred_mh.net.ReversePackedSegs( [outputs[0], seq_lens_for_reverse], name + "/output-reversed") return outputs @classmethod def _create_rnn_variant(cls, init_model, pred_model, n, opset_version): assert init_model is not None, "cannot convert RNNs without access to the full model" assert pred_model is not None, "cannot convert RNNs without access to the full model" attrs = dict(n.attrs) # make a copy, which is safe to mutate hidden_size = attrs.pop('hidden_size') direction = force_unicode(attrs.pop('direction', 'forward')) if n.op_type == 'RNN': activation = force_unicode(attrs.pop('activations', ('tanh',))[0].lower()) elif n.op_type == 'GRU': linear_before_reset = attrs.pop('linear_before_reset', 0) assert not attrs, "unsupported RNN attributes: " + str(attrs.keys()) assert direction in ['forward', 'bidirectional'], "unsupported backwards RNN/GRU/LSTM" if n.op_type in ['RNN', 'GRU']: input_blob, W, R, B, sequence_lens, initial_h = n.inputs elif n.op_type == 'LSTM': input_blob, W, R, B, sequence_lens, initial_h, initial_c = n.inputs if sequence_lens == "": sequence_lens = None for x in itertools.chain(init_model.graph.input, init_model.graph.value_info, pred_model.graph.input, pred_model.graph.value_info): if x.name == W: input_size = x.type.tensor_type.shape.dim[2].dim_value break else: raise RuntimeError("best-effort shape inference for RNN/GRU/LSTM failed") pred_mh = ModelHelper() init_net = core.Net("init-net") init_net.Reshape(W, [W, cls.dummy_name()], shape=[1,-1,0]) init_net.Squeeze(W, W, dims=[0]) init_net.Reshape(R, [R, cls.dummy_name()], shape=[1,-1,0]) init_net.Squeeze(R, R, dims=[0]) init_net.Reshape(B, [B, cls.dummy_name()], shape=[1,-1]) init_net.Squeeze(B, B, dims=[0]) if n.op_type == 'RNN': def reform(*args): pass def make_cell(*args, **kwargs): return rnn_cell.BasicRNN(*args, activation=activation, **kwargs) def make_rnn(direction_offset): return cls._make_rnn_direction( input_blob, B, W, R, [(initial_h, '/initial_h')], sequence_lens, pred_mh, init_net, input_size, hidden_size, 1, direction_offset, "/i2h_b", "/gates_t_b", "/i2h_w", "/gates_t_w", reform, make_cell, lambda x: x) elif n.op_type == 'GRU': def reform(Bi, Br, W_, R_, name, hidden_size, init_net): # caffe2 has a different order from onnx. We need to rearrange # z r h -> r z h reforms = ((W_, 'i2h_w', True, [(0,-1)]), (R_, 'gate_t_w', False, [(0,-1)]), (Bi, 'i2h_b', True, []), (Br, 'gate_t_b', False, [])) cls._rnn_reform_weights(reforms, name, hidden_size, init_net, ['update', 'reset', 'output'], [1, 0, 2]) def make_cell(*args, **kwargs): return gru_cell.GRU(*args, linear_before_reset=linear_before_reset, **kwargs) def make_rnn(direction_offset): return cls._make_rnn_direction( input_blob, B, W, R, [(initial_h, '/initial_h')], sequence_lens, pred_mh, init_net, input_size, hidden_size, 3, direction_offset, "_bias_i2h", "_bias_gates", "/i2h_w_pre", "/gates_t_w_pre", reform, make_cell, lambda x: x) elif n.op_type == 'LSTM': def reform(Bi, Br, W_, R_, name, hidden_size, init_net): # caffe2 has a different order from onnx. We need to rearrange # i o f c -> i f o c reforms = ((W_, 'i2h_w', True, [(0, -1)]), (R_, 'gates_t_w', True, [(0, -1)]), (Bi, 'i2h_b' , True, []), (Br, 'gates_t_b', True, [])) cls._rnn_reform_weights(reforms, name, hidden_size, init_net, ['input', 'output', 'forget', 'cell'], [0, 2, 1, 3]) def make_cell(*args, **kwargs): return rnn_cell.LSTM(*args, **kwargs) def make_rnn(direction_offset): return cls._make_rnn_direction( input_blob, B, W, R, [(initial_h, '/initial_h'), (initial_c, '/initial_c')], sequence_lens, pred_mh, init_net, input_size, hidden_size, 4, direction_offset, "/i2h_b", "/gates_t_b", "/i2h_w", "/gates_t_w", reform, make_cell, lambda x: [x[0], x[1], x[3]]) if direction == 'forward': outputs = make_rnn(0) # in the forward case, storage is shared between the # last outputs. We need to decouple them so that the # VariableLengthSequencePadding only mutates # n.outputs[0] for i in range(1, len(outputs)): pred_mh.net.Copy(outputs[i], n.outputs[i]) if sequence_lens is not None: pred_mh.net.VariableLengthSequencePadding( [outputs[0], sequence_lens], [outputs[0]]) pred_mh.net.ExpandDims([outputs[0]], [n.outputs[0]], dims=[1]) elif direction == 'bidirectional': outputs_f = make_rnn(0) outputs_b = make_rnn(1) concatted_output, _ = pred_mh.net.Concat( [outputs_f[0], outputs_b[0]], [cls.dummy_name(), cls.dummy_name()], axis=2) if sequence_lens is not None: pred_mh.net.VariableLengthSequencePadding( [concatted_output, sequence_lens], [concatted_output]) reshaped_output, _ = pred_mh.net.Reshape(concatted_output, [cls.dummy_name(), cls.dummy_name()], shape=[0,0,-1,2]) pred_mh.net.Transpose(reshaped_output, n.outputs[0], axes=[0,2,1,3]) for i in range(1, len(n.outputs)): pred_mh.net.Concat([outputs_f[i], outputs_b[i]], [n.outputs[i], cls.dummy_name()], axis=0) # We want to decide whether to put all of our weight-reshaping # operators in the init net or the predict net. We can put # them in the init net iff the inputs to those operators are # already available, either as graph initializers, or as the # output of other operators in the init net. The latter case # occurs, for example, when exporting from pytorch to onnx. # In most production use, we expect has_initializers to be # true. initializers = {i.name for i in init_model.graph.initializer} outputs = {output for node in init_model.graph.node for output in node.output} has_initializers = all(x in initializers or x in outputs for x in (W, R, B)) pred_ops = [] init_ops = [] (init_ops if has_initializers else pred_ops).extend(init_net.Proto().op) pred_ops.extend(pred_mh.Proto().op) return Caffe2Ops(pred_ops, init_ops, list(pred_mh.Proto().external_input)) @classmethod def _create_control_op(cls, init_model, pred_model, n, opset_version): control_inputs = [] if '__control_inputs' in n.attrs: control_inputs.extend(n.attrs['__control_inputs']) node = cls._common_onnx_node_to_caffe2_op(init_model, pred_model, n, opset_version) node.control_input.extend(control_inputs) return Caffe2Ops([node], [], []) @classmethod def _remove_ssa(cls, net, remap_dict): for op in net.op: for i, name in enumerate(op.output): if name in remap_dict: op.output[i] = remap_dict[name] for i, out in enumerate(net.external_output): if out in remap_dict: net.external_output[i] = remap_dict[out] @classmethod def _create_if(cls, init_model, pred_model, n, opset_version): ops = cls._create_control_op(init_model, pred_model, n, opset_version) assert ops[0][0].type == 'If' if_op = ops[0][0] then_net = else_net = None control_inputs = [] for arg in if_op.arg: if arg.name == 'then_net': then_net = arg.n if arg.name == 'else_net': else_net = arg.n if arg.name == '__control_inputs': control_inputs = arg.strings assert then_net and else_net then_net_outs = then_net.external_output else_net_outs = else_net.external_output op_outputs = if_op.output assert len(then_net_outs) == len(else_net_outs) assert len(else_net_outs) == len(op_outputs) for arg in if_op.arg: if arg.name == 'then_net': arg.n.external_input.extend(control_inputs) if arg.name == 'else_net': arg.n.external_input.extend(control_inputs) return ops @classmethod def _create_loop(cls, init_model, pred_model, n, opset_version): ops = cls._create_control_op(init_model, pred_model, n, opset_version) assert ops[0][0].type == 'ONNXWhile' while_op = ops[0][0] while_op.arg.extend([caffe2.python.utils.MakeArgument('has_trip_count', True)]) while_op.arg.extend([caffe2.python.utils.MakeArgument('has_cond', True)]) while_op.arg.extend([caffe2.python.utils.MakeArgument('disable_scopes', True)]) control_inputs = [] for arg in while_op.arg: if arg.name == '__control_inputs': control_inputs = arg.strings num_loop_carried_deps = 0 for arg in while_op.arg: if arg.name == 'body': num_loop_carried_deps = len(arg.n.external_input) - 2 arg.n.external_input.extend(control_inputs) while_op.arg.extend([ caffe2.python.utils.MakeArgument('num_loop_carried_deps', num_loop_carried_deps) ]) return ops @classmethod def _substitute_raw_value(cls, tp, raw_values_dict): if tp.HasField('raw_data') and tp.raw_data == bytes(b'__EXTERNAL'): if tp.name not in raw_values_dict: raise RuntimeError('TensorProto for value {} referenced raw data but it was not found!'.format(tp.name)) else: tp.raw_data = raw_values_dict[tp.name] @classmethod def _visit_and_substitute_raw_values(cls, nodes, raw_values_dict): for node in nodes: for attr in node.attribute: if attr.HasField('t'): cls._substitute_raw_value(attr.t, raw_values_dict) for t in attr.tensors: cls._substitute_raw_value(t, raw_values_dict) if attr.HasField('g'): cls._visit_and_substitute_raw_values(attr.g.node, raw_values_dict) for g in attr.graphs: cls._visit_and_substitute_raw_values(g.node, raw_values_dict) @classmethod def _external_value_resolution_pass(cls, model, raw_values_dict): for init in model.graph.initializer: cls._substitute_raw_value(init, raw_values_dict) cls._visit_and_substitute_raw_values(model.graph.node, raw_values_dict) @classmethod def _direct_initialize_parameters(cls, initializer, ws, device_option): for tp in initializer: ws.FeedBlob(tp.name, onnx.numpy_helper.to_array(tp), device_option) @classmethod def _direct_initialize_inputs(cls, inputs, initialized, ws, device_option): for value_info in inputs: if value_info.name in initialized: continue shape = list(d.dim_value for d in value_info.type.tensor_type.shape.dim) ws.FeedBlob( value_info.name, np.ones(shape, dtype=onnx.mapping.TENSOR_TYPE_TO_NP_TYPE[value_info.type.tensor_type.elem_type]), device_option) @staticmethod def optimize_onnx(input, init=False, predict=False): passes = ['fuse_consecutive_transposes', 'eliminate_nop_transpose', 'fuse_transpose_into_gemm', 'lift_lexical_references'] if init: passes.append('split_init') if predict: passes.append('split_predict') out = onnx.optimizer.optimize(input, passes) return out @classmethod def prepare_zip_archive(cls, file, device='CPU', **kwargs): with zipfile.ZipFile(file, mode='r') as z: with z.open('__MODEL_PROTO', 'r') as f: model = onnx.load(f); blob_names = set(z.namelist()) - set('__MODEL_PROTO') # TODO: make this more efficient raw_values_dict = {} for name in blob_names: with z.open(name, 'r') as blob_file: raw_values_dict[name] = blob_file.read() return cls.prepare(model, device, raw_values_dict=raw_values_dict, **kwargs) @classmethod def prepare(cls, model, device='CPU', raw_values_dict=None, **kwargs): ''' For Onnx Caffe2Backend, we require that init_graph don't initialize the actual input of the predict_graph, for example, if "img" is the input blob for the predict_net, we require that in init_graph and in initializer of the predict_graph, "img" is not initalized. We don't have a check for this, since there is no way we can know which blob is the input of the predict_graph. ''' if not kwargs.pop('no_check_UNSAFE', False): super(Caffe2Backend, cls).prepare(model, device, **kwargs) opset_version = None for imp in model.opset_import: if not imp.HasField("domain") or imp.domain == "": opset_version = imp.version if imp.version > cls._known_opset_version: warnings.warn("This version of onnx-caffe2 targets ONNX operator set version {}, but the model we are trying to import uses version {}. We will try to import it anyway, but if the model uses operators which had BC-breaking changes in the intervening versions, import will fail.".format(cls._known_opset_version, imp.version)) else: warnings.warn("Unrecognized operator set {}".format(imp.domain)) if opset_version is None: if model.ir_version >= 0x00000003: raise RuntimeError("Model with IR version >= 3 did not specify ONNX operator set version (onnx-caffe2 requires it)") else: opset_version = 1 model = onnx.shape_inference.infer_shapes(model) ws = Workspace() device_option = get_device_option(Device(device)) init_net, predict_net = cls._onnx_model_to_caffe2_net(model, device, opset_version, False) if raw_values_dict: cls._external_value_resolution_pass(model, raw_values_dict) # Directly load initializer data into blobs in workspace cls._direct_initialize_parameters( model.graph.initializer, ws, device_option, ) initialized = {init.name for init in model.graph.initializer} cls._direct_initialize_inputs( model.graph.input, initialized, ws, device_option, ) uninitialized = [value_info.name for value_info in model.graph.input if value_info.name not in initialized] retval = Caffe2Rep(init_net, predict_net, ws, uninitialized) return retval @classmethod # TODO: This method needs a refactor for clarity def _onnx_node_to_caffe2_op(cls, init_model, pred_model, node_def, opset_version): cbackend = C.Caffe2Backend(cls._dummy_name) if cbackend.support_onnx_import(node_def.op_type): # extract value infos from pred model (value infos of # node's inputs that are in init model should be all # available in pred model) value_infos = [] for name in node_def.input: if pred_model is not None: for vi in itertools.chain(pred_model.graph.input, pred_model.graph.output, pred_model.graph.value_info): if vi.name == name: value_infos.append(vi.SerializeToString()) op_strs = cbackend.convert_node(node_def.SerializeToString(), value_infos, opset_version) init_ops = [] for s in op_strs[0]: op = caffe2_pb2.OperatorDef() op.ParseFromString(s) init_ops.append(op) ops = [] for s in op_strs[1]: op = caffe2_pb2.OperatorDef() op.ParseFromString(s) ops.append(op) return Caffe2Ops(ops, init_ops, []) if node_def.op_type in cls._special_operators: translator = getattr(cls, cls._special_operators[node_def.op_type]) else: translator = cls._common_onnx_node_to_caffe2_op ops = translator(init_model, pred_model, OnnxNode(node_def), opset_version) if isinstance(ops, Caffe2Ops): return ops if not isinstance(ops, container_abcs.Iterable): ops = [ops] return Caffe2Ops(ops, [], []) _broadcast_operators = { 'Add', 'Sub', } @classmethod def _common_onnx_node_to_caffe2_op(cls, init_model, pred_model, onnx_node, opset_version): """ This translator performs the basic translation of ONNX nodes into Caffe2 operators. Besides doing a straightforward marshalling from one format to another, it also does these extra things: - Renames operators based on '_renamed_operators' - Renames attributes based on '_global_renamed_attrs' and '_per_op_renamed_attrs' If you're writing a custom translator, consider calling this first, and then fixing things up further. """ c2_op = caffe2_pb2.OperatorDef() c2_op.input.extend(onnx_node.inputs) c2_op.output.extend(onnx_node.outputs) c2_op.name = onnx_node.name onnx_op_type = onnx_node.op_type broken_version = cls._broken_operators.get(onnx_op_type, float('Inf')) if broken_version <= opset_version: raise ValueError( "Don't know how to translate op {} in ONNX operator set v{} (I only support prior to v{})".format(onnx_op_type, opset_version, broken_version)) c2_op.type = cls._renamed_operators.get(onnx_op_type, onnx_op_type) if not core.IsOperator(c2_op.type): raise ValueError( "Don't know how to translate op {}".format(onnx_op_type)) def kmap(k): if (onnx_op_type in cls._per_op_renamed_attrs and k in cls._per_op_renamed_attrs[onnx_op_type]): return cls._per_op_renamed_attrs[onnx_op_type][k] if k in cls._global_renamed_attrs: return cls._global_renamed_attrs[k] return k c2_op.arg.extend(onnx_node.attrs.caffe2(kmap=kmap)) if opset_version < 7: # onnx opset 7 and newest caffe2 have adopted full onnx broadcast semantics # so we don't need this hack anymore if c2_op.type in cls._broadcast_operators: already_broadcast = False for arg in c2_op.arg: if arg.name == 'broadcast': already_broadcast = True if not already_broadcast: c2_op.arg.extend([caffe2.python.utils.MakeArgument('broadcast', 1)]) return c2_op @staticmethod def _all_names_in_graph(graph): if graph is None: return set() names = set() names.update(value_info.name for value_info in graph.input) names.update(value_info.name for value_info in graph.output) for node in graph.node: names.update(node.input) names.update(node.output) return names @classmethod def _graph_to_net(cls, onnx_graph, opset_version): net = caffe2_pb2.NetDef() for node in onnx_graph.node: try: c2ops = cls._onnx_node_to_caffe2_op( None, None, node, opset_version) except Exception as e: print('ONNX FATAL:', e) continue net.op.extend(c2ops.init_ops) net.op.extend(c2ops.ops) net.external_input.extend(c2ops.interface_blobs) net.external_output.extend( value_info.name for value_info in onnx_graph.output) net.external_input.extend( value_info.name for value_info in onnx_graph.input) return net @classmethod def _onnx_model_to_caffe2_net(cls, onnx_model, device, opset_version, include_initializers): device_option = get_device_option(Device(device)) onnx_model = onnx.utils.polish_model(onnx_model) init_model = cls.optimize_onnx(onnx_model, init=True) pred_model = cls.optimize_onnx(onnx_model, predict=True) init_net = caffe2_pb2.NetDef() pred_net = caffe2_pb2.NetDef() init_net.name = onnx_model.graph.name + '_init' pred_net.name = onnx_model.graph.name + '_predict' if include_initializers: init_net.op.extend(cls._create_tensor_filling_op(tp) for tp in onnx_model.graph.initializer) cls._dummy_name.reset(cls._all_names_in_graph(init_model.graph) | cls._all_names_in_graph(pred_model.graph)) errors = [] for net, model in ( (init_net, init_model), (pred_net, pred_model) ): net.device_option.CopyFrom(device_option) for node in model.graph.node: try: c2ops = cls._onnx_node_to_caffe2_op( init_model, pred_model, node, opset_version) except Exception as e: msg = 'Error while processing node: {}. Exception: {}'.format(node, e) errors.append(msg) print('ONNX FATAL:', msg, file=sys.stderr) continue init_net.op.extend(c2ops.init_ops) net.op.extend(c2ops.ops) net.external_input.extend(c2ops.interface_blobs) net.external_output.extend( value_info.name for value_info in model.graph.output) net.external_input.extend( value_info.name for value_info in model.graph.input) if len(errors) > 0: raise RuntimeError( "ONNX conversion failed, encountered {} errors:\n\n{}".format( len(errors), "\n\n".join(errors))) return init_net, pred_net # wrapper for backwards compatability @classmethod def onnx_graph_to_caffe2_net(cls, model, device="CPU", opset_version=_known_opset_version): return cls._onnx_model_to_caffe2_net(model, device=device, opset_version=opset_version, include_initializers=True) @classmethod def supports_device(cls, device_str): device = Device(device_str) if device.type == DeviceType.CPU: return True elif core.IsGPUDeviceType(device.type): return workspace.has_gpu_support return False @classmethod def is_compatible(cls, model, device='CPU', **kwargs): if hasattr(super(Caffe2Backend, cls), 'is_compatible') \ and callable(super(Caffe2Backend, cls).is_compatible): if not super(Caffe2Backend, cls).is_compatible(model, device, **kwargs): return False # TODO: should have an unspported list of operators, be optimistic for now return True prepare = Caffe2Backend.prepare prepare_zip_archive = Caffe2Backend.prepare_zip_archive run_node = Caffe2Backend.run_node run_model = Caffe2Backend.run_model supports_device = Caffe2Backend.supports_device # noqa is_compatible = Caffe2Backend.is_compatible
[ [ [ 181, 196 ] ], [ [ 220, 228 ] ], [ [ 252, 266 ] ], [ [ 290, 306 ] ], [ [ 315, 317 ] ], [ [ 325, 336 ], [ 4132, 4143 ] ], [ [ 360, 365 ] ], [ [ 367, 371 ] ], [ [ 379, 382 ], [ 38360, 38363 ] ], [ [ 390, 397 ], [ 28532, 28539 ] ], [ [ 405, 414 ], [ 16857, 16866 ], [ 32190, 32199 ] ], [ [ 712, 724 ] ], [ [ 733, 739 ] ], [ [ 766, 770 ], [ 1737, 1741 ], [ 7883, 7887 ], [ 14911, 14915 ], [ 17352, 17356 ], [ 34785, 34789 ], [ 39460, 39464 ] ], [ [ 772, 781 ], [ 1701, 1710 ], [ 39514, 39523 ] ], [ [ 783, 791 ], [ 17848, 17856 ], [ 20135, 20143 ] ], [ [ 793, 801 ], [ 18970, 18978 ] ], [ [ 842, 856 ], [ 33378, 33392 ] ], [ [ 896, 907 ], [ 17319, 17330 ] ], [ [ 933, 943 ], [ 1659, 1669 ], [ 9099, 9109 ], [ 9760, 9770 ], [ 32675, 32685 ], [ 32849, 32859 ], [ 34184, 34194 ], [ 36349, 36359 ], [ 37366, 37376 ], [ 37405, 37415 ] ], [ [ 951, 970 ], [ 2244, 2250 ], [ 25185, 25191 ], [ 25273, 25279 ], [ 25355, 25361 ], [ 25841, 25847 ], [ 35795, 35801 ] ], [ [ 978, 989 ], [ 27851, 27853 ] ], [ [ 997, 1001 ] ], [ [ 1019, 1026 ] ], [ [ 1028, 1038 ] ], [ [ 1040, 1051 ], [ 10079, 10090 ], [ 10243, 10254 ], [ 10414, 10425 ], [ 10472, 10483 ], [ 10640, 10651 ], [ 10698, 10709 ], [ 10755, 10766 ], [ 10814, 10825 ], [ 10872, 10883 ], [ 11036, 11047 ], [ 11199, 11210 ] ], [ [ 1053, 1067 ] ], [ [ 1069, 1079 ] ], [ [ 1087, 1104 ] ], [ [ 1112, 1121 ] ], [ [ 1129, 1143 ] ], [ [ 1151, 1171 ] ], [ [ 1179, 1189 ], [ 8104, 8108 ], [ 8208, 8212 ], [ 8638, 8642 ], [ 8742, 8746 ], [ 27441, 27445 ], [ 27872, 27876 ], [ 28379, 28383 ], [ 28646, 28650 ], [ 30653, 30657 ], [ 37183, 37187 ], [ 9982, 9986 ] ], [ [ 1220, 1227 ], [ 4230, 4237 ] ], [ [ 1229, 1235 ], [ 7829, 7835 ], [ 30762, 30768 ], [ 37145, 37151 ], [ 39362, 39368 ] ], [ [ 1237, 1247 ], [ 1643, 1653 ], [ 1684, 1694 ], [ 39407, 39417 ] ], [ [ 1249, 1263 ], [ 9379, 9393 ] ], [ [ 1306, 1315 ], [ 7858, 7867 ], [ 30708, 30717 ] ], [ [ 1359, 1368 ], [ 31533, 31542 ] ], [ [ 1416, 1428 ] ], [ [ 1437, 1475 ], [ 7166, 7167 ], [ 8901, 8902 ], [ 31785, 31786 ] ], [ [ 1484, 1492 ], [ 29944, 29952 ], [ 30305, 30313 ] ], [ [ 1498, 1511 ], [ 16105, 16118 ], [ 16211, 16224 ] ], [ [ 1607, 1624 ], [ 7811, 7828 ], [ 30744, 30761 ], [ 37127, 37144 ] ], [ [ 1797, 1811 ], [ 2005, 2019 ], [ 3997, 4011 ] ], [ [ 2335, 2356 ], [ 2073, 2094 ] ], [ [ 3497, 3505 ], [ 33250, 33258 ] ], [ [ 4120, 4129 ], [ 22982, 22991 ], [ 23433, 23442 ], [ 32962, 32971 ], [ 33312, 33321 ], [ 33443, 33452 ] ], [ [ 4216, 4229 ], [ 39997, 40010 ], [ 40042, 40055 ], [ 40088, 40101 ], [ 40124, 40137 ], [ 40167, 40180 ], [ 40222, 40235 ], [ 2836, 2849 ], [ 2876, 2889 ], [ 3272, 3285 ], [ 3303, 3316 ], [ 7604, 7617 ], [ 29637, 29650 ], [ 39663, 39676 ], [ 39733, 39746 ], [ 39794, 39807 ] ], [ [ 39987, 39994 ] ], [ [ 40020, 40039 ] ], [ [ 40077, 40085 ] ], [ [ 40112, 40121 ] ], [ [ 40149, 40164 ] ], [ [ 40206, 40219 ] ] ]
""" Consolidate Services Description of all APIs # noqa: E501 The version of the OpenAPI document: version not set Generated by: https://openapi-generator.tech """ import re # noqa: F401 import sys # noqa: F401 from argocd_python_client.model_utils import ( # noqa: F401 ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) from ..model_utils import OpenApiModel from argocd_python_client.exceptions import ApiAttributeError def lazy_import(): from argocd_python_client.model.v1_event import V1Event from argocd_python_client.model.v1_list_meta import V1ListMeta globals()['V1Event'] = V1Event globals()['V1ListMeta'] = V1ListMeta class V1EventList(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { } validations = { } @cached_property def additional_properties_type(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ lazy_import() return { 'items': ([V1Event],), # noqa: E501 'metadata': (V1ListMeta,), # noqa: E501 } @cached_property def discriminator(): return None attribute_map = { 'items': 'items', # noqa: E501 'metadata': 'metadata', # noqa: E501 } read_only_vars = { } _composed_schemas = {} @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 """V1EventList - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) items ([V1Event]): [optional] # noqa: E501 metadata (V1ListMeta): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value) return self required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 """V1EventList - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) items ([V1Event]): [optional] # noqa: E501 metadata (V1ListMeta): [optional] # noqa: E501 """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " f"class with read only attributes.")
[ [ [ 192, 194 ] ], [ [ 216, 219 ] ], [ [ 300, 312 ], [ 6110, 6122 ], [ 10229, 10241 ] ], [ [ 318, 331 ] ], [ [ 337, 348 ], [ 887, 898 ] ], [ [ 354, 365 ] ], [ [ 371, 386 ], [ 2099, 2114 ], [ 2458, 2473 ], [ 2943, 2958 ] ], [ [ 392, 416 ] ], [ [ 422, 452 ], [ 3201, 3231 ], [ 7383, 7413 ] ], [ [ 458, 462 ], [ 2358, 2362 ] ], [ [ 468, 476 ], [ 2364, 2372 ] ], [ [ 482, 491 ] ], [ [ 497, 506 ], [ 2403, 2412 ] ], [ [ 512, 538 ] ], [ [ 568, 580 ], [ 6042, 6054 ] ], [ [ 625, 642 ], [ 11339, 11356 ] ], [ [ 649, 660 ], [ 2322, 2333 ], [ 2794, 2805 ] ], [ [ 875, 886 ] ] ]
from flask import Flask, request import json app = Flask(__name__) @app.route('/') def hello(): outFile = {'Tittle' : "Simon Game", 'msg' : "Hello World!"} outFile = json.dumps(outFile) return json.loads(outFile)
[ [ [ 18, 23 ], [ 52, 57 ] ], [ [ 25, 32 ] ], [ [ 40, 44 ], [ 176, 180 ], [ 207, 211 ] ], [ [ 46, 49 ], [ 70, 73 ] ], [ [ 89, 94 ] ] ]
# # Copyright (c) 2018 Via Technology Ltd. All Rights Reserved. # Consult your license regarding permissions and restrictions. # """ operations related to airspaces and intersections. """ from psycopg2 import Error, InternalError from psycopg2.extensions import AsIs from psycopg2.extras import DictCursor from itertools import filterfalse from functools import reduce from shapely.wkt import loads import pru.db.context as ctx from pru.logger import logger log = logger(__name__) def make_point(lon, lat, connection): """ Makes a geo point """ cursor = connection.cursor() query = "SELECT ST_MakePoint(%s, %s)" params = (float(lon), float(lat)) cursor.execute(query, params) return cursor.fetchone() def make_augmented_point_from_position(position, flight_id, connection): """ Takes a position tuple and makes a augmented point. """ point = make_point(position[1], position[0], connection) return {'flight_id': flight_id, 'lon': position[1], 'lat': position[0], 'geoPoint': point} def make_augmented_points_from_positions(latitudes, longitudes, flight_id, connection): """ Takes a list of latitudes and a list of longitudes and a flight_id. Makes a list of augmented points. """ return [make_augmented_point_from_position(position, flight_id, connection) for position in zip(latitudes, longitudes)] def extract_point_list_from_augmented_points(augmented_points): """ Given a list or generator of augmented points extract the geo point representation as a list. """ return list(map(lambda augmented_points: augmented_points['geoPoint'], augmented_points)) def make_line_from_augmented_points(augmented_points, flight_id, connection): """ Given a list of augmented points create a geographic line. """ if (len(augmented_points) == 0): log.warning(f"Creating a line from a list of points but the list " "was empty for flight id {flight_id}.") return [[]] cursor = connection.cursor() query = "SELECT ST_AsEWKT(ST_MakeLine(ARRAY[%s]));" params = [augmented_points] cursor.execute(query, params) return cursor.fetchone() def find_sectors_intersected_by(line_string, flight_id, min_altitude, max_altitude, context, connection): """ Lists the airspace ids and details of those airspaces where the given line string intersects excluding those that are outside of the range of altitudes of the trajectory. """ log.debug(f"Finding trajectory intersection with airspaces for flight id: {flight_id}") schema_name = context[ctx.SCHEMA_NAME] try: with connection.cursor() as cursor: query = "SELECT id, av_airspace_id, min_altitude, max_altitude " \ "from %s.sectors where " \ "NOT (max_altitude < %s OR min_altitude > %s) AND " \ "ST_Intersects(wkt, ST_GeographyFromText('SRID=4326;%s'));" params = [AsIs(schema_name), min_altitude, max_altitude, AsIs(line_string)] cursor.execute(query, params) return cursor.fetchall() except InternalError: log.exception(f"Failed whist trying to find the intersection between " "a route with flight id {flight_id} and the airspace model.") return [] def find_user_sectors_intersected_by(line_string, flight_id, min_altitude, max_altitude, context, connection): """ Lists the user defined airspace uids and details of those airspaces where the given line string intersects. """ log.debug(f"Finding trajectory intersection with user defined airspaces for flight id: {flight_id}") schema_name = context[ctx.SCHEMA_NAME] try: with connection.cursor() as cursor: query = "SELECT id, org_id, min_altitude, max_altitude, user_id, " \ "sector_name from %s.user_defined_sectors where " \ "NOT (max_altitude < %s OR min_altitude > %s) AND " \ "ST_Intersects(wkt, ST_GeographyFromText('SRID=4326;%s'));" params = [AsIs(schema_name), min_altitude, max_altitude, AsIs(line_string)] cursor.execute(query, params) return cursor.fetchall() except InternalError: log.exception(f"Failed whist trying to find the intersection between " "a route with flight id {flight_id} and the airspace model.") return [] def make_geographic_trajectory(augmented_points, flight_id, connection): """ Given a list of augmented points create a geographic line segment. """ log.debug(f"Making geo trajectory for flight id: {flight_id}") return make_line_from_augmented_points( extract_point_list_from_augmented_points(augmented_points), flight_id, connection)[0] def make_augmented_trajectory(augmented_points, geographic_trajectory, flight_id, min_altitude, max_altitude, connection, is_user_defined=False): """ Makes a trajectory augmented with geographic positions and a list of sectors intersected by the trajectory excluding those that do not meet the altitude range of the trajectory. """ log.debug(f"Creating an augmented trajectory for flight id: {flight_id}") if not is_user_defined: sectors = find_sectors_intersected_by(geographic_trajectory, flight_id, min_altitude, max_altitude, ctx.CONTEXT, connection) else: sectors = find_user_sectors_intersected_by(geographic_trajectory, flight_id, min_altitude, max_altitude, ctx.CONTEXT, connection) return {'extendedPoints': augmented_points, 'line': geographic_trajectory, 'sectors': sectors, 'is_user_defined': is_user_defined} def find_sector(db_ID, connection): schemaName = ctx.CONTEXT[ctx.SCHEMA_NAME] with connection.cursor(cursor_factory=DictCursor) as cursor: cursor.execute("SELECT id, av_airspace_id, av_icao_state_id, av_name, min_altitude, max_altitude FROM %s.sectors WHERE " "id = %s", [AsIs(schemaName), db_ID]) return cursor.fetchone() def find_sector_identifiers(db_ID, context, connection): """ Finds the identifiers for a sector given the db id of the sector. """ schemaName = context[ctx.SCHEMA_NAME] with connection.cursor(cursor_factory=DictCursor) as cursor: cursor.execute("SELECT av_airspace_id, av_icao_state_id, av_name FROM %s.sectors WHERE " "id = %s", [AsIs(schemaName), db_ID]) return cursor.fetchmany() def find_airspace_by_database_ID(db_ID, context, connection, is_user_defined=False): """ Finds an aairspace with the given database id Returns a list, list may be empty. """ schemaName = context[ctx.SCHEMA_NAME] with connection.cursor(cursor_factory=DictCursor) as cursor: if is_user_defined: cursor.execute("SELECT * FROM %s.user_defined_sectors WHERE " "id = %s", [AsIs(schemaName), db_ID]) return cursor.fetchmany() else: cursor.execute("SELECT * FROM %s.sectors WHERE " "id = %s", [AsIs(schemaName), db_ID]) return cursor.fetchmany() def originates(first_point, polygon_string, flight_id, sector_id, connection): """ If the first point is inside the given sector we determine that the trajectory originates in the sector. first_point wkb for the first point of the trajectory returns True => originates in sectors """ cursor = connection.cursor() query = "SELECT ST_Intersects(%s::geography, %s::geography);" params = [first_point, polygon_string] cursor.execute(query, params) originates = cursor.fetchone()[0] if originates: log.debug(f"Flight with id {flight_id} originates in sector {sector_id}") return originates def find_line_poly_intersection_without_boundary(lineString, polygonString, connection): """ Use the geo db to find the intersections between the linestring and the unbounded polygon string. The polygon is assumed to _NOT_ have a boundary around it. """ query = "SELECT ST_AsText(ST_Intersection(%s::geography, ST_Force2D(ST_Boundary(%s))::geography));" params = [lineString, polygonString] try: with connection.cursor() as cursor: cursor.execute(query, params) res = cursor.fetchall() return {'segmentStrings': res, 'ploygonString': polygonString} except Error: log.exception("Failed to find intersection : Error") return [] def find_line_poly_intersection_with_boundary(lineString, polygonString, connection): """ Use the geo db to find the intersections between the linestring and the bounded polygon string. The polygon is assumed to already have a boundary around it. """ query = "SELECT unit.find_intersections(%s, %s)" params = [lineString, polygonString] try: with connection.cursor() as cursor: cursor.execute(query, params) res = cursor.fetchall() return {'segmentStrings': res, 'ploygonString': polygonString} except Error: log.exception("Failed to find intersection : Error") return [] def find_intersections(augmented_trajectory, min_altitude, max_altitude, flight_id, connection): """ Finds the points on the trajectory that intersect with the sectors of the the augmented trajectory. """ log.debug(f"Finding intersection for flight id {flight_id}") first_point = augmented_trajectory['extendedPoints'][0]['geoPoint'] first_point_lon = augmented_trajectory['extendedPoints'][0]['lon'] first_point_lat = augmented_trajectory['extendedPoints'][0]['lat'] is_user_defined = augmented_trajectory['is_user_defined'] # Find each sector sector_IDs = [sector[0] for sector in augmented_trajectory['sectors']] log.debug("Found sector ids %s", str(sector_IDs)) sectors = [find_airspace_by_database_ID(str(sector_id), ctx.CONTEXT, connection, is_user_defined)[0] for sector_id in sector_IDs] # Find the points of the trajectory where the trajectory intersects # with each sector if is_user_defined: segments = [{'flight_id': flight_id, 'intersections': find_line_poly_intersection_with_boundary(augmented_trajectory['line'], sector['bounded_sector'], connection), 'origin': {'is_origin': originates(first_point, sector['wkt'], flight_id, sector['id'], connection), 'origin_lat': first_point_lat, 'origin_lon': first_point_lon}, 'id': sector['id'], 'org_id': sector['org_id'], 'user_id': sector['user_id'], 'sector_name': sector['sector_name'], 'min_altitude': sector['min_altitude'], 'max_altitude': sector['max_altitude'], 'is_cylinder': sector['is_cylinder'], 'is_user_defined': is_user_defined} for sector in sectors] else: segments = [{'flight_id': flight_id, 'intersections': find_line_poly_intersection_with_boundary(augmented_trajectory['line'], sector['bounded_sector'], connection), 'origin': {'is_origin': originates(first_point, sector['wkt'], flight_id, sector['id'], connection), 'origin_lat': first_point_lat, 'origin_lon': first_point_lon}, 'id': sector['id'], 'av_icao_state_id': sector['av_icao_state_id'], 'av_name': sector['av_name'], 'av_airspace_id': sector['av_airspace_id'], 'min_altitude': sector['min_altitude'], 'max_altitude': sector['max_altitude'], 'is_user_defined': is_user_defined} for sector in sectors] return segments def extract(sector_id, shape, flight_id): """ Given a shapley shape find if we have a point or a multipoint. For a point extract the y, x pair as a list of one tuple of sector_id, latitude and longitude. For a multipoint return a list of multiple tuples. """ if shape.geom_type == 'MultiPoint': return [(sector_id, p.y, p.x) for p in shape] elif shape.geom_type == 'Point': return [(sector_id, shape.y, shape.x)] else: log.debug("Unknown geom type : %s in flight id %s and sector_id %s, was %s, skipping", shape.geom_type, flight_id, sector_id, str(shape)) return [] def extract_details_from_intersection(sector_id, wkt, origin, flight_id): """ Given an intersection wkt use shapley to create the point or multipoint object. Then extract the latitude and longitudes from the (multi)point. Returns a list of tuples of sector_id, latiitude and longitude """ intersection_tuples = extract(sector_id, loads(wkt), flight_id) if origin['is_origin']: # If this sector is an origin sector, add in the lat lons at the start. intersection_tuples = [(sector_id, origin['origin_lat'], origin['origin_lon'])] + intersection_tuples return intersection_tuples def make_sector_description(intersection, is_user_defined=False): """ Makes a text description of the sector from the intersection description """ if is_user_defined: return f'{intersection["org_id"]}/{intersection["user_id"]}/{intersection["sector_name"]}' else: return f'{intersection["av_icao_state_id"]}/{intersection["av_name"]}/{intersection["id"]}/{intersection["av_airspace_id"]}' def make_sector_identifier(intersection): """ Makes a text version of the database id in the given intersection """ return f'{intersection["id"]}' def extract_intersection_wkts(intersections): """ Given a list of intersection dicts return a list of wkts with sector descriptive text and the origin details as a tuple. ie ("some-text-made-from-sector-ids", wkt, {is_origin:False, origin_lat:lat, origin_lon: lon}) """ return [(make_sector_identifier(intersection), intersection['intersections']['segmentStrings'][0][0], intersection['origin']) for intersection in intersections] def merge_l_t(l, lt): """ Merge a list of tuples lt, each of three values into three lists l. For example: [('a', 'b', 'c'), ('a', 'd', 'e')] -> [['a', 'a'], ['b', 'd'], ['c', 'e']] """ for t in lt: l[0].append(t[1]) l[1].append(t[2]) l[2].append(t[0]) return l def create_intersection_data_structure(intersections, flight_id): """ Given the intersection data structures create a response tuple. """ # The intersection wkts are tuples of the sector_id, the wkt and the origin # status for the intersection. intersection_wkts = extract_intersection_wkts(intersections) intersection_details = [extract_details_from_intersection(*intersection_wkt, flight_id) for intersection_wkt in intersection_wkts] x_y_sector_ids = reduce(merge_l_t, intersection_details, [[], [], []]) return x_y_sector_ids[0], x_y_sector_ids[1], x_y_sector_ids[2]
[ [ [ 211, 216 ], [ 8658, 8663 ], [ 9344, 9349 ] ], [ [ 218, 231 ], [ 3180, 3193 ], [ 4308, 4321 ] ], [ [ 264, 268 ], [ 3023, 3027 ], [ 3071, 3075 ], [ 4151, 4155 ], [ 4199, 4203 ], [ 6148, 6152 ], [ 6614, 6618 ], [ 7114, 7118 ], [ 7292, 7296 ] ], [ [ 297, 307 ], [ 5938, 5948 ], [ 6436, 6446 ], [ 6950, 6960 ] ], [ [ 330, 341 ] ], [ [ 364, 370 ], [ 15759, 15765 ] ], [ [ 395, 400 ], [ 13609, 13614 ] ], [ [ 408, 429 ], [ 2651, 2654 ], [ 3752, 3755 ], [ 5467, 5470 ], [ 5615, 5618 ], [ 5867, 5870 ], [ 5879, 5882 ], [ 6377, 6380 ], [ 6891, 6894 ], [ 10252, 10255 ] ], [ [ 453, 459 ], [ 467, 473 ] ], [ [ 461, 464 ], [ 1895, 1898 ], [ 2537, 2540 ], [ 3203, 3206 ], [ 3625, 3628 ], [ 4331, 4334 ], [ 4670, 4673 ], [ 5257, 5260 ], [ 7907, 7910 ], [ 8673, 8676 ], [ 9359, 9362 ], [ 9657, 9660 ], [ 10098, 10101 ], [ 13095, 13098 ] ], [ [ 490, 500 ], [ 897, 907 ] ], [ [ 744, 778 ], [ 1281, 1315 ] ], [ [ 1059, 1095 ] ], [ [ 1399, 1439 ], [ 4789, 4829 ] ], [ [ 1697, 1728 ], [ 4744, 4775 ] ], [ [ 2232, 2259 ], [ 5377, 5404 ] ], [ [ 3382, 3414 ], [ 5520, 5552 ] ], [ [ 4510, 4536 ] ], [ [ 4905, 4930 ] ], [ [ 5818, 5829 ] ], [ [ 6213, 6236 ] ], [ [ 6680, 6708 ], [ 10163, 10191 ] ], [ [ 7362, 7372 ], [ 10888, 10898 ], [ 11962, 11972 ] ], [ [ 8010, 8054 ] ], [ [ 8750, 8791 ], [ 10572, 10613 ], [ 11646, 11687 ] ], [ [ 9436, 9454 ] ], [ [ 12620, 12627 ], [ 13590, 13597 ] ], [ [ 13257, 13290 ], [ 15631, 15664 ] ], [ [ 13887, 13910 ] ], [ [ 14314, 14336 ], [ 14778, 14800 ] ], [ [ 14479, 14504 ], [ 15562, 15587 ] ], [ [ 14961, 14970 ], [ 15766, 15775 ] ], [ [ 15277, 15311 ] ] ]
import numpy as np from tinygrad.tensor import Function from extra.cherry import * # ************* unary ops ************* class ReLU(Function): def forward(ctx, input): ctx.save_for_backward(input) return cherry_unop(input, UnaryOps.RELU) def backward(ctx, grad_output): input, = ctx.saved_tensors return cherry_binop(grad_output, cherry_unop(input, UnaryOps.GT0), BinaryOps.MUL) class Log(Function): def forward(ctx, input): ctx.save_for_backward(input) return cherry_unop(input, UnaryOps.LOG) def backward(ctx, grad_output): input, = ctx.saved_tensors return cherry_binop(grad_output, input, BinaryOps.DIV) class Exp(Function): def forward(ctx, input): ret = cherry_unop(input, UnaryOps.EXP) ctx.save_for_backward(ret) return ret def backward(ctx, grad_output): ret, = ctx.saved_tensors return cherry_binop(grad_output, ret, BinaryOps.MUL) # ************* reduce ops ************* class Sum(Function): def forward(ctx, input, axis=None): ctx.save_for_backward(input, axis) return cherry_reduceop(input, ReduceOps.SUM, axis) def backward(ctx, grad_output): input, axis = ctx.saved_tensors if isinstance(axis, int): axis = [axis] shape = [1 if axis is None or i in axis else input.shape[i] for i in range(len(input.shape))] return cherry_binop(grad_output.reshape(shape), np.zeros_like(input), BinaryOps.ADD) class Max(Function): def forward(ctx, inp, axis=None): if isinstance(axis, int): axis = [axis] #ret = np.amax(inp, axis=None if axis is None else tuple(axis), keepdims=True) ret = cherry_reduceop(inp, ReduceOps.MAX, None if axis is None else tuple(axis), keepdims=True) ctx.save_for_backward(inp, axis, ret) if axis is not None: ret = ret.reshape([inp.shape[i] for i in range(len(inp.shape)) if i not in axis]) return ret def backward(ctx, grad_output): input, axis, ret = ctx.saved_tensors shape = [1 if axis is None or i in axis else input.shape[i] for i in range(len(input.shape))] ret2 = (input==ret.reshape(shape)) #div = ret2.sum(axis=None if axis is None else tuple(axis), keepdims=True) #return ret2*grad_output.reshape(shape)/div div = cherry_reduceop(ret2, ReduceOps.SUM, axis=None if axis is None else tuple(axis), keepdims=True) return cherry_binop(cherry_binop(ret2, grad_output.reshape(shape), BinaryOps.MUL), div, BinaryOps.DIV) # ************* binary ops ************* def unbroadcast(out, in_sh): # adjoint operation to broadcast is sum. Need to sum all axis with 1 = in_sh[i] < out.shape[i] sum_axis = tuple([i for i in range(len(in_sh)) if in_sh[i]==1 and out.shape[i]>1]) if in_sh != (1,) else None return cherry_reduceop(out, ReduceOps.SUM, sum_axis).reshape(in_sh) class Add(Function): def forward(ctx, x, y): ctx.save_for_backward(x.shape, y.shape) return cherry_binop(x, y, BinaryOps.ADD) def backward(ctx, grad_output): shape_x, shape_y = ctx.saved_tensors return unbroadcast(grad_output, shape_x), unbroadcast(grad_output, shape_y) class Sub(Function): def forward(ctx, x, y): ctx.save_for_backward(x.shape, y.shape) return cherry_binop(x, y, BinaryOps.SUB) def backward(ctx, grad_output): shape_x, shape_y = ctx.saved_tensors return unbroadcast(grad_output, shape_x), unbroadcast(-grad_output, shape_y) class Mul(Function): def forward(ctx, x, y): ctx.save_for_backward(x, y) return cherry_binop(x, y, BinaryOps.MUL) def backward(ctx, grad_output): x,y = ctx.saved_tensors return unbroadcast(y*grad_output, x.shape), unbroadcast(x*grad_output, y.shape) class Pow(Function): def forward(ctx, x, y): ctx.save_for_backward(x, y) return cherry_binop(x, y, BinaryOps.POW) def backward(ctx, grad_output): x,y = ctx.saved_tensors return unbroadcast(y * (x**(y-1.0)) * grad_output, x.shape), \ unbroadcast((x**y) * np.log(x) * grad_output, y.shape) # ************* processing ops ************* class Matmul(Function): def forward(ctx, input, weight): ctx.save_for_backward(input, weight) return cherry_matmul(input, weight) def backward(ctx, grad_output): input, weight = ctx.saved_tensors grad_input = cherry_matmul(grad_output, weight, transpose_w=True) grad_weight = cherry_matmul(input, grad_output, transpose_x=True) return grad_input, grad_weight class Conv2D(Function): def forward(ctx, x, w, stride=1, groups=1): if type(ctx.stride) == int: ctx.stride = (ctx.stride, ctx.stride) cout,cin,H,W = w.shape ys,xs = ctx.stride bs,cin_ = x.shape[0], x.shape[1] iy,ix = x.shape[2],x.shape[3] oy,ox = (x.shape[2]-(H-ys))//ys, (x.shape[3]-(W-xs))//xs assert cin*ctx.groups == cin_ assert cout % ctx.groups == 0 rcout = cout//ctx.groups # if H == 1 and W == 1 and ctx.groups == 1 and ctx.stride == (1,1): gx = x.reshape(bs,ctx.groups,cin,x.shape[2],x.shape[3]) tx = np.lib.stride_tricks.as_strided(gx, shape=(bs, ctx.groups, cin, oy, ox, H, W), strides=(*gx.strides[0:3], gx.strides[3]*ys, gx.strides[4]*xs, *gx.strides[3:5]), writeable=False, ) tw = w.reshape(ctx.groups, rcout, cin, H, W) ctx.save_for_backward(tx, tw, x.shape) print((*gx.strides[0:3], gx.strides[3]*ys, gx.strides[4]*xs, *gx.strides[3:5])) """ ret = np.zeros((bs,ctx.groups,oy,ox,rcout),dtype=x.dtype) for g in range(ctx.groups): #ijYXyx,kjyx -> iYXk ->ikYX ret[:,g] += np.tensordot(tx[:,g], tw[g], ((1,4,5),(1,2,3))) print(bs, ctx.groups, cin) return np.moveaxis(ret,4,2).reshape(bs, cout, oy, ox) """ cherry_dmar(SLOT(0), x) # bs, groups, cin, x.shape[2], x.shape[3] cherry_dmar(SLOT(1), w) # groups, rcout, cin, H, W cherry_reset_counts() print(bs, ctx.groups, rcout, oy, ox, cin, H, W) for B in range(0, bs): if cin == 1 and rcout == 1 and ctx.groups > 1: # hmm, this doesn't work, it's not a matmul # you always have to loop over the groups, since they aren't joint # the idea would be to collapse the HxW into the matmul, but you'd be limited to 9 for 3x3 # and while the load is easy in the weight matrix, it's hard in the image matrix (3 strides) # and only the diagonal of the matrix would be useful! groups aren't channels! # [(1, 144, 58, 58), (144, 1, 3, 3)] -> (1, 144, 56, 56) # what does a grouped 1x1 conv look like? # bs x groups x yx -- groups x 1 --> bs x groups x yx # it looks like a broadcasted multiply #print("opt1") # x: bs x groups x iy x ix # w: groups x H x W # out: bs x groups x oy x ox # ix x groups x groups for g in range(0, groups, SZ): for Y in range(0, oy): for X in range(0, ox, SZ): IY,IX = Y*ys,X*xs riski_zero(Reg.MATMUL_ACC) for y in range(IY, IY+H): for x in range(IX, IX+W): riski_load(Reg.MATMUL_INPUT, SLOT(0) + B*groups*iy*ix + g*iy*ix + y*ix + x, xs, iy*ix, min(SZ, ox-X), min(SZ, groups-g)) # 0 here is for broadcasting riski_load(Reg.MATMUL_WEIGHTS, SLOT(1) + g*H*W + (y-IY)*W + (x-IX), 0, H*W, SZ, min(SZ, groups-g)) riski_mulacc() #risk_regdump() riski_store(Reg.MATMUL_ACC, SLOT(2) + B*groups*oy*ox + g*oy*ox + Y*ox + X, 1, oy*ox, min(SZ, ox-X), min(SZ, groups-g)) elif H == 1 and W == 1 and xs == 1 and ys == 1: #print("opt2") # oxy x cin x rcout -- unstrided 1x1 # this is a simple matmul for g in range(0, groups): for c in range(0, rcout, SZ): yx = oy*ox assert yx == iy*ix for YX in range(0, oy*ox, SZ): # these are next to each other # inner conv riski_zero(Reg.MATMUL_ACC) for ci in range(0, cin, SZ): riski_load(Reg.MATMUL_INPUT, SLOT(0) + B*groups*cin*yx + g*cin*yx + ci*yx + YX, 1, yx, min(SZ, yx-YX), min(SZ, cin-ci)) riski_load(Reg.MATMUL_WEIGHTS, SLOT(1) + g*rcout*cin + c*cin + ci, 1, cin, min(SZ, cin-ci), min(SZ, rcout-c)) riski_matmul() riski_store(Reg.MATMUL_ACC, SLOT(2) + B*groups*rcout*yx + g*rcout*yx + c*yx + YX, 1, yx, min(SZ, yx-YX), min(SZ, rcout-c)) else: #print("unoptimized") # ox x cin x rcout -- unoptimized for g in range(0, groups): for c in range(0, rcout, SZ): for Y in range(0, oy): for X in range(0, ox, SZ): IY,IX = Y*ys,X*xs # inner conv riski_zero(Reg.MATMUL_ACC) for ci in range(0, cin, SZ): # not a loop in 1x1 convs, 9 in 3x3, 25 in 5x5 for y in range(IY, IY+H): for x in range(IX, IX+W): riski_load(Reg.MATMUL_INPUT, SLOT(0) + B*groups*cin*iy*ix + g*cin*iy*ix + ci*iy*ix + y*ix + x, xs, iy*ix, min(SZ, ox-X), min(SZ, cin-ci)) riski_load(Reg.MATMUL_WEIGHTS, SLOT(1) + g*rcout*cin*H*W + c*cin*H*W + ci*H*W + (y-IY)*W + (x-IX), H*W, cin*H*W, min(SZ, cin-ci), min(SZ, rcout-c)) riski_matmul() riski_store(Reg.MATMUL_ACC, SLOT(2) + B*groups*rcout*oy*ox + g*rcout*oy*ox + c*oy*ox + Y*ox + X, 1, oy*ox, min(SZ, ox-X), min(SZ, rcout-c)) cherry_print_counts() #print(x.shape, w.shape, "->", ret.shape) return cherry_dmaw(SLOT(2), (bs, cout, oy, ox)) def backward(ctx, grad_output): bs,_,oy,ox = grad_output.shape tx, tw, x_shape = ctx.saved_tensors _,rcout,cin,H,W = tw.shape ys,xs = ctx.stride OY,OX = x_shape[2:4] ggg = grad_output.reshape(bs,ctx.groups,rcout,oy,ox) gdw = np.zeros((ctx.groups,rcout,cin,H,W), dtype=tx.dtype) if cin >= 16: # optimize for large channel count for g in range(ctx.groups): #'ikYX,ijYXyx -> kjyx' for i in range(ggg[:,g].shape[1]): for m in range(tx[:,g].shape[4]): for n in range(tx[:,g].shape[5]): # Use transposes to ensure reshape keeps the correct dimension (channel dimension) when multiple dimensions have the same size big_matrix = np.transpose(tx[:,g][:, :, :, :, m, n], (1, 0, 2, 3)).reshape(tx[:,g].shape[1], -1).T gdw[g][i, :, m, n] = cherry_matmul(ggg[:,g][:,i].reshape(1, -1), big_matrix).flatten() else: # unoptimized for g in range(ctx.groups): #'ikYX,ijYXyx -> kjyx' for i in range(ggg[:,g].shape[1]): for j in range(tx[:,g].shape[1]): for m in range(tx[:,g].shape[4]): big_matrix = tx[:,g][:,j, :, :, m].reshape(-1, tx[:,g].shape[5]) gdw[g][i, j, m] = cherry_matmul(ggg[:,g][:,i].reshape(1, -1), big_matrix).flatten() # needs to be optimized separately for large oy and ox, versus large ctx.groups gdx = np.zeros((bs,ctx.groups,cin,OY,OX), dtype=tx.dtype) for k in range(oy*ox): Y, X = k//ox, k%ox iY,iX = Y*ys, X*xs big_matrix = [] for g in range(ctx.groups): big_matrix.append(cherry_matmul(ggg[:,g,:,Y,X].reshape(bs, -1), tw[g].reshape(rcout, -1)).reshape((bs, cin, H, W))) gdx[:, :, :, iY:iY+H, iX:iX+W] = cherry_binop(gdx[:, :, :, iY:iY+H, iX:iX+W], np.array(np.transpose(big_matrix, (1, 0, 2, 3, 4))), BinaryOps.ADD) return gdx.reshape((bs, ctx.groups*cin, OY, OX)), gdw.reshape((ctx.groups*rcout, cin, H, W))
[ [ [ 7, 18 ], [ 1375, 1377 ], [ 3917, 3919 ], [ 4955, 4957 ], [ 10204, 10206 ], [ 10683, 10685 ], [ 11370, 11372 ], [ 11763, 11765 ], [ 11772, 11774 ] ], [ [ 47, 55 ], [ 136, 144 ], [ 415, 423 ], [ 666, 674 ], [ 967, 975 ], [ 1423, 1431 ], [ 2782, 2790 ], [ 3075, 3083 ], [ 3369, 3377 ], [ 3641, 3649 ], [ 4011, 4019 ], [ 4400, 4408 ] ], [ [ 81, 82 ], [ 218, 229 ], [ 237, 245 ], [ 329, 341 ], [ 355, 366 ], [ 374, 382 ], [ 389, 398 ], [ 497, 508 ], [ 516, 524 ], [ 607, 619 ], [ 640, 649 ], [ 714, 725 ], [ 733, 741 ], [ 868, 880 ], [ 899, 908 ], [ 1066, 1081 ], [ 1089, 1098 ], [ 1334, 1346 ], [ 1397, 1406 ], [ 1607, 1622 ], [ 1628, 1637 ], [ 2217, 2232 ], [ 2239, 2248 ], [ 2324, 2336 ], [ 2337, 2349 ], [ 2384, 2393 ], [ 2405, 2414 ], [ 2710, 2725 ], [ 2731, 2740 ], [ 2874, 2886 ], [ 2893, 2902 ], [ 3167, 3179 ], [ 3186, 3195 ], [ 3449, 3461 ], [ 3468, 3477 ], [ 3721, 3733 ], [ 3740, 3749 ], [ 4109, 4122 ], [ 4228, 4241 ], [ 4299, 4312 ], [ 5640, 5651 ], [ 5652, 5656 ], [ 5712, 5723 ], [ 5724, 5728 ], [ 5770, 5789 ], [ 6770, 6772 ], [ 6842, 6844 ], [ 6893, 6903 ], [ 6904, 6907 ], [ 7020, 7030 ], [ 7031, 7034 ], [ 7069, 7073 ], [ 7151, 7153 ], [ 7166, 7168 ], [ 7246, 7256 ], [ 7257, 7260 ], [ 7297, 7301 ], [ 7362, 7364 ], [ 7370, 7372 ], [ 7403, 7415 ], [ 7466, 7477 ], [ 7478, 7481 ], [ 7510, 7514 ], [ 7587, 7589 ], [ 7602, 7604 ], [ 7844, 7846 ], [ 7941, 7943 ], [ 8020, 8030 ], [ 8031, 8034 ], [ 8085, 8087 ], [ 8106, 8116 ], [ 8117, 8120 ], [ 8153, 8157 ], [ 8233, 8235 ], [ 8249, 8251 ], [ 8278, 8288 ], [ 8289, 8292 ], [ 8327, 8331 ], [ 8393, 8395 ], [ 8410, 8412 ], [ 8440, 8452 ], [ 8469, 8480 ], [ 8481, 8484 ], [ 8513, 8517 ], [ 8594, 8596 ], [ 8610, 8612 ], [ 8778, 8780 ], [ 8854, 8856 ], [ 8939, 8949 ], [ 8950, 8953 ], [ 9006, 9008 ], [ 9188, 9198 ], [ 9199, 9202 ], [ 9241, 9245 ], [ 9346, 9348 ], [ 9361, 9363 ], [ 9396, 9406 ], [ 9407, 9410 ], [ 9451, 9455 ], [ 9561, 9563 ], [ 9578, 9580 ], [ 9614, 9626 ], [ 9645, 9656 ], [ 9657, 9660 ], [ 9691, 9695 ], [ 9792, 9794 ], [ 9807, 9809 ], [ 9825, 9844 ], [ 9905, 9916 ], [ 9917, 9921 ], [ 10804, 10817 ], [ 11209, 11222 ], [ 11581, 11594 ], [ 11718, 11730 ], [ 11816, 11825 ] ], [ [ 131, 135 ] ], [ [ 411, 414 ] ], [ [ 662, 665 ] ], [ [ 963, 966 ] ], [ [ 1419, 1422 ] ], [ [ 2467, 2478 ], [ 2995, 3006 ], [ 3030, 3041 ], [ 3288, 3299 ], [ 3323, 3334 ], [ 3557, 3568 ], [ 3594, 3605 ], [ 3829, 3840 ], [ 3896, 3907 ] ], [ [ 2778, 2781 ] ], [ [ 3071, 3074 ] ], [ [ 3365, 3368 ] ], [ [ 3637, 3640 ] ], [ [ 4004, 4010 ] ], [ [ 4393, 4399 ] ] ]
from __future__ import print_function, division from neuralnilm import Net, RealApplianceSource, BLSTMLayer, SubsampleLayer, DimshuffleLayer from lasagne.nonlinearities import sigmoid, rectify from lasagne.objectives import crossentropy from lasagne.init import Uniform, Normal from lasagne.layers import LSTMLayer, DenseLayer, Conv1DLayer, ReshapeLayer """ Setup: * in_to_cell init weights are now Normal(1.0) * output all appliances * fix bug in RealApplianceSource * use cross-entropy * smaller network * power targets * trying without first two sigmoid layers. * updated to craffel/nntools commit 097aca480d60fdfada513c20070f8132d71a26b0 which fixes LSTM bug. https://github.com/craffel/nntools/commit/097aca480d60fdfada513c20070f8132d71a26b0 * Subsampling *bidirectional* LSTM * Output every sequence in the batch * Change W_in_to_cell from Normal(1.0) to Uniform(5) * put back the two sigmoid layers * use Conv1D to create a hierarchical subsampling LSTM * Using LSTM (not BLSTM) to speed up training while testing * Use dimshuffle not reshape * 2 dense layers back * back to default init * conv between LSTMs. * More data * BLSTM * Try just using a 1D convnet on input * add second Convnet layer (not sure this is correct thing to do?) * third conv layer * large inits * back to 2 conv layers e70 * Based on e65 * Using sigmoid instead of rectify in Conv1D layers e71 * Larger layers * More data e72 * At a third conv layer e73 * Add a dense layer after 3 conv layers e74 * Removed dense layer after 3 conv layers (because it failed to learn anything) * Trying standard inits for weights and biases throughout network. e75 * Putting back large init for first layer e76 * Removed 3rd conv layer e77 * Try init Uniform(1) e78 * Back to large inits for first layers * Trying 3rd conv layer, also with large init Results """ source = RealApplianceSource( '/data/dk3810/ukdale.h5', ['fridge freezer', 'hair straighteners', 'television'], max_input_power=1000, max_appliance_powers=[300, 500, 200], window=("2013-06-01", "2014-07-01"), output_one_appliance=False, boolean_targets=False, min_on_duration=60, input_padding=8 ) net = Net( experiment_name="e78", source=source, learning_rate=1e-1, save_plot_interval=50, loss_function=crossentropy, layers_config=[ { 'type': DimshuffleLayer, 'pattern': (0, 2, 1) }, { 'type': Conv1DLayer, 'num_filters': 50, 'filter_length': 3, 'stride': 1, 'nonlinearity': sigmoid, 'W': Uniform(25), 'b': Uniform(25) }, { 'type': Conv1DLayer, 'num_filters': 50, 'filter_length': 3, 'stride': 1, 'nonlinearity': sigmoid, 'W': Uniform(10), 'b': Uniform(10) }, { 'type': Conv1DLayer, 'num_filters': 50, 'filter_length': 5, 'stride': 1, 'nonlinearity': sigmoid, 'W': Uniform(10), 'b': Uniform(10) }, { 'type': DimshuffleLayer, 'pattern': (0, 2, 1) }, { 'type': LSTMLayer, 'num_units': 80, 'W_in_to_cell': Uniform(5) }, { 'type': DenseLayer, 'num_units': source.n_outputs, 'nonlinearity': sigmoid } ] ) net.print_net() net.compile() net.fit()
[ [ [ 23, 37 ] ], [ [ 39, 47 ] ], [ [ 71, 74 ], [ 2185, 2188 ] ], [ [ 76, 95 ], [ 1856, 1875 ] ], [ [ 97, 107 ] ], [ [ 109, 123 ] ], [ [ 125, 140 ], [ 2369, 2384 ], [ 3174, 3189 ] ], [ [ 176, 183 ], [ 2589, 2596 ], [ 2827, 2834 ], [ 3065, 3072 ], [ 3468, 3475 ] ], [ [ 185, 192 ] ], [ [ 224, 236 ], [ 2305, 2317 ] ], [ [ 262, 269 ], [ 2615, 2622 ], [ 2645, 2652 ], [ 2853, 2860 ], [ 2883, 2890 ], [ 3091, 3098 ], [ 3121, 3128 ], [ 3333, 3340 ] ], [ [ 271, 277 ] ], [ [ 305, 314 ], [ 3265, 3274 ] ], [ [ 316, 326 ], [ 3385, 3395 ] ], [ [ 328, 339 ], [ 2460, 2471 ], [ 2698, 2709 ], [ 2936, 2947 ] ], [ [ 341, 353 ] ], [ [ 1847, 1853 ], [ 2228, 2234 ], [ 3422, 3428 ] ], [ [ 2179, 2182 ], [ 3495, 3498 ], [ 3511, 3514 ], [ 3525, 3528 ] ] ]
from terra_sdk.core.slashing import MsgUnjail def test_deserializes_msg_unjail_examples(load_msg_examples): examples = load_msg_examples(MsgUnjail.type, "./MsgUnjail.data.json") for example in examples: assert MsgUnjail.from_data(example).to_data() == example
[ [ [ 36, 45 ], [ 143, 152 ], [ 228, 237 ] ], [ [ 52, 89 ] ] ]
from flask_restful import abort, Resource from flask import request, g, session from flask.json import jsonify from whistle_server.models.user import User def verify_password(password, hashed): from werkzeug.security import check_password_hash return check_password_hash(hashed, password) class LoginEndpoint(Resource): def post(self): username = request.json.get('username') password = request.json.get('password') # wrong input if username is None or password is None: abort(418) user = User.find_by_username(username) # user doesn't exist if user is None: return abort(418) # wrong password if not verify_password(password, user.obj["password_hash"]): return abort(418) session["_session"] = str(user.obj['_id']) response = jsonify({ "user_id": str(user.obj["_id"]) }) response.status_code = 201 return response class CreateUserEndpoint(Resource): def post(self): username = request.json.get('username') password = request.json.get('password') # wrong input if username is None or password is None: print('username or password is None') abort(418) user = User.create(username, password) if user is None: print('User was None') abort(418) response = jsonify({}) response.status_code = 200 return response
[ [ [ 26, 31 ], [ 529, 534 ], [ 660, 665 ], [ 784, 789 ], [ 1284, 1289 ], [ 1414, 1419 ] ], [ [ 33, 41 ], [ 319, 327 ], [ 1024, 1032 ] ], [ [ 60, 67 ], [ 369, 376 ], [ 417, 424 ], [ 1074, 1081 ], [ 1122, 1129 ] ], [ [ 69, 70 ] ], [ [ 72, 79 ], [ 803, 810 ] ], [ [ 103, 110 ], [ 865, 872 ], [ 1444, 1451 ] ], [ [ 150, 154 ], [ 555, 559 ], [ 1310, 1314 ] ], [ [ 160, 175 ], [ 711, 726 ] ], [ [ 305, 318 ] ], [ [ 1005, 1023 ] ] ]
# -*- coding: utf-8 -*- """ Defines the unit tests for the :mod:`colour.models.cam16_ucs` module. """ import unittest from colour.models.tests.test_cam02_ucs import ( TestJMh_CIECAM02_to_UCS_Luo2006, TestUCS_Luo2006_to_JMh_CIECAM02, TestXYZ_to_UCS_Luo2006, TestUCS_Luo2006_to_XYZ, ) __author__ = 'Colour Developers' __copyright__ = 'Copyright (C) 2013-2021 - Colour Developers' __license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause' __maintainer__ = 'Colour Developers' __email__ = 'colour-developers@colour-science.org' __status__ = 'Production' __all__ = [ 'TestJMh_CAM16_to_UCS_Li2017', 'TestUCS_Li2017_to_JMh_CAM16', 'TestXYZ_to_UCS_Li2017', 'TestUCS_Li2017_to_XYZ', ] class TestJMh_CAM16_to_UCS_Li2017(TestJMh_CIECAM02_to_UCS_Luo2006): """ Defines :func:`colour.models.cam16_ucs.JMh_CAM16_to_UCS_Li2017` definition unit tests methods. Notes ----- - :func:`colour.models.cam16_ucs.JMh_CAM16_to_UCS_Li2017` is a wrapper of :func:`colour.models.cam02_ucs.JMh_CIECAM02_to_UCS_Luo2006` and thus currently adopts the same unittests. """ class TestUCS_Li2017_to_JMh_CAM16(TestUCS_Luo2006_to_JMh_CIECAM02): """ Defines :func:`colour.models.cam16_ucs.UCS_Li2017_to_JMh_CAM16` definition unit tests methods. Notes ----- - :func:`colour.models.cam16_ucs.UCS_Li2017_to_JMh_CAM16` is a wrapper of :func:`colour.models.cam02_ucs.UCS_Luo2006_to_JMh_CIECAM02` and thus currently adopts the same unittests. """ class TestXYZ_to_UCS_Li2017(TestXYZ_to_UCS_Luo2006): """ Defines :func:`colour.models.cam16_ucs.XYZ_to_UCS_Li2017` definition unit tests methods. """ pass class TestUCS_Li2017_to_XYZ(TestUCS_Luo2006_to_XYZ): """ Defines :func:`colour.models.cam16_ucs.UCS_Li2017_to_XYZ` definition unit tests methods. """ pass if __name__ == '__main__': unittest.main()
[ [ [ 110, 118 ], [ 1946, 1954 ] ], [ [ 173, 204 ], [ 769, 800 ] ], [ [ 210, 241 ], [ 1181, 1212 ] ], [ [ 247, 269 ], [ 1587, 1609 ] ], [ [ 275, 297 ], [ 1765, 1787 ] ], [ [ 302, 312 ] ], [ [ 335, 348 ] ], [ [ 397, 408 ] ], [ [ 476, 490 ] ], [ [ 513, 522 ] ], [ [ 564, 574 ] ], [ [ 591, 598 ] ], [ [ 741, 768 ] ], [ [ 1153, 1180 ] ], [ [ 1565, 1586 ] ], [ [ 1743, 1764 ] ] ]
""" This file offers the methods to automatically retrieve the graph Dictyostelium discoideum. The graph is automatically retrieved from the STRING repository. Report --------------------- At the time of rendering these methods (please see datetime below), the graph had the following characteristics: Datetime: 2021-02-02 18:15:05.559120 The undirected graph Dictyostelium discoideum has 10127 nodes and 1406097 weighted edges, of which none are self-loops. The graph is dense as it has a density of 0.02742 and has 103 connected components, where the component with most nodes has 9898 nodes and the component with the least nodes has 2 nodes. The graph median node degree is 167, the mean node degree is 277.69, and the node degree mode is 1. The top 5 most central nodes are 44689.DDB0232950 (degree 2470), 44689.DDB0219986 (degree 2400), 44689.DDB0235316 (degree 2050), 44689.DDB0191503 (degree 2034) and 44689.DDB0235320 (degree 2018). References --------------------- Please cite the following if you use the data: @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } Usage example ---------------------- The usage of this graph is relatively straightforward: .. code:: python # First import the function to retrieve the graph from the datasets from ensmallen_graph.datasets.string import DictyosteliumDiscoideum # Then load the graph graph = DictyosteliumDiscoideum() # Finally, you can do anything with it, for instance, compute its report: print(graph) # If you need to run a link prediction task with validation, # you can split the graph using a connected holdout as follows: train_graph, validation_graph = graph.connected_holdout( # You can use an 80/20 split the holdout, for example. train_size=0.8, # The random state is used to reproduce the holdout. random_state=42, # Wether to show a loading bar. verbose=True ) # Remember that, if you need, you can enable the memory-time trade-offs: train_graph.enable( vector_sources=True, vector_destinations=True, vector_outbounds=True ) # Consider using the methods made available in the Embiggen package # to run graph embedding or link prediction tasks. """ from typing import Dict from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error def DictyosteliumDiscoideum( directed: bool = False, verbose: int = 2, cache_path: str = "graphs/string", **additional_graph_kwargs: Dict ) -> EnsmallenGraph: """Return new instance of the Dictyostelium discoideum graph. The graph is automatically retrieved from the STRING repository. Parameters ------------------- directed: bool = False, Wether to load the graph as directed or undirected. By default false. verbose: int = 2, Wether to show loading bars during the retrieval and building of the graph. cache_path: str = "graphs", Where to store the downloaded graphs. additional_graph_kwargs: Dict, Additional graph kwargs. Returns ----------------------- Instace of Dictyostelium discoideum graph. Report --------------------- At the time of rendering these methods (please see datetime below), the graph had the following characteristics: Datetime: 2021-02-02 18:15:05.559120 The undirected graph Dictyostelium discoideum has 10127 nodes and 1406097 weighted edges, of which none are self-loops. The graph is dense as it has a density of 0.02742 and has 103 connected components, where the component with most nodes has 9898 nodes and the component with the least nodes has 2 nodes. The graph median node degree is 167, the mean node degree is 277.69, and the node degree mode is 1. The top 5 most central nodes are 44689.DDB0232950 (degree 2470), 44689.DDB0219986 (degree 2400), 44689.DDB0235316 (degree 2050), 44689.DDB0191503 (degree 2034) and 44689.DDB0235320 (degree 2018). References --------------------- Please cite the following if you use the data: @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } Usage example ---------------------- The usage of this graph is relatively straightforward: .. code:: python # First import the function to retrieve the graph from the datasets from ensmallen_graph.datasets.string import DictyosteliumDiscoideum # Then load the graph graph = DictyosteliumDiscoideum() # Finally, you can do anything with it, for instance, compute its report: print(graph) # If you need to run a link prediction task with validation, # you can split the graph using a connected holdout as follows: train_graph, validation_graph = graph.connected_holdout( # You can use an 80/20 split the holdout, for example. train_size=0.8, # The random state is used to reproduce the holdout. random_state=42, # Wether to show a loading bar. verbose=True ) # Remember that, if you need, you can enable the memory-time trade-offs: train_graph.enable( vector_sources=True, vector_destinations=True, vector_outbounds=True ) # Consider using the methods made available in the Embiggen package # to run graph embedding or link prediction tasks. """ return AutomaticallyRetrievedGraph( graph_name="DictyosteliumDiscoideum", dataset="string", directed=directed, verbose=verbose, cache_path=cache_path, additional_graph_kwargs=additional_graph_kwargs )()
[ [ [ 2803, 2807 ], [ 3106, 3110 ] ], [ [ 2849, 2876 ], [ 6475, 6502 ] ], [ [ 2908, 2922 ], [ 3116, 3130 ] ], [ [ 2961, 2984 ] ] ]
import importlib.util import os import stat import typing from email.utils import parsedate import anyio from starlette.datastructures import URL, Headers from starlette.exceptions import HTTPException from starlette.responses import FileResponse, RedirectResponse, Response from starlette.types import Receive, Scope, Send PathLike = typing.Union[str, "os.PathLike[str]"] class NotModifiedResponse(Response): NOT_MODIFIED_HEADERS = ( "cache-control", "content-location", "date", "etag", "expires", "vary", ) def __init__(self, headers: Headers): super().__init__( status_code=304, headers={ name: value for name, value in headers.items() if name in self.NOT_MODIFIED_HEADERS }, ) class StaticFiles: def __init__( self, *, directory: PathLike = None, packages: typing.List[str] = None, html: bool = False, check_dir: bool = True, ) -> None: self.directory = directory self.packages = packages self.all_directories = self.get_directories(directory, packages) self.html = html self.config_checked = False if check_dir and directory is not None and not os.path.isdir(directory): raise RuntimeError(f"Directory '{directory}' does not exist") def get_directories( self, directory: PathLike = None, packages: typing.List[str] = None ) -> typing.List[PathLike]: """ Given `directory` and `packages` arguments, return a list of all the directories that should be used for serving static files from. """ directories = [] if directory is not None: directories.append(directory) for package in packages or []: spec = importlib.util.find_spec(package) assert spec is not None, f"Package {package!r} could not be found." assert ( spec.origin is not None ), f"Directory 'statics' in package {package!r} could not be found." package_directory = os.path.normpath( os.path.join(spec.origin, "..", "statics") ) assert os.path.isdir( package_directory ), f"Directory 'statics' in package {package!r} could not be found." directories.append(package_directory) return directories async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: """ The ASGI entry point. """ assert scope["type"] == "http" if not self.config_checked: await self.check_config() self.config_checked = True path = self.get_path(scope) response = await self.get_response(path, scope) await response(scope, receive, send) def get_path(self, scope: Scope) -> str: """ Given the ASGI scope, return the `path` string to serve up, with OS specific path seperators, and any '..', '.' components removed. """ return os.path.normpath(os.path.join(*scope["path"].split("/"))) async def get_response(self, path: str, scope: Scope) -> Response: """ Returns an HTTP response, given the incoming path, method and request headers. """ if scope["method"] not in ("GET", "HEAD"): raise HTTPException(status_code=405) try: full_path, stat_result = await anyio.to_thread.run_sync( self.lookup_path, path ) except PermissionError: raise HTTPException(status_code=401) except OSError: raise if stat_result and stat.S_ISREG(stat_result.st_mode): # We have a static file to serve. return self.file_response(full_path, stat_result, scope) elif stat_result and stat.S_ISDIR(stat_result.st_mode) and self.html: # We're in HTML mode, and have got a directory URL. # Check if we have 'index.html' file to serve. index_path = os.path.join(path, "index.html") full_path, stat_result = await anyio.to_thread.run_sync( self.lookup_path, index_path ) if stat_result is not None and stat.S_ISREG(stat_result.st_mode): if not scope["path"].endswith("/"): # Directory URLs should redirect to always end in "/". url = URL(scope=scope) url = url.replace(path=url.path + "/") return RedirectResponse(url=url) return self.file_response(full_path, stat_result, scope) if self.html: # Check for '404.html' if we're in HTML mode. full_path, stat_result = await anyio.to_thread.run_sync( self.lookup_path, "404.html" ) if stat_result and stat.S_ISREG(stat_result.st_mode): return FileResponse( full_path, stat_result=stat_result, method=scope["method"], status_code=404, ) raise HTTPException(status_code=404) def lookup_path( self, path: str ) -> typing.Tuple[str, typing.Optional[os.stat_result]]: for directory in self.all_directories: full_path = os.path.realpath(os.path.join(directory, path)) directory = os.path.realpath(directory) if os.path.commonprefix([full_path, directory]) != directory: # Don't allow misbehaving clients to break out of the static files # directory. continue try: return full_path, os.stat(full_path) except (FileNotFoundError, NotADirectoryError): continue return "", None def file_response( self, full_path: PathLike, stat_result: os.stat_result, scope: Scope, status_code: int = 200, ) -> Response: method = scope["method"] request_headers = Headers(scope=scope) response = FileResponse( full_path, status_code=status_code, stat_result=stat_result, method=method ) if self.is_not_modified(response.headers, request_headers): return NotModifiedResponse(response.headers) return response async def check_config(self) -> None: """ Perform a one-off configuration check that StaticFiles is actually pointed at a directory, so that we can raise loud errors rather than just returning 404 responses. """ if self.directory is None: return try: stat_result = await anyio.to_thread.run_sync(os.stat, self.directory) except FileNotFoundError: raise RuntimeError( f"StaticFiles directory '{self.directory}' does not exist." ) if not (stat.S_ISDIR(stat_result.st_mode) or stat.S_ISLNK(stat_result.st_mode)): raise RuntimeError( f"StaticFiles path '{self.directory}' is not a directory." ) def is_not_modified( self, response_headers: Headers, request_headers: Headers ) -> bool: """ Given the request and response headers, return `True` if an HTTP "Not Modified" response could be returned instead. """ try: if_none_match = request_headers["if-none-match"] etag = response_headers["etag"] if if_none_match == etag: return True except KeyError: pass try: if_modified_since = parsedate(request_headers["if-modified-since"]) last_modified = parsedate(response_headers["last-modified"]) if ( if_modified_since is not None and last_modified is not None and if_modified_since >= last_modified ): return True except KeyError: pass return False
[ [ [ 7, 21 ], [ 1888, 1897 ] ], [ [ 29, 31 ], [ 1322, 1324 ], [ 2176, 2178 ], [ 2210, 2212 ], [ 2286, 2288 ], [ 3155, 3157 ], [ 3172, 3174 ], [ 4160, 4162 ], [ 5375, 5377 ], [ 5464, 5466 ], [ 5481, 5483 ], [ 5536, 5538 ], [ 5579, 5581 ], [ 5826, 5828 ], [ 6042, 6044 ], [ 6873, 6875 ] ], [ [ 39, 43 ], [ 3783, 3787 ], [ 3963, 3967 ], [ 4364, 4368 ], [ 4994, 4998 ], [ 7070, 7074 ], [ 7107, 7111 ] ], [ [ 51, 57 ], [ 338, 344 ], [ 965, 971 ], [ 1533, 1539 ], [ 1500, 1506 ], [ 5341, 5347 ], [ 5359, 5365 ] ], [ [ 82, 91 ], [ 7799, 7808 ], [ 7875, 7884 ] ], [ [ 100, 105 ], [ 3553, 3558 ], [ 4236, 4241 ], [ 4878, 4883 ], [ 6848, 6853 ] ], [ [ 144, 147 ], [ 4552, 4555 ] ], [ [ 149, 156 ], [ 603, 610 ], [ 6190, 6197 ], [ 7322, 7329 ], [ 7348, 7355 ] ], [ [ 190, 203 ], [ 3465, 3478 ], [ 3682, 3695 ], [ 5255, 5268 ] ], [ [ 236, 248 ], [ 5052, 5064 ], [ 6231, 6243 ] ], [ [ 250, 266 ], [ 4655, 4671 ] ], [ [ 268, 276 ], [ 404, 412 ], [ 3275, 3283 ], [ 6121, 6129 ] ], [ [ 305, 312 ], [ 2547, 2554 ] ], [ [ 314, 319 ], [ 2531, 2536 ], [ 2953, 2958 ], [ 3265, 3270 ], [ 6073, 6078 ] ], [ [ 321, 325 ], [ 2562, 2566 ] ], [ [ 327, 335 ], [ 930, 938 ], [ 1545, 1553 ], [ 1473, 1481 ], [ 6011, 6019 ] ], [ [ 384, 403 ], [ 6429, 6448 ] ], [ [ 855, 866 ] ] ]
# Copyright (c) 2014 Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import abc from oslo_config import cfg import six from stevedore import named from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u from barbican.plugin.util import multiple_backends from barbican.plugin.util import utils as plugin_utils _SECRET_STORE = None CONF = config.new_config() DEFAULT_PLUGIN_NAMESPACE = 'barbican.secretstore.plugin' DEFAULT_PLUGINS = ['store_crypto'] store_opt_group = cfg.OptGroup(name='secretstore', title='Secret Store Plugin Options') store_opts = [ cfg.StrOpt('namespace', default=DEFAULT_PLUGIN_NAMESPACE, help=u._('Extension namespace to search for plugins.') ), cfg.MultiStrOpt('enabled_secretstore_plugins', default=DEFAULT_PLUGINS, help=u._('List of secret store plugins to load.') ), cfg.BoolOpt('enable_multiple_secret_stores', default=False, help=u._('Flag to enable multiple secret store plugin' ' backend support. Default is False') ), cfg.ListOpt('stores_lookup_suffix', help=u._('List of suffix to use for looking up plugins which ' 'are supported with multiple backend support.') ) ] CONF.register_group(store_opt_group) CONF.register_opts(store_opts, group=store_opt_group) config.parse_args(CONF) config.set_module_config("secretstore", CONF) def list_opts(): yield store_opt_group, store_opts class SecretStorePluginNotFound(exception.BarbicanHTTPException): """Raised when no plugins are installed.""" client_message = u._("No plugin was found that could support your request") status_code = 400 def __init__(self, plugin_name=None): if plugin_name: message = u._('Secret store plugin "{name}"' ' not found.').format(name=plugin_name) else: message = u._("Secret store plugin not found.") super(SecretStorePluginNotFound, self).__init__(message) class SecretStoreSupportedPluginNotFound(exception.BarbicanHTTPException): """Raised when no secret store supported plugin is found.""" client_message = u._("Secret store supported plugin not found.") status_code = 400 def __init__(self, key_spec): message = u._("Could not find a secret store plugin for storing " "secret with algorithm '{alg}' and bit-length " "'{len}'.").format(alg=key_spec.alg, len=key_spec.bit_length) super(SecretStoreSupportedPluginNotFound, self).__init__( message) class SecretGenerateSupportedPluginNotFound(exception.BarbicanHTTPException): """Raised when no secret generate supported plugin is found.""" client_message = u._("Secret generate supported plugin not found.") status_code = 400 def __init__(self, key_spec): message = u._("Could not find a secret store plugin for generating " "secret with algorithm '{alg}' and bit-length " "'{len}'.").format(alg=key_spec.alg, len=key_spec.bit_length) super(SecretGenerateSupportedPluginNotFound, self).__init__( message) class SecretContentTypeNotSupportedException(exception.BarbicanHTTPException): """Raised when support for payload content type is not available.""" status_code = 400 def __init__(self, content_type): super(SecretContentTypeNotSupportedException, self).__init__( u._("A Content-Type of '{content_type}' for secrets is " "not supported").format( content_type=content_type) ) self.content_type = content_type self.client_message = u._( "content-type of '{content_type}' not supported").format( content_type=content_type) class SecretContentEncodingNotSupportedException( exception.BarbicanHTTPException): """Raised when support for payload content encoding is not available.""" status_code = 400 def __init__(self, content_encoding): super(SecretContentEncodingNotSupportedException, self).__init__( u._("Secret Content-Encoding of '{content_encoding}' " "not supported").format( content_encoding=content_encoding) ) self.content_encoding = content_encoding self.client_message = u._( "content-encoding of '{content_encoding}' not supported").format( content_encoding=content_encoding) class SecretNoPayloadProvidedException(exception.BarbicanException): """Raised when secret information is not provided.""" def __init__(self): super(SecretNoPayloadProvidedException, self).__init__( u._('No secret information provided to encrypt.') ) class SecretContentEncodingMustBeBase64(exception.BarbicanHTTPException): """Raised when encoding must be base64.""" client_message = u._("Text-based binary secret payloads must " "specify a content-encoding of 'base64'") status_code = 400 def __init__(self): super(SecretContentEncodingMustBeBase64, self).__init__( u._("Encoding type must be 'base64' for text-based payloads.") ) class SecretGeneralException(exception.BarbicanException): """Raised when a system fault has occurred.""" def __init__(self, reason=u._('Unknown')): super(SecretGeneralException, self).__init__( u._('Problem seen during crypto processing - ' 'Reason: {reason}').format(reason=reason) ) self.reason = reason class SecretPayloadDecodingError(exception.BarbicanHTTPException): """Raised when payload could not be decoded.""" client_message = u._("Problem decoding payload") status_code = 400 def __init__(self): super(SecretPayloadDecodingError, self).__init__( u._("Problem decoding payload") ) class SecretAcceptNotSupportedException(exception.BarbicanHTTPException): """Raised when requested decrypted content-type is not available.""" client_message = u._("Wrong payload content-type") status_code = 406 def __init__(self, accept): super(SecretAcceptNotSupportedException, self).__init__( u._("Secret Accept of '{accept}' not supported").format( accept=accept) ) self.accept = accept class SecretNotFoundException(exception.BarbicanHTTPException): """Raised when secret information could not be located.""" client_message = u._("Not Found. Sorry but your secret is in another " "castle") status_code = 404 def __init__(self): super(SecretNotFoundException, self).__init__( u._('No secret information found')) class SecretAlgorithmNotSupportedException(exception.BarbicanHTTPException): """Raised when support for an algorithm is not available.""" client_message = u._("Requested algorithm is not supported") status_code = 400 def __init__(self, algorithm): super(SecretAlgorithmNotSupportedException, self).__init__( u._("Secret algorithm of '{algorithm}' not supported").format( algorithm=algorithm) ) self.algorithm = algorithm class GeneratePassphraseNotSupportedException(exception.BarbicanHTTPException): """Raised when generating keys encrypted by passphrase is not supported.""" client_message = ( u._("Generating keys encrypted with passphrases is not supported") ) status_code = 400 def __init__(self): super(GeneratePassphraseNotSupportedException, self).__init__( self.client_message ) class SecretStorePluginsNotConfigured(exception.BarbicanException): """Raised when there are no secret store plugins configured.""" def __init__(self): super(SecretStorePluginsNotConfigured, self).__init__( u._('No secret store plugins have been configured') ) class StorePluginNotAvailableOrMisconfigured(exception.BarbicanException): """Raised when a plugin that was previously used can not be found.""" def __init__(self, plugin_name): super(StorePluginNotAvailableOrMisconfigured, self).__init__( u._("The requested Store Plugin {plugin_name} is not " "currently available. This is probably a server " "misconfiguration.").format( plugin_name=plugin_name) ) self.plugin_name = plugin_name class SecretType(object): """Constant to define the symmetric key type. Used by getSecret to retrieve a symmetric key. """ SYMMETRIC = "symmetric" """Constant to define the public key type. Used by getSecret to retrieve a public key. """ PUBLIC = "public" """Constant to define the private key type. Used by getSecret to retrieve a private key. """ PRIVATE = "private" """Constant to define the passphrase type. Used by getSecret to retrieve a passphrase.""" PASSPHRASE = "passphrase" # nosec """Constant to define the certificate type. Used by getSecret to retrieve a certificate.""" CERTIFICATE = "certificate" """Constant to define the opaque date type. Used by getSecret to retrieve opaque data. Opaque data can be any kind of data. This data type signals to Barbican to just store the information and do not worry about the format or encoding. This is the default type if no type is specified by the user.""" OPAQUE = utils.SECRET_TYPE_OPAQUE class KeyAlgorithm(object): """Constant for the Diffie Hellman algorithm.""" DIFFIE_HELLMAN = "diffie_hellman" """Constant for the DSA algorithm.""" DSA = "dsa" """Constant for the RSA algorithm.""" RSA = "rsa" """Constant for the Elliptic Curve algorithm.""" EC = "ec" """Constant for the HMACSHA1 algorithm.""" HMACSHA1 = "hmacsha1" """Constant for the HMACSHA256 algorithm.""" HMACSHA256 = "hmacsha256" """Constant for the HMACSHA384 algorithm.""" HMACSHA384 = "hmacsha384" """Constant for the HMACSHA512 algorithm.""" HMACSHA512 = "hmacsha512" """List of asymmetric algorithms""" ASYMMETRIC_ALGORITHMS = [DIFFIE_HELLMAN, DSA, RSA, EC] """Constant for the AES algorithm.""" AES = "aes" """Constant for the DES algorithm.""" DES = "des" """Constant for the DESede (triple-DES) algorithm.""" DESEDE = "desede" """List of symmetric algorithms""" SYMMETRIC_ALGORITHMS = [AES, DES, DESEDE, HMACSHA1, HMACSHA256, HMACSHA384, HMACSHA512] class KeySpec(object): """This object specifies the algorithm and bit length for a key.""" def __init__(self, alg=None, bit_length=None, mode=None, passphrase=None): """Creates a new KeySpec. :param alg:algorithm for the key :param bit_length:bit length of the key :param mode:algorithm mode for the key :param passphrase:passphrase for the private_key """ self.alg = alg self.bit_length = bit_length self.mode = mode # TODO(john-wood-w) Paul, is 'mode' required? self.passphrase = passphrase class SecretDTO(object): """This object is a secret data transfer object (DTO). This object encapsulates a key and attributes about the key. The attributes include a KeySpec that contains the algorithm and bit length. The attributes also include information on the encoding of the key. """ # TODO(john-wood-w) Remove 'content_type' once secret normalization work is # completed. def __init__(self, type, secret, key_spec, content_type, transport_key=None): """Creates a new SecretDTO. The secret is stored in the secret parameter. In the future this DTO may include compression and key wrapping information. :param type: SecretType for secret :param secret: secret, as a base64-encoded string :param key_spec: KeySpec key specifications :param content_type: Content type of the secret, one of MIME types such as 'text/plain' or 'application/octet-stream' :param transport_key: presence of this parameter indicates that the secret has been encrypted using a transport key. The transport key is a base64 encoded x509 transport certificate. """ self.type = type or SecretType.OPAQUE self.secret = secret self.key_spec = key_spec self.content_type = content_type self.transport_key = transport_key class AsymmetricKeyMetadataDTO(object): """This DTO encapsulates metadata(s) for asymmetric key components. These components are private_key_meta, public_key_meta and passphrase_meta. """ def __init__(self, private_key_meta=None, public_key_meta=None, passphrase_meta=None): """Constructor for AsymmetricKeyMetadataDTO :param private_key_meta: private key metadata :param public_key_meta: public key metadata :param passphrase_meta: passphrase key metadata """ self.private_key_meta = private_key_meta self.public_key_meta = public_key_meta self.passphrase_meta = passphrase_meta @six.add_metaclass(abc.ABCMeta) class SecretStoreBase(object): @abc.abstractmethod def get_plugin_name(self): """Gets user friendly plugin name. This plugin name is expected to be read from config file. There will be a default defined for plugin name which can be customized in specific deployment if needed. This name needs to be unique across a deployment. """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_symmetric_key(self, key_spec): """Generate a new symmetric key and store it. Generates a new symmetric key and stores it in the secret store. A dictionary is returned that contains metadata about the newly created symmetric key. The dictionary of metadata is stored by Barbican and passed into other methods to aid the plugins. This can be useful for plugins that generate a unique ID in the external data store and use it to retrieve the key in the future. The returned dictionary may be empty if the SecretStore does not require it. :param key_spec: KeySpec that contains details on the type of key to generate :returns: an optional dictionary containing metadata about the key """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_asymmetric_key(self, key_spec): """Generate a new asymmetric key pair and store it. Generates a new asymmetric key pair and stores it in the secret store. An object of type AsymmetricKeyMetadataDTO will be returned containing attributes of metadata for newly created key pairs. The metadata is stored by Barbican and passed into other methods to aid the plugins. This can be useful for plugins that generate a unique ID in the external data store and use it to retrieve the key pairs in the future. :param key_spec: KeySpec that contains details on the type of key to generate :returns: An object of type AsymmetricKeyMetadataDTO containing metadata about the key pair. """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def store_secret(self, secret_dto): """Stores a key. The SecretDTO contains the bytes of the secret and properties of the secret. The SecretStore retrieves the secret bytes, stores them, and returns a dictionary of metadata about the secret. This can be useful for plugins that generate a unique ID in the external data store and use it to retrieve the secret in the future. The returned dictionary may be empty if the SecretStore does not require it. :param secret_dto: SecretDTO for secret :returns: an optional dictionary containing metadata about the secret """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def get_secret(self, secret_type, secret_metadata): """Retrieves a secret from the secret store. Retrieves a secret from the secret store and returns a SecretDTO that contains the secret. The secret_metadata parameter is the metadata returned from one of the generate or store methods. This data is used by the plugins to retrieve the key. The secret_type parameter may be useful for secret stores to know the expected format of the secret. For instance if the type is SecretDTO.PRIVATE then a PKCS8 structure is returned. This way secret stores do not need to manage the secret type on their own. :param secret_type: secret type :param secret_metadata: secret metadata :returns: SecretDTO that contains secret """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_supports(self, key_spec): """Returns a boolean indicating if the secret type is supported. This checks if the algorithm and bit length are supported by the generate methods. This is useful to call before calling generate_symmetric_key or generate_asymetric_key to see if the key type is supported before trying to generate it. :param key_spec: KeySpec that contains details on the algorithm and bit length :returns: boolean indicating if the algorithm is supported """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def delete_secret(self, secret_metadata): """Deletes a secret from the secret store. Deletes a secret from a secret store. It can no longer be referenced after this call. :param secret_metadata: secret_metadata """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def store_secret_supports(self, key_spec): """Returns a boolean indicating if the secret can be stored. Checks if the secret store can store the secret, give the attributes of the secret in the KeySpec. For example, some plugins may need to know the attributes in order to store the secret, but other plugins may be able to store the secret as a blob if no attributes are given. :param key_spec: KeySpec for the secret :returns: a boolean indicating if the secret can be stored """ raise NotImplementedError # pragma: no cover def get_transport_key(self): """Gets a transport key. Returns the current valid transport key associated with this plugin. The transport key is expected to be a base64 encoded x509 certificate containing a public key. Admins are responsible for deleting old keys from the database using the DELETE method on the TransportKey resource. By default, returns None. Plugins that support transport key wrapping should override this method. """ return None def is_transport_key_current(self, transport_key): """Determines if the provided transport key is the current valid key Returns true if the transport key is the current valid transport key. If the key is not valid, then barbican core will request a new transport key from the plugin. Returns False by default. Plugins that support transport key wrapping should override this method. """ return False def _enforce_extensions_configured(plugin_related_function): def _check_plugins_configured(self, *args, **kwargs): if not self.extensions: raise SecretStorePluginsNotConfigured() return plugin_related_function(self, *args, **kwargs) return _check_plugins_configured class SecretStorePluginManager(named.NamedExtensionManager): def __init__(self, conf=CONF, invoke_args=(), invoke_kwargs={}): ss_conf = config.get_module_config('secretstore') plugin_names = self._get_internal_plugin_names(ss_conf) super(SecretStorePluginManager, self).__init__( ss_conf.secretstore.namespace, plugin_names, invoke_on_load=False, # Defer creating plugins to utility below. invoke_args=invoke_args, invoke_kwds=invoke_kwargs, name_order=True # extensions sorted as per order of plugin names ) plugin_utils.instantiate_plugins(self, invoke_args, invoke_kwargs) multiple_backends.sync_secret_stores(self) @_enforce_extensions_configured def get_plugin_store(self, key_spec, plugin_name=None, transport_key_needed=False, project_id=None): """Gets a secret store plugin. :param: plugin_name: set to plugin_name to get specific plugin :param: key_spec: KeySpec of key that will be stored :param: transport_key_needed: set to True if a transport key is required. :returns: SecretStoreBase plugin implementation """ active_plugins = multiple_backends.get_applicable_store_plugins( self, project_id=project_id, existing_plugin_name=plugin_name) if plugin_name is not None: for plugin in active_plugins: if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise SecretStorePluginNotFound(plugin_name) if not transport_key_needed: for plugin in active_plugins: if plugin.store_secret_supports(key_spec): return plugin else: for plugin in active_plugins: if (plugin.get_transport_key() is not None and plugin.store_secret_supports(key_spec)): return plugin raise SecretStoreSupportedPluginNotFound(key_spec) @_enforce_extensions_configured def get_plugin_retrieve_delete(self, plugin_name): """Gets a secret retrieve/delete plugin. If this function is being called, it is because we are trying to retrieve or delete an already stored secret. Thus, the plugin name is actually gotten from the plugin metadata that has already been stored in the database. So, in this case, if this plugin is not available, this might be due to a server misconfiguration. :returns: SecretStoreBase plugin implementation :raises: StorePluginNotAvailableOrMisconfigured: If the plugin wasn't found it's because the plugin parameters were not properly configured on the database side. """ for plugin in plugin_utils.get_active_plugins(self): if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise StorePluginNotAvailableOrMisconfigured(plugin_name) @_enforce_extensions_configured def get_plugin_generate(self, key_spec, project_id=None): """Gets a secret generate plugin. :param key_spec: KeySpec that contains details on the type of key to generate :returns: SecretStoreBase plugin implementation """ active_plugins = multiple_backends.get_applicable_store_plugins( self, project_id=project_id, existing_plugin_name=None) for plugin in active_plugins: if plugin.generate_supports(key_spec): return plugin raise SecretGenerateSupportedPluginNotFound(key_spec) def _get_internal_plugin_names(self, secretstore_conf): """Gets plugin names used for loading via stevedore. When multiple secret store support is enabled, then secret store plugin names are read via updated configuration structure. If not enabled, then it reads MultiStr property in 'secretstore' config section. """ # to cache default global secret store value on first use self.global_default_store_dict = None if utils.is_multiple_backends_enabled(): self.parsed_stores = multiple_backends.\ read_multiple_backends_config() plugin_names = [store.store_plugin for store in self.parsed_stores if store.store_plugin] else: plugin_names = secretstore_conf.secretstore.\ enabled_secretstore_plugins return plugin_names def get_manager(): global _SECRET_STORE if not _SECRET_STORE: _SECRET_STORE = SecretStorePluginManager() return _SECRET_STORE
[ [ [ 628, 631 ], [ 14266, 14269 ], [ 14316, 14319 ], [ 14729, 14732 ], [ 15611, 15614 ], [ 16495, 16498 ], [ 17228, 17231 ], [ 18142, 18145 ], [ 18785, 18788 ], [ 19125, 19128 ] ], [ [ 657, 660 ], [ 1107, 1110 ], [ 1227, 1230 ], [ 1392, 1395 ], [ 1581, 1584 ], [ 1814, 1817 ] ], [ [ 668, 671 ], [ 14248, 14251 ] ], [ [ 694, 699 ], [ 21090, 21095 ] ], [ [ 729, 735 ], [ 976, 982 ], [ 2113, 2119 ], [ 2138, 2144 ], [ 21207, 21213 ] ], [ [ 764, 773 ], [ 2275, 2284 ], [ 2832, 2841 ], [ 3460, 3469 ], [ 4101, 4110 ], [ 4756, 4765 ], [ 5433, 5442 ], [ 5723, 5732 ], [ 6167, 6176 ], [ 6540, 6549 ], [ 6881, 6890 ], [ 7335, 7344 ], [ 7738, 7747 ], [ 8234, 8243 ], [ 8653, 8662 ], [ 8959, 8968 ] ], [ [ 802, 807 ], [ 10458, 10463 ], [ 22550, 22555 ], [ 23998, 24003 ], [ 25261, 25266 ] ], [ [ 829, 838 ], [ 1320, 1321 ], [ 1509, 1510 ], [ 1678, 1679 ], [ 1871, 1872 ], [ 2379, 2380 ], [ 2953, 2954 ], [ 3584, 3585 ], [ 5826, 5827 ], [ 6278, 6279 ], [ 6648, 6649 ], [ 7010, 7011 ], [ 7454, 7455 ], [ 7859, 7860 ], [ 8380, 8381 ], [ 2549, 2550 ], [ 2686, 2687 ], [ 3076, 3077 ], [ 3710, 3711 ], [ 4352, 4353 ], [ 4578, 4579 ], [ 5018, 5019 ], [ 5258, 5259 ], [ 5621, 5622 ], [ 6063, 6064 ], [ 6361, 6362 ], [ 6797, 6798 ], [ 7176, 7177 ], [ 7657, 7658 ], [ 8041, 8042 ], [ 8850, 8851 ], [ 9182, 9183 ] ], [ [ 872, 889 ], [ 21764, 21781 ], [ 22329, 22346 ], [ 24476, 24493 ], [ 25332, 25349 ] ], [ [ 923, 944 ], [ 21688, 21700 ], [ 23944, 23956 ] ], [ [ 947, 960 ], [ 25731, 25744 ] ], [ [ 969, 973 ], [ 2022, 2026 ], [ 2059, 2063 ], [ 2131, 2135 ], [ 2178, 2182 ], [ 21148, 21152 ] ], [ [ 996, 1020 ], [ 1274, 1298 ] ], [ [ 1053, 1068 ], [ 1467, 1482 ] ], [ [ 1089, 1104 ], [ 2042, 2057 ], [ 2096, 2111 ], [ 2213, 2228 ] ], [ [ 1208, 1218 ], [ 2078, 2088 ], [ 2230, 2240 ] ], [ [ 2190, 2199 ] ], [ [ 2249, 2274 ], [ 2738, 2763 ], [ 22654, 22679 ] ], [ [ 2797, 2831 ], [ 3341, 3375 ], [ 23100, 23134 ] ], [ [ 3422, 3459 ], [ 3978, 4015 ], [ 24726, 24763 ] ], [ [ 4062, 4100 ], [ 4284, 4322 ] ], [ [ 4704, 4746 ], [ 4946, 4988 ] ], [ [ 5400, 5432 ], [ 5559, 5591 ] ], [ [ 5689, 5722 ], [ 6000, 6033 ] ], [ [ 6144, 6166 ], [ 6309, 6331 ] ], [ [ 6513, 6539 ], [ 6741, 6767 ] ], [ [ 6847, 6880 ], [ 7113, 7146 ] ], [ [ 7311, 7334 ], [ 7604, 7627 ] ], [ [ 7701, 7737 ], [ 7975, 8011 ] ], [ [ 8194, 8233 ], [ 8514, 8553 ] ], [ [ 8621, 8652 ], [ 8789, 8820 ], [ 20924, 20955 ] ], [ [ 8920, 8958 ], [ 9114, 9152 ], [ 24094, 24132 ] ], [ [ 9446, 9456 ], [ 13382, 13392 ] ], [ [ 10491, 10503 ] ], [ [ 11560, 11567 ] ], [ [ 12146, 12155 ] ], [ [ 13554, 13578 ] ], [ [ 14285, 14300 ] ], [ [ 20759, 20789 ], [ 21813, 21843 ], [ 23151, 23181 ], [ 24152, 24182 ] ], [ [ 21065, 21089 ], [ 21326, 21350 ], [ 25770, 25794 ] ], [ [ 25680, 25691 ] ], [ [ 25754, 25767 ], [ 25808, 25821 ] ] ]
import os import torch import torch.nn as nn import numpy as np from contextlib import contextmanager from functools import partial from torch.optim import Adam, SGD from spirl.utils.general_utils import ParamDict, get_clipped_optimizer, AttrDict, prefix_dict, map_dict, \ nan_hook, np2obj, ConstantSchedule from spirl.utils.pytorch_utils import RAdam, remove_grads, map2np, map2torch from spirl.utils.vis_utils import add_caption_to_img, add_captions_to_seq from spirl.rl.components.normalization import DummyNormalizer from spirl.rl.components.policy import Policy from spirl.components.checkpointer import CheckpointHandler from spirl.rl.utils.mpi import sync_grads class BaseAgent(nn.Module): def __init__(self, config): super().__init__() self._hp = self._default_hparams().overwrite(config) self.device = self._hp.device self._is_train = True # indicates whether agent should sample in training mode self._rand_act_mode = False # indicates whether agent should act randomly (for warmup collection) self._rollout_mode = False # indicates whether agent is run in rollout mode (omit certain policy outputs) self._obs_normalizer = self._hp.obs_normalizer(self._hp.obs_normalizer_params) def _default_hparams(self): default_dict = ParamDict({ 'device': None, # pytorch device 'discount_factor': 0.99, # discount factor for RL update 'optimizer': 'adam', # supported: 'adam', 'radam', 'rmsprop', 'sgd' 'gradient_clip': None, # max grad norm, if None no clipping 'momentum': 0, # momentum in RMSProp / SGD optimizer 'adam_beta': 0.9, # beta1 param in Adam 'update_iterations': 1, # number of iteration steps per one call to 'update(...)' 'target_network_update_factor': 5e-3, # percentage of new weights that are carried over 'batch_size': 64, # size of the experience batch used for updates 'obs_normalizer': DummyNormalizer, # observation normalization class 'obs_normalizer_params': {}, # parameters for optimization norm class 'obs_norm_log_groups': {}, # (optional) dict defining separation of state space for obsNormLog 'log_videos': True, # whether to log videos during logging 'log_video_caption': False, # whether to add captions to video 'num_workers': None, # number of independent workers --> whether grads need sync }) return default_dict def act(self, obs): """Returns policy output dict given observation (random action if self._rand_act_mode is set).""" if self._rand_act_mode: return self._act_rand(obs) else: return self._act(obs) def _act(self, obs): """Implements act method in child class.""" raise NotImplementedError def _act_rand(self, obs): """Returns random action with proper dimension. Implemented in child class.""" raise NotImplementedError def update(self, experience_batch): """Updates the policy given a batch of experience.""" raise NotImplementedError def add_experience(self, experience_batch): """Provides interface for adding additional experience to agent replay, needs to be overwritten by child.""" print("### This agent does not support additional experience! ###") def log_outputs(self, logging_stats, rollout_storage, logger, log_images, step): """Visualizes/logs all training outputs.""" logger.log_scalar_dict(logging_stats, prefix='train' if self._is_train else 'val', step=step) if log_images: assert rollout_storage is not None # need rollout data for image logging # log rollout videos with info captions if 'image' in rollout_storage and self._hp.log_videos: if self._hp.log_video_caption: vids = [np.stack(add_captions_to_seq(rollout.image, np2obj(rollout.info))).transpose(0, 3, 1, 2) for rollout in rollout_storage.get()[-logger.n_logged_samples:]] else: vids = [np.stack(rollout.image).transpose(0, 3, 1, 2) for rollout in rollout_storage.get()[-logger.n_logged_samples:]] logger.log_videos(vids, name="rollouts", step=step) self.visualize(logger, rollout_storage, step) def visualize(self, logger, rollout_storage, step): """Optionally allows to further visualize the internal state of agent (e.g. replay buffer etc.)""" pass def reset(self): """Can be used for any initializations of agent's state at beginning of episode.""" pass def save_state(self, save_dir): """Provides interface to save any internal state variables (like replay buffers) to disk.""" pass def load_state(self, save_dir): """Provides interface to load any internal state variables (like replay buffers) from disk.""" pass def sync_networks(self): """Syncs network parameters across workers.""" raise NotImplementedError def _soft_update_target_network(self, target, source): """Copies weights from source to target with weight [0,1].""" for target_param, param in zip(target.parameters(), source.parameters()): target_param.data.copy_(self._hp.target_network_update_factor * param.data + (1 - self._hp.target_network_update_factor) * target_param.data) def _copy_to_target_network(self, target, source): """Completely copies weights from source to target.""" for target_param, source_param in zip(target.parameters(), source.parameters()): target_param.data.copy_(source_param.data) def _get_optimizer(self, optimizer, model, lr): """Returns an instance of the specified optimizers on the parameters of the model with specified learning rate.""" if optimizer == 'adam': get_optim = partial(get_clipped_optimizer, optimizer_type=Adam, betas=(self._hp.adam_beta, 0.999)) elif optimizer == 'radam': get_optim = partial(get_clipped_optimizer, optimizer_type=RAdam, betas=(self._hp.adam_beta, 0.999)) elif optimizer == 'sgd': get_optim = partial(get_clipped_optimizer, optimizer_type=SGD, momentum=self._hp.momentum) else: raise ValueError("Optimizer '{}' not supported!".format(optimizer)) optim = partial(get_optim, gradient_clip=self._hp.gradient_clip) return optim(filter(lambda p: p.requires_grad, model.parameters()), lr=lr) def _perform_update(self, loss, opt, network): """Performs one backward gradient step on the loss using the given optimizer. Also syncs gradients.""" nan_hook(loss) opt.zero_grad() loss.backward() grads = [p.grad for p in network.parameters()] nan_hook(grads) opt.step() def _get_obs_norm_info(self): if isinstance(self._obs_normalizer, DummyNormalizer): return {} mean, std = self._obs_normalizer.mean, self._obs_normalizer.std if not self._hp.obs_norm_log_groups: self._hp.obs_norm_log_groups = AttrDict(all=np.arange(mean.shape[0])) info = {} for group_key in self._hp.obs_norm_log_groups: info['obs_norm_' + group_key + '_mean'] = mean[self._hp.obs_norm_log_groups[group_key]].mean() info['obs_norm_' + group_key + '_std'] = std[self._hp.obs_norm_log_groups[group_key]].mean() return info @staticmethod def load_model_weights(model, checkpoint, epoch='latest'): """Loads weights for a given model from the given checkpoint directory.""" checkpoint_dir = checkpoint if os.path.basename(checkpoint) == 'weights' \ else os.path.join(checkpoint, 'weights') # checkpts in 'weights' dir checkpoint_path = CheckpointHandler.get_resume_ckpt_file(epoch, checkpoint_dir) CheckpointHandler.load_weights(checkpoint_path, model=model) @staticmethod def _remove_batch(d): """Adds batch dimension to all tensors in d.""" return map_dict(lambda x: x[0] if (isinstance(x, torch.Tensor) or isinstance(x, np.ndarray)) else x, d) @contextmanager def val_mode(self): """Sets validation parameters if desired. To be used like: with agent.val_mode(): ...<do something>...""" self._is_train = False self.call_children("switch_to_val", Policy) yield self._is_train = True self.call_children("switch_to_train", Policy) @contextmanager def rand_act_mode(self): """Performs random actions within context. To be used like: with agent.rand_act_mode(): ...<do something>...""" self._rand_act_mode = True yield self._rand_act_mode = False @contextmanager def rollout_mode(self): """Sets rollout parameters if desired.""" self._rollout_mode = True self.call_children("switch_to_rollout", Policy) yield self._rollout_mode = False self.call_children("switch_to_non_rollout", Policy) def call_children(self, fn, cls): """Call function with name fn in all submodules of class cls.""" def conditional_fn(module): if isinstance(module, cls): getattr(module, fn).__call__() self.apply(conditional_fn) class HierarchicalAgent(BaseAgent): """Implements a basic hierarchical agent with high-level and low-level policy/policies.""" def __init__(self, config): super().__init__(config) self.hl_agent = self._hp.hl_agent(self._hp.overwrite(self._hp.hl_agent_params)) self.ll_agent = self._hp.ll_agent(self._hp.overwrite(self._hp.ll_agent_params)) self._last_hl_output = None # stores last high-level output to feed to low-level during intermediate steps def _default_hparams(self): default_dict = ParamDict({ 'hl_agent': None, # high-level agent class 'hl_agent_params': None, # parameters of the high-level agent 'll_agent': None, # low-level agent class 'll_agent_params': None, # parameters of the low-level agent(s) 'update_hl': True, # whether to update high-level agent 'update_ll': True, # whether to update low-level agent(s) 'll_subgoal_reaching_reward': False, # whether to count ll subgoal reaching reward in training 'll_subgoal_reaching_reward_weight': 1e3, # weight for the subgoal reaching reward }) return super()._default_hparams().overwrite(default_dict) def act(self, obs): """Output dict contains is_hl_step in case high-level action was performed during this action.""" obs_input = obs[None] if len(obs.shape) == 1 else obs # need batch input for agents output = AttrDict() if self._perform_hl_step_now: # perform step with high-level policy self._last_hl_output = self.hl_agent.act(obs_input) output.is_hl_step = True if len(obs_input.shape) == 2 and len(self._last_hl_output.action.shape) == 1: self._last_hl_output.action = self._last_hl_output.action[None] # add batch dim if necessary self._last_hl_output.log_prob = self._last_hl_output.log_prob[None] else: output.is_hl_step = False output.update(prefix_dict(self._last_hl_output, 'hl_')) # perform step with low-level policy assert self._last_hl_output is not None output.update(self.ll_agent.act(self.make_ll_obs(obs_input, self._last_hl_output.action))) return self._remove_batch(output) if len(obs.shape) == 1 else output def update(self, experience_batches): """Updates high-level and low-level agents depending on which parameters are set.""" assert isinstance(experience_batches, AttrDict) # update requires batches for both HL and LL update_outputs = AttrDict() if self._hp.update_hl: hl_update_outputs = self.hl_agent.update(experience_batches.hl_batch) update_outputs.update(prefix_dict(hl_update_outputs, "hl_")) if self._hp.update_ll: ll_update_outputs = self.ll_agent.update(experience_batches.ll_batch) update_outputs.update(ll_update_outputs) return update_outputs def log_outputs(self, logging_stats, rollout_storage, logger, log_images, step): """Additionally provides option ot visualize hierarchical agents.""" super().log_outputs(logging_stats, rollout_storage, logger, log_images, step) if log_images: self.hl_agent.visualize(logger, rollout_storage, step) self.ll_agent.visualize(logger, rollout_storage, step) def _act_rand(self, obs): """Performs random actions with high-level policy. Low-level policy operates normally.""" with self.hl_agent.rand_act_mode(): return self.act(obs) def make_ll_obs(self, obs, hl_action): """Creates low-level agent's observation from env observation and HL action.""" return np.concatenate((obs, hl_action), axis=-1) def add_experience(self, experience_batch): self.hl_agent.add_experience(experience_batch.hl_batch) self.ll_agent.add_experience(experience_batch.ll_batch) def sync_networks(self): self.hl_agent.sync_networks() self.ll_agent.sync_networks() def state_dict(self, *args, **kwargs): return {'hl_agent': self.hl_agent.state_dict(*args, **kwargs), 'll_agent': self.ll_agent.state_dict(*args, **kwargs)} def load_state_dict(self, state_dict, *args, **kwargs): self.hl_agent.load_state_dict(state_dict.pop('hl_agent'), *args, **kwargs) self.ll_agent.load_state_dict(state_dict.pop('ll_agent'), *args, **kwargs) def save_state(self, save_dir): self.hl_agent.save_state(os.path.join(save_dir, 'hl_agent')) self.ll_agent.save_state(os.path.join(save_dir, 'll_agent')) def load_state(self, save_dir): self.hl_agent.load_state(os.path.join(save_dir, 'hl_agent')) self.ll_agent.load_state(os.path.join(save_dir, 'll_agent')) def reset(self): super().reset() self.hl_agent.reset() self.ll_agent.reset() @contextmanager def rand_act_mode(self): """Performs random actions within context. To be used like: with agent.rand_act_mode(): ...<do something>...""" self._rand_act_mode = True self.hl_agent._rand_act_mode = True self.ll_agent._rand_act_mode = True yield self._rand_act_mode = False self.hl_agent._rand_act_mode = False self.ll_agent._rand_act_mode = False @property def _perform_hl_step_now(self): """Indicates whether the high-level policy should be executed in the current time step.""" raise NotImplementedError # should be implemented by child class! class FixedIntervalHierarchicalAgent(HierarchicalAgent): """Hierarchical agent that executes high-level actions in fixed temporal intervals.""" def __init__(self, config): super().__init__(config) self._steps_since_hl = 0 # number of steps since last high-level step def _default_hparams(self): default_dict = ParamDict({ 'hl_interval': 3, # temporal interval at which high-level actions are executed }) return super()._default_hparams().overwrite(default_dict) def act(self, *args, **kwargs): output = super().act(*args, **kwargs) self._steps_since_hl += 1 return output @property def _perform_hl_step_now(self): return self._steps_since_hl % self._hp.hl_interval == 0 def reset(self): super().reset() self._steps_since_hl = 0 # start new episode with high-level step
[ [ [ 7, 9 ], [ 8155, 8157 ], [ 8232, 8234 ], [ 14585, 14587 ], [ 14654, 14656 ], [ 14760, 14762 ], [ 14829, 14831 ] ], [ [ 17, 22 ], [ 8615, 8620 ] ], [ [ 30, 44 ], [ 728, 730 ] ], [ [ 52, 63 ], [ 4265, 4267 ], [ 4497, 4499 ], [ 7620, 7622 ], [ 13777, 13779 ], [ 8690, 8692 ] ], [ [ 87, 101 ], [ 8720, 8734 ], [ 9060, 9074 ], [ 9315, 9329 ], [ 14977, 14991 ] ], [ [ 124, 131 ], [ 6386, 6393 ], [ 6532, 6539 ], [ 6677, 6684 ], [ 6866, 6873 ] ], [ [ 156, 160 ], [ 6432, 6436 ] ], [ [ 162, 165 ], [ 6723, 6726 ] ], [ [ 205, 214 ], [ 1367, 1376 ], [ 10427, 10436 ], [ 15981, 15990 ] ], [ [ 216, 237 ], [ 6394, 6415 ], [ 6540, 6561 ], [ 6685, 6706 ] ], [ [ 239, 247 ], [ 7607, 7615 ], [ 11491, 11499 ], [ 12544, 12552 ], [ 12625, 12633 ] ], [ [ 249, 260 ], [ 12049, 12060 ], [ 12783, 12794 ] ], [ [ 262, 270 ], [ 8573, 8581 ] ], [ [ 314, 322 ], [ 7177, 7185 ], [ 7304, 7312 ] ], [ [ 324, 330 ], [ 4309, 4315 ] ], [ [ 332, 348 ] ], [ [ 387, 392 ], [ 6578, 6583 ] ], [ [ 394, 406 ] ], [ [ 408, 414 ] ], [ [ 416, 425 ] ], [ [ 460, 478 ] ], [ [ 480, 499 ], [ 4274, 4293 ] ], [ [ 546, 561 ], [ 2226, 2241 ], [ 7419, 7434 ] ], [ [ 601, 607 ], [ 8948, 8954 ], [ 9046, 9052 ], [ 9490, 9496 ], [ 9599, 9605 ] ], [ [ 650, 667 ], [ 8326, 8343 ], [ 8396, 8413 ] ], [ [ 699, 709 ] ], [ [ 718, 727 ], [ 9904, 9913 ] ], [ [ 9886, 9903 ], [ 15670, 15687 ] ], [ [ 15639, 15669 ] ] ]
import time import emoji from telegram import InlineKeyboardMarkup, ParseMode, InlineKeyboardButton from telegram.ext import run_async, ConversationHandler from telegram.error import TelegramError from django.db.models import Q from . import constants, authentication, renderers, models def send_broadcast(admin, broadcast, context): bot = context.bot success = 0 errors = 0 for user in models.BotUser.objects.all(): try: if user.language == 'es': bot.send_message( chat_id=user.chat_id, text=broadcast.text_es ) elif user.language == 'en': bot.send_message( chat_id=user.chat_id, text=broadcast.text_en ) success += 1 except Exception as e: user.has_blocked_bot = True user.save() errors += 1 time.sleep(1) broadcast.success = success broadcast.errors = errors broadcast.sent = True broadcast.save() bot.send_message( chat_id=admin.chat_id, text='Enviados: {}\nErrores: {}'.format( success, errors ), ) @run_async def feedback(update, context): query = update.callback_query query.answer() user = authentication.authenticate(update.effective_user) text = '' if user.language == 'es': text = ( '¿Deseas enviar tu opinión para ayudarme a mejorar el bot?' '\n\nPuedes reportar errores, solicitar nuevas funcionalidades o mejoras.\n\n' 'Envíame tu opinión o ejecuta /cancel.' ) elif user.language == 'en': text = ( 'Do you want to send me your feedback to help me improve the bot?' '\n\nYou can report bugs, request new features or improvements.\n\n' 'Send your feedback or execute /cancel.' ) query.edit_message_text( text=text ) return constants.INPUT_FEEDBACK @run_async def input_feedback(update, context): bot = context.bot message = update.message.text user = authentication.authenticate(update.effective_user) _, keyboard = renderers.main_markup(user) if str(message).lower() == '/cancel': if user.language == 'es': update.message.chat.send_message( text='✅ Se canceló la acción que estabas llevando a cabo.', reply_markup=InlineKeyboardMarkup(keyboard) ) elif user.language == 'en': update.message.chat.send_message( text='✅ The action has been canceled.', reply_markup=InlineKeyboardMarkup(keyboard) ) else: name = user.first_name if user.last_name is not None: name += ' ' + user.last_name if user.username is not None: name += '(@{})'.format(user.username) text = ( '💬 Feedback from {name}:' '\n\n{message}'.format( name=name, message=message ) ) # persist feedback models.Feedback.objects.create( bot_user=user, message=message ) # send feedback to admins admins = models.BotUser.objects.filter(is_admin=True) for admin in admins: bot.send_message( chat_id=admin.chat_id, text=text ) # thanks text = '' if user.language == 'es': text = 'Muchas gracias por tu opinión.' elif user.language == 'en': text = 'Thank you for your feedback.' bot.send_message( chat_id=user.chat_id, text=text, reply_markup=InlineKeyboardMarkup(keyboard) ) return ConversationHandler.END @run_async def input_broadcast_message(update, context): message = update.message.text bot = context.bot user = authentication.authenticate(update.effective_user) try: broadcast = models.Broadcast.objects.get(sent=False) if broadcast.setting_lang == 'es': broadcast.text_es = message elif broadcast.setting_lang == 'en': broadcast.text_en = message broadcast.setting_lang = None broadcast.save() text, keyboard = renderers.broadcast_markup(user, context) bot.send_message( chat_id=user.chat_id, text=text, reply_markup=InlineKeyboardMarkup(keyboard) ) return ConversationHandler.END except models.Notification.DoesNotExist: return ConversationHandler.END @run_async def input_direct_message(update, context): bot = context.bot message = update.message.text user = authentication.authenticate(update.effective_user) if user.is_admin: context.user_data['md_text'] = message bot.send_message( chat_id=user.chat_id, text=message, reply_markup=InlineKeyboardMarkup([ [InlineKeyboardButton(text='Enviar', callback_data='confirm_md')], [InlineKeyboardButton(text='Cancelar', callback_data='cancel_md')], ]) ) return ConversationHandler.END @run_async def broadcast(update, context): query = update.callback_query query.answer() user = authentication.authenticate(update.effective_user) params = query.data.split(' ') operation = params[1] value = None if params.__len__() > 2: value = params[2] try: broad = models.Broadcast.objects.get(sent=False) if operation == 'lang': broad.setting_lang = value broad.save() query.edit_message_text( text='Envíame el mensaje en idioma "{}"'.format(value) ) return constants.INPUT_BROADCAST_MESSAGE if operation == 'send': send_broadcast( admin=user, broadcast=broad, context=context ) return ConversationHandler.END except models.Broadcast.DoesNotExist: query.edit_message_text( text='No hay ninguna notificación en curso, comienza una nueva.' ) return ConversationHandler.END @run_async def direct_message(update, context): query = update.callback_query query.answer() user = authentication.authenticate(update.effective_user) params = query.data.split(' ') id = params[1] if user.is_admin: context.user_data['md_id'] = id context.bot.send_message( chat_id=user.chat_id, text='🤖 Escribe el mensaje para enviar al usuario.' ) return constants.INPUT_DIRECT_MESSAGE @run_async def send_direct_message(update, context): query = update.callback_query query.answer() user = authentication.authenticate(update.effective_user) if user.is_admin: bot = context.bot id = context.user_data.get('md_id') text = context.user_data.get('md_text') try: destiny_user = models.BotUser.objects.get(pk=id) try: bot.send_message( chat_id=destiny_user.chat_id, text=text ) query.edit_message_text( text='✅ Mensaje enviado.' ) destiny_user.has_blocked_bot = False destiny_user.save() except: destiny_user.has_blocked_bot = True destiny_user.save() bot.send_message( chat_id=user.chat_id, text='⚠️ No fue posible enviar el mensaje.' ) except models.BotUser.DoesNotExist: pass del context.user_data['md_id'] del context.user_data['md_text'] @run_async def cancel_direct_message(update, context): query = update.callback_query query.answer() user = authentication.authenticate(update.effective_user) if user.is_admin: bot = context.bot query.edit_message_text( text='✅ Envío cancelado.' ) del context.user_data['md_id'] del context.user_data['md_text'] @run_async def input_user_criteria(update, context): bot = context.bot message = update.message.text user = authentication.authenticate(update.effective_user) if user.is_admin: criteria = message users = models.BotUser.objects.filter( Q(username__icontains=criteria) | Q(first_name__icontains=criteria) | Q(last_name__icontains=criteria) ) bot.send_message( chat_id=user.chat_id, text='{} resultados'.format(users.count()), ) for u in users: text, keyboard = renderers.user_markup(u) bot.send_message( chat_id=user.chat_id, text=text, reply_markup=InlineKeyboardMarkup(keyboard) ) return ConversationHandler.END
[ [ [ 7, 11 ], [ 960, 964 ] ], [ [ 19, 24 ] ], [ [ 46, 66 ], [ 2501, 2521 ], [ 2714, 2734 ], [ 3830, 3850 ], [ 4568, 4588 ], [ 5093, 5113 ], [ 9155, 9175 ] ], [ [ 68, 77 ] ], [ [ 79, 99 ], [ 5133, 5153 ], [ 5216, 5236 ] ], [ [ 125, 134 ], [ 1247, 1256 ], [ 2059, 2068 ], [ 3910, 3919 ], [ 4738, 4747 ], [ 5351, 5360 ], [ 6407, 6416 ], [ 6879, 6888 ], [ 8019, 8028 ], [ 8404, 8413 ] ], [ [ 136, 155 ], [ 3883, 3902 ], [ 4625, 4644 ], [ 4711, 4730 ], [ 5324, 5343 ], [ 6176, 6195 ], [ 6380, 6399 ], [ 9212, 9231 ] ], [ [ 183, 196 ] ], [ [ 226, 227 ], [ 8686, 8687 ], [ 8732, 8733 ], [ 8780, 8781 ] ], [ [ 242, 251 ], [ 2031, 2040 ], [ 5953, 5962 ], [ 6845, 6854 ] ], [ [ 253, 267 ], [ 1353, 1367 ], [ 2174, 2188 ], [ 4035, 4049 ], [ 4859, 4873 ], [ 5458, 5472 ], [ 6519, 6533 ], [ 6996, 7010 ], [ 8138, 8152 ], [ 8524, 8538 ] ], [ [ 269, 278 ], [ 2244, 2253 ], [ 4417, 4426 ], [ 9005, 9014 ] ], [ [ 280, 286 ], [ 408, 414 ], [ 3181, 3187 ], [ 3330, 3336 ], [ 4117, 4123 ], [ 4661, 4667 ], [ 5671, 5677 ], [ 6212, 6218 ], [ 7231, 7237 ], [ 7889, 7895 ], [ 8643, 8649 ] ], [ [ 293, 307 ], [ 6033, 6047 ] ], [ [ 1261, 1269 ] ], [ [ 2073, 2087 ] ], [ [ 3924, 3947 ] ], [ [ 4752, 4772 ] ], [ [ 5365, 5374 ] ], [ [ 6421, 6435 ] ], [ [ 6893, 6912 ] ], [ [ 8033, 8054 ] ], [ [ 8418, 8437 ] ] ]
""" elasticapm.contrib.django.client ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011-2017 Elasticsearch Large portions are :copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import import logging import django from django.conf import settings as django_settings from django.core.exceptions import DisallowedHost from django.db import DatabaseError from django.http import HttpRequest from elasticapm.base import Client from elasticapm.conf import constants from elasticapm.contrib.django.utils import iterate_with_template_sources from elasticapm.utils import compat, encoding, get_url_dict from elasticapm.utils.module_import import import_string from elasticapm.utils.wsgi import get_environ, get_headers __all__ = ("DjangoClient",) default_client_class = "elasticapm.contrib.django.DjangoClient" _client = (None, None) def get_client(client=None): """ Get an ElasticAPM client. :param client: :return: :rtype: elasticapm.base.Client """ global _client tmp_client = client is not None if not tmp_client: config = getattr(django_settings, "ELASTIC_APM", {}) client = config.get("CLIENT", default_client_class) if _client[0] != client: client_class = import_string(client) instance = client_class() if not tmp_client: _client = (client, instance) return instance return _client[1] class DjangoClient(Client): logger = logging.getLogger("elasticapm.errors.client.django") def __init__(self, config=None, **inline): if config is None: config = getattr(django_settings, "ELASTIC_APM", {}) if "framework_name" not in inline: inline["framework_name"] = "django" inline["framework_version"] = django.get_version() super(DjangoClient, self).__init__(config, **inline) def get_user_info(self, request): user_info = {} if not hasattr(request, "user"): return user_info try: user = request.user if hasattr(user, "is_authenticated"): if callable(user.is_authenticated): user_info["is_authenticated"] = user.is_authenticated() else: user_info["is_authenticated"] = bool(user.is_authenticated) if hasattr(user, "id"): user_info["id"] = encoding.keyword_field(user.id) if hasattr(user, "get_username"): user_info["username"] = encoding.keyword_field(user.get_username()) elif hasattr(user, "username"): user_info["username"] = encoding.keyword_field(user.username) if hasattr(user, "email"): user_info["email"] = user.email except DatabaseError: # If the connection is closed or similar, we'll just skip this return {} return user_info def get_data_from_request(self, request, capture_body=False): result = { "env": dict(get_environ(request.META)), "headers": dict(get_headers(request.META)), "method": request.method, "socket": {"remote_address": request.META.get("REMOTE_ADDR"), "encrypted": request.is_secure()}, "cookies": dict(request.COOKIES), } if request.method in constants.HTTP_WITH_BODY: content_type = request.META.get("CONTENT_TYPE") if content_type == "application/x-www-form-urlencoded": data = compat.multidict_to_dict(request.POST) elif content_type and content_type.startswith("multipart/form-data"): data = compat.multidict_to_dict(request.POST) if request.FILES: data["_files"] = {field: file.name for field, file in compat.iteritems(request.FILES)} else: try: data = request.body except Exception: data = "<unavailable>" result["body"] = data if (capture_body or not data) else "[REDACTED]" if hasattr(request, "get_raw_uri"): # added in Django 1.9 url = request.get_raw_uri() else: try: # Requires host to be in ALLOWED_HOSTS, might throw a # DisallowedHost exception url = request.build_absolute_uri() except DisallowedHost: # We can't figure out the real URL, so we have to set it to # DisallowedHost result["url"] = {"full": "DisallowedHost"} url = None if url: result["url"] = get_url_dict(url) return result def get_data_from_response(self, response): result = {"status_code": response.status_code} if hasattr(response, "items"): result["headers"] = dict(response.items()) return result def capture(self, event_type, request=None, **kwargs): if "context" not in kwargs: kwargs["context"] = context = {} else: context = kwargs["context"] is_http_request = isinstance(request, HttpRequest) if is_http_request: context["request"] = self.get_data_from_request( request, capture_body=self.config.capture_body in ("all", "errors") ) context["user"] = self.get_user_info(request) result = super(DjangoClient, self).capture(event_type, **kwargs) if is_http_request: # attach the elasticapm object to the request request._elasticapm = {"service_name": self.config.service_name, "id": result} return result def _get_stack_info_for_trace( self, frames, library_frame_context_lines=None, in_app_frame_context_lines=None, with_locals=True, locals_processor_func=None, ): """If the stacktrace originates within the elasticapm module, it will skip frames until some other module comes up.""" return list( iterate_with_template_sources( frames, with_locals=with_locals, library_frame_context_lines=library_frame_context_lines, in_app_frame_context_lines=in_app_frame_context_lines, include_paths_re=self.include_paths_re, exclude_paths_re=self.exclude_paths_re, locals_processor_func=locals_processor_func, ) ) def send(self, url, **kwargs): """ Serializes and signs ``data`` and passes the payload off to ``send_remote`` If ``server`` was passed into the constructor, this will serialize the data and pipe it to the server using ``send_remote()``. """ if self.config.server_url: return super(DjangoClient, self).send(url, **kwargs) else: self.error_logger.error("No server configured, and elasticapm not installed. Cannot send message") return None class ProxyClient(object): """ A proxy which represents the current client at all times. """ # introspection support: __members__ = property(lambda x: x.__dir__()) # Need to pretend to be the wrapped class, for the sake of objects that care # about this (especially in equality tests) __class__ = property(lambda x: get_client().__class__) __dict__ = property(lambda o: get_client().__dict__) __repr__ = lambda: repr(get_client()) __getattr__ = lambda x, o: getattr(get_client(), o) __setattr__ = lambda x, o, v: setattr(get_client(), o, v) __delattr__ = lambda x, o: delattr(get_client(), o) __lt__ = lambda x, o: get_client() < o __le__ = lambda x, o: get_client() <= o __eq__ = lambda x, o: get_client() == o __ne__ = lambda x, o: get_client() != o __gt__ = lambda x, o: get_client() > o __ge__ = lambda x, o: get_client() >= o if compat.PY2: __cmp__ = lambda x, o: cmp(get_client(), o) # noqa F821 __hash__ = lambda x: hash(get_client()) # attributes are currently not callable # __call__ = lambda x, *a, **kw: get_client()(*a, **kw) __nonzero__ = lambda x: bool(get_client()) __len__ = lambda x: len(get_client()) __getitem__ = lambda x, i: get_client()[i] __iter__ = lambda x: iter(get_client()) __contains__ = lambda x, i: i in get_client() __getslice__ = lambda x, i, j: get_client()[i:j] __add__ = lambda x, o: get_client() + o __sub__ = lambda x, o: get_client() - o __mul__ = lambda x, o: get_client() * o __floordiv__ = lambda x, o: get_client() // o __mod__ = lambda x, o: get_client() % o __divmod__ = lambda x, o: get_client().__divmod__(o) __pow__ = lambda x, o: get_client() ** o __lshift__ = lambda x, o: get_client() << o __rshift__ = lambda x, o: get_client() >> o __and__ = lambda x, o: get_client() & o __xor__ = lambda x, o: get_client() ^ o __or__ = lambda x, o: get_client() | o __div__ = lambda x, o: get_client().__div__(o) __truediv__ = lambda x, o: get_client().__truediv__(o) __neg__ = lambda x: -(get_client()) __pos__ = lambda x: +(get_client()) __abs__ = lambda x: abs(get_client()) __invert__ = lambda x: ~(get_client()) __complex__ = lambda x: complex(get_client()) __int__ = lambda x: int(get_client()) if compat.PY2: __long__ = lambda x: long(get_client()) # noqa F821 __float__ = lambda x: float(get_client()) __str__ = lambda x: str(get_client()) __unicode__ = lambda x: compat.text_type(get_client()) __oct__ = lambda x: oct(get_client()) __hex__ = lambda x: hex(get_client()) __index__ = lambda x: get_client().__index__() __coerce__ = lambda x, o: x.__coerce__(x, o) __enter__ = lambda x: x.__enter__() __exit__ = lambda x, *a, **kw: x.__exit__(*a, **kw) client = ProxyClient() def _get_installed_apps_paths(): """ Generate a list of modules in settings.INSTALLED_APPS. """ out = set() for app in django_settings.INSTALLED_APPS: out.add(app) return out
[ [ [ 271, 286 ] ], [ [ 295, 302 ], [ 1545, 1552 ] ], [ [ 311, 317 ], [ 1871, 1877 ] ], [ [ 342, 369 ], [ 1183, 1198 ], [ 1702, 1717 ], [ 10181, 10196 ] ], [ [ 405, 419 ], [ 4506, 4520 ] ], [ [ 442, 455 ], [ 2868, 2881 ] ], [ [ 480, 491 ], [ 5264, 5275 ] ], [ [ 521, 527 ], [ 1523, 1529 ] ], [ [ 556, 565 ], [ 3433, 3442 ] ], [ [ 610, 639 ], [ 6183, 6212 ] ], [ [ 669, 675 ], [ 8078, 8084 ], [ 9515, 9521 ], [ 3610, 3616 ], [ 3754, 3760 ], [ 3901, 3907 ], [ 9704, 9710 ] ], [ [ 677, 685 ], [ 2481, 2489 ], [ 2599, 2607 ], [ 2727, 2735 ] ], [ [ 687, 699 ], [ 4761, 4773 ] ], [ [ 743, 756 ], [ 1332, 1345 ] ], [ [ 791, 802 ], [ 3116, 3127 ] ], [ [ 804, 815 ], [ 3172, 3183 ] ], [ [ 817, 824 ] ], [ [ 847, 867 ], [ 1257, 1277 ] ], [ [ 911, 918 ], [ 1287, 1294 ] ], [ [ 940, 950 ], [ 7509, 7519 ], [ 7568, 7578 ], [ 7620, 7630 ], [ 7673, 7683 ], [ 7732, 7742 ], [ 7791, 7801 ], [ 7835, 7845 ], [ 7878, 7888 ], [ 7922, 7932 ], [ 7966, 7976 ], [ 8010, 8020 ], [ 8053, 8063 ], [ 8125, 8135 ], [ 8185, 8195 ], [ 8336, 8346 ], [ 8378, 8388 ], [ 8423, 8433 ], [ 8469, 8479 ], [ 8520, 8530 ], [ 8568, 8578 ], [ 8613, 8623 ], [ 8657, 8667 ], [ 8701, 8711 ], [ 8750, 8760 ], [ 8795, 8805 ], [ 8842, 8852 ], [ 8896, 8906 ], [ 8944, 8954 ], [ 8992, 9002 ], [ 9037, 9047 ], [ 9081, 9091 ], [ 9124, 9134 ], [ 9168, 9178 ], [ 9223, 9233 ], [ 9277, 9287 ], [ 9317, 9327 ], [ 9359, 9369 ], [ 9402, 9412 ], [ 9452, 9462 ], [ 9494, 9504 ], [ 9561, 9571 ], [ 9620, 9630 ], [ 9662, 9672 ], [ 9721, 9731 ], [ 9763, 9773 ], [ 9805, 9815 ], [ 9845, 9855 ] ], [ [ 1510, 1522 ], [ 1906, 1918 ], [ 5546, 5558 ], [ 6968, 6980 ] ], [ [ 7165, 7176 ], [ 10026, 10037 ] ], [ [ 10017, 10023 ] ], [ [ 10046, 10071 ] ], [ [ 1427, 1434 ], [ 1491, 1498 ] ] ]
#!/usr/bin/env python from distutils.core import setup, Extension import glob import os # Get matfiles and images for testing matfiles=glob.glob(os.path.join('tests/data/*.mat')) data=glob.glob(os.path.join('data/*')) setup( name='RLS', version='1.0', description='Python implementation of RLS program', author='Abhijit Bendale', author_email='bendale@mit.edu', py_modules = ['rls_pipeline','tests.test_rlspackage', 'utils.linearRLS', 'utils.non_linear_rls', 'OptParserExtended'], data_files = [('documentation',['documentation/notes.rst']), ('data', ['data/smp.mat']), ('tests/data', ['tests/data/smp.mat','tests/data/linear_rls.mat', 'tests/data/non_linear_rls.mat'])], )
[ [ [ 237, 242 ], [ 409, 414 ] ], [ [ 244, 253 ] ], [ [ 261, 265 ], [ 324, 328 ], [ 373, 377 ] ], [ [ 273, 275 ], [ 334, 336 ], [ 383, 385 ] ], [ [ 315, 323 ] ], [ [ 368, 372 ] ] ]
#!/usr/bin/env python # -*- coding: utf-8 -*- import simplejson as json from alipay.aop.api.constant.ParamConstants import * class ExSourceRateVO(object): def __init__(self): self._bid = None self._currency_pair = None self._currency_unit = None self._expiry_time = None self._extended_params = None self._generate_date = None self._generate_time = None self._gmt_create = None self._gmt_modified = None self._guaranteed = None self._id = None self._inst = None self._inst_rate_reference_id = None self._is_exception = None self._is_flat = None self._is_formatted = None self._is_valid = None self._maturity_date = None self._maximum_bid_amount = None self._maximum_offer_amount = None self._memo = None self._mid = None self._minimum_bid_amount = None self._minimum_offer_amount = None self._offer = None self._on_off_shore = None self._period = None self._profile = None self._quote_type = None self._rate_method = None self._rate_source_code = None self._rate_type = None self._segment_id = None self._sp_bid = None self._sp_mid = None self._sp_offer = None self._start_time = None self._sub_inst = None self._threshold_time = None self._valid_time = None self._zone_expiry_time = None self._zone_generate_time = None self._zone_gmt_create = None self._zone_gmt_modified = None self._zone_start_time = None self._zone_threshold_time = None self._zone_valid_time = None @property def bid(self): return self._bid @bid.setter def bid(self, value): self._bid = value @property def currency_pair(self): return self._currency_pair @currency_pair.setter def currency_pair(self, value): self._currency_pair = value @property def currency_unit(self): return self._currency_unit @currency_unit.setter def currency_unit(self, value): self._currency_unit = value @property def expiry_time(self): return self._expiry_time @expiry_time.setter def expiry_time(self, value): self._expiry_time = value @property def extended_params(self): return self._extended_params @extended_params.setter def extended_params(self, value): self._extended_params = value @property def generate_date(self): return self._generate_date @generate_date.setter def generate_date(self, value): self._generate_date = value @property def generate_time(self): return self._generate_time @generate_time.setter def generate_time(self, value): self._generate_time = value @property def gmt_create(self): return self._gmt_create @gmt_create.setter def gmt_create(self, value): self._gmt_create = value @property def gmt_modified(self): return self._gmt_modified @gmt_modified.setter def gmt_modified(self, value): self._gmt_modified = value @property def guaranteed(self): return self._guaranteed @guaranteed.setter def guaranteed(self, value): self._guaranteed = value @property def id(self): return self._id @id.setter def id(self, value): self._id = value @property def inst(self): return self._inst @inst.setter def inst(self, value): self._inst = value @property def inst_rate_reference_id(self): return self._inst_rate_reference_id @inst_rate_reference_id.setter def inst_rate_reference_id(self, value): self._inst_rate_reference_id = value @property def is_exception(self): return self._is_exception @is_exception.setter def is_exception(self, value): self._is_exception = value @property def is_flat(self): return self._is_flat @is_flat.setter def is_flat(self, value): self._is_flat = value @property def is_formatted(self): return self._is_formatted @is_formatted.setter def is_formatted(self, value): self._is_formatted = value @property def is_valid(self): return self._is_valid @is_valid.setter def is_valid(self, value): self._is_valid = value @property def maturity_date(self): return self._maturity_date @maturity_date.setter def maturity_date(self, value): self._maturity_date = value @property def maximum_bid_amount(self): return self._maximum_bid_amount @maximum_bid_amount.setter def maximum_bid_amount(self, value): self._maximum_bid_amount = value @property def maximum_offer_amount(self): return self._maximum_offer_amount @maximum_offer_amount.setter def maximum_offer_amount(self, value): self._maximum_offer_amount = value @property def memo(self): return self._memo @memo.setter def memo(self, value): self._memo = value @property def mid(self): return self._mid @mid.setter def mid(self, value): self._mid = value @property def minimum_bid_amount(self): return self._minimum_bid_amount @minimum_bid_amount.setter def minimum_bid_amount(self, value): self._minimum_bid_amount = value @property def minimum_offer_amount(self): return self._minimum_offer_amount @minimum_offer_amount.setter def minimum_offer_amount(self, value): self._minimum_offer_amount = value @property def offer(self): return self._offer @offer.setter def offer(self, value): self._offer = value @property def on_off_shore(self): return self._on_off_shore @on_off_shore.setter def on_off_shore(self, value): self._on_off_shore = value @property def period(self): return self._period @period.setter def period(self, value): self._period = value @property def profile(self): return self._profile @profile.setter def profile(self, value): self._profile = value @property def quote_type(self): return self._quote_type @quote_type.setter def quote_type(self, value): self._quote_type = value @property def rate_method(self): return self._rate_method @rate_method.setter def rate_method(self, value): self._rate_method = value @property def rate_source_code(self): return self._rate_source_code @rate_source_code.setter def rate_source_code(self, value): self._rate_source_code = value @property def rate_type(self): return self._rate_type @rate_type.setter def rate_type(self, value): self._rate_type = value @property def segment_id(self): return self._segment_id @segment_id.setter def segment_id(self, value): self._segment_id = value @property def sp_bid(self): return self._sp_bid @sp_bid.setter def sp_bid(self, value): self._sp_bid = value @property def sp_mid(self): return self._sp_mid @sp_mid.setter def sp_mid(self, value): self._sp_mid = value @property def sp_offer(self): return self._sp_offer @sp_offer.setter def sp_offer(self, value): self._sp_offer = value @property def start_time(self): return self._start_time @start_time.setter def start_time(self, value): self._start_time = value @property def sub_inst(self): return self._sub_inst @sub_inst.setter def sub_inst(self, value): self._sub_inst = value @property def threshold_time(self): return self._threshold_time @threshold_time.setter def threshold_time(self, value): self._threshold_time = value @property def valid_time(self): return self._valid_time @valid_time.setter def valid_time(self, value): self._valid_time = value @property def zone_expiry_time(self): return self._zone_expiry_time @zone_expiry_time.setter def zone_expiry_time(self, value): self._zone_expiry_time = value @property def zone_generate_time(self): return self._zone_generate_time @zone_generate_time.setter def zone_generate_time(self, value): self._zone_generate_time = value @property def zone_gmt_create(self): return self._zone_gmt_create @zone_gmt_create.setter def zone_gmt_create(self, value): self._zone_gmt_create = value @property def zone_gmt_modified(self): return self._zone_gmt_modified @zone_gmt_modified.setter def zone_gmt_modified(self, value): self._zone_gmt_modified = value @property def zone_start_time(self): return self._zone_start_time @zone_start_time.setter def zone_start_time(self, value): self._zone_start_time = value @property def zone_threshold_time(self): return self._zone_threshold_time @zone_threshold_time.setter def zone_threshold_time(self, value): self._zone_threshold_time = value @property def zone_valid_time(self): return self._zone_valid_time @zone_valid_time.setter def zone_valid_time(self, value): self._zone_valid_time = value def to_alipay_dict(self): params = dict() if self.bid: if hasattr(self.bid, 'to_alipay_dict'): params['bid'] = self.bid.to_alipay_dict() else: params['bid'] = self.bid if self.currency_pair: if hasattr(self.currency_pair, 'to_alipay_dict'): params['currency_pair'] = self.currency_pair.to_alipay_dict() else: params['currency_pair'] = self.currency_pair if self.currency_unit: if hasattr(self.currency_unit, 'to_alipay_dict'): params['currency_unit'] = self.currency_unit.to_alipay_dict() else: params['currency_unit'] = self.currency_unit if self.expiry_time: if hasattr(self.expiry_time, 'to_alipay_dict'): params['expiry_time'] = self.expiry_time.to_alipay_dict() else: params['expiry_time'] = self.expiry_time if self.extended_params: if hasattr(self.extended_params, 'to_alipay_dict'): params['extended_params'] = self.extended_params.to_alipay_dict() else: params['extended_params'] = self.extended_params if self.generate_date: if hasattr(self.generate_date, 'to_alipay_dict'): params['generate_date'] = self.generate_date.to_alipay_dict() else: params['generate_date'] = self.generate_date if self.generate_time: if hasattr(self.generate_time, 'to_alipay_dict'): params['generate_time'] = self.generate_time.to_alipay_dict() else: params['generate_time'] = self.generate_time if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified if self.guaranteed: if hasattr(self.guaranteed, 'to_alipay_dict'): params['guaranteed'] = self.guaranteed.to_alipay_dict() else: params['guaranteed'] = self.guaranteed if self.id: if hasattr(self.id, 'to_alipay_dict'): params['id'] = self.id.to_alipay_dict() else: params['id'] = self.id if self.inst: if hasattr(self.inst, 'to_alipay_dict'): params['inst'] = self.inst.to_alipay_dict() else: params['inst'] = self.inst if self.inst_rate_reference_id: if hasattr(self.inst_rate_reference_id, 'to_alipay_dict'): params['inst_rate_reference_id'] = self.inst_rate_reference_id.to_alipay_dict() else: params['inst_rate_reference_id'] = self.inst_rate_reference_id if self.is_exception: if hasattr(self.is_exception, 'to_alipay_dict'): params['is_exception'] = self.is_exception.to_alipay_dict() else: params['is_exception'] = self.is_exception if self.is_flat: if hasattr(self.is_flat, 'to_alipay_dict'): params['is_flat'] = self.is_flat.to_alipay_dict() else: params['is_flat'] = self.is_flat if self.is_formatted: if hasattr(self.is_formatted, 'to_alipay_dict'): params['is_formatted'] = self.is_formatted.to_alipay_dict() else: params['is_formatted'] = self.is_formatted if self.is_valid: if hasattr(self.is_valid, 'to_alipay_dict'): params['is_valid'] = self.is_valid.to_alipay_dict() else: params['is_valid'] = self.is_valid if self.maturity_date: if hasattr(self.maturity_date, 'to_alipay_dict'): params['maturity_date'] = self.maturity_date.to_alipay_dict() else: params['maturity_date'] = self.maturity_date if self.maximum_bid_amount: if hasattr(self.maximum_bid_amount, 'to_alipay_dict'): params['maximum_bid_amount'] = self.maximum_bid_amount.to_alipay_dict() else: params['maximum_bid_amount'] = self.maximum_bid_amount if self.maximum_offer_amount: if hasattr(self.maximum_offer_amount, 'to_alipay_dict'): params['maximum_offer_amount'] = self.maximum_offer_amount.to_alipay_dict() else: params['maximum_offer_amount'] = self.maximum_offer_amount if self.memo: if hasattr(self.memo, 'to_alipay_dict'): params['memo'] = self.memo.to_alipay_dict() else: params['memo'] = self.memo if self.mid: if hasattr(self.mid, 'to_alipay_dict'): params['mid'] = self.mid.to_alipay_dict() else: params['mid'] = self.mid if self.minimum_bid_amount: if hasattr(self.minimum_bid_amount, 'to_alipay_dict'): params['minimum_bid_amount'] = self.minimum_bid_amount.to_alipay_dict() else: params['minimum_bid_amount'] = self.minimum_bid_amount if self.minimum_offer_amount: if hasattr(self.minimum_offer_amount, 'to_alipay_dict'): params['minimum_offer_amount'] = self.minimum_offer_amount.to_alipay_dict() else: params['minimum_offer_amount'] = self.minimum_offer_amount if self.offer: if hasattr(self.offer, 'to_alipay_dict'): params['offer'] = self.offer.to_alipay_dict() else: params['offer'] = self.offer if self.on_off_shore: if hasattr(self.on_off_shore, 'to_alipay_dict'): params['on_off_shore'] = self.on_off_shore.to_alipay_dict() else: params['on_off_shore'] = self.on_off_shore if self.period: if hasattr(self.period, 'to_alipay_dict'): params['period'] = self.period.to_alipay_dict() else: params['period'] = self.period if self.profile: if hasattr(self.profile, 'to_alipay_dict'): params['profile'] = self.profile.to_alipay_dict() else: params['profile'] = self.profile if self.quote_type: if hasattr(self.quote_type, 'to_alipay_dict'): params['quote_type'] = self.quote_type.to_alipay_dict() else: params['quote_type'] = self.quote_type if self.rate_method: if hasattr(self.rate_method, 'to_alipay_dict'): params['rate_method'] = self.rate_method.to_alipay_dict() else: params['rate_method'] = self.rate_method if self.rate_source_code: if hasattr(self.rate_source_code, 'to_alipay_dict'): params['rate_source_code'] = self.rate_source_code.to_alipay_dict() else: params['rate_source_code'] = self.rate_source_code if self.rate_type: if hasattr(self.rate_type, 'to_alipay_dict'): params['rate_type'] = self.rate_type.to_alipay_dict() else: params['rate_type'] = self.rate_type if self.segment_id: if hasattr(self.segment_id, 'to_alipay_dict'): params['segment_id'] = self.segment_id.to_alipay_dict() else: params['segment_id'] = self.segment_id if self.sp_bid: if hasattr(self.sp_bid, 'to_alipay_dict'): params['sp_bid'] = self.sp_bid.to_alipay_dict() else: params['sp_bid'] = self.sp_bid if self.sp_mid: if hasattr(self.sp_mid, 'to_alipay_dict'): params['sp_mid'] = self.sp_mid.to_alipay_dict() else: params['sp_mid'] = self.sp_mid if self.sp_offer: if hasattr(self.sp_offer, 'to_alipay_dict'): params['sp_offer'] = self.sp_offer.to_alipay_dict() else: params['sp_offer'] = self.sp_offer if self.start_time: if hasattr(self.start_time, 'to_alipay_dict'): params['start_time'] = self.start_time.to_alipay_dict() else: params['start_time'] = self.start_time if self.sub_inst: if hasattr(self.sub_inst, 'to_alipay_dict'): params['sub_inst'] = self.sub_inst.to_alipay_dict() else: params['sub_inst'] = self.sub_inst if self.threshold_time: if hasattr(self.threshold_time, 'to_alipay_dict'): params['threshold_time'] = self.threshold_time.to_alipay_dict() else: params['threshold_time'] = self.threshold_time if self.valid_time: if hasattr(self.valid_time, 'to_alipay_dict'): params['valid_time'] = self.valid_time.to_alipay_dict() else: params['valid_time'] = self.valid_time if self.zone_expiry_time: if hasattr(self.zone_expiry_time, 'to_alipay_dict'): params['zone_expiry_time'] = self.zone_expiry_time.to_alipay_dict() else: params['zone_expiry_time'] = self.zone_expiry_time if self.zone_generate_time: if hasattr(self.zone_generate_time, 'to_alipay_dict'): params['zone_generate_time'] = self.zone_generate_time.to_alipay_dict() else: params['zone_generate_time'] = self.zone_generate_time if self.zone_gmt_create: if hasattr(self.zone_gmt_create, 'to_alipay_dict'): params['zone_gmt_create'] = self.zone_gmt_create.to_alipay_dict() else: params['zone_gmt_create'] = self.zone_gmt_create if self.zone_gmt_modified: if hasattr(self.zone_gmt_modified, 'to_alipay_dict'): params['zone_gmt_modified'] = self.zone_gmt_modified.to_alipay_dict() else: params['zone_gmt_modified'] = self.zone_gmt_modified if self.zone_start_time: if hasattr(self.zone_start_time, 'to_alipay_dict'): params['zone_start_time'] = self.zone_start_time.to_alipay_dict() else: params['zone_start_time'] = self.zone_start_time if self.zone_threshold_time: if hasattr(self.zone_threshold_time, 'to_alipay_dict'): params['zone_threshold_time'] = self.zone_threshold_time.to_alipay_dict() else: params['zone_threshold_time'] = self.zone_threshold_time if self.zone_valid_time: if hasattr(self.zone_valid_time, 'to_alipay_dict'): params['zone_valid_time'] = self.zone_valid_time.to_alipay_dict() else: params['zone_valid_time'] = self.zone_valid_time return params @staticmethod def from_alipay_dict(d): if not d: return None o = ExSourceRateVO() if 'bid' in d: o.bid = d['bid'] if 'currency_pair' in d: o.currency_pair = d['currency_pair'] if 'currency_unit' in d: o.currency_unit = d['currency_unit'] if 'expiry_time' in d: o.expiry_time = d['expiry_time'] if 'extended_params' in d: o.extended_params = d['extended_params'] if 'generate_date' in d: o.generate_date = d['generate_date'] if 'generate_time' in d: o.generate_time = d['generate_time'] if 'gmt_create' in d: o.gmt_create = d['gmt_create'] if 'gmt_modified' in d: o.gmt_modified = d['gmt_modified'] if 'guaranteed' in d: o.guaranteed = d['guaranteed'] if 'id' in d: o.id = d['id'] if 'inst' in d: o.inst = d['inst'] if 'inst_rate_reference_id' in d: o.inst_rate_reference_id = d['inst_rate_reference_id'] if 'is_exception' in d: o.is_exception = d['is_exception'] if 'is_flat' in d: o.is_flat = d['is_flat'] if 'is_formatted' in d: o.is_formatted = d['is_formatted'] if 'is_valid' in d: o.is_valid = d['is_valid'] if 'maturity_date' in d: o.maturity_date = d['maturity_date'] if 'maximum_bid_amount' in d: o.maximum_bid_amount = d['maximum_bid_amount'] if 'maximum_offer_amount' in d: o.maximum_offer_amount = d['maximum_offer_amount'] if 'memo' in d: o.memo = d['memo'] if 'mid' in d: o.mid = d['mid'] if 'minimum_bid_amount' in d: o.minimum_bid_amount = d['minimum_bid_amount'] if 'minimum_offer_amount' in d: o.minimum_offer_amount = d['minimum_offer_amount'] if 'offer' in d: o.offer = d['offer'] if 'on_off_shore' in d: o.on_off_shore = d['on_off_shore'] if 'period' in d: o.period = d['period'] if 'profile' in d: o.profile = d['profile'] if 'quote_type' in d: o.quote_type = d['quote_type'] if 'rate_method' in d: o.rate_method = d['rate_method'] if 'rate_source_code' in d: o.rate_source_code = d['rate_source_code'] if 'rate_type' in d: o.rate_type = d['rate_type'] if 'segment_id' in d: o.segment_id = d['segment_id'] if 'sp_bid' in d: o.sp_bid = d['sp_bid'] if 'sp_mid' in d: o.sp_mid = d['sp_mid'] if 'sp_offer' in d: o.sp_offer = d['sp_offer'] if 'start_time' in d: o.start_time = d['start_time'] if 'sub_inst' in d: o.sub_inst = d['sub_inst'] if 'threshold_time' in d: o.threshold_time = d['threshold_time'] if 'valid_time' in d: o.valid_time = d['valid_time'] if 'zone_expiry_time' in d: o.zone_expiry_time = d['zone_expiry_time'] if 'zone_generate_time' in d: o.zone_generate_time = d['zone_generate_time'] if 'zone_gmt_create' in d: o.zone_gmt_create = d['zone_gmt_create'] if 'zone_gmt_modified' in d: o.zone_gmt_modified = d['zone_gmt_modified'] if 'zone_start_time' in d: o.zone_start_time = d['zone_start_time'] if 'zone_threshold_time' in d: o.zone_threshold_time = d['zone_threshold_time'] if 'zone_valid_time' in d: o.zone_valid_time = d['zone_valid_time'] return o
[ [ [ 53, 71 ] ], [ [ 124, 125 ] ], [ [ 134, 148 ], [ 21214, 21228 ] ] ]
#!/usr/bin/env python3 """ Created on 15 Oct 2020 @author: Bruno Beloff (bruno.beloff@southcoastscience.com) DESCRIPTION The disk_volume utility is used to determine whether a volume is mounted and, if so, the free and used space on the volume. Space is given in blocks. The volume is identified by its mount point. If the "is-available" field in the report is false, this indicates that an OS error occurred when an attempt was made to access the volume. This error can occur if a removable medium failed, or was disconnected without being unmounted. The disk_volume utility is normally included in the commands accepted by the control_receiver utility. SYNOPSIS disk_volume.py [-v] MOUNTED_ON EXAMPLES ./disk_volume.py -v /srv/SCS_logging DOCUMENT EXAMPLE {"filesystem": "/dev/mmcblk0p1", "size": 15384184, "used": 319296, "free": 14892092, "mounted-on": "/srv/SCS_logging", "is-available": false} SEE ALSO scs_dev/disk_usage """ import sys from scs_core.data.json import JSONify from scs_dev.cmd.cmd_disk_volume import CmdDiskVolume from scs_host.sys.host import Host # -------------------------------------------------------------------------------------------------------------------- if __name__ == '__main__': # ---------------------------------------------------------------------------------------------------------------- # cmd... cmd = CmdDiskVolume() if not cmd.is_valid(): cmd.print_help(sys.stderr) exit(2) if cmd.verbose: print("disk_volume: %s" % cmd, file=sys.stderr) # ---------------------------------------------------------------------------------------------------------------- # run... volume = Host.disk_volume(cmd.mounted_on) print(JSONify.dumps(volume)) # ---------------------------------------------------------------------------------------------------------------- # end... if cmd.verbose and volume: print("disk_volume: percent used: %s" % volume.percent_used(), file=sys.stderr)
[ [ [ 949, 952 ], [ 1444, 1447 ], [ 1537, 1540 ], [ 2005, 2008 ] ], [ [ 985, 992 ], [ 1740, 1747 ] ], [ [ 1034, 1047 ], [ 1377, 1390 ] ], [ [ 1079, 1083 ], [ 1697, 1701 ] ], [ [ 1371, 1374 ], [ 1405, 1408 ], [ 1429, 1432 ], [ 1480, 1483 ], [ 1527, 1530 ], [ 1714, 1717 ], [ 1905, 1908 ] ], [ [ 1688, 1694 ], [ 1754, 1760 ], [ 1921, 1927 ], [ 1977, 1983 ] ] ]
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import mock from nova import exception from nova.openstack.common import processutils from nova import test from nova import utils from nova.virt import images class QemuTestCase(test.NoDBTestCase): def test_qemu_info_with_bad_path(self): self.assertRaises(exception.InvalidDiskInfo, images.qemu_img_info, '/path/that/does/not/exist') @mock.patch.object(os.path, 'exists', return_value=True) def test_qemu_info_with_errors(self, path_exists): self.assertRaises(processutils.ProcessExecutionError, images.qemu_img_info, '/fake/path') @mock.patch.object(os.path, 'exists', return_value=True) @mock.patch.object(utils, 'execute', return_value=('stdout', None)) def test_qemu_info_with_no_errors(self, path_exists, utils_execute): image_info = images.qemu_img_info('/fake/path') self.assertTrue(image_info) self.assertTrue(str(image_info))
[ [ [ 612, 614 ], [ 1042, 1044 ], [ 1309, 1311 ] ], [ [ 623, 627 ], [ 1024, 1028 ], [ 1291, 1295 ], [ 1352, 1356 ] ], [ [ 646, 655 ], [ 888, 897 ] ], [ [ 690, 702 ], [ 1161, 1173 ] ], [ [ 720, 724 ], [ 798, 802 ] ], [ [ 742, 747 ], [ 1370, 1375 ] ], [ [ 770, 776 ], [ 941, 947 ], [ 1223, 1229 ], [ 1574, 1580 ] ], [ [ 785, 797 ] ] ]
import cv2 import argparse import numpy as np def process_edge_image(input, output): print('edge', input, output) img = cv2.imread(input) img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) img = cv2.GaussianBlur(img, (3, 3), 0) ret, thr = cv2.threshold(img, 0, 255, cv2.THRESH_OTSU) edges = cv2.Canny(img, ret * 0.5, ret) cv2.imwrite(output, 255 - edges) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('input', help='input image') parser.add_argument('output', help='output image') args = parser.parse_args() process_edge_image(args.input, args.output)
[ [ [ 7, 10 ], [ 130, 133 ], [ 158, 161 ], [ 176, 179 ], [ 206, 209 ], [ 255, 258 ], [ 282, 285 ], [ 312, 315 ], [ 348, 351 ] ], [ [ 19, 27 ], [ 422, 430 ] ], [ [ 35, 46 ] ], [ [ 52, 70 ], [ 592, 610 ] ], [ [ 413, 419 ], [ 452, 458 ], [ 505, 511 ], [ 567, 573 ] ], [ [ 560, 564 ], [ 611, 615 ], [ 623, 627 ] ] ]
"""Module for specifying the environmental variables.""" import os DIRNAME = os.path.dirname(__file__) DB_NAME = "items.csv" DB_PATH = os.path.join(DIRNAME, "data", DB_NAME) TEST_DB = "test_items.csv" TEST_DB_PATH = os.path.join(DIRNAME, "data", TEST_DB) INSTRUCTIONS = ( "\nValitse toiminto" "\n (1) lisää" "\n (2) listaa" "\n (3) poista" "\n (4) hae tarkemmat tiedot id:llä" "\n (5) hae vinkkejä hakusanalla" "\n (9) poista kaikki vinkit" "\n (0) lopeta\n") ADD_MENU = ( "\nMitä lisätään?" "\n (1) kirja" "\n (2) video" "\n (3) blogi" "\n (4) takaisin valikkoon" "\n (0) lopeta") CMD_PROMPTS = { "book": [("Kirjailijan/kirjailijoiden nimet: ", "Kirjailijan nimi on lisättävä!"), ("Kirjan nimi: ", "Kirjan nimi on lisättävä!"), ("Julkaisuvuosi: ", "Julkaisuvuosi ei ole kelvollinen!") ], "video": [("Videon tekijä: ", "Videon tekijä on lisättävä!"), ("Videon nimi: ", "Videon nimi on lisättävä!"), ("Videon osoite: ", "Videon osoite on lisättävä!"), ("Videon julkaisupäivä: ", "Videon julkaisupäivä on lisättävä!") ], "blog": [("Blogin kirjoittaja: ", "Blogin kirjoittaja on lisättävä!"), ("Blogin nimi: ", "Blogin nimi on lisättävä!"), ("Postaus: ", "Postauksen nimi on lisättävä!"), ("Blogin osoite: ", "Blogin osoite on lisättävä!"), ("Postauksen julkaisupäivä: ", "Postauksen julkaisupäivä on lisättävä!") ], "delete": [("\nAnna poistettavan teoksen id: ", "Teoksen id on annettava!") ], "search":[("Syötä hakusana: ", "Kirjoita hakusana!")], "details": [("\nAnna id: ", "ID on annettava!")], "clear": [("\nPoistetaan kaikki vinkit.", "Ai etkö haluakaan poistaa?")] } OUTPUTS = { "already in list": "\nLukuvinkki on jo tallennettu aiemmin!", "added": "\nUusi lukuvinkki lisätty.", "empty list": "Sovellukseen ei ole tallennettu vinkkejä :(", "choice": "\nValinta: ", "list": "\nTallennetut vinkit:\n", "item not found": "Teosta ei löytynyt.", "confirm": "\nOletko varma (K/E)? ", "deleting": "Poistetaan vinkki...", "not deleted": "Vinkkiä ei poistettu.", "unknown command": "Komentoa ei löytynyt, yritä uudelleen.", "quit": "Kiitti & moi!", "creator": "tekijä", "author": "kirjailija", "id": "id", "name": "nimi", "details results": "\nVinkin tarkemmat tiedot:\n", "search results": "\nHakusanalla löytyvät vinkit:\n", "search help": "\nVoit etsiä vinkkiä tekijän ja nimen perusteella syöttämällä hakusanan", "broken input": "Syötteessäsi on ongelma.", "confirm_clearing": "\nPoistetaanko ihan kaikki? (K/E) ", "clearing": "Poistetaan kaikkia vinkkejä. Hyvästi!", "not cleared": "Vinkkejä ei poistettu." } TITLE = "\nLUKUVINKKIKIRJASTO" HEADERS = ['type', 'id', 'creator', 'title'] YES = 'K' NO = 'E'
[ [ [ 64, 66 ], [ 78, 80 ], [ 137, 139 ], [ 219, 221 ] ], [ [ 68, 75 ], [ 150, 157 ], [ 232, 239 ] ], [ [ 105, 112 ], [ 167, 174 ] ], [ [ 127, 134 ] ], [ [ 177, 184 ], [ 249, 256 ] ], [ [ 204, 216 ] ], [ [ 259, 271 ] ], [ [ 497, 505 ] ], [ [ 644, 655 ] ], [ [ 1839, 1846 ] ], [ [ 2867, 2872 ] ], [ [ 2898, 2905 ] ], [ [ 2943, 2946 ] ], [ [ 2953, 2955 ] ] ]
#!/usr/bin/env python import asyncio from collections import deque import logging import time from typing import List, Dict, Optional, Tuple, Set, Deque from hummingbot.client.command import __all__ as commands from hummingbot.core.clock import Clock from hummingbot.core.data_type.order_book_tracker import OrderBookTrackerDataSourceType from hummingbot.core.data_type.user_stream_tracker import UserStreamTrackerDataSourceType from hummingbot.logger import HummingbotLogger from hummingbot.logger.application_warning import ApplicationWarning from hummingbot.market.binance.binance_market import BinanceMarket from hummingbot.market.bittrex.bittrex_market import BittrexMarket from hummingbot.market.kucoin.kucoin_market import KucoinMarket from hummingbot.market.coinbase_pro.coinbase_pro_market import CoinbaseProMarket from hummingbot.market.huobi.huobi_market import HuobiMarket from hummingbot.market.liquid.liquid_market import LiquidMarket from hummingbot.market.market_base import MarketBase from hummingbot.market.paper_trade import create_paper_trade_market from hummingbot.market.radar_relay.radar_relay_market import RadarRelayMarket from hummingbot.market.bamboo_relay.bamboo_relay_market import BambooRelayMarket from hummingbot.market.dolomite.dolomite_market import DolomiteMarket from hummingbot.market.loopring.loopring_market import LoopringMarket from hummingbot.market.bitcoin_com.bitcoin_com_market import BitcoinComMarket from hummingbot.market.kraken.kraken_market import KrakenMarket from hummingbot.model.sql_connection_manager import SQLConnectionManager from hummingbot.wallet.ethereum.ethereum_chain import EthereumChain from hummingbot.wallet.ethereum.web3_wallet import Web3Wallet from hummingbot.client.ui.keybindings import load_key_bindings from hummingbot.client.ui.parser import load_parser, ThrowingArgumentParser from hummingbot.client.ui.hummingbot_cli import HummingbotCLI from hummingbot.client.ui.completer import load_completer from hummingbot.client.errors import InvalidCommandError, ArgumentParserError from hummingbot.client.config.global_config_map import global_config_map, using_wallet from hummingbot.client.config.config_helpers import get_erc20_token_addresses, get_strategy_config_map from hummingbot.strategy.strategy_base import StrategyBase from hummingbot.strategy.cross_exchange_market_making import CrossExchangeMarketPair from hummingbot.core.utils.kill_switch import KillSwitch from hummingbot.data_feed.data_feed_base import DataFeedBase from hummingbot.notifier.notifier_base import NotifierBase from hummingbot.notifier.telegram_notifier import TelegramNotifier from hummingbot.strategy.market_trading_pair_tuple import MarketTradingPairTuple from hummingbot.market.markets_recorder import MarketsRecorder from hummingbot.client.config.security import Security s_logger = None MARKET_CLASSES = { "bamboo_relay": BambooRelayMarket, "binance": BinanceMarket, "coinbase_pro": CoinbaseProMarket, "huobi": HuobiMarket, "liquid": LiquidMarket, "radar_relay": RadarRelayMarket, "dolomite": DolomiteMarket, "loopring": LoopringMarket, "bittrex": BittrexMarket, "kucoin": KucoinMarket, "bitcoin_com": BitcoinComMarket, "kraken": KrakenMarket, } class HummingbotApplication(*commands): KILL_TIMEOUT = 10.0 APP_WARNING_EXPIRY_DURATION = 3600.0 APP_WARNING_STATUS_LIMIT = 6 _main_app: Optional["HummingbotApplication"] = None @classmethod def logger(cls) -> HummingbotLogger: global s_logger if s_logger is None: s_logger = logging.getLogger(__name__) return s_logger @classmethod def main_application(cls) -> "HummingbotApplication": if cls._main_app is None: cls._main_app = HummingbotApplication() return cls._main_app def __init__(self): self.ev_loop: asyncio.BaseEventLoop = asyncio.get_event_loop() self.parser: ThrowingArgumentParser = load_parser(self) self.app = HummingbotCLI( input_handler=self._handle_command, bindings=load_key_bindings(self), completer=load_completer(self) ) self.markets: Dict[str, MarketBase] = {} self.wallet: Optional[Web3Wallet] = None # strategy file name and name get assigned value after import or create command self.strategy_file_name: str = None self.strategy_name: str = None self.strategy_task: Optional[asyncio.Task] = None self.strategy: Optional[StrategyBase] = None self.market_pair: Optional[CrossExchangeMarketPair] = None self.market_trading_pair_tuples: List[MarketTradingPairTuple] = [] self.clock: Optional[Clock] = None self.init_time: int = int(time.time() * 1e3) self.start_time: Optional[int] = None self.assets: Optional[Set[str]] = set() self.starting_balances = {} self.placeholder_mode = False self.log_queue_listener: Optional[logging.handlers.QueueListener] = None self.data_feed: Optional[DataFeedBase] = None self.notifiers: List[NotifierBase] = [] self.kill_switch: Optional[KillSwitch] = None self._app_warnings: Deque[ApplicationWarning] = deque() self._trading_required: bool = True self.trade_fill_db: SQLConnectionManager = SQLConnectionManager.get_trade_fills_instance() self.markets_recorder: Optional[MarketsRecorder] = None @property def strategy_config_map(self): if self.strategy_name is not None: return get_strategy_config_map(self.strategy_name) return None def _notify(self, msg: str): self.app.log(msg) for notifier in self.notifiers: notifier.add_msg_to_queue(msg) def _handle_command(self, raw_command: str): raw_command = raw_command.lower().strip() try: if self.placeholder_mode: pass else: args = self.parser.parse_args(args=raw_command.split()) kwargs = vars(args) if not hasattr(args, "func"): return f = args.func del kwargs["func"] f(**kwargs) except InvalidCommandError as e: self._notify("Invalid command: %s" % (str(e),)) except ArgumentParserError as e: self._notify(str(e)) except NotImplementedError: self._notify("Command not yet implemented. This feature is currently under development.") except Exception as e: self.logger().error(e, exc_info=True) async def _cancel_outstanding_orders(self) -> bool: success = True try: on_chain_cancel_on_exit = global_config_map.get("on_chain_cancel_on_exit").value bamboo_relay_use_coordinator = global_config_map.get("bamboo_relay_use_coordinator").value kill_timeout: float = self.KILL_TIMEOUT self._notify("Cancelling outstanding orders...") for market_name, market in self.markets.items(): # By default, the bot does not cancel orders on exit on Radar Relay or Bamboo Relay, # since all open orders will expire in a short window if not on_chain_cancel_on_exit and (market_name == "radar_relay" or (market_name == "bamboo_relay" and not bamboo_relay_use_coordinator)): continue cancellation_results = await market.cancel_all(kill_timeout) uncancelled = list(filter(lambda cr: cr.success is False, cancellation_results)) if len(uncancelled) > 0: success = False uncancelled_order_ids = list(map(lambda cr: cr.order_id, uncancelled)) self._notify("\nFailed to cancel the following orders on %s:\n%s" % ( market_name, '\n'.join(uncancelled_order_ids) )) except Exception: self.logger().error(f"Error canceling outstanding orders.", exc_info=True) success = False if success: self._notify("All outstanding orders cancelled.") return success async def run(self): await self.app.run() def add_application_warning(self, app_warning: ApplicationWarning): self._expire_old_application_warnings() self._app_warnings.append(app_warning) def clear_application_warning(self): self._app_warnings.clear() @staticmethod def _initialize_market_assets(market_name: str, trading_pairs: List[str]) -> List[Tuple[str, str]]: market_class: MarketBase = MARKET_CLASSES.get(market_name, MarketBase) market_trading_pairs: List[Tuple[str, str]] = [market_class.split_trading_pair(trading_pair) for trading_pair in trading_pairs] return market_trading_pairs @staticmethod def _convert_to_exchange_trading_pair(market_name: str, hb_trading_pair: List[str]) -> List[str]: market_class: MarketBase = MARKET_CLASSES.get(market_name, MarketBase) return [market_class.convert_to_exchange_trading_pair(trading_pair) for trading_pair in hb_trading_pair] def _initialize_wallet(self, token_trading_pairs: List[str]): if not using_wallet(): return ethereum_wallet = global_config_map.get("ethereum_wallet").value private_key = Security._private_keys[ethereum_wallet] ethereum_rpc_url = global_config_map.get("ethereum_rpc_url").value erc20_token_addresses = get_erc20_token_addresses(token_trading_pairs) chain_name: str = global_config_map.get("ethereum_chain_name").value self.wallet: Web3Wallet = Web3Wallet( private_key=private_key, backend_urls=[ethereum_rpc_url], erc20_token_addresses=erc20_token_addresses, chain=getattr(EthereumChain, chain_name), ) def _initialize_markets(self, market_names: List[Tuple[str, List[str]]]): ethereum_rpc_url = global_config_map.get("ethereum_rpc_url").value # aggregate trading_pairs if there are duplicate markets market_trading_pairs_map = {} for market_name, trading_pairs in market_names: if market_name not in market_trading_pairs_map: market_trading_pairs_map[market_name] = [] market_class: MarketBase = MARKET_CLASSES.get(market_name, MarketBase) for trading_pair in trading_pairs: exchange_trading_pair: str = market_class.convert_to_exchange_trading_pair(trading_pair) market_trading_pairs_map[market_name].append(exchange_trading_pair) for market_name, trading_pairs in market_trading_pairs_map.items(): if global_config_map.get("paper_trade_enabled").value: try: market = create_paper_trade_market(market_name, trading_pairs) except Exception: raise paper_trade_account_balance = global_config_map.get("paper_trade_account_balance").value for asset, balance in paper_trade_account_balance: market.set_balance(asset, balance) elif market_name == "binance": binance_api_key = global_config_map.get("binance_api_key").value binance_api_secret = global_config_map.get("binance_api_secret").value market = BinanceMarket( binance_api_key, binance_api_secret, order_book_tracker_data_source_type=OrderBookTrackerDataSourceType.EXCHANGE_API, trading_pairs=trading_pairs, trading_required=self._trading_required, ) elif market_name == "radar_relay": assert self.wallet is not None market = RadarRelayMarket( wallet=self.wallet, ethereum_rpc_url=ethereum_rpc_url, trading_pairs=trading_pairs, trading_required=self._trading_required, ) elif market_name == "bamboo_relay": assert self.wallet is not None use_coordinator = global_config_map.get("bamboo_relay_use_coordinator").value pre_emptive_soft_cancels = global_config_map.get("bamboo_relay_pre_emptive_soft_cancels").value market = BambooRelayMarket( wallet=self.wallet, ethereum_rpc_url=ethereum_rpc_url, trading_pairs=trading_pairs, use_coordinator=use_coordinator, pre_emptive_soft_cancels=pre_emptive_soft_cancels, trading_required=self._trading_required, ) elif market_name == "coinbase_pro": coinbase_pro_api_key = global_config_map.get("coinbase_pro_api_key").value coinbase_pro_secret_key = global_config_map.get("coinbase_pro_secret_key").value coinbase_pro_passphrase = global_config_map.get("coinbase_pro_passphrase").value market = CoinbaseProMarket(coinbase_pro_api_key, coinbase_pro_secret_key, coinbase_pro_passphrase, trading_pairs=trading_pairs, trading_required=self._trading_required) elif market_name == "huobi": huobi_api_key = global_config_map.get("huobi_api_key").value huobi_secret_key = global_config_map.get("huobi_secret_key").value market = HuobiMarket(huobi_api_key, huobi_secret_key, order_book_tracker_data_source_type=OrderBookTrackerDataSourceType.EXCHANGE_API, trading_pairs=trading_pairs, trading_required=self._trading_required) elif market_name == "liquid": liquid_api_key = global_config_map.get("liquid_api_key").value liquid_secret_key = global_config_map.get("liquid_secret_key").value market = LiquidMarket(liquid_api_key, liquid_secret_key, order_book_tracker_data_source_type=OrderBookTrackerDataSourceType.EXCHANGE_API, user_stream_tracker_data_source_type=UserStreamTrackerDataSourceType.EXCHANGE_API, trading_pairs=trading_pairs, trading_required=self._trading_required) elif market_name == "dolomite": assert self.wallet is not None is_test_net: bool = global_config_map.get("ethereum_chain_name").value == "DOLOMITE_TEST" market = DolomiteMarket( wallet=self.wallet, ethereum_rpc_url=ethereum_rpc_url, order_book_tracker_data_source_type=OrderBookTrackerDataSourceType.EXCHANGE_API, trading_pairs=trading_pairs, isTestNet=is_test_net, trading_required=self._trading_required, ) elif market_name == "loopring": loopring_accountid : int = global_config_map.get("loopring_accountid").value loopring_exchangeid : int = global_config_map.get("loopring_exchangeid").value loopring_private_key : str = global_config_map.get("loopring_private_key").value loopring_api_key : str = global_config_map.get("loopring_api_key").value market = LoopringMarket( loopring_accountid=loopring_accountid, loopring_exchangeid=loopring_exchangeid, loopring_private_key=loopring_private_key, loopring_api_key=loopring_api_key, trading_pairs=trading_pairs, trading_required=self._trading_required, order_book_tracker_data_source_type=OrderBookTrackerDataSourceType.EXCHANGE_API ) elif market_name == "bittrex": bittrex_api_key = global_config_map.get("bittrex_api_key").value bittrex_secret_key = global_config_map.get("bittrex_secret_key").value market = BittrexMarket(bittrex_api_key, bittrex_secret_key, order_book_tracker_data_source_type=OrderBookTrackerDataSourceType.EXCHANGE_API, trading_pairs=trading_pairs, trading_required=self._trading_required) elif market_name == "kucoin": kucoin_api_key = global_config_map.get("kucoin_api_key").value kucoin_secret_key = global_config_map.get("kucoin_secret_key").value kucoin_passphrase = global_config_map.get("kucoin_passphrase").value market = KucoinMarket(kucoin_api_key, kucoin_passphrase, kucoin_secret_key, order_book_tracker_data_source_type=OrderBookTrackerDataSourceType.EXCHANGE_API, trading_pairs=trading_pairs, trading_required=self._trading_required) elif market_name == "bitcoin_com": bitcoin_com_api_key = global_config_map.get("bitcoin_com_api_key").value bitcoin_com_secret_key = global_config_map.get("bitcoin_com_secret_key").value market = BitcoinComMarket(bitcoin_com_api_key, bitcoin_com_secret_key, order_book_tracker_data_source_type=OrderBookTrackerDataSourceType.EXCHANGE_API, trading_pairs=trading_pairs, trading_required=self._trading_required) elif market_name == "kraken": kraken_api_key = global_config_map.get("kraken_api_key").value kraken_secret_key = global_config_map.get("kraken_secret_key").value market = KrakenMarket(kraken_api_key, kraken_secret_key, order_book_tracker_data_source_type=OrderBookTrackerDataSourceType.EXCHANGE_API, trading_pairs=trading_pairs, trading_required=self._trading_required) else: raise ValueError(f"Market name {market_name} is invalid.") self.markets[market_name]: MarketBase = market self.markets_recorder = MarketsRecorder( self.trade_fill_db, list(self.markets.values()), self.strategy_file_name, self.strategy_name, ) self.markets_recorder.start() def _initialize_notifiers(self): if global_config_map.get("telegram_enabled").value: # TODO: refactor to use single instance if not any([isinstance(n, TelegramNotifier) for n in self.notifiers]): self.notifiers.append( TelegramNotifier( token=global_config_map["telegram_token"].value, chat_id=global_config_map["telegram_chat_id"].value, hb=self, ) ) for notifier in self.notifiers: notifier.start()
[ [ [ 30, 37 ], [ 3904, 3911 ], [ 3880, 3887 ], [ 4457, 4464 ] ], [ [ 62, 67 ], [ 5231, 5236 ] ], [ [ 75, 82 ], [ 3590, 3597 ], [ 4980, 4987 ] ], [ [ 90, 94 ], [ 4751, 4755 ] ], [ [ 114, 118 ], [ 4639, 4643 ], [ 5097, 5101 ], [ 8639, 8643 ], [ 8625, 8629 ], [ 8771, 8775 ], [ 9023, 9027 ], [ 9009, 9013 ], [ 9281, 9285 ], [ 10009, 10013 ], [ 10025, 10029 ] ], [ [ 120, 124 ], [ 4173, 4177 ] ], [ [ 126, 134 ], [ 3414, 3422 ], [ 4221, 4229 ], [ 4448, 4456 ], [ 4501, 4509 ], [ 4557, 4565 ], [ 4693, 4701 ], [ 4795, 4803 ], [ 4837, 4845 ], [ 4971, 4979 ], [ 5043, 5051 ], [ 5147, 5155 ], [ 5414, 5422 ] ], [ [ 136, 141 ], [ 8644, 8649 ], [ 8776, 8781 ], [ 10014, 10019 ] ], [ [ 143, 146 ], [ 4846, 4849 ] ], [ [ 148, 153 ], [ 5203, 5208 ] ], [ [ 193, 212 ], [ 3289, 3297 ] ], [ [ 247, 252 ], [ 4702, 4707 ] ], [ [ 310, 340 ], [ 11632, 11662 ], [ 13932, 13962 ], [ 14513, 14543 ], [ 15214, 15244 ], [ 16293, 16323 ], [ 16756, 16786 ], [ 17482, 17512 ], [ 18111, 18141 ], [ 18701, 18731 ] ], [ [ 399, 430 ], [ 14633, 14664 ] ], [ [ 461, 477 ], [ 3496, 3512 ] ], [ [ 528, 546 ], [ 5209, 5227 ], [ 8346, 8364 ] ], [ [ 600, 613 ], [ 2924, 2937 ], [ 11484, 11497 ] ], [ [ 667, 680 ], [ 3148, 3161 ], [ 16591, 16604 ] ], [ [ 732, 744 ], [ 3177, 3189 ], [ 17265, 17277 ] ], [ [ 808, 825 ], [ 2959, 2976 ], [ 13219, 13236 ] ], [ [ 875, 886 ], [ 2991, 3002 ], [ 13777, 13788 ] ], [ [ 938, 950 ], [ 3018, 3030 ], [ 14353, 14365 ] ], [ [ 993, 1003 ], [ 4183, 4193 ], [ 8729, 8739 ], [ 8684, 8694 ], [ 9101, 9111 ], [ 9056, 9066 ], [ 10464, 10474 ], [ 10419, 10429 ], [ 19025, 19035 ] ], [ [ 1046, 1071 ], [ 10906, 10931 ] ], [ [ 1133, 1149 ], [ 3051, 3067 ], [ 11925, 11941 ] ], [ [ 1213, 1230 ], [ 2890, 2907 ], [ 12493, 12510 ] ], [ [ 1286, 1300 ], [ 3085, 3099 ], [ 15047, 15061 ] ], [ [ 1356, 1370 ], [ 3117, 3131 ], [ 15873, 15887 ] ], [ [ 1432, 1448 ], [ 3210, 3226 ], [ 17929, 17945 ] ], [ [ 1500, 1512 ], [ 3242, 3254 ], [ 18541, 18553 ] ], [ [ 1565, 1585 ], [ 5335, 5355 ], [ 5312, 5332 ] ], [ [ 1641, 1654 ], [ 9922, 9935 ] ], [ [ 1706, 1716 ], [ 4230, 4240 ], [ 9745, 9755 ], [ 9732, 9742 ] ], [ [ 1762, 1779 ], [ 4084, 4101 ] ], [ [ 1820, 1831 ], [ 3975, 3986 ] ], [ [ 1833, 1855 ], [ 3950, 3972 ] ], [ [ 1904, 1917 ], [ 4012, 4025 ] ], [ [ 1961, 1975 ], [ 4119, 4133 ] ], [ [ 2013, 2032 ], [ 6245, 6264 ] ], [ [ 2034, 2053 ], [ 6346, 6365 ] ], [ [ 2109, 2126 ], [ 6755, 6772 ], [ 6853, 6870 ], [ 9370, 9387 ], [ 9506, 9523 ], [ 9660, 9677 ], [ 10066, 10083 ], [ 10804, 10821 ], [ 11066, 11083 ], [ 11325, 11342 ], [ 11409, 11426 ], [ 12296, 12313 ], [ 12399, 12416 ], [ 12947, 12964 ], [ 13041, 13058 ], [ 13138, 13155 ], [ 13624, 13641 ], [ 13704, 13721 ], [ 14196, 14213 ], [ 14278, 14295 ], [ 14952, 14969 ], [ 15517, 15534 ], [ 15611, 15628 ], [ 15707, 15724 ], [ 15800, 15817 ], [ 16432, 16449 ], [ 16516, 16533 ], [ 17024, 17041 ], [ 17106, 17123 ], [ 17191, 17208 ], [ 17758, 17775 ], [ 17850, 17867 ], [ 18385, 18402 ], [ 18467, 18484 ], [ 19334, 19351 ], [ 19625, 19642 ], [ 19700, 19717 ] ], [ [ 2128, 2140 ], [ 9308, 9320 ] ], [ [ 2193, 2218 ], [ 9586, 9611 ] ], [ [ 2220, 2243 ], [ 5559, 5582 ] ], [ [ 2290, 2302 ], [ 4510, 4522 ] ], [ [ 2364, 2387 ], [ 4566, 4589 ] ], [ [ 2435, 2445 ], [ 5156, 5166 ] ], [ [ 2494, 2506 ], [ 5052, 5064 ] ], [ [ 2553, 2565 ], [ 5102, 5114 ] ], [ [ 2616, 2632 ], [ 19473, 19489 ], [ 19577, 19593 ] ], [ [ 2691, 2713 ], [ 4644, 4666 ] ], [ [ 2761, 2776 ], [ 5423, 5438 ], [ 19078, 19093 ] ], [ [ 2823, 2831 ], [ 9439, 9447 ] ], [ [ 2834, 2842 ], [ 3549, 3557 ] ], [ [ 2851, 2865 ], [ 8697, 8711 ], [ 9069, 9083 ], [ 10432, 10446 ] ], [ [ 3266, 3287 ], [ 3780, 3801 ] ], [ [ 3579, 3587 ], [ 3633, 3641 ] ] ]
# Copyright (C) GRyCAP - I3M - UPV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module with all the classes and methods related with the binary supervisor.""" import subprocess import sys import uuid from faassupervisor.faas import DefaultSupervisor from faassupervisor.logger import get_logger from faassupervisor.utils import SysUtils, FileUtils, StrUtils class BinarySupervisor(DefaultSupervisor): """Supervisor class used in the Binary environment.""" _SCRIPT_FILE_NAME = 'script.sh' _OSCAR_SCRIPT_PATH = '/oscar/config/script.sh' def __init__(self, event_type): self.output = '' self.event_type = event_type get_logger().info('SUPERVISOR: Initializing Binary supervisor') def _get_script_path(self): script_path = None if SysUtils.is_var_in_env('SCRIPT'): script_path = SysUtils.join_paths(SysUtils.get_env_var("TMP_INPUT_DIR"), self._SCRIPT_FILE_NAME) script_content = StrUtils.base64_to_str(SysUtils.get_env_var('SCRIPT')) FileUtils.create_file_with_content(script_path, script_content) get_logger().info("Script file created in '%s'", script_path) elif FileUtils.is_file(self._OSCAR_SCRIPT_PATH): script_path = self._OSCAR_SCRIPT_PATH get_logger().info("Script file found in '%s'", script_path) return script_path def execute_function(self): script_path = self._get_script_path() if script_path: try: pyinstaller_library_path = SysUtils.get_env_var('LD_LIBRARY_PATH') orig_library_path = SysUtils.get_env_var('LD_LIBRARY_PATH_ORIG') if orig_library_path: SysUtils.set_env_var('LD_LIBRARY_PATH', orig_library_path) else: SysUtils.delete_env_var('LD_LIBRARY_PATH') proc = subprocess.Popen(['/bin/sh', script_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf-8', errors='ignore') SysUtils.set_env_var('LD_LIBRARY_PATH', pyinstaller_library_path) get_logger().debug("CONTAINER OUTPUT:\n %s", self.output) for line in proc.stdout: get_logger().debug(line.strip()) self.output = self.output + line except subprocess.CalledProcessError as cpe: # Exit with user script return code if an # error occurs (Kubernetes handles the error) get_logger().error(cpe.output.decode(encoding='utf-8', errors='ignore')) sys.exit(cpe.returncode) else: get_logger().error('No user script found!') def create_response(self): if self.event_type and self.event_type == 'UNKNOWN': # Check if there are files in $TMP_OUTPUT_DIR output_dir = SysUtils.get_env_var('TMP_OUTPUT_DIR') files = FileUtils.get_all_files_in_dir(output_dir) if len(files) == 1: # Return the file encoded in base64 file_content = FileUtils.read_file(files[0], 'rb') return StrUtils.bytes_to_base64str(file_content) if len(files) > 1: # Generate a zip with all files and return it encoded in base64 zip_path = SysUtils.join_paths(output_dir, str(uuid.uuid4())) FileUtils.zip_file_list(files, zip_path) file_content = FileUtils.read_file(zip_path, 'rb') return StrUtils.bytes_to_base64str(file_content) return self.output def create_error_response(self): pass
[ [ [ 667, 677 ], [ 2431, 2441 ], [ 2529, 2539 ], [ 2601, 2611 ], [ 3073, 3083 ] ], [ [ 685, 688 ], [ 3336, 3339 ] ], [ [ 696, 700 ], [ 4099, 4103 ] ], [ [ 733, 750 ], [ 884, 901 ] ], [ [ 785, 795 ], [ 1158, 1168 ], [ 1654, 1664 ], [ 1835, 1845 ], [ 2849, 2859 ], [ 2968, 2978 ], [ 3247, 3257 ], [ 3387, 3397 ] ], [ [ 829, 837 ], [ 1293, 1301 ], [ 1353, 1361 ], [ 1373, 1381 ], [ 1534, 1542 ], [ 2085, 2093 ], [ 2161, 2169 ], [ 2264, 2272 ], [ 2365, 2373 ], [ 2767, 2775 ], [ 3607, 3615 ], [ 4063, 4071 ] ], [ [ 839, 848 ], [ 1578, 1587 ], [ 1729, 1738 ], [ 3666, 3675 ], [ 3824, 3833 ], [ 4130, 4139 ], [ 4202, 4211 ] ], [ [ 850, 858 ], [ 1511, 1519 ], [ 3883, 3891 ], [ 4261, 4269 ] ], [ [ 867, 883 ] ] ]
""" Climate Change Project """ import plotly.graph_objects as go from PIL import Image, ImageDraw, ImageFont from computing_data import calc_high_actual_pd, \ calc_low_actual_pd, \ calc_median_actual_pd, \ make_high_rcp_list, make_low_rcp_list, \ make_median_rcp_list, rcp_to_slice, temp_to_rgb from reading_data import read_actual_data, read_predicted_data, CITY_SET, MAP, CITY_TEMPS def plot_temp_data(actual_temps_dict: dict, final_low_rcp_list: list, final_median_rcp_list: list, final_high_rcp_list: list) -> None: """Plot a line and scatter graph of real and predicted temperatures using plotly's line and scatter plots """ x = list(actual_temps_dict.keys()) actual_y = list(actual_temps_dict.values()) low_predicted_y = final_low_rcp_list median_predicted_y = final_median_rcp_list high_predicted_y = final_high_rcp_list fig = go.Figure() fig.add_trace(go.Scatter(x=x, y=low_predicted_y, mode='lines+markers', name='RCP 2.6 Predicted Temperature')) fig.add_trace(go.Scatter(x=x, y=median_predicted_y, mode='lines+markers', name='RCP 4.5 Predicted Temperature')) fig.add_trace(go.Scatter(x=x, y=high_predicted_y, mode='lines+markers', name='RCP 8.5 Predicted Temperature')) fig.add_trace(go.Scatter(x=x, y=actual_y, mode='lines+markers', name='Actual Temperature')) fig.update_layout( title="Actual vs Predicted Temperature of " + city[3], xaxis_title="Years", yaxis_title="Temperature (Celsius)", font=dict( family="Courier New, monospace", size=18) ) fig.show() def draw_table(actual_temps_dict: dict, final_low_rcp_list: list, final_median_rcp_list: list, final_high_rcp_list: list, low_rcp_percentage_difference: list, median_rcp_percentage_difference: list, high_rcp_percentage_difference: list) -> None: """ Draw a table using a plotly's basic table """ fig = go.Figure(data=[go.Table(header=dict(values=['Actual Temperature', 'RCP 2.6', '% Difference of RCP 2.6 and Actual Temp', 'RCP 4.5', '% Difference of RCP 4.5 and Actual Temp', 'RCP 8.5', '% Difference of RCP 8.5 and Actual Temp'], line_color='darkslategray', fill_color='lightskyblue'), cells=dict(values=[list(actual_temps_dict.values()), final_low_rcp_list, low_rcp_percentage_difference, final_median_rcp_list, median_rcp_percentage_difference, final_high_rcp_list, high_rcp_percentage_difference]))]) fig.update_layout( title="Actual vs Predicted Temperature of " + city[3] ) fig.show() def draw_map(rcp_type: str) -> None: """ Draws both maps for predicted and actual temperature of the cities in Canada """ map = Image.open(MAP) width, height = map.size new_map = Image.new('RGB', (width * 2, height + 80)) # fills the cities for the actual map for city in CITY_SET: temp = CITY_TEMPS[city][0] ImageDraw.floodfill(map, city[2], temp_to_rgb(temp), thresh=50) map2 = Image.open(MAP) # fills the cities for the predicted map for city in CITY_SET: temp = CITY_TEMPS[city][rcp_to_slice(rcp_type)] ImageDraw.floodfill(map2, city[2], temp_to_rgb(temp), thresh=50) new_map.paste(map, (0, 80)) new_map.paste(map2, (width, 80)) # Writes the titles title_font = ImageFont.truetype("arial.ttf", 50) new_map_editable = ImageDraw.Draw(new_map) new_map_editable.text((width // 3, 10), 'Actual Temperatures(' + year + ')', font=title_font) new_map_editable.text((int(1.3 * width), 10), 'Predicted Temperatures(' + year + ')', font=title_font) new_map.show() def run(city: tuple, year: int, city_name: str) -> None: """ Runs the code for one city """ actual_temps_dict = read_actual_data(city[0]) predicted_temps_dict = read_predicted_data(city[1], actual_temps_dict) if city[3].lower() == city_name.lower(): final_low_rcp_list = make_low_rcp_list(predicted_temps_dict) low_rcp_percentage_difference = \ calc_low_actual_pd(actual_temps_dict, final_low_rcp_list) final_median_rcp_list = make_median_rcp_list(predicted_temps_dict) median_rcp_percentage_difference = \ calc_median_actual_pd(actual_temps_dict, final_median_rcp_list) final_high_rcp_list = make_high_rcp_list(predicted_temps_dict) high_rcp_percentage_difference = \ calc_high_actual_pd(actual_temps_dict, final_high_rcp_list) plot_temp_data(actual_temps_dict, final_low_rcp_list, final_median_rcp_list, final_high_rcp_list) draw_table(actual_temps_dict, final_low_rcp_list, final_median_rcp_list, final_high_rcp_list, low_rcp_percentage_difference, median_rcp_percentage_difference, high_rcp_percentage_difference) temperatures = [actual_temps_dict[year], predicted_temps_dict[year]['RCP 2.6'], predicted_temps_dict[year]['RCP 4.5'], predicted_temps_dict[year]['RCP 8.5']] CITY_TEMPS[city] = temperatures # this is the main part of the program that calls every function if __name__ == '__main__': year = input('Write the year for the map to display data from ' '(in range of 2003-2019 inclusive)') if not 2003 <= int(year) <= 2019: year = input('Try again. Write the number between 2003 and 2019 inclusive') city_name = input( 'Type the name of the city you want to display its stats on graph' '(TORONTO, QUEBEC, HALIFAX, WINNIPEG)') if city_name.lower() not in ('toronto', 'halifax', 'quebec', 'winnipeg'): city_name = input( 'Try again. Type Toronto or Quebec or Halifax or Winnipeg') rcp_type = input( 'Write an RCP value for the map to display on the "predicted" side.' '(write RCP 2.6 or RCP 4.5 or RCP 8.5)') if rcp_type not in ('RCP 2.6', 'RCP 4.5', 'RCP 8.5'): rcp_type = input('Try again. Write RCP 2.6 or RCP 4.5 or RCP 8.5)') while True: for city in CITY_SET: run(city, int(year), city_name) draw_map(rcp_type) year = input('Write the year for the map to display data from ' '(in range of 2003-2019 inclusive). ' 'Type 2 wrong answers to exit') if not 2003 <= int(year) <= 2019: year = input('Try again. Write the number between 2003 and 2019 inclusive. ' 'Type a wrong answer to exit') if not 2003 <= int(year) <= 2019: break city_name = input( 'Type the name of the city you want to display its stats on graph' '(TORONTO, QUEBEC, HALIFAX, WINNIPEG) Type 2 wrong answers to exit.') if city_name.lower() not in ('toronto', 'halifax', 'quebec', 'winnipeg'): city_name = input( 'Try again. Type Toronto or Quebec or Halifax or Winnipeg. ' 'Type a wrong answer to exit.') if city_name.lower() not in ('toronto', 'halifax', 'quebec', 'winnipeg'): break rcp_type = input( 'Write an RCP value for the map to display on the "predicted" side.' '(write RCP 2.6 or RCP 4.5 or RCP 8.5) Type 2 wrong answers to exit') if rcp_type not in ('RCP 2.6', 'RCP 4.5', 'RCP 8.5'): rcp_type = input('Try again. Write RCP 2.6 or RCP 4.5 or RCP 8.5' 'Type a wrong answer to exit.') if rcp_type not in ('RCP 2.6', 'RCP 4.5', 'RCP 8.5'): break
[ [ [ 38, 64 ], [ 912, 914 ], [ 943, 945 ], [ 1116, 1118 ], [ 1292, 1294 ], [ 1466, 1468 ], [ 2279, 2281 ], [ 2295, 2297 ] ], [ [ 81, 86 ], [ 3766, 3771 ], [ 3826, 3831 ], [ 4057, 4062 ] ], [ [ 88, 97 ], [ 3981, 3990 ], [ 4209, 4218 ], [ 4445, 4454 ] ], [ [ 99, 108 ], [ 4386, 4395 ] ], [ [ 136, 155 ], [ 5560, 5579 ] ], [ [ 163, 181 ], [ 5146, 5164 ] ], [ [ 189, 210 ], [ 5336, 5357 ] ], [ [ 218, 236 ], [ 5464, 5482 ] ], [ [ 238, 255 ], [ 5052, 5069 ] ], [ [ 263, 283 ], [ 5236, 5256 ] ], [ [ 285, 297 ], [ 4177, 4189 ] ], [ [ 299, 310 ], [ 4015, 4026 ], [ 4244, 4255 ] ], [ [ 336, 352 ], [ 4876, 4892 ] ], [ [ 354, 373 ], [ 4929, 4948 ] ], [ [ 375, 383 ], [ 7237, 7245 ], [ 3928, 3936 ], [ 4135, 4143 ] ], [ [ 385, 388 ], [ 3777, 3780 ], [ 4068, 4071 ] ], [ [ 390, 400 ], [ 3953, 3963 ], [ 4160, 4170 ], [ 6224, 6234 ] ], [ [ 407, 421 ], [ 5660, 5674 ] ], [ [ 1875, 1885 ], [ 5789, 5799 ] ], [ [ 3625, 3633 ], [ 7300, 7308 ] ], [ [ 4752, 4755 ], [ 7259, 7262 ] ], [ [ 6355, 6359 ], [ 6496, 6500 ], [ 7273, 7277 ], [ 4564, 4568 ], [ 4697, 4701 ] ], [ [ 6519, 6523 ], [ 7273, 7277 ], [ 4564, 4568 ], [ 4697, 4701 ] ], [ [ 6599, 6608 ], [ 6748, 6757 ], [ 7280, 7289 ] ], [ [ 6827, 6836 ], [ 7280, 7289 ] ], [ [ 6922, 6930 ], [ 7073, 7081 ], [ 7309, 7317 ] ], [ [ 7132, 7140 ], [ 7309, 7317 ] ], [ [ 7229, 7233 ], [ 7263, 7267 ], [ 1679, 1683 ], [ 3589, 3593 ] ], [ [ 7328, 7332 ], [ 7531, 7535 ], [ 7718, 7722 ], [ 7273, 7277 ], [ 4564, 4568 ], [ 4697, 4701 ] ], [ [ 7558, 7562 ], [ 7718, 7722 ], [ 7273, 7277 ], [ 4564, 4568 ], [ 4697, 4701 ] ], [ [ 7760, 7769 ], [ 7951, 7960 ], [ 8189, 8198 ], [ 7280, 7289 ] ], [ [ 8034, 8043 ], [ 8189, 8198 ], [ 7280, 7289 ] ], [ [ 8287, 8295 ], [ 8479, 8487 ], [ 8680, 8688 ], [ 7309, 7317 ] ], [ [ 8542, 8550 ], [ 8680, 8688 ], [ 7309, 7317 ] ] ]
from __future__ import print_function, division from numpy import array, argmax from pyscf.nao import tddft_iter class tddft_iter_x_zip(tddft_iter): """ Iterative TDDFT with a high-energy part of the KS eigenvectors compressed """ def __init__(self, **kw): from pyscf.nao.m_fermi_dirac import fermi_dirac_occupations tddft_iter.__init__(self, **kw) self.x_zip = kw['x_zip'] if 'x_zip' in kw else False self.x_zip_eps = kw['x_zip_eps'] if 'x_zip_eps' in kw else 0.05 self.x_zip_emax = kw['x_zip_emax'] if 'x_zip_emax' in kw else 0.25 if self.x_zip: # redefine the eigenvectors sm2e,sma2x = self.build_x_zip() if self.verbosity>0: print(__name__, 'self.mo_energy.shape =', self.mo_energy.shape) print(__name__, 'sm2e.shape =', sm2e.shape) self.ksn2e = array([sm2e]) ksn2fd = fermi_dirac_occupations(self.telec, self.ksn2e, self.fermi_energy) for s,n2fd in enumerate(ksn2fd[0]): if not all(n2fd>self.nfermi_tol): continue print(self.telec, s, nfermi_tol, n2fd) raise RuntimeError(__name__, 'telec is too high?') self.ksn2f = (3-self.nspin)*ksn2fd self.nfermi = array([argmax(ksn2fd[0,s,:]<self.nfermi_tol) for s in range(self.nspin)], dtype=int) self.vstart = array([argmax(1.0-ksn2fd[0,s,:]>=self.nfermi_tol) for s in range(self.nspin)], dtype=int) self.xocc = [ma2x[:nfermi,:] for ma2x,nfermi in zip(sma2x,self.nfermi)] self.xvrt = [ma2x[vstart:,:] for ma2x,vstart in zip(sma2x,self.vstart)] def build_x_zip(self): """ define compressed eigenvectors """ from pyscf.nao.m_x_zip import x_zip sm2e = [] sma2x = [] for n2e,na2x in zip(self.mo_energy[0], self.mo_coeff[0,:,:,:,0]): vst, i2w,i2dos, m2e, ma2x = x_zip(n2e, na2x, eps=self.x_zip_eps, emax=self.x_zip_emax) sm2e.append(m2e) sma2x.append(ma2x) sm2e = array(sm2e) return sm2e, sma2x
[ [ [ 23, 37 ] ], [ [ 39, 47 ] ], [ [ 66, 71 ], [ 822, 827 ], [ 1187, 1192 ], [ 1292, 1297 ], [ 1898, 1903 ] ], [ [ 73, 79 ], [ 1194, 1200 ], [ 1299, 1305 ] ], [ [ 102, 112 ], [ 138, 148 ], [ 337, 347 ] ], [ [ 121, 137 ] ] ]
class ProxyListException(Exception): def __init___(self, extraArguments): Exception.__init__(self, " was raised - {0}".format(extraArguments)) self.dErrorArguments = extraArguments
[ [ [ 6, 24 ] ] ]
""" Functions and classes for managing a map saved in the .tmx format. Typically these .tmx maps are created using the `Tiled Map Editor`_. For more information, see the `Platformer Tutorial`_. .. _Tiled Map Editor: https://www.mapeditor.org/ .. _Platformer Tutorial: http://arcade.academy/examples/platform_tutorial/index.html """ import copy import math import os from pathlib import Path from typing import List, Optional, Tuple, Union, cast import pytiled_parser from arcade import ( AnimatedTimeBasedSprite, AnimationKeyframe, Sprite, SpriteList, load_texture, ) from arcade.arcade_types import Point from arcade.resources import resolve_resource_path _FLIPPED_HORIZONTALLY_FLAG = 0x80000000 _FLIPPED_VERTICALLY_FLAG = 0x40000000 _FLIPPED_DIAGONALLY_FLAG = 0x20000000 def read_tmx(map_file: Union[str, Path]) -> pytiled_parser.TiledMap: raise DeprecationWarning("The read_tmx function has been replaced with read_map. Use this function and convert your .tmx files to .json using the Tiled editor.") def read_map(map_file: Union[str, Path]) -> pytiled_parser.TiledMap: """ Given a .json file, this will read in a tiled map, and return a TiledMap object. Important: Tiles must be a "collection" of images. Hitboxes can be drawn around tiles in the tileset editor, but only polygons are supported. (This is a great area for PR's to improve things.) :param str json_file: String with name of our JSON Tiled file :returns: Map :rtype: TiledMap """ # If we should pull from local resources, replace with proper path map_file = resolve_resource_path(map_file) tile_map = pytiled_parser.parse_map(map_file) return tile_map def get_cartesian( map_object: pytiled_parser.TiledMap, coordinates: pytiled_parser.OrderedPair ) -> pytiled_parser.OrderedPair: """ Given a TiledMap and a set of coordinates, this returns the cartesian coordinates This assumed the supplied coordinates are pixel coordinates, and bases the cartesian grid off of the Map's tile size. So if you have a map with 128x128 pixel Tiles, and you supply coordinates 500, 250 to this function you'll receive back 3, 2. This works by taking the floor of the quotient of the pixel coordinate divided by the tile size. :param pytiled_parser.TiledMap map_object: The map to pull tile size from :param pytiled_parser.OrderedPair coordinates: The pixel coordinates to convert """ x = math.floor(coordinates.x / map_object.tile_size.width) y = math.floor(coordinates.y / map_object.tile_size.height) return pytiled_parser.OrderedPair(x, y) def get_tilemap_layer( map_object: pytiled_parser.TiledMap, layer_path: str ) -> Optional[pytiled_parser.Layer]: """ Given a TiledMap and a layer path, this returns the TileLayer. :param pytiled_parser.objects.TileMap map_object: The map read in by the read_tmx function. :param str layer_path: A string to match the layer name. Case sensitive. :returns: A TileLayer, or None if no layer was found. """ assert isinstance(map_object, pytiled_parser.TiledMap) assert isinstance(layer_path, str) def _get_tilemap_layer(path, layers): layer_name = path.pop(0) for layer in layers: if layer.name == layer_name: if isinstance(layer, pytiled_parser.LayerGroup): if len(path) != 0: return _get_tilemap_layer(path, layer.layers) else: return layer return None path = layer_path.strip("/").split("/") layer = _get_tilemap_layer(path, map_object.layers) return layer def _get_tile_by_gid( map_object: pytiled_parser.TiledMap, tile_gid: int ) -> Optional[pytiled_parser.Tile]: flipped_diagonally = False flipped_horizontally = False flipped_vertically = False if tile_gid & _FLIPPED_HORIZONTALLY_FLAG: flipped_horizontally = True tile_gid -= _FLIPPED_HORIZONTALLY_FLAG if tile_gid & _FLIPPED_DIAGONALLY_FLAG: flipped_diagonally = True tile_gid -= _FLIPPED_DIAGONALLY_FLAG if tile_gid & _FLIPPED_VERTICALLY_FLAG: flipped_vertically = True tile_gid -= _FLIPPED_VERTICALLY_FLAG for tileset_key, tileset in map_object.tilesets.items(): if tile_gid < tileset_key: continue # No specific tile info, but there is a tile sheet if ( tileset.tiles is None and tileset.image is not None and tileset_key <= tile_gid < tileset_key + tileset.tile_count ): tile_ref = pytiled_parser.Tile( id=(tile_gid - tileset_key), image=tileset.image ) else: tile_ref = tileset.tiles.get(tile_gid - tileset_key) if tile_ref: my_tile = copy.copy(tile_ref) my_tile.tileset = tileset my_tile.flipped_vertically = flipped_vertically my_tile.flipped_diagonally = flipped_diagonally my_tile.flipped_horizontally = flipped_horizontally return my_tile return None def _get_tile_by_id( map_object: pytiled_parser.TiledMap, tileset: pytiled_parser.Tileset, tile_id: int ) -> Optional[pytiled_parser.Tile]: for tileset_key, cur_tileset in map_object.tilesets.items(): if cur_tileset is tileset: for tile_key, tile in cur_tileset.tiles.items(): if tile_id == tile.id: return tile return None def _get_image_info_from_tileset(tile): image_x = 0 image_y = 0 if tile.tileset.image is not None: margin = tile.tileset.margin or 0 spacing = tile.tileset.spacing or 0 row = tile.id // tile.tileset.columns image_y = margin + row * (tile.tileset.tile_height + spacing) col = tile.id % tile.tileset.columns image_x = margin + col * (tile.tileset.tile_width + spacing) if tile.tileset.image: # Sprite sheet, use max width/height from sheet width = tile.tileset.tile_width height = tile.tileset.tile_height else: # Individual image, use image width and height width = tile.image_width height = tile.image_height return image_x, image_y, width, height def _get_image_source( tile: pytiled_parser.Tile, base_directory: Optional[str], map_directory: Optional[str], ): image_file = None if tile.image: image_file = tile.image elif tile.tileset.image: image_file = tile.tileset.image if not image_file: print( f"Warning for tile {tile.id_}, no image source listed either for individual tile, or as a tileset." ) return None if os.path.exists(image_file): return image_file if base_directory: try2 = Path(base_directory, image_file) if os.path.exists(try2): return try2 if map_directory: try3 = Path(map_directory, image_file) if os.path.exists(try3): return try3 print( f"Warning, can't find image {image_file} for tile {tile.id} - {base_directory}" ) return None def _create_sprite_from_tile( map_object: pytiled_parser.TiledMap, tile: pytiled_parser.Tile, scaling: float = 1.0, base_directory: str = None, hit_box_algorithm="Simple", hit_box_detail: float = 4.5, ): """ Given a tile from the parser, see if we can create a sprite from it """ # --- Step 1, find a reference to an image this is going to be based off of map_source = map_object.map_file map_directory = os.path.dirname(map_source) image_file = _get_image_source(tile, base_directory, map_directory) # print(f"Creating tile: {tmx_file}") if tile.animation: # my_sprite = AnimatedTimeSprite(tmx_file, scaling) my_sprite: Sprite = AnimatedTimeBasedSprite(image_file, scaling) else: image_x, image_y, width, height = _get_image_info_from_tileset(tile) my_sprite = Sprite( image_file, scaling, image_x, image_y, width, height, flipped_horizontally=tile.flipped_horizontally, flipped_vertically=tile.flipped_vertically, flipped_diagonally=tile.flipped_diagonally, hit_box_algorithm=hit_box_algorithm, hit_box_detail=hit_box_detail, ) if tile.properties is not None and len(tile.properties) > 0: for key, value in tile.properties.items(): my_sprite.properties[key] = value if tile.type: my_sprite.properties["type"] = tile.type # print(tile.image.source, my_sprite.center_x, my_sprite.center_y) if tile.objects is not None: if len(tile.objects.tiled_objects) > 1: print( f"Warning, only one hit box supported for tile with image {tile.image.source}." ) for hitbox in tile.objects.tiled_objects: points: List[Point] = [] if isinstance(hitbox, pytiled_parser.tiled_object.Rectangle): if hitbox.size is None: print( f"Warning: Rectangle hitbox created for without a " f"height or width for {tile.image.source}. Ignoring." ) continue # print(my_sprite.width, my_sprite.height) sx = hitbox.coordinates.x - (my_sprite.width / (scaling * 2)) sy = -(hitbox.coordinates.y - (my_sprite.height / (scaling * 2))) ex = (hitbox.coordinates.x + hitbox.size.width) - ( my_sprite.width / (scaling * 2) ) ey = -( (hitbox.coordinates.y + hitbox.size.height) - (my_sprite.height / (scaling * 2)) ) # print(f"Size: {hitbox.size} Location: {hitbox.location}") p1 = [sx, sy] p2 = [ex, sy] p3 = [ex, ey] p4 = [sx, ey] # print(f"w:{my_sprite.width:.1f}, h:{my_sprite.height:.1f}", end=", ") points = [p1, p2, p3, p4] # for point in points: # print(f"({point[0]:.1f}, {point[1]:.1f}) ") # print() elif isinstance(hitbox, pytiled_parser.tiled_object.Polygon) or isinstance( hitbox, pytiled_parser.tiled_object.Polyline ): for point in hitbox.points: adj_x = ( point.x + hitbox.coordinates.x - my_sprite.width / (scaling * 2) ) adj_y = -( point.y + hitbox.coordinates.y - my_sprite.height / (scaling * 2) ) adj_point = [adj_x, adj_y] points.append(adj_point) # If we have a polyline, and it is closed, we need to # remove the duplicate end-point if points[0][0] == points[-1][0] and points[0][1] == points[-1][1]: points.pop() elif isinstance(hitbox, pytiled_parser.tiled_object.Ellipse): if hitbox.size is None: print( f"Warning: Ellipse hitbox created for without a height " f"or width for {tile.image.source}. Ignoring." ) continue # print(f"Size: {hitbox.size} Location: {hitbox.location}") hw = hitbox.size.width / 2 hh = hitbox.size.height / 2 cx = hitbox.coordinates.x + hw cy = hitbox.coordinates.y + hh acx = cx - (my_sprite.width / (scaling * 2)) acy = cy - (my_sprite.height / (scaling * 2)) # print(f"acx: {acx} acy: {acy} cx: {cx} cy: {cy} hh: {hh} hw: {hw}") total_steps = 8 angles = [ step / total_steps * 2 * math.pi for step in range(total_steps) ] for angle in angles: x = hw * math.cos(angle) + acx y = -(hh * math.sin(angle) + acy) point = [x, y] points.append(point) # for point in points: # print(f"({point[0]:.1f}, {point[1]:.1f}) ") # print() else: print(f"Warning: Hitbox type {type(hitbox)} not supported.") my_sprite.set_hit_box(points) if tile.animation is not None: # Animated image key_frame_list = [] # Loop through each frame for frame in tile.animation: # Get the tile for the frame frame_tile = _get_tile_by_id(map_object, tile.tileset, frame.tile_id) if frame_tile: image_file = _get_image_source( frame_tile, base_directory, map_directory ) # Does the tile have an image? if frame_tile.image: # Yes, use it texture = load_texture(image_file) else: # No image for tile? Pull from tilesheet image_x, image_y, width, height = _get_image_info_from_tileset( frame_tile ) texture = load_texture(image_file, image_x, image_y, width, height) key_frame = AnimationKeyframe(frame.tile_id, frame.duration, texture) key_frame_list.append(key_frame) # If this is the first texture in the animation, go ahead and # set it as the current texture. if len(key_frame_list) == 1: my_sprite.texture = key_frame.texture # print(f"Add tile {frame.tile_id} for keyframe. Source: {frame_tile.image.source}") cast(AnimatedTimeBasedSprite, my_sprite).frames = key_frame_list return my_sprite def _process_object_layer( map_object: pytiled_parser.TiledMap, layer: pytiled_parser.ObjectLayer, scaling: float = 1, base_directory: str = "", use_spatial_hash: Optional[bool] = None, hit_box_algorithm="Simple", hit_box_detail=4.5, ) -> SpriteList: sprite_list: SpriteList = SpriteList(use_spatial_hash=use_spatial_hash) for cur_object in layer.tiled_objects: if not hasattr(cur_object, "gid"): print( "Warning: Currently only tiles (not objects) are supported in object layers." ) continue tile = _get_tile_by_gid(map_object, cur_object.gid) my_sprite = _create_sprite_from_tile( map_object, tile, scaling=scaling, base_directory=base_directory, hit_box_algorithm=hit_box_algorithm, hit_box_detail=hit_box_detail, ) x = cur_object.coordinates.x * scaling y = ( map_object.map_size.height * map_object.tile_size[1] - cur_object.coordinates.y ) * scaling my_sprite.width = width = cur_object.size[0] * scaling my_sprite.height = height = cur_object.size[1] * scaling center_x = width / 2 center_y = height / 2 if cur_object.rotation is not None: rotation = -math.radians(cur_object.rotation) else: rotation = 0 cos_rotation = math.cos(rotation) sin_rotation = math.sin(rotation) rotated_center_x = center_x * cos_rotation - center_y * sin_rotation rotated_center_y = center_x * sin_rotation + center_y * cos_rotation my_sprite.position = (x + rotated_center_x, y + rotated_center_y) my_sprite.angle = math.degrees(rotation) # Opacity opacity = layer.opacity if opacity: my_sprite.alpha = int(opacity * 255) # Properties if cur_object.properties is not None and "change_x" in cur_object.properties: my_sprite.change_x = float(cur_object.properties["change_x"]) if cur_object.properties is not None and "change_y" in cur_object.properties: my_sprite.change_y = float(cur_object.properties["change_y"]) if ( cur_object.properties is not None and "boundary_bottom" in cur_object.properties ): my_sprite.boundary_bottom = float(cur_object.properties["boundary_bottom"]) if ( cur_object.properties is not None and "boundary_top" in cur_object.properties ): my_sprite.boundary_top = float(cur_object.properties["boundary_top"]) if ( cur_object.properties is not None and "boundary_left" in cur_object.properties ): my_sprite.boundary_left = float(cur_object.properties["boundary_left"]) if ( cur_object.properties is not None and "boundary_right" in cur_object.properties ): my_sprite.boundary_right = float(cur_object.properties["boundary_right"]) if cur_object.properties is not None: my_sprite.properties.update(cur_object.properties) if cur_object.type: my_sprite.properties["type"] = cur_object.type if cur_object.name: my_sprite.properties["name"] = cur_object.name sprite_list.append(my_sprite) return sprite_list def _process_tile_layer( map_object: pytiled_parser.TiledMap, layer: pytiled_parser.TileLayer, scaling: float = 1, base_directory: str = "", use_spatial_hash: Optional[bool] = None, hit_box_algorithm="Simple", hit_box_detail: float = 4.5, ) -> SpriteList: sprite_list: SpriteList = SpriteList(use_spatial_hash=use_spatial_hash) map_array = layer.data # Loop through the layer and add in the wall list for row_index, row in enumerate(map_array): for column_index, item in enumerate(row): # Check for empty square if item == 0: continue tile = _get_tile_by_gid(map_object, item) if tile is None: error_msg = ( f"Warning, couldn't find tile for item {item} in layer " f"'{layer.name}' in file '{map_object.map_file}'." ) raise ValueError(error_msg) my_sprite = _create_sprite_from_tile( map_object, tile, scaling=scaling, base_directory=base_directory, hit_box_algorithm=hit_box_algorithm, hit_box_detail=hit_box_detail, ) if my_sprite is None: print( f"Warning: Could not create sprite number {item} in layer '{layer.name}' {tile.image.source}" ) else: my_sprite.center_x = ( column_index * (map_object.tile_size[0] * scaling) + my_sprite.width / 2 ) my_sprite.center_y = (map_object.map_size.height - row_index - 1) * ( map_object.tile_size[1] * scaling ) + my_sprite.height / 2 # Opacity opacity = layer.opacity if opacity: my_sprite.alpha = int(opacity * 255) sprite_list.append(my_sprite) return sprite_list def process_layer( map_object: pytiled_parser.TiledMap, layer_name: str, scaling: float = 1, base_directory: str = "", use_spatial_hash: Optional[bool] = None, hit_box_algorithm="Simple", hit_box_detail: float = 4.5, ) -> SpriteList: """ This takes a map layer returned by the read_tmx function, and creates Sprites for it. :param map_object: The TileMap read in by read_tmx. :param layer_name: The name of the layer that we are creating sprites for. :param scaling: Scaling the layer up or down. (Note, any number besides 1 can create a tearing effect, if numbers don't evenly divide.) :param base_directory: Base directory of the file, that we start from to load images. :param use_spatial_hash: If all, or at least 75%, of the loaded tiles will not move between frames and you are using either the simple physics engine or platformer physics engine, set this to True to speed collision calculation. Leave False if using PyMunk, if all sprites are moving, or if no collision will be checked. :param str hit_box_algorithm: One of 'None', 'Simple' or 'Detailed'. \ Defaults to 'Simple'. Use 'Simple' for the :data:`PhysicsEngineSimple`, \ :data:`PhysicsEnginePlatformer` \ and 'Detailed' for the :data:`PymunkPhysicsEngine`. .. figure:: images/hit_box_algorithm_none.png :width: 40% hit_box_algorithm = "None" .. figure:: images/hit_box_algorithm_simple.png :width: 55% hit_box_algorithm = "Simple" .. figure:: images/hit_box_algorithm_detailed.png :width: 75% hit_box_algorithm = "Detailed" :param float hit_box_detail: Float, defaults to 4.5. Used with 'Detailed' to hit box :returns: A SpriteList. """ if len(base_directory) > 0 and not base_directory.endswith("/"): base_directory += "/" layer = get_tilemap_layer(map_object, layer_name) if layer is None: print(f"Warning, no layer named '{layer_name}'.") return SpriteList() if isinstance(layer, pytiled_parser.TileLayer): return _process_tile_layer( map_object, layer, scaling, base_directory, use_spatial_hash, hit_box_algorithm, hit_box_detail, ) elif isinstance(layer, pytiled_parser.ObjectLayer): return _process_object_layer( map_object, layer, scaling, base_directory, use_spatial_hash, hit_box_algorithm, hit_box_detail, ) print(f"Warning, layer '{layer_name}' has unexpected type. '{type(layer)}'") return SpriteList()
[ [ [ 345, 349 ], [ 4900, 4904 ] ], [ [ 357, 361 ], [ 2499, 2503 ], [ 2562, 2566 ], [ 12233, 12237 ], [ 12356, 12360 ], [ 12409, 12413 ], [ 15625, 15629 ], [ 15722, 15726 ], [ 15764, 15768 ], [ 16038, 16042 ] ], [ [ 369, 371 ], [ 6810, 6812 ], [ 6947, 6949 ], [ 7074, 7076 ], [ 7698, 7700 ] ], [ [ 392, 396 ], [ 838, 842 ], [ 1074, 1078 ], [ 6903, 6907 ], [ 7031, 7035 ] ], [ [ 416, 420 ], [ 9101, 9105 ] ], [ [ 422, 430 ], [ 2750, 2758 ], [ 3795, 3803 ], [ 5300, 5308 ], [ 6427, 6435 ], [ 6461, 6469 ], [ 14453, 14461 ], [ 17907, 17915 ], [ 19916, 19924 ] ], [ [ 432, 437 ] ], [ [ 439, 444 ], [ 827, 832 ], [ 1063, 1068 ] ], [ [ 446, 450 ], [ 14181, 14185 ] ], [ [ 459, 473 ], [ 848, 862 ], [ 1084, 1098 ], [ 1667, 1681 ], [ 1830, 1844 ], [ 1760, 1774 ], [ 1798, 1812 ], [ 2630, 2644 ], [ 2759, 2773 ], [ 2704, 2718 ], [ 3134, 3148 ], [ 3804, 3818 ], [ 3751, 3765 ], [ 4677, 4691 ], [ 5309, 5323 ], [ 5224, 5238 ], [ 5258, 5272 ], [ 6386, 6400 ], [ 7290, 7304 ], [ 7325, 7339 ], [ 9152, 9166 ], [ 10480, 10494 ], [ 10556, 10570 ], [ 11350, 11364 ], [ 14313, 14327 ], [ 14349, 14363 ], [ 17769, 17783 ], [ 17805, 17819 ], [ 19794, 19808 ], [ 22048, 22062 ], [ 22330, 22344 ], [ 3381, 3395 ] ], [ [ 500, 523 ], [ 7952, 7975 ], [ 14186, 14209 ] ], [ [ 529, 546 ], [ 13734, 13751 ] ], [ [ 552, 558 ], [ 7943, 7949 ], [ 8104, 8110 ] ], [ [ 564, 574 ], [ 14537, 14547 ], [ 14579, 14589 ], [ 14566, 14576 ], [ 18000, 18010 ], [ 18042, 18052 ], [ 18029, 18039 ], [ 20009, 20019 ], [ 22009, 22019 ], [ 22681, 22691 ] ], [ [ 580, 592 ], [ 13367, 13379 ], [ 13647, 13659 ] ], [ [ 628, 633 ], [ 9106, 9111 ] ], [ [ 663, 684 ], [ 1619, 1640 ] ], [ [ 686, 712 ], [ 3941, 3967 ], [ 4025, 4051 ] ], [ [ 726, 750 ], [ 4195, 4219 ], [ 4275, 4299 ] ], [ [ 764, 788 ], [ 4071, 4095 ], [ 4151, 4175 ] ], [ [ 808, 816 ] ], [ [ 1044, 1052 ] ], [ [ 1729, 1742 ] ], [ [ 2669, 2686 ], [ 21872, 21889 ] ], [ [ 3717, 3733 ], [ 14876, 14892 ], [ 18376, 18392 ] ], [ [ 5191, 5206 ], [ 13005, 13020 ] ], [ [ 5585, 5613 ], [ 8049, 8077 ], [ 13529, 13557 ] ], [ [ 6357, 6374 ], [ 7743, 7760 ], [ 13119, 13136 ] ], [ [ 7248, 7272 ], [ 14941, 14965 ], [ 18705, 18729 ] ], [ [ 14274, 14295 ], [ 22374, 22395 ] ], [ [ 17732, 17751 ], [ 22090, 22109 ] ], [ [ 19763, 19776 ] ] ]
from typing import List, Set, Tuple VertexSets = List[Set[int]] EdgeList = List[Tuple[int, int]]
[ [ [ 19, 23 ], [ 50, 54 ], [ 76, 80 ] ], [ [ 25, 28 ], [ 55, 58 ] ], [ [ 30, 35 ], [ 81, 86 ] ], [ [ 37, 47 ] ], [ [ 65, 73 ] ] ]
import pytest from main_app.models import * from main_app.tests.utils import * # TESTS FOR CREATE MODELS: @pytest.mark.django_db def test_create_user(): # Given: users_before = User.objects.count() # When: new_user = fake_user() # Then: assert User.objects.count() == users_before + 1 assert new_user.pk == 1 assert new_user.is_anonymous is False @pytest.mark.django_db def test_create_category(): # Given: categories_before = Category.objects.count() # When: new_category = fake_category() # Then: assert Category.objects.count() == categories_before + 1 assert Category.objects.count() == 1 assert new_category.pk == 1 @pytest.mark.django_db def test_create_institution(): # Given: institutions_before = Institution.objects.count() # When: new_institution = fake_institution() # Then: assert Institution.objects.count() == institutions_before + 1 assert Institution.objects.count() == 1 assert new_institution.pk == 1 @pytest.mark.django_db def test_create_donation(): # Given: donations_before = Donation.objects.count() # When: new_donation = fake_donation() # Then: assert Donation.objects.count() == donations_before + 1 assert Donation.objects.count() == 1 assert new_donation.pk == 1 # TESTS FOR EDIT MODELS: @pytest.mark.django_db def test_edit_user(): # Given: user = fake_user() # When: previous_user_name = user.username user.username = "Charity" # Then: assert previous_user_name != user.username assert user.username == "Charity" @pytest.mark.django_db def test_edit_category(): # Given: category = fake_category() # When: previous_category_name = category.name category.name = "Clothes" # Then: assert previous_category_name != category.name assert category.name == "Clothes" @pytest.mark.django_db def test_edit_institution(): # Given: institution = fake_institution() # When: previous_institution_name = institution.name institution.name = "Fundacja" previous_institution_description = institution.description institution.description = "Some description" previous_institution_pk = institution.pk institution.pk = 2 # Then: assert previous_institution_name != institution.name assert institution.name == "Fundacja" assert previous_institution_description != institution.description assert institution.description == "Some description" assert previous_institution_pk != institution.pk assert institution.pk == 2 @pytest.mark.django_db def test_edit_donation(): # Given: donation = fake_donation() # When: previous_donation_city = donation.city donation.city = "Katowice" previous_donation_date = donation.pick_up_date donation.pick_up_date = "2020-06-17" previous_donation_comment = donation.pick_up_comment donation.pick_up_comment = "Comment" previous_donation_status = donation.collected donation.collected = False # Then: assert previous_donation_city != donation.city assert donation.city == "Katowice" assert previous_donation_date != donation.pick_up_date assert donation.pick_up_date == "2020-06-17" assert previous_donation_comment != donation.pick_up_comment assert donation.pick_up_comment == "Comment" assert previous_donation_status != donation.collected assert donation.collected is False # TESTS FOR DELETE MODELS: @pytest.mark.django_db def test_delete_user(): # Given: user = fake_user() users_before_deletion = User.objects.count() # When: user.delete() # Then: assert User.objects.count() == users_before_deletion - 1 @pytest.mark.django_db def test_delete_category(): # Given: category = fake_category() categories_before_deletion = Category.objects.count() # When: category.delete() # Then: assert Category.objects.count() == categories_before_deletion - 1 @pytest.mark.django_db def test_delete_institution(): # Given: institution = fake_institution() institution_before_deletion = Institution.objects.count() # When: institution.delete() # Then: assert Institution.objects.count() == institution_before_deletion - 1 @pytest.mark.django_db def test_delete_donation(): # Given: donation = fake_donation() donation_before_deletion = Donation.objects.count() # When: donation.delete() # Then: assert Donation.objects.count() == donation_before_deletion - 1
[ [ [ 7, 13 ], [ 110, 116 ], [ 385, 391 ], [ 693, 699 ], [ 1026, 1032 ], [ 1358, 1364 ], [ 1619, 1625 ], [ 1900, 1906 ], [ 2602, 2608 ], [ 3503, 3509 ], [ 3740, 3746 ], [ 4011, 4017 ], [ 4302, 4308 ] ], [ [ 42, 43 ] ], [ [ 77, 78 ], [ 188, 192 ], [ 236, 245 ], [ 271, 275 ], [ 472, 480 ], [ 528, 541 ], [ 567, 575 ], [ 628, 636 ], [ 785, 796 ], [ 847, 863 ], [ 889, 900 ], [ 955, 966 ], [ 1112, 1120 ], [ 1168, 1181 ], [ 1207, 1215 ], [ 1267, 1275 ], [ 1426, 1435 ], [ 1695, 1708 ], [ 1982, 1998 ], [ 2678, 2691 ], [ 3573, 3582 ], [ 3613, 3617 ], [ 3687, 3691 ], [ 3818, 3831 ], [ 3867, 3875 ], [ 3949, 3957 ], [ 4095, 4111 ], [ 4148, 4159 ], [ 4236, 4247 ], [ 4380, 4393 ], [ 4427, 4435 ], [ 4509, 4517 ] ], [ [ 136, 152 ] ], [ [ 411, 431 ] ], [ [ 719, 742 ] ], [ [ 1052, 1072 ] ], [ [ 1384, 1398 ] ], [ [ 1645, 1663 ] ], [ [ 1926, 1947 ] ], [ [ 2628, 2646 ] ], [ [ 3529, 3545 ] ], [ [ 3766, 3786 ] ], [ [ 4037, 4060 ] ], [ [ 4328, 4348 ] ] ]
""" A sensor platform which detects underruns and capped status from the official Raspberry Pi Kernel. Minimal Kernel needed is 4.14+ """ import logging from rpi_bad_power import UnderVoltage, new_under_voltage from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback _LOGGER = logging.getLogger(__name__) DESCRIPTION_NORMALIZED = "Voltage normalized. Everything is working as intended." DESCRIPTION_UNDER_VOLTAGE = "Under-voltage was detected. Consider getting a uninterruptible power supply for your Raspberry Pi." async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up rpi_power binary sensor.""" under_voltage = await hass.async_add_executor_job(new_under_voltage) async_add_entities([RaspberryChargerBinarySensor(under_voltage)], True) class RaspberryChargerBinarySensor(BinarySensorEntity): """Binary sensor representing the rpi power status.""" _attr_device_class = BinarySensorDeviceClass.PROBLEM _attr_icon = "mdi:raspberry-pi" _attr_name = "RPi Power status" _attr_unique_id = "rpi_power" # only one sensor possible def __init__(self, under_voltage: UnderVoltage) -> None: """Initialize the binary sensor.""" self._under_voltage = under_voltage def update(self) -> None: """Update the state.""" value = self._under_voltage.get() if self._attr_is_on != value: if value: _LOGGER.warning(DESCRIPTION_UNDER_VOLTAGE) else: _LOGGER.info(DESCRIPTION_NORMALIZED) self._attr_is_on = value
[ [ [ 146, 153 ], [ 501, 508 ] ], [ [ 181, 193 ], [ 1423, 1435 ] ], [ [ 195, 212 ], [ 980, 997 ] ], [ [ 271, 294 ], [ 1218, 1241 ] ], [ [ 300, 318 ], [ 1112, 1130 ] ], [ [ 363, 374 ], [ 815, 826 ] ], [ [ 406, 419 ], [ 782, 795 ] ], [ [ 470, 489 ], [ 852, 871 ] ], [ [ 491, 498 ], [ 1715, 1722 ], [ 1792, 1799 ] ], [ [ 530, 552 ], [ 1805, 1827 ] ], [ [ 612, 637 ], [ 1731, 1756 ] ], [ [ 743, 1074 ] ], [ [ 1083, 1111 ], [ 1023, 1051 ] ] ]
import numpy import wave class Audiostream(object): def __init__(self, volume_prio=1): self.volume_prio = volume_prio def get_data(self, frame_count, channels, width, rate): return "".join(["\x00"]*frames*self.channels*self.width) def get_volume_priority(self): return self.volume_prio class WaveAudioStream(Audiostream): def __init__(self, file, volume_prio=1): Audiostream.__init__(self, volume_prio) self.wf = wave.open(file) def get_data(self, frame_count, channels, width, rate, format): data = self.wf.readframes(frame_count) if len(data) > 0: return numpy.fromstring(data, format) else: return None class FeedAudioStream(Audiostream): def __init__(self, keep_open=False, volume_prio=1): Audiostream.__init__(self, volume_prio) self.keep_open = keep_open self.closed = False self.data = [] self.offset = 0 def feed(self, data): if self.closed: print("WARNING: Trying to add data to a closed stream.") self.data.append(data) def clean(self): self.data = self.data[self.offset:] self.offset = 0 def get_data(self, frame_count, channels, width, rate, format): size = min(len(self.data)-self.offset, frame_count*channels) if size == 0 and not self.keep_open: self.closed = True return None data = numpy.array(self.data[self.offset:self.offset+size]) self.offset += size if self.offset > rate: self.clean() return data
[ [ [ 7, 12 ], [ 709, 714 ], [ 1577, 1582 ] ], [ [ 20, 24 ], [ 524, 528 ] ], [ [ 40, 51 ], [ 394, 405 ], [ 826, 837 ], [ 466, 477 ], [ 909, 920 ] ], [ [ 378, 393 ] ], [ [ 810, 825 ] ] ]
# This class was generated on Mon, 23 Dec 2019 12:39:22 IST by version 0.1.0-dev+904328-dirty of Braintree SDK Generator # payouts_item_get_request.py # @version 0.1.0-dev+904328-dirty # @type request # @data H4sIAAAAAAAC/+xb63PbuBH/3r9ih9eZnGcoyXe5p795bF/j9pq4sZNOx/VYELkSUYMADwta4WTyv3fwIM2XHMdxdH3ok61dPPa3u1gsFuD76CXLMTqIClap0tCEG8ynKzRRHB0jJZoXhisZHUTnmVoTpGgYFwRLpYGB7wS2TwyLCk6Pp3AI/yz3958niUrR/Ye+1bVtdc1Tz53dsSFDURBUqgSeojR8WUGKkmNqx89RGprC6dJPZ38Cp9Agdr0SJqEkBJPhJ89t1N2ktn89Bd6iBL4EbkCw5IaADYY2mkliiVXP2NDTKI7+VqKuzphmORrUFB1cXsXRC2Qp6j71F6XzPu2MmaxDex9dVIW1FhnN5SqKo7dMc7YQ2LFiDTeKo79gFVgDg15kCKfHoJY18NqUzrjrjCeZVQ9lal2b3WI61JpVXoz9OHqNLH0lRRUdLJkgtITfSq4xjQ6MLjGOzrQqUBuOFB3IUogPV74NkvGDWKIlUaEkoac1OM+8VKdWqrrJEPXHcbbRkWGmJGAyBWUy1A24neu2XPfBZg6Etp0/7qd27ltuqr6TdulDO5aS/1YinLHqjInJCiVqZjCFROW5ktadTcYMCC5vyCmFUKaoJ87aGhPkt6gnxFOElg5oCm8IU+f3RrPkhsvV5+rA+Xrcd+SjUmuUSQWJkreoySIbKCcJja47je6UNM4fKisZTgYaSZU62YKJ8V2SMbnCa2uhjvx9zlDyugXYFt6knIAVheC1mTJOYVU/saVqEw0RLbXKr1muSmk6eLr0e+xgndC3C/vnkksmE85E2xtjoDLJgNmFu2CCyQRB6WZ5p+UWjJfcKWHod6MgL02mESdJxjRLDGo4PX81+e7bb35su2GKV1/PUpXQjEuDK83sALOUa0zMTCOZWd14YhvTbO8pdpz4o3BvmSi7PlpThkAdJw77Y85XmYEFHvgYWgofQP0vwf2vQwkOLmpn9gCRI4HgNwjzP5/9Y+6dnGkEqQyYquAJE6KCpfZOwcQ0hOl61N4ckGLCcyaaHuNzXbw8bs1F5SLltzzF1EqowGSqJCZTk9H4dLMa4S9uDSLooHyQZb5AbbOJWpBCsATrTLHjBDEQIlwetWkEE+syYF3m6uvMmIIOZrP1ej3lpKZKr2aclHOoSddHppnJxZP4ydWDooBRYzGgTd1FgF0E2EWA/84IcDXM2VBrpUfyG0umbmJTk0YyGst61DHqUas3xUW56ifXLeJQQJ9SOzfVkgl3lG+S4kRpjcKtVShKXShCeuK0axMSLpdK527qa5vWdxCNMIfI3rw+dSC88p2jN73ucklnoO3EmhyJ2Kobbe5oQwCB5xdt6tgL9MebLUot7Z+2yIGw8ZCWlTmTE40staOAbR7U/UXkDqv1xC20Y7/QNhZI3PzXadPqDtWGBl2UhxKYFdwGO5am3Mfnexf5D0+/yJccRXeF15ShTZz2Q6nHtfLelLDSrvHHmeQzFjVRb7etKUPJNTJqFuqXEfNXLm+gPfNAYFdQ6AhcUzY7hvZVromLnJjC5YvDi5NXh+e+OlGnQazgM41LtPsZ2l8THepcNPsqYwYVo4nrsfflPSrTuOygDISRjFblhUB7OGd6hQbevP51ChcKcnaDIS3wqG0SE9vmCy49J0eTqRTW3GQ+9F7aCH2BeWF7THxsNpje5QBGKUFTjmbp0gC73c/0Mvnh+x/395w2p2CzkULjpNAqQSIuVzFwmYgy9ZPO/ziPYf71PHYp+HxvDk2eSlOwiOYW6xy4D6w3WEFtL4tVSZt5t4pLrFGBx+jxMJtQkTW8NI68pQXlddrbUQJpaLwXFxdntRma9M1sMN6WEGgUHfH975FThlU/NNmIqQr8qKN8//NPP31F6JLiyXd7depOqG+R3DHLlQ59nurGd4YuJcsXfFWqkkTV23gJcyYNT6gOqt4Nz21W68LJ6yAh9VJZJpmTjRHxlXQl4pntO6kh9X9O3316ervJDFe9DHcX+HaBbxf4doHv/yDwxQ+9O10wk2T9o/OQt+kE3bqUCppt3a66/l/k2ONuSP2p595b4TFUgT5E1L3sD1dpiUaHLkAyyrK4XAm7ehJecJTmMyFePqAMO1KD/Y8twN6HcFd/3dVfd/XXTwjTUvWukwNh6CchYFGBCV9yqyFlfAlKKsOXPAn7DJyXRaG0ISgLG8++29/fh8Pzo9PTVq7kIsc3liOVnPS5W8sV/AuGXsLQEMeqJ57ZeuZjQ9gUjpT2+XZK4PwOYd7E72ubXcz9QgMuGz9DMk8bDl76GuIQZpBjUHQcsO4vdRVMm2oLz1mEq5obvF6WQgylHud3RT8+OXt9cnR4cXLs03In+jOCpq8DNoUjJmGBdmMqiUskcvQYJE9u/H92ucsqPG5y2gi1Pmkj2wKBCsGNjzdLrsnEIBiZMH6tClhnKO+U6B5gNHNuyd/HtblZh38fEblATXZHNy2d2hE83O3gWPFblEMgHfLjkLghnMmDJbeIKudpKnAIq0t/HC4/RnDJN4TABCn3HNEojZCXwvBCYLsdhXMvl6swlNFKVjlPvH6YXQ/PKO6MvR1NFRqX/F03965JI48VHcsJbbgRGNcBulbOFiWnctmXvCGN7LiO1Tr1/A7y6oE/3tEe54uhv3einIsquOWhdckbqdbSnhTcWfiBYdTPcq9jh0n7Tp13nTpXtVOH9ls7LXdzhQ17dGCNvju2vPC0z79i5aHCUJ+y6tRle7lVkHrNhECzAVLDHILyLGtwXVeTQq8aC7WKAVPwR3eXfr1Fmau79lRn9cDgmc1an4EX259f4c055GrBBUKRKYkh9d/WEnNp9egb8wGrVyQepuSnx7XwcKHda+X2Y3QJTAJLEntet+ipIoP5k/v45ipD+w32EnFT9STwdg+/doWHXeHhf7XwcM+3NQPW6I7XKwa7L63gLSe+EOjThEqVne9tOp9ZbWc9hBA+WgUf8h4Q3z8lsMN5pkqR2jNqeES05ibjEp7vQ8qqbR07Dc/xOlyoYVcFA9ZI1dye1W28t23r9I+T//hozchnic0YsdXG5al7d4em19vfCX70nuf58+c/N/c8309/2NuWojrfEXUV1Wd92oVJq/vvgMV/IrYRT8MeYnJGbrUNX5s9VcoSHSlpUIZP5yL3RYwvY87+Re7+/oUxxV/9DehB9KeTi8h/RhgdRLPbb2b1V2mz9uees/fd+PUhiqPzG140kpy8KzAxmJ47LEcqxejg2/39D3/4NwAAAP// # DO NOT EDIT import paypalhttp try: from urllib import quote # Python 2.X except ImportError: from urllib.parse import quote # Python 3+ class PayoutsItemGetRequest: """ Shows details for a payout item, by ID. A <code>payout_item_id</code> helps you identify denied payments. If a payment is denied, you can use the <code>payout_item_id</code> to identify the payment even if it lacks a <code>transaction_id</code>. """ def __init__(self, payout_item_id): self.verb = "GET" self.path = "/v1/payments/payouts-item/{payout_item_id}?".replace("{payout_item_id}", quote(str(payout_item_id))) self.headers = {} self.headers["Content-Type"] = "application/json" self.body = None
[ [ [ 3479, 3489 ] ], [ [ 3519, 3524 ], [ 4063, 4068 ] ], [ [ 3588, 3593 ], [ 4063, 4068 ] ], [ [ 3614, 3635 ] ] ]
from typing import FrozenSet, Tuple import pysmt.typing as types from pysmt.environment import Environment as PysmtEnv from pysmt.fnode import FNode from utils import symb_to_next from hint import Hint, Location def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode, FNode]: assert isinstance(env, PysmtEnv) mgr = env.formula_manager pc = mgr.Symbol("pc", types.INT) x = mgr.Symbol("x", types.INT) y = mgr.Symbol("y", types.INT) z = mgr.Symbol("z", types.INT) x_pc = symb_to_next(mgr, pc) x_x = symb_to_next(mgr, x) x_y = symb_to_next(mgr, y) x_z = symb_to_next(mgr, z) symbols = frozenset([pc, x, y, z]) n_locs = 5 int_bound = n_locs pcs = [] x_pcs = [] ints = [mgr.Int(i) for i in range(int_bound)] for l in range(n_locs): n = ints[l] pcs.append(mgr.Equals(pc, n)) x_pcs.append(mgr.Equals(x_pc, n)) m_1 = mgr.Int(-1) pcend = mgr.Equals(pc, m_1) x_pcend = mgr.Equals(x_pc, m_1) # initial location. init = pcs[0] # control flow graph. cfg = mgr.And( # pc = -1 : -1, mgr.Implies(pcend, x_pcend), # pc = 0 & !(y >= 1) : -1, mgr.Implies(mgr.And(pcs[0], mgr.Not(mgr.GE(y, ints[1]))), x_pcend), # pc = 0 & y >= 1 : 1, mgr.Implies(mgr.And(pcs[0], mgr.GE(y, ints[1])), x_pcs[1]), # pc = 1 & !(z >= 1) : -1, mgr.Implies(mgr.And(pcs[1], mgr.Not(mgr.GE(z, ints[1]))), x_pcend), # pc = 1 & z >= 1 : 2, mgr.Implies(mgr.And(pcs[1], mgr.GE(z, ints[1])), x_pcs[2]), # pc = 2 & !(x >= 0) : -1, mgr.Implies(mgr.And(pcs[2], mgr.Not(mgr.GE(x, ints[0]))), x_pcend), # pc = 2 & x >= 0 : 3, mgr.Implies(mgr.And(pcs[2], mgr.GE(x, ints[0])), x_pcs[3]), # pc = 3 : 4, mgr.Implies(pcs[3], x_pcs[4]), # pc = 4 : 2, mgr.Implies(pcs[4], x_pcs[2])) # transition labels. labels = mgr.And( # (pc = -1 & pc' = -1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcend, x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 0 & pc' = -1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[0], x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 0 & pc' = 1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[0], x_pcs[1]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 1 & pc' = -1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[1], x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 1 & pc' = 2) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[1], x_pcs[2]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 2 & pc' = -1) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[2], x_pcend), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 2 & pc' = 3) -> (x' = x & y' = y & z' = z), mgr.Implies( mgr.And(pcs[2], x_pcs[3]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 3 & pc' = 4) -> (x' = y*z - 1 & y' = y & z' = z), mgr.Implies( mgr.And(pcs[3], x_pcs[4]), mgr.And(mgr.Equals(x_x, mgr.Minus(mgr.Times(y, z), ints[1])), mgr.Equals(x_y, y), mgr.Equals(x_z, z))), # (pc = 4 & pc' = 2) -> (x' = x & y' = y+1 & z' = z), mgr.Implies( mgr.And(pcs[4], x_pcs[2]), mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, mgr.Plus(y, ints[1])), mgr.Equals(x_z, z)))) # transition relation. trans = mgr.And(cfg, labels) # fairness. fairness = mgr.Not(pcend) return symbols, init, trans, fairness def hints(env: PysmtEnv) -> FrozenSet[Hint]: assert isinstance(env, PysmtEnv) mgr = env.formula_manager pc = mgr.Symbol("pc", types.INT) x = mgr.Symbol("x", types.INT) y = mgr.Symbol("y", types.INT) z = mgr.Symbol("z", types.INT) symbs = frozenset([pc, x, y, z]) x_pc = symb_to_next(mgr, pc) x_x = symb_to_next(mgr, x) x_y = symb_to_next(mgr, y) x_z = symb_to_next(mgr, z) res = [] i_0 = mgr.Int(0) i_1 = mgr.Int(1) i_2 = mgr.Int(2) i_3 = mgr.Int(3) loc0 = Location(env, mgr.GE(y, i_3)) loc0.set_progress(1, mgr.Equals(x_y, mgr.Plus(y, i_1))) loc1 = Location(env, mgr.GE(y, i_3)) loc1.set_progress(2, mgr.Equals(x_y, y)) loc2 = Location(env, mgr.GE(y, i_3)) loc2.set_progress(2, mgr.Equals(x_y, mgr.Plus(y, i_1))) h_y = Hint("h_y4", env, frozenset([y]), symbs) h_y.set_locs([loc0, loc1, loc2]) res.append(h_y) stutter = mgr.Equals(x_x, x) loc0 = Location(env, mgr.GT(x, i_0), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1))) loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1))) loc1 = Location(env, mgr.GT(x, i_0)) loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, i_1))) h_x = Hint("h_x2", env, frozenset([x]), symbs) h_x.set_locs([loc0, loc1]) res.append(h_x) loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1))) loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1))) loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1)) loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y))) h_x = Hint("h_x3", env, frozenset([x]), symbs) h_x.set_locs([loc0, loc1]) res.append(h_x) loc0 = Location(env, mgr.GE(z, i_0)) loc0.set_progress(1, mgr.Equals(x_z, z)) loc1 = Location(env, mgr.GE(z, i_0)) loc1.set_progress(0, mgr.Equals(x_z, mgr.Plus(z, i_3))) h_z = Hint("h_z4", env, frozenset([z]), symbs) h_z.set_locs([loc0, loc1]) res.append(h_z) loc0 = Location(env, mgr.GE(z, i_3)) loc0.set_progress(0, mgr.GT(x_z, z)) h_z = Hint("h_z1", env, frozenset([z]), symbs) h_z.set_locs([loc0]) res.append(h_z) loc = Location(env, mgr.LE(z, i_0)) loc.set_progress(0, mgr.Equals(x_z, z)) h_z = Hint("h_z0", env, frozenset([z]), symbs) h_z.set_locs([loc]) res.append(h_z) loc0 = Location(env, mgr.GE(y, i_3)) loc0.set_progress(1, mgr.Equals(x_y, mgr.Plus(y, i_1))) loc1 = Location(env, mgr.GE(y, i_3), mgr.GE(x, i_2)) loc1.set_progress(0, mgr.Equals(x_y, mgr.Plus(y, x))) h_y = Hint("h_y3", env, frozenset([y]), symbs) h_y.set_locs([loc0, loc1]) res.append(h_y) stutter = mgr.Equals(x_y, y) loc0 = Location(env, mgr.GE(y, i_3)) loc0.set_progress(1, mgr.Equals(x_y, mgr.Plus(y, i_1))) loc1 = Location(env, mgr.GE(y, i_3), mgr.GE(z, i_2)) loc1.set_progress(0, mgr.Equals(x_y, mgr.Plus(y, z))) h_y = Hint("h_y2", env, frozenset([y]), symbs) h_y.set_locs([loc0, loc1]) res.append(h_y) loc0 = Location(env, mgr.Equals(pc, i_2)) loc0.set_progress(1, mgr.GT(x_pc, i_2)) loc1 = Location(env, mgr.GE(pc, i_3)) loc1.set_progress(0, mgr.Equals(x_pc, i_2)) h_pc = Hint("h_pc3", env, frozenset([pc]), symbs) h_pc.set_locs([loc0, loc1]) res.append(h_pc) loc0 = Location(env, mgr.GE(z, i_3), mgr.GE(y, i_0)) loc0.set_progress(1, mgr.Equals(x_z, y)) loc1 = Location(env, mgr.GE(z, i_0), mgr.GE(x, i_3)) loc1.set_progress(0, mgr.GE(x_z, mgr.Plus(z, x))) h_z = Hint("h_z3", env, frozenset([z]), symbs) h_z.set_locs([loc0, loc1]) res.append(h_z) return frozenset(res)
[ [ [ 19, 28 ], [ 260, 269 ], [ 4252, 4261 ] ], [ [ 30, 35 ], [ 254, 259 ] ], [ [ 43, 64 ], [ 439, 444 ], [ 474, 479 ], [ 509, 514 ], [ 544, 549 ], [ 4363, 4368 ], [ 4398, 4403 ], [ 4433, 4438 ], [ 4468, 4473 ] ], [ [ 95, 118 ], [ 241, 249 ], [ 373, 381 ], [ 4239, 4247 ], [ 4296, 4304 ] ], [ [ 143, 148 ], [ 270, 275 ], [ 278, 283 ], [ 285, 290 ], [ 338, 343 ] ], [ [ 167, 179 ], [ 566, 578 ], [ 598, 610 ], [ 629, 641 ], [ 660, 672 ], [ 4527, 4539 ], [ 4559, 4571 ], [ 4590, 4602 ], [ 4621, 4633 ] ], [ [ 197, 201 ], [ 4262, 4266 ], [ 5040, 5044 ], [ 5437, 5441 ], [ 5809, 5813 ], [ 6100, 6104 ], [ 6286, 6290 ], [ 6468, 6472 ], [ 6781, 6785 ], [ 7134, 7138 ], [ 7419, 7423 ], [ 7740, 7744 ] ], [ [ 203, 211 ], [ 4753, 4761 ], [ 4854, 4862 ], [ 4940, 4948 ], [ 5184, 5192 ], [ 5337, 5345 ], [ 5542, 5550 ], [ 5695, 5703 ], [ 5914, 5922 ], [ 6000, 6008 ], [ 6205, 6213 ], [ 6384, 6392 ], [ 6566, 6574 ], [ 6667, 6675 ], [ 6919, 6927 ], [ 7020, 7028 ], [ 7239, 7247 ], [ 7329, 7337 ], [ 7528, 7536 ], [ 7630, 7638 ] ], [ [ 218, 235 ] ], [ [ 4228, 4233 ] ] ]
import requests from django.db import models from django.utils import timezone from users.models import CustomUser from datetime import datetime def get_coordinate(gps, ref): coordinate = gps[0] + gps[1]/60 + gps[2]/3600 if ref == 'W': coordinate = -coordinate return coordinate def get_timestamp(timestamp_string): datetime_object = datetime.strptime(timestamp_string, '%Y:%m:%d %H:%M:%S') return datetime_object class Photo(models.Model): name = models.CharField(max_length=120) lat = models.DecimalField(max_digits=9, decimal_places=6) lon = models.DecimalField(max_digits=9, decimal_places=6) timestamp = models.DateTimeField(auto_now_add=True, auto_now=False) user = models.ForeignKey(CustomUser, on_delete=models.CASCADE) airspace_name = models.CharField(max_length=120, default='') airspace_class = models.CharField(max_length=120, default='G') def save_many(photos, user): for photo in photos: name = photo['ImageDescription'] lat = get_coordinate(photo['GPSLatitude'], photo['GPSLatitudeRef']) lon = get_coordinate(photo['GPSLongitude'], photo['GPSLongitudeRef']) timestamp = get_timestamp(photo['DateTimeOriginal']) t = requests.post( 'http://airspace-service.herokuapp.com/geo/getAirspace', data = {"longitude": lon, "latitude": lat} ) airspace_data=t.json() airspace_name =airspace_data['name'] airspace_class =airspace_data['class'] photo_model = Photo( name=name, lat=lat, lon=lon, timestamp=timestamp, user=user, airspace_name=airspace_name, airspace_class=airspace_class ) photo_model.save() def get_all(user): return Photo.objects.filter(user=user).values( 'id', 'name', 'lat', 'lon', 'timestamp', 'airspace_name', 'airspace_class' ) def delete_all(user): return Photo.objects.filter(user=user).delete() def delete_one(user,id): return Photo.objects.filter(user=user,id=id).delete()
[ [ [ 7, 15 ], [ 1262, 1270 ] ], [ [ 38, 44 ], [ 457, 463 ], [ 483, 489 ], [ 526, 532 ], [ 588, 594 ], [ 656, 662 ], [ 723, 729 ], [ 763, 769 ], [ 799, 805 ], [ 865, 871 ] ], [ [ 70, 78 ] ], [ [ 104, 114 ], [ 741, 751 ] ], [ [ 136, 144 ], [ 360, 368 ] ], [ [ 150, 164 ], [ 1037, 1051 ], [ 1117, 1131 ] ], [ [ 305, 318 ], [ 1205, 1218 ] ], [ [ 451, 456 ], [ 1584, 1589 ], [ 1907, 1912 ], [ 2158, 2163 ], [ 2244, 2249 ] ] ]
a = [1, "a"] print(list) print(dir(list)) list = [1, "a"] print(dir(list)) tuple = ("a", "b") print(list) print(tuple) dictn = {"key": "dictionary", "d" :a} print(dictn) def factorial(n): "Factorial calculation string document string" # print("Calculating factorial of ", n) if n <= 1: return 1 else: return n * factorial(n - 1) print(factorial(100)) # printing document string print(factorial.__doc__)
[ [ [ 0, 1 ], [ 167, 168 ] ], [ [ 44, 48 ], [ 70, 74 ], [ 103, 107 ] ], [ [ 78, 83 ], [ 115, 120 ] ], [ [ 123, 128 ], [ 177, 182 ] ], [ [ 190, 199 ], [ 373, 382 ], [ 422, 431 ], [ 344, 353 ] ] ]
import os from maya import cmds import avalon.maya import pype.api from pype.hosts.maya.lib import extract_alembic class ExtractAnimation(pype.api.Extractor): """Produce an alembic of just point positions and normals. Positions and normals, uvs, creases are preserved, but nothing more, for plain and predictable point caches. """ label = "Extract Animation" hosts = ["maya"] families = ["animation"] def process(self, instance): # Collect the out set nodes out_sets = [node for node in instance if node.endswith("out_SET")] if len(out_sets) != 1: raise RuntimeError("Couldn't find exactly one out_SET: " "{0}".format(out_sets)) out_set = out_sets[0] roots = cmds.sets(out_set, query=True) # Include all descendants nodes = roots + cmds.listRelatives(roots, allDescendents=True, fullPath=True) or [] # Collect the start and end including handles start = instance.data["frameStart"] end = instance.data["frameEnd"] handles = instance.data.get("handles", 0) or 0 if handles: start -= handles end += handles self.log.info("Extracting animation..") dirname = self.staging_dir(instance) parent_dir = self.staging_dir(instance) filename = "{name}.abc".format(**instance.data) path = os.path.join(parent_dir, filename) options = { "step": instance.data.get("step", 1.0) or 1.0, "attr": ["cbId"], "writeVisibility": True, "writeCreases": True, "uvWrite": True, "selection": True, "worldSpace": instance.data.get("worldSpace", True), "writeColorSets": instance.data.get("writeColorSets", False) } if not instance.data.get("includeParentHierarchy", True): # Set the root nodes if we don't want to include parents # The roots are to be considered the ones that are the actual # direct members of the set options["root"] = roots if int(cmds.about(version=True)) >= 2017: # Since Maya 2017 alembic supports multiple uv sets - write them. options["writeUVSets"] = True with avalon.maya.suspended_refresh(): with avalon.maya.maintained_selection(): cmds.select(nodes, noExpand=True) extract_alembic(file=path, startFrame=float(start), endFrame=float(end), **options) if "representations" not in instance.data: instance.data["representations"] = [] representation = { 'name': 'abc', 'ext': 'abc', 'files': filename, "stagingDir": dirname, } instance.data["representations"].append(representation) self.log.info("Extracted {} to {}".format(instance, dirname))
[ [ [ 7, 9 ], [ 1511, 1513 ] ], [ [ 28, 32 ], [ 783, 787 ], [ 873, 877 ], [ 2237, 2241 ], [ 2508, 2512 ] ], [ [ 41, 52 ], [ 2406, 2412 ], [ 2456, 2462 ] ], [ [ 60, 68 ], [ 142, 146 ] ], [ [ 101, 116 ], [ 2558, 2573 ] ], [ [ 125, 141 ] ] ]
import re from passerine.db.common import ProxyObject, ProxyFactory, ProxyCollection from passerine.db.repository import Repository from passerine.db.entity import get_relational_map from passerine.db.exception import IntegrityConstraintError, UnsupportedRepositoryReferenceError from passerine.db.mapper import AssociationType from passerine.db.metadata.entity import EntityMetadata from passerine.db.metadata.helper import EntityMetadataHelper from passerine.db.uow import UnitOfWork from passerine.graph import DependencyNode, DependencyManager class QueryIteration(DependencyNode): def __init__(self, join_config, alias, parent_alias, property_path): super(QueryIteration, self).__init__() self._join_config = join_config self._alias = alias self._parent_alias = parent_alias self._property_path = property_path @property def join_config(self): return self._join_config @property def alias(self): return self._alias @property def parent_alias(self): return self._parent_alias @property def property_path(self): return self._property_path def to_dict(self): return { 'property_path': self.property_path, 'parent_alias': self.parent_alias, 'alias': self.alias, 'join_config': self.join_config, 'adjacent_nodes':self.adjacent_nodes } def __repr__(self): return str('{}({})'.format(self.__class__.__name__, self.to_dict())) class Session(object): """ Database Session :param database_name: the database name :param driver: the driver API """ def __init__(self, driver): self._driver = driver self._uow = UnitOfWork(self) self._repository_map = {} self._registered_types = {} self._re_property_path_delimiter = re.compile('\.') @property def driver(self): return self._driver def collection(self, entity_class): """ Alias to ``repository()`` .. deprecated:: 2.2 """ return self.repository(entity_class) def repositories(self): """ Retrieve the list of collections :rtype: list """ return [self._repository_map[key] for key in self._repository_map] def repository(self, reference): """ Retrieve the collection :param reference: the entity class or entity metadata of the target repository / collection :rtype: passerine.db.repository.Repository """ key = None if isinstance(reference, EntityMetadata): key = reference.collection_name elif EntityMetadataHelper.hasMetadata(reference): is_registerable_reference = True metadata = EntityMetadataHelper.extract(reference) key = metadata.collection_name self.register_class(reference) if not key: raise UnsupportedRepositoryReferenceError('Either a class with metadata or an entity metadata is supported.') if key not in self._repository_map: repository = Repository( session = self, representing_class = reference ) repository.setup_index() self._repository_map[key] = repository return self._repository_map[key] def register_class(self, entity_class): """ Register the entity class :param type entity_class: the class of document/entity :rtype: passerine.db.repository.Repository .. note:: This is for internal operation only. As it seems to be just a residual from the prototype stage, the follow-up investigation in order to remove the method will be for Tori 3.1. """ key = entity_class if isinstance(entity_class, type): metadata = EntityMetadataHelper.extract(entity_class) key = metadata.collection_name if key not in self._registered_types: self._registered_types[key] = entity_class def query(self, query): """ Query the data :param passerine.db.query.Query query: the query object :return: the list of matched entities :rtype: list """ metadata = EntityMetadataHelper.extract(query.origin) # Deprecated in Tori 3.1; Only for backward compatibility if not query.is_new_style: return self.driver.query( metadata, query._condition, self.driver.dialect.get_iterating_constrains(query) ) root_class = query.origin expression_set = query.criteria.get_analyzed_version() # Register the root entity query.join_map[query.alias] = { 'alias': query.alias, 'path': None, 'class': root_class, 'parent_alias': None, 'property_path': None, 'result_list': [] } self._update_join_map(metadata, query.join_map, query.alias) iterating_sequence = self._compute_iterating_sequence(query.join_map) alias_to_query_map = self.driver.dialect.get_alias_to_native_query_map(query) for iteration in iterating_sequence: if not self._sub_query(query, alias_to_query_map, iteration): break return query.join_map[query.alias]['result_list'] def _sub_query(self, query, alias_to_query_map, iteration): is_join_query = True alias = iteration.alias if alias not in alias_to_query_map: return False join_config = query.join_map[alias] joined_type = join_config['class'] joined_meta = EntityMetadataHelper.extract(joined_type) native_query = alias_to_query_map[alias] local_constrains = {} if not iteration.parent_alias: is_root = False constrains = self.driver.dialect.get_iterating_constrains(query) result_list = self.driver.query(joined_meta, native_query, local_constrains) # No result in a sub-query means no result in the main query. if not result_list: return False join_config['result_list'] = result_list alias_to_query_map.update(self.driver.dialect.get_alias_to_native_query_map(query)) return True def _compute_iterating_sequence(self, join_map): iterating_sequence = [] joining_sequence = [] reference_map = {} # reference_map is used locally for fast reverse lookup # iterating_seq is a final sequence # Calculate the iterating sequence for alias in join_map: join_config = join_map[alias] parent_alias = None property_path = None if join_config['path']: parent_alias, property_path = join_config['path'].split('.', 2) qi = QueryIteration(join_config, alias, parent_alias, property_path) joining_sequence.append(qi) reference_map[alias] = qi # Update the dependency map for key in reference_map: reference_a = reference_map[key] if reference_a.parent_alias not in reference_map: continue reference_a.connect(reference_map[reference_a.parent_alias]) iterating_sequence = DependencyManager.get_order(reference_map) iterating_sequence.reverse() return iterating_sequence def _update_join_map(self, origin_metadata, join_map, origin_alias): link_map = origin_metadata.relational_map iterating_sequence = [] # Compute the (local) iterating sequence for updating the join map. # Note: this is not the query iterating sequence. for alias in join_map: join_config = join_map[alias] if join_config['class']: continue parent_alias, property_path = join_config['path'].split('.', 2) join_config['alias'] = alias join_config['property_path'] = property_path join_config['parent_alias'] = parent_alias join_config['result_list'] = [] iterating_sequence.append((join_config, alias, parent_alias, property_path)) # Update the immediate properties. for join_config, current_alias, parent_alias, property_path in iterating_sequence: if parent_alias != origin_alias: continue if property_path not in link_map: continue mapper = link_map[property_path] join_config['class'] = mapper.target_class join_config['mapper'] = mapper # Update the joined properties. for join_config, current_alias, parent_alias, property_path in iterating_sequence: if current_alias not in join_map: continue if not join_map[current_alias]['class']: continue next_origin_class = join_map[current_alias]['class'] next_metadata = EntityMetadataHelper.extract(next_origin_class) self._update_join_map(next_metadata, join_map, current_alias) def delete(self, *entities): """ Delete entities :param entities: one or more entities :type entities: type of list of type """ for entity in entities: targeted_entity = self._force_load(entity) self._uow.register_deleted(targeted_entity) def refresh(self, *entities): """ Refresh entities :param entities: one or more entities :type entities: type of list of type """ for entity in entities: self.refresh_one(entity) def refresh_one(self, entity): self._uow.refresh(self._force_load(entity)) def persist(self, *entities): """ Persist entities :param entities: one or more entities :type entities: type of list of type """ for entity in entities: self.persist_one(entity) def persist_one(self, entity): targeted_entity = self._force_load(entity) registering_action = self._uow.register_new \ if self._uow.is_new(targeted_entity) \ else self._uow.register_dirty registering_action(targeted_entity) def recognize(self, entity): self._uow.register_clean(self._force_load(entity)) def flush(self, *args, **kwargs): """ Flush all changes of the session. See the flag from :method:`passerine.db.uow.UnitOfWork.commit`. """ self._uow.commit(*args, **kwargs) def find_record(self, id, cls): return self._uow.find_recorded_entity(id, cls) def apply_relational_map(self, entity): """ Wire connections according to the relational map """ meta = EntityMetadataHelper.extract(entity) rmap = meta.relational_map for property_name in rmap: guide = rmap[property_name] """ :type: passerine.db.mapper.RelatingGuide """ # In the reverse mapping, the lazy loading is not possible but so # the proxy object is still used. if guide.inverted_by: target_meta = EntityMetadataHelper.extract(guide.target_class) api = self._driver.collection(target_meta.collection_name) if guide.association in [AssociationType.ONE_TO_ONE, AssociationType.MANY_TO_ONE]: # Replace with Criteria target = api.find_one({guide.inverted_by: entity.id}) entity.__setattr__(property_name, ProxyFactory.make(self, target['_id'], guide)) elif guide.association == AssociationType.ONE_TO_MANY: # Replace with Criteria proxy_list = [ ProxyFactory.make(self, target['_id'], guide) for target in api.find({guide.inverted_by: entity.id}) ] entity.__setattr__(property_name, proxy_list) elif guide.association == AssociationType.MANY_TO_MANY: entity.__setattr__(property_name, ProxyCollection(self, entity, guide)) else: raise IntegrityConstraintError('Unknown type of entity association (reverse mapping)') return # Done the application # In the direct mapping, the lazy loading is applied wherever applicable. if guide.association in [AssociationType.ONE_TO_ONE, AssociationType.MANY_TO_ONE]: if not entity.__getattribute__(property_name): continue entity.__setattr__( property_name, ProxyFactory.make( self, entity.__getattribute__(property_name), guide ) ) elif guide.association == AssociationType.ONE_TO_MANY: proxy_list = [] for object_id in entity.__getattribute__(property_name): if not object_id: continue proxy_list.append(ProxyFactory.make(self, object_id, guide)) entity.__setattr__(property_name, proxy_list) elif guide.association == AssociationType.MANY_TO_MANY: entity.__setattr__(property_name, ProxyCollection(self, entity, guide)) else: raise IntegrityConstraintError('Unknown type of entity association') def _force_load(self, entity): return entity._actual \ if isinstance(entity, ProxyObject) \ else entity
[ [ [ 8, 10 ], [ 1978, 1980 ] ], [ [ 52, 63 ], [ 14047, 14058 ] ], [ [ 65, 77 ], [ 11973, 11985 ], [ 12194, 12206 ], [ 13114, 13126 ], [ 13580, 13592 ] ], [ [ 79, 94 ], [ 12534, 12549 ], [ 13804, 13819 ] ], [ [ 136, 146 ], [ 3248, 3258 ] ], [ [ 188, 206 ] ], [ [ 248, 272 ], [ 12620, 12644 ], [ 13882, 13906 ] ], [ [ 274, 309 ], [ 3074, 3109 ] ], [ [ 351, 366 ], [ 11742, 11757 ], [ 11770, 11785 ], [ 12062, 12077 ], [ 12450, 12465 ], [ 12872, 12887 ], [ 12900, 12915 ], [ 13335, 13350 ], [ 13724, 13739 ] ], [ [ 408, 422 ], [ 2715, 2729 ] ], [ [ 464, 484 ], [ 2789, 2809 ], [ 2903, 2923 ], [ 4059, 4079 ], [ 4484, 4504 ], [ 5943, 5963 ], [ 9348, 9368 ], [ 11177, 11197 ], [ 11575, 11595 ] ], [ [ 526, 536 ], [ 1846, 1856 ] ], [ [ 578, 592 ], [ 634, 648 ] ], [ [ 594, 611 ], [ 7616, 7633 ] ], [ [ 619, 633 ], [ 738, 752 ], [ 7164, 7178 ] ], [ [ 1624, 1631 ] ] ]
import numpy as np from numpy.core.numerictypes import typecodes import inspect import functools import re import builtins import os from concurrent.futures import ThreadPoolExecutor as thread_pool from concurrent.futures import ProcessPoolExecutor as process_pool from concurrent.futures import as_completed def _iterable(y): try: iter(y) except TypeError: return False return True # We use an extended version of: # http://docs.scipy.org/doc/numpy/reference/c-api.generalized-ufuncs.html _DIMENSION_NAME = r'\w+' _CORE_DIMENSION_LIST = '(?:{0:}(?:,{0:})*)?'.format(_DIMENSION_NAME) _VECTOR_ARGUMENT = r'(\({}\))'.format(_CORE_DIMENSION_LIST) _EXCLUDED_ARGUMENT = r'(_)' _ARGUMENT = r'(?:{0:}|{1:})'.format(_VECTOR_ARGUMENT, _EXCLUDED_ARGUMENT) _ARGUMENT_LIST = '{0:}(?:,{0:})*'.format(_ARGUMENT) _OUT_ARGUMENT_LIST = '{0:}(?:,{0:})*'.format(_VECTOR_ARGUMENT) _SIGNATURE = '^{0:}->{1:}$'.format(_ARGUMENT_LIST, _OUT_ARGUMENT_LIST) def _parse_signature(signature): if not re.match(_SIGNATURE, signature): raise ValueError( 'not a valid gufunc signature: {}'.format(signature)) inargs, outargs = [], [] _in, _out = signature.split('->') for arg in re.findall(_ARGUMENT, _in): if arg[1] == "_": inargs.append(None) else: inarg = [] for match in re.findall(_DIMENSION_NAME, arg[0]): try: inarg.append(int(match)) except: inarg.append(match) inargs.append(tuple(inarg)) for arg in re.findall(_ARGUMENT, _out): if arg[1] == "_": outargs.append(None) else: outarg = [] for match in re.findall(_DIMENSION_NAME, arg[0]): try: outarg.append(int(match)) except: outarg.append(match) outargs.append(tuple(outarg)) return inargs, outargs def _update_dim_sizes(dim_sizes, arg, core_dims): if not core_dims: return num_core_dims = len(core_dims) if arg.ndim < num_core_dims: raise ValueError('%d-dimensional argument does not have enough ' 'dimensions for all core dimensions %r' % (arg.ndim, core_dims)) core_shape = arg.shape[-num_core_dims:] for dim, size in zip(core_dims, core_shape): if dim in dim_sizes: if size != dim_sizes[dim]: raise ValueError('inconsistent size for core dimension' ' %r: %r vs %r' % (dim, size, dim_sizes[dim])) elif isinstance(dim, str): dim_sizes[dim] = size elif dim != size: raise ValueError('inconsistent size for core dimension: %r vs %r' % (dim, size)) def _parse_input_dimensions(args, arg_dims): dim_sizes = {} broadcast_args = [] for a, dims in zip(args, arg_dims): if dims is None: broadcast_args.append(None) continue _update_dim_sizes(dim_sizes, a, dims) ndim = a.ndim - len(dims) dummy_array = np.lib.stride_tricks.as_strided(0, a.shape[:ndim]) broadcast_args.append(dummy_array) broadcast_shape = np.lib.stride_tricks._broadcast_shape(*broadcast_args) return broadcast_shape, dim_sizes def _calculate_shapes(broadcast_shape, dim_sizes, list_of_core_dims): return [(broadcast_shape + tuple((dim_sizes[dim] if isinstance(dim, str) else dim) for dim in core_dims) if core_dims is not None else None) for core_dims in list_of_core_dims] def _create_arrays(broadcast_shape, dim_sizes, list_of_core_dims, dtypes): shapes = _calculate_shapes(broadcast_shape, dim_sizes, list_of_core_dims) arrays = tuple(np.empty(shape, dtype=dtype) for shape, dtype in zip(shapes, dtypes)) return arrays def parallize(signature, otypes=None, doc=None, default='parallelenv', evn='MEGA_PARALLIZE', isvec=False, parallel='threads', sendindex=False): def wrap_parallized(pyfunc): return parallized(pyfunc, signature, otypes, doc, default, evn, isvec, parallel, sendindex) return wrap_parallized class parallized(object): # inspired by np.vectorize def __init__(self, pyfunc, signature, otypes=None, doc=None, default='parallel', evn='MEGA_PARALLIZE', isvec=False, parallel_type='threads', sendindex=False): self.signature = signature self.default = default self.evn = evn self.isvec = isvec self.parallel_type = parallel_type self.sendindex = sendindex self._ufunc = None # Caching to improve default performance if doc is not None: self.__doc__ = doc else: self.__doc__ = pyfunc.__doc__ if isinstance(otypes, str): for char in otypes: if char not in typecodes['All']: raise ValueError("Invalid otype specified: %s" % (char,)) elif _iterable(otypes): otypes = ''.join([np.dtype(x).char for x in otypes]) elif otypes is not None: raise ValueError("Invalid otype specification") self.otypes = otypes self._in, self._out = _parse_signature(signature) self.excluded = [(a is None) for a in self._in] self.pyfunc = pyfunc self.__wrapped__ = pyfunc self.parameters = [k for k in inspect.signature(pyfunc).parameters] if self.sendindex: self.parameters = self.parameters[1:] def _process_args(self, args, kwargs): givenargs = list(args) allargs = [] for p in self.parameters: if p in kwargs: allargs.append(kwargs.pop(p)) else: if len(args) == 0: msg = 'expected {}, got {}'.format(len(self.parameters), len(givenargs)) raise TypeError("Missing positional arguments: " + msg) allargs.append(args[0]) args = args[1:] if len(kwargs) != 0: raise TypeError("Unknown keyword arguments {}!".format(kwargs)) if len(args) != 0: msg = 'expected {}, got {}'.format(len(self.parameters), len(givenargs)) raise TypeError("Too many positional arguments: " + msg) args = tuple((np.asanyarray(a) if not ex else a) for a, ex in zip(allargs, self.excluded)) broadcast_shape, dim_sizes = _parse_input_dimensions(args, self._in) input_shapes = _calculate_shapes(broadcast_shape, dim_sizes, self._in) args = [(np.broadcast_to(arg, shape, subok=True) if shape is not None else arg) for arg, shape in zip(args, input_shapes)] return broadcast_shape, dim_sizes, args def __call__(self, *args, **kwargs): if self.default is 'parallel': return self.parallel(*args, **kwargs) if self.default is 'sequential': return self.sequential(*args, **kwargs) if self.default is 'vectorized': return self.vectorized(*args, **kwargs) if self.default is 'parallelenv': if self.evn in os.environ and not os.environ[self.evn]: return self.vectorized(*args, **kwargs) else: return self.parallel(*args, **kwargs) def vectorized(self, *args, **kwargs): if self.isvec: if self.sendindex: return self.pyfunc(None, *args, **kwargs) else: return self.pyfunc(*args, **kwargs) else: return self.sequential(*args, **kwargs) def sequential(self, *args, **kwargs): broadcast_shape, dim_sizes, args = self._process_args(args, kwargs) outputs = None otypes = self.otypes nout = len(self._out) for index in np.ndindex(*broadcast_shape): i_args = ((arg[index] if _in is not None else arg) for _in, arg in zip(self._in, args)) if self.sendindex: results = self.pyfunc(index, *i_args) else: results = self.pyfunc(*i_args) n_results = len(results) if isinstance(results, tuple) else 1 if nout != n_results: raise ValueError( 'wrong number of outputs from pyfunc: expected %r, got %r' % (nout, n_results)) if nout == 1: results = (results,) if outputs is None: for result, core_dims in zip(results, self._out): _update_dim_sizes(dim_sizes, result, core_dims) if otypes is None: otypes = [np.asarray(result).dtype for result in results] outputs = _create_arrays(broadcast_shape, dim_sizes, self._out, otypes) for output, result in zip(outputs, results): output[index] = result if outputs is None: # did not call the function even once if otypes is None: raise ValueError('cannot call `vectorize` on size 0 inputs ' 'unless `otypes` is set') if builtins.any(dim not in dim_sizes for dims in self._out for dim in dims): raise ValueError('cannot call `vectorize` with a signature ' 'including new output dimensions on size 0 ' 'inputs') outputs = _create_arrays(broadcast_shape, dim_sizes, self._out, otypes) return outputs[0] if nout == 1 else outputs def parallel(self, *args, **kwargs): broadcast_shape, dim_sizes, args = self._process_args(args, kwargs) outputs = None otypes = self.otypes nout = len(self._out) if self.parallel_type == 'threads': pool = thread_pool(os.cpu_count()) elif self.parallel_type == 'processes': pool = process_pool(os.cpu_count()) futures = {} for index in np.ndindex(*broadcast_shape): i_args = ((arg[index] if _in is not None else arg) for _in, arg in zip(self._in, args)) if self.sendindex: futures[pool.submit(self.pyfunc, index, *i_args)] = index else: futures[pool.submit(self.pyfunc, *i_args)] = index for f in as_completed(futures): index = futures[f] results = f.result() n_results = len(results) if isinstance(results, tuple) else 1 if nout != n_results: raise ValueError( 'wrong number of outputs from pyfunc: expected %r, got %r' % (nout, n_results)) if nout == 1: results = (results,) if outputs is None: for result, core_dims in zip(results, self._out): _update_dim_sizes(dim_sizes, result, core_dims) if otypes is None: otypes = [np.asarray(result).dtype for result in results] outputs = _create_arrays(broadcast_shape, dim_sizes, self._out, otypes) for output, result in zip(outputs, results): output[index] = result if outputs is None: # did not call the function even once if otypes is None: raise ValueError('cannot call `vectorize` on size 0 inputs ' 'unless `otypes` is set') if builtins.any(dim not in dim_sizes for dims in self._out for dim in dims): raise ValueError('cannot call `vectorize` with a signature ' 'including new output dimensions on size 0 ' 'inputs') outputs = _create_arrays(broadcast_shape, dim_sizes, self._out, otypes) return outputs[0] if nout == 1 else outputs class asparallel(object): def __init__(self, pyfunc, default='parallelenv', evn='MEGA_PARALLIZE'): self.pyfunc = pyfunc self.default = default self.evn = evn self.__wrapped__ = pyfunc def __call__(self, *args, **kwargs): if self.default is 'parallel': return self.parallel(*args, **kwargs) if self.default is 'sequential': return self.sequential(*args, **kwargs) if self.default is 'vectorized': return self.vectorized(*args, **kwargs) if self.default is 'parallelenv': if self.evn in os.environ and not os.environ[self.evn]: return self.vectorized(*args, **kwargs) else: return self.parallel(*args, **kwargs) def parallel(self, *args, **kwargs): def wrap_parallels(parallelfunc): return parallelfunc.parallel return self.pyfunc(wrap_parallels, *args, **kwargs) def sequential(self, *args, **kwargs): def wrap_parallels(parallelfunc): return parallelfunc.sequential return self.pyfunc(wrap_parallels, *args, **kwargs) def vectorized(self, *args, **kwargs): def wrap_parallels(parallelfunc): return parallelfunc.vectorized return self.pyfunc(wrap_parallels, *args, **kwargs)
[ [ [ 7, 18 ], [ 3202, 3204 ], [ 3318, 3320 ], [ 3938, 3940 ], [ 5297, 5299 ], [ 6696, 6698 ], [ 6968, 6970 ], [ 8235, 8237 ], [ 9098, 9100 ], [ 10568, 10570 ], [ 11576, 11578 ] ], [ [ 55, 64 ], [ 5139, 5148 ] ], [ [ 72, 79 ], [ 5671, 5678 ] ], [ [ 87, 96 ] ], [ [ 104, 106 ], [ 1009, 1011 ], [ 1216, 1218 ], [ 1364, 1366 ], [ 1587, 1589 ], [ 1738, 1740 ] ], [ [ 114, 122 ], [ 9634, 9642 ], [ 12112, 12120 ] ], [ [ 130, 132 ], [ 7549, 7551 ], [ 7568, 7570 ], [ 10413, 10415 ], [ 10509, 10511 ], [ 13222, 13224 ], [ 13241, 13243 ] ], [ [ 164, 197 ], [ 10401, 10412 ] ], [ [ 229, 264 ], [ 10496, 10508 ] ], [ [ 296, 308 ], [ 10928, 10940 ] ], [ [ 315, 324 ], [ 5248, 5257 ] ], [ [ 521, 536 ], [ 598, 613 ], [ 1375, 1390 ], [ 1749, 1764 ] ], [ [ 546, 566 ], [ 653, 673 ] ], [ [ 615, 631 ], [ 739, 755 ], [ 874, 890 ] ], [ [ 675, 693 ], [ 757, 775 ] ], [ [ 703, 712 ], [ 818, 827 ], [ 1227, 1236 ], [ 1598, 1607 ] ], [ [ 777, 791 ], [ 927, 941 ] ], [ [ 829, 847 ], [ 943, 961 ] ], [ [ 892, 902 ], [ 1018, 1028 ] ], [ [ 969, 985 ], [ 5485, 5501 ] ], [ [ 1982, 1999 ], [ 3108, 3125 ], [ 8984, 9001 ], [ 11462, 11479 ] ], [ [ 2890, 2913 ], [ 6832, 6855 ] ], [ [ 3417, 3434 ], [ 3854, 3871 ], [ 6895, 6912 ] ], [ [ 3770, 3784 ], [ 9173, 9187 ], [ 9984, 9998 ], [ 11651, 11665 ], [ 12462, 12476 ] ], [ [ 4051, 4060 ] ], [ [ 4413, 4423 ], [ 4267, 4277 ] ], [ [ 12622, 12632 ] ] ]
import datetime import os import re from peewee import * from playhouse.reflection import * from .base import IS_SQLITE_OLD from .base import ModelTestCase from .base import TestModel from .base import db from .base import requires_models from .base import requires_sqlite from .base import skip_if from .base_models import Tweet from .base_models import User class ColTypes(TestModel): f1 = BigIntegerField(index=True) f2 = BlobField() f3 = BooleanField() f4 = CharField(max_length=50) f5 = DateField() f6 = DateTimeField() f7 = DecimalField() f8 = DoubleField() f9 = FloatField() f10 = IntegerField(unique=True) f11 = AutoField() f12 = TextField() f13 = TimeField() class Meta: indexes = ( (('f10', 'f11'), True), (('f11', 'f8', 'f13'), False), ) class Nullable(TestModel): nullable_cf = CharField(null=True) nullable_if = IntegerField(null=True) class RelModel(TestModel): col_types = ForeignKeyField(ColTypes, backref='foo') col_types_nullable = ForeignKeyField(ColTypes, null=True) class FKPK(TestModel): col_types = ForeignKeyField(ColTypes, primary_key=True) class Underscores(TestModel): _id = AutoField() _name = CharField() class Category(TestModel): name = CharField(max_length=10) parent = ForeignKeyField('self', null=True) class Nugget(TestModel): category_id = ForeignKeyField(Category, column_name='category_id') category = CharField() class BaseReflectionTestCase(ModelTestCase): def setUp(self): super(BaseReflectionTestCase, self).setUp() self.introspector = Introspector.from_database(self.database) class TestReflection(BaseReflectionTestCase): requires = [ColTypes, Nullable, RelModel, FKPK, Underscores, Category, Nugget] def test_generate_models(self): models = self.introspector.generate_models() self.assertTrue(set(( 'category', 'col_types', 'fkpk', 'nugget', 'nullable', 'rel_model', 'underscores')).issubset(set(models))) def assertIsInstance(obj, klass): self.assertTrue(isinstance(obj, klass)) category = models['category'] self.assertEqual( sorted(category._meta.fields), ['id', 'name', 'parent']) assertIsInstance(category.id, AutoField) assertIsInstance(category.name, CharField) assertIsInstance(category.parent, ForeignKeyField) self.assertEqual(category.parent.rel_model, category) fkpk = models['fkpk'] self.assertEqual(sorted(fkpk._meta.fields), ['col_types']) assertIsInstance(fkpk.col_types, ForeignKeyField) self.assertEqual(fkpk.col_types.rel_model, models['col_types']) self.assertTrue(fkpk.col_types.primary_key) relmodel = models['rel_model'] self.assertEqual( sorted(relmodel._meta.fields), ['col_types', 'col_types_nullable', 'id']) assertIsInstance(relmodel.col_types, ForeignKeyField) assertIsInstance(relmodel.col_types_nullable, ForeignKeyField) self.assertFalse(relmodel.col_types.null) self.assertTrue(relmodel.col_types_nullable.null) self.assertEqual(relmodel.col_types.rel_model, models['col_types']) self.assertEqual(relmodel.col_types_nullable.rel_model, models['col_types']) @requires_sqlite def test_generate_models_indexes(self): models = self.introspector.generate_models() self.assertEqual(models['fkpk']._meta.indexes, []) self.assertEqual(models['rel_model']._meta.indexes, []) self.assertEqual(models['category']._meta.indexes, []) col_types = models['col_types'] indexed = set(['f1']) unique = set(['f10']) for field in col_types._meta.sorted_fields: self.assertEqual(field.index, field.name in indexed) self.assertEqual(field.unique, field.name in unique) indexes = col_types._meta.indexes self.assertEqual(sorted(indexes), [ (['f10', 'f11'], True), (['f11', 'f8', 'f13'], False), ]) def test_table_subset(self): models = self.introspector.generate_models(table_names=[ 'category', 'col_types', 'foobarbaz']) self.assertEqual(sorted(models.keys()), ['category', 'col_types']) @requires_sqlite def test_sqlite_fk_re(self): user_id_tests = [ 'FOREIGN KEY("user_id") REFERENCES "users"("id")', 'FOREIGN KEY(user_id) REFERENCES users(id)', 'FOREIGN KEY ([user_id]) REFERENCES [users] ([id])', '"user_id" NOT NULL REFERENCES "users" ("id")', 'user_id not null references users (id)', ] fk_pk_tests = [ ('"col_types_id" INTEGER NOT NULL PRIMARY KEY REFERENCES ' '"coltypes" ("f11")'), 'FOREIGN KEY ("col_types_id") REFERENCES "coltypes" ("f11")', ] regex = SqliteMetadata.re_foreign_key for test in user_id_tests: match = re.search(regex, test, re.I) self.assertEqual(match.groups(), ( 'user_id', 'users', 'id', )) for test in fk_pk_tests: match = re.search(regex, test, re.I) self.assertEqual(match.groups(), ( 'col_types_id', 'coltypes', 'f11', )) def test_make_column_name(self): # Tests for is_foreign_key=False. tests = ( ('Column', 'column'), ('Foo_id', 'foo_id'), ('foo_id', 'foo_id'), ('foo_id_id', 'foo_id_id'), ('foo', 'foo'), ('_id', '_id'), ('a123', 'a123'), ('and', 'and_'), ('Class', 'class_'), ('Class_ID', 'class_id'), ('camelCase', 'camel_case'), ('ABCdefGhi', 'ab_cdef_ghi'), ) for col_name, expected in tests: self.assertEqual( self.introspector.make_column_name(col_name), expected) # Tests for is_foreign_key=True. tests = ( ('Foo_id', 'foo'), ('foo_id', 'foo'), ('foo_id_id', 'foo_id'), ('foo', 'foo'), ('_id', '_id'), ('a123', 'a123'), ('and', 'and_'), ('Class', 'class_'), ('Class_ID', 'class_'), ('camelCase', 'camel_case'), ('ABCdefGhi', 'ab_cdef_ghi'), ) for col_name, expected in tests: self.assertEqual( self.introspector.make_column_name(col_name, True), expected) def test_make_model_name(self): tests = ( ('Table', 'Table'), ('table', 'Table'), ('table_baz', 'TableBaz'), ('foo__bar__baz2', 'FooBarBaz2'), ('foo12_3', 'Foo123'), ) for table_name, expected in tests: self.assertEqual( self.introspector.make_model_name(table_name), expected) def test_col_types(self): (columns, primary_keys, foreign_keys, model_names, indexes) = self.introspector.introspect() expected = ( ('col_types', ( ('f1', (BigIntegerField, IntegerField), False), # There do not appear to be separate constants for the blob and # text field types in MySQL's drivers. See GH#1034. ('f2', (BlobField, TextField), False), ('f3', (BooleanField, IntegerField), False), ('f4', CharField, False), ('f5', DateField, False), ('f6', DateTimeField, False), ('f7', DecimalField, False), ('f8', (DoubleField, FloatField), False), ('f9', FloatField, False), ('f10', IntegerField, False), ('f11', AutoField, False), ('f12', TextField, False), ('f13', TimeField, False))), ('rel_model', ( ('col_types_id', ForeignKeyField, False), ('col_types_nullable_id', ForeignKeyField, True))), ('nugget', ( ('category_id', ForeignKeyField, False), ('category', CharField, False))), ('nullable', ( ('nullable_cf', CharField, True), ('nullable_if', IntegerField, True))), ('fkpk', ( ('col_types_id', ForeignKeyField, False),)), ('underscores', ( ('_id', AutoField, False), ('_name', CharField, False))), ('category', ( ('name', CharField, False), ('parent_id', ForeignKeyField, True))), ) for table_name, expected_columns in expected: introspected_columns = columns[table_name] for field_name, field_class, is_null in expected_columns: if not isinstance(field_class, (list, tuple)): field_class = (field_class,) column = introspected_columns[field_name] self.assertTrue(column.field_class in field_class, "%s in %s" % (column.field_class, field_class)) self.assertEqual(column.nullable, is_null) def test_foreign_keys(self): (columns, primary_keys, foreign_keys, model_names, indexes) = self.introspector.introspect() self.assertEqual(foreign_keys['col_types'], []) rel_model = foreign_keys['rel_model'] self.assertEqual(len(rel_model), 2) fkpk = foreign_keys['fkpk'] self.assertEqual(len(fkpk), 1) fkpk_fk = fkpk[0] self.assertEqual(fkpk_fk.table, 'fkpk') self.assertEqual(fkpk_fk.column, 'col_types_id') self.assertEqual(fkpk_fk.dest_table, 'col_types') self.assertEqual(fkpk_fk.dest_column, 'f11') category = foreign_keys['category'] self.assertEqual(len(category), 1) category_fk = category[0] self.assertEqual(category_fk.table, 'category') self.assertEqual(category_fk.column, 'parent_id') self.assertEqual(category_fk.dest_table, 'category') self.assertEqual(category_fk.dest_column, 'id') def test_table_names(self): (columns, primary_keys, foreign_keys, model_names, indexes) = self.introspector.introspect() names = ( ('col_types', 'ColTypes'), ('nullable', 'Nullable'), ('rel_model', 'RelModel'), ('fkpk', 'Fkpk')) for k, v in names: self.assertEqual(model_names[k], v) def test_column_meta(self): (columns, primary_keys, foreign_keys, model_names, indexes) = self.introspector.introspect() rel_model = columns['rel_model'] col_types_id = rel_model['col_types_id'] self.assertEqual(col_types_id.get_field_parameters(), { 'column_name': "'col_types_id'", 'model': 'ColTypes', 'field': "'f11'", }) col_types_nullable_id = rel_model['col_types_nullable_id'] self.assertEqual(col_types_nullable_id.get_field_parameters(), { 'column_name': "'col_types_nullable_id'", 'null': True, 'backref': "'col_types_col_types_nullable_set'", 'model': 'ColTypes', 'field': "'f11'", }) fkpk = columns['fkpk'] self.assertEqual(fkpk['col_types_id'].get_field_parameters(), { 'column_name': "'col_types_id'", 'model': 'ColTypes', 'primary_key': True, 'field': "'f11'"}) category = columns['category'] parent_id = category['parent_id'] self.assertEqual(parent_id.get_field_parameters(), { 'column_name': "'parent_id'", 'null': True, 'model': "'self'", 'field': "'id'", }) nugget = columns['nugget'] category_fk = nugget['category_id'] self.assertEqual(category_fk.name, 'category_id') self.assertEqual(category_fk.get_field_parameters(), { 'field': "'id'", 'model': 'Category', 'column_name': "'category_id'", }) category = nugget['category'] self.assertEqual(category.name, 'category') def test_get_field(self): (columns, primary_keys, foreign_keys, model_names, indexes) = self.introspector.introspect() expected = ( ('col_types', ( ('f1', ('f1 = BigIntegerField(index=True)', 'f1 = IntegerField(index=True)')), ('f2', ('f2 = BlobField()', 'f2 = TextField()')), ('f4', 'f4 = CharField()'), ('f5', 'f5 = DateField()'), ('f6', 'f6 = DateTimeField()'), ('f7', 'f7 = DecimalField()'), ('f10', 'f10 = IntegerField(unique=True)'), ('f11', 'f11 = AutoField()'), ('f12', ('f12 = TextField()', 'f12 = BlobField()')), ('f13', 'f13 = TimeField()'), )), ('nullable', ( ('nullable_cf', 'nullable_cf = ' 'CharField(null=True)'), ('nullable_if', 'nullable_if = IntegerField(null=True)'), )), ('fkpk', ( ('col_types_id', 'col_types = ForeignKeyField(' "column_name='col_types_id', field='f11', model=ColTypes, " 'primary_key=True)'), )), ('nugget', ( ('category_id', 'category_id = ForeignKeyField(' "column_name='category_id', field='id', model=Category)"), ('category', 'category = CharField()'), )), ('rel_model', ( ('col_types_id', 'col_types = ForeignKeyField(' "column_name='col_types_id', field='f11', model=ColTypes)"), ('col_types_nullable_id', 'col_types_nullable = ' "ForeignKeyField(backref='col_types_col_types_nullable_set', " "column_name='col_types_nullable_id', field='f11', " 'model=ColTypes, null=True)'), )), ('underscores', ( ('_id', '_id = AutoField()'), ('_name', '_name = CharField()'), )), ('category', ( ('name', 'name = CharField()'), ('parent_id', 'parent = ForeignKeyField(' "column_name='parent_id', field='id', model='self', " 'null=True)'), )), ) for table, field_data in expected: for field_name, fields in field_data: if not isinstance(fields, tuple): fields = (fields,) actual = columns[table][field_name].get_field() self.assertTrue(actual in fields, '%s not in %s' % (actual, fields)) class EventLog(TestModel): data = CharField(constraints=[SQL('DEFAULT \'\'')]) timestamp = DateTimeField(constraints=[SQL('DEFAULT current_timestamp')]) flags = IntegerField(constraints=[SQL('DEFAULT 0')]) misc = TextField(constraints=[SQL('DEFAULT \'foo\'')]) class DefaultVals(TestModel): key = CharField(constraints=[SQL('DEFAULT \'foo\'')]) value = IntegerField(constraints=[SQL('DEFAULT 0')]) class Meta: primary_key = CompositeKey('key', 'value') class TestReflectDefaultValues(BaseReflectionTestCase): requires = [DefaultVals, EventLog] @requires_sqlite def test_default_values(self): models = self.introspector.generate_models() default_vals = models['default_vals'] create_table = ( 'CREATE TABLE IF NOT EXISTS "default_vals" (' '"key" VARCHAR(255) NOT NULL DEFAULT \'foo\', ' '"value" INTEGER NOT NULL DEFAULT 0, ' 'PRIMARY KEY ("key", "value"))') # Re-create table using the introspected schema. self.assertSQL(default_vals._schema._create_table(), create_table, []) default_vals.drop_table() default_vals.create_table() # Verify that the introspected schema has not changed. models = self.introspector.generate_models() default_vals = models['default_vals'] self.assertSQL(default_vals._schema._create_table(), create_table, []) @requires_sqlite def test_default_values_extended(self): models = self.introspector.generate_models() eventlog = models['event_log'] create_table = ( 'CREATE TABLE IF NOT EXISTS "event_log" (' '"id" INTEGER NOT NULL PRIMARY KEY, ' '"data" VARCHAR(255) NOT NULL DEFAULT \'\', ' '"timestamp" DATETIME NOT NULL DEFAULT current_timestamp, ' '"flags" INTEGER NOT NULL DEFAULT 0, ' '"misc" TEXT NOT NULL DEFAULT \'foo\')') # Re-create table using the introspected schema. self.assertSQL(eventlog._schema._create_table(), create_table, []) eventlog.drop_table() eventlog.create_table() # Verify that the introspected schema has not changed. models = self.introspector.generate_models() eventlog = models['event_log'] self.assertSQL(eventlog._schema._create_table(), create_table, []) class TestReflectionDependencies(BaseReflectionTestCase): requires = [User, Tweet] def test_generate_dependencies(self): models = self.introspector.generate_models(table_names=['tweet']) self.assertEqual(set(models), set(('users', 'tweet'))) IUser = models['users'] ITweet = models['tweet'] self.assertEqual(set(ITweet._meta.fields), set(( 'id', 'user', 'content', 'timestamp'))) self.assertEqual(set(IUser._meta.fields), set(('id', 'username'))) self.assertTrue(ITweet.user.rel_model is IUser) self.assertTrue(ITweet.user.rel_field is IUser.id) def test_ignore_backrefs(self): models = self.introspector.generate_models(table_names=['users']) self.assertEqual(set(models), set(('users',))) class Note(TestModel): content = TextField() timestamp = DateTimeField(default=datetime.datetime.now) status = IntegerField() class TestReflectViews(BaseReflectionTestCase): requires = [Note] def setUp(self): super(TestReflectViews, self).setUp() self.database.execute_sql('CREATE VIEW notes_public AS ' 'SELECT content, timestamp FROM note ' 'WHERE status = 1 ORDER BY timestamp DESC') def tearDown(self): self.database.execute_sql('DROP VIEW notes_public') super(TestReflectViews, self).tearDown() def test_views_ignored_default(self): models = self.introspector.generate_models() self.assertFalse('notes_public' in models) def test_introspect_view(self): models = self.introspector.generate_models(include_views=True) self.assertTrue('notes_public' in models) NotesPublic = models['notes_public'] self.assertEqual(sorted(NotesPublic._meta.fields), ['content', 'timestamp']) self.assertTrue(isinstance(NotesPublic.content, TextField)) self.assertTrue(isinstance(NotesPublic.timestamp, DateTimeField)) @skip_if(IS_SQLITE_OLD) def test_introspect_view_integration(self): for i, (ct, st) in enumerate([('n1', 1), ('n2', 2), ('n3', 1)]): Note.create(content=ct, status=st, timestamp=datetime.datetime(2018, 1, 1 + i)) NP = self.introspector.generate_models( table_names=['notes_public'], include_views=True)['notes_public'] self.assertEqual([(np.content, np.timestamp) for np in NP.select()], [ ('n3', datetime.datetime(2018, 1, 3)), ('n1', datetime.datetime(2018, 1, 1))]) class Event(TestModel): key = TextField() timestamp = DateTimeField(index=True) metadata = TextField(default='') class TestInteractiveHelpers(ModelTestCase): requires = [Category, Event] def test_generate_models(self): M = generate_models(self.database) self.assertTrue('category' in M) self.assertTrue('event' in M) def assertFields(m, expected): actual = [(f.name, f.field_type) for f in m._meta.sorted_fields] self.assertEqual(actual, expected) assertFields(M['category'], [('id', 'AUTO'), ('name', 'VARCHAR'), ('parent', 'INT')]) assertFields(M['event'], [ ('id', 'AUTO'), ('key', 'TEXT'), ('timestamp', 'DATETIME'), ('metadata', 'TEXT')])
[ [ [ 7, 15 ], [ 18648, 18656 ], [ 20023, 20031 ], [ 20283, 20291 ], [ 20334, 20342 ] ], [ [ 23, 25 ] ], [ [ 33, 35 ], [ 5240, 5242 ], [ 5263, 5265 ], [ 5427, 5429 ], [ 5450, 5452 ] ], [ [ 56, 57 ] ], [ [ 91, 92 ], [ 400, 415 ], [ 437, 446 ], [ 458, 470 ], [ 482, 491 ], [ 516, 525 ], [ 537, 550 ], [ 562, 574 ], [ 586, 597 ], [ 609, 619 ], [ 632, 644 ], [ 668, 677 ], [ 690, 699 ], [ 712, 721 ], [ 897, 906 ], [ 936, 948 ], [ 1005, 1020 ], [ 1071, 1086 ], [ 1149, 1164 ], [ 1235, 1244 ], [ 1259, 1268 ], [ 1311, 1320 ], [ 1349, 1364 ], [ 1429, 1444 ], [ 1497, 1506 ], [ 15413, 15422 ], [ 15436, 15439 ], [ 15474, 15487 ], [ 15501, 15504 ], [ 15548, 15560 ], [ 15574, 15577 ], [ 15604, 15613 ], [ 15627, 15630 ], [ 15694, 15703 ], [ 15717, 15720 ], [ 15754, 15766 ], [ 15780, 15783 ], [ 15838, 15850 ], [ 18598, 18607 ], [ 18626, 18639 ], [ 18684, 18696 ], [ 20403, 20412 ], [ 20431, 20444 ], [ 20472, 20481 ], [ 1657, 1669 ], [ 2436, 2445 ], [ 2487, 2496 ], [ 2540, 2555 ], [ 2758, 2773 ], [ 3108, 3123 ], [ 3179, 3194 ], [ 5154, 5168 ], [ 7453, 7468 ], [ 7470, 7482 ], [ 7665, 7674 ], [ 7676, 7685 ], [ 7720, 7732 ], [ 7734, 7746 ], [ 7780, 7789 ], [ 7822, 7831 ], [ 7864, 7877 ], [ 7910, 7922 ], [ 7956, 7967 ], [ 7969, 7979 ], [ 8013, 8023 ], [ 8057, 8069 ], [ 8103, 8112 ], [ 8146, 8155 ], [ 8189, 8198 ], [ 8271, 8286 ], [ 8338, 8353 ], [ 8421, 8436 ], [ 8475, 8484 ], [ 8555, 8564 ], [ 8605, 8617 ], [ 8684, 8699 ], [ 8766, 8775 ], [ 8811, 8820 ], [ 8884, 8893 ], [ 8933, 8948 ], [ 19706, 19715 ], [ 19776, 19789 ], [ 20623, 20638 ] ], [ [ 112, 125 ], [ 19806, 19819 ] ], [ [ 144, 157 ], [ 1540, 1553 ], [ 20525, 20538 ] ], [ [ 176, 185 ], [ 379, 388 ], [ 867, 876 ], [ 977, 986 ], [ 1121, 1130 ], [ 1213, 1222 ], [ 1288, 1297 ], [ 1399, 1408 ], [ 15390, 15399 ], [ 15672, 15681 ], [ 18572, 18581 ], [ 20381, 20390 ] ], [ [ 204, 206 ] ], [ [ 225, 240 ] ], [ [ 259, 274 ], [ 3521, 3536 ], [ 4535, 4550 ], [ 15970, 15985 ], [ 16815, 16830 ] ], [ [ 293, 300 ], [ 19798, 19805 ] ], [ [ 326, 331 ], [ 17840, 17845 ] ], [ [ 357, 361 ], [ 17834, 17838 ] ], [ [ 370, 378 ], [ 1021, 1029 ], [ 1087, 1095 ], [ 1165, 1173 ], [ 1763, 1771 ] ], [ [ 858, 866 ], [ 1773, 1781 ] ], [ [ 968, 976 ], [ 1783, 1791 ] ], [ [ 1116, 1120 ], [ 1793, 1797 ] ], [ [ 1201, 1212 ], [ 1799, 1810 ] ], [ [ 1279, 1287 ], [ 1445, 1453 ], [ 1812, 1820 ], [ 20557, 20565 ] ], [ [ 1392, 1398 ], [ 1838, 1844 ] ], [ [ 1517, 1539 ], [ 1722, 1744 ], [ 15900, 15922 ], [ 17793, 17815 ], [ 18724, 18746 ], [ 1591, 1613 ] ], [ [ 1707, 1721 ] ], [ [ 15381, 15389 ], [ 15954, 15962 ] ], [ [ 15660, 15671 ], [ 15941, 15952 ] ], [ [ 15875, 15899 ] ], [ [ 17766, 17792 ] ], [ [ 18567, 18571 ], [ 18765, 18769 ], [ 19954, 19958 ] ], [ [ 18707, 18723 ], [ 18807, 18823 ], [ 19154, 19170 ] ], [ [ 20375, 20380 ], [ 20567, 20572 ] ], [ [ 20502, 20524 ] ] ]
import numpy as np from .robot_model import RobotModel from ...utils.mjcf_utils import xml_path_completion class Panda(RobotModel): """Panda is a sensitive single-arm robot designed by Franka.""" def __init__(self, idn=0, bottom_offset=(0, 0, -0.913)): """ Args: idn (int or str): Number or some other unique identification string for this robot instance bottom_offset (3-list/tuple): x,y,z offset desired from initial coordinates """ super().__init__(xml_path_completion("robots/panda/robot.xml"), idn=idn, bottom_offset=bottom_offset) # Set joint damping self.set_joint_attribute(attrib="damping", values=np.array((0.1, 0.1, 0.1, 0.1, 0.1, 0.01, 0.01))) @property def dof(self): return 7 @property def gripper(self): return "PandaGripper" @property def default_controller_config(self): return "default_panda" @property def init_qpos(self): return np.array([0, np.pi / 16.0, 0.00, -np.pi / 2.0 - np.pi / 3.0, 0.00, np.pi - 0.2, np.pi/4]) @property def base_xpos_offset(self): return { "bins": (-0.5, 0.3, 0), "empty": (-0.6, 0, 0), "pegs": (-0.5, 0.15, 0), "table": lambda table_length: (-0.16 - table_length / 2, 0, 0) } @property def arm_type(self): return "single" @property def _joints(self): return ["joint1", "joint2", "joint3", "joint4", "joint5", "joint6", "joint7"] @property def _eef_name(self): return "right_hand" @property def _robot_base(self): return "base" @property def _actuators(self): return { "pos": [], # No position actuators for panda "vel": [], # No velocity actuators for panda "torq": ["torq_j1", "torq_j2", "torq_j3", "torq_j4", "torq_j5", "torq_j6", "torq_j7"] } @property def _contact_geoms(self): return ["link1_collision", "link2_collision", "link3_collision", "link4_collision", "link5_collision", "link6_collision", "link7_collision"] @property def _root(self): return "link0" @property def _links(self): return ["link1", "link2", "link3", "link4", "link5", "link6", "link7"]
[ [ [ 7, 18 ], [ 691, 693 ], [ 1001, 1003 ], [ 1014, 1016 ], [ 1035, 1037 ], [ 1049, 1051 ], [ 1068, 1070 ], [ 1081, 1083 ] ], [ [ 44, 54 ], [ 121, 131 ] ], [ [ 87, 106 ], [ 519, 538 ] ], [ [ 115, 120 ] ] ]
# coding: utf-8 """ Kubernetes No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 The version of the OpenAPI document: v1.19.15 Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from kubernetes_asyncio.client.configuration import Configuration class V1beta1CronJobSpec(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'concurrency_policy': 'str', 'failed_jobs_history_limit': 'int', 'job_template': 'V1beta1JobTemplateSpec', 'schedule': 'str', 'starting_deadline_seconds': 'int', 'successful_jobs_history_limit': 'int', 'suspend': 'bool' } attribute_map = { 'concurrency_policy': 'concurrencyPolicy', 'failed_jobs_history_limit': 'failedJobsHistoryLimit', 'job_template': 'jobTemplate', 'schedule': 'schedule', 'starting_deadline_seconds': 'startingDeadlineSeconds', 'successful_jobs_history_limit': 'successfulJobsHistoryLimit', 'suspend': 'suspend' } def __init__(self, concurrency_policy=None, failed_jobs_history_limit=None, job_template=None, schedule=None, starting_deadline_seconds=None, successful_jobs_history_limit=None, suspend=None, local_vars_configuration=None): # noqa: E501 """V1beta1CronJobSpec - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._concurrency_policy = None self._failed_jobs_history_limit = None self._job_template = None self._schedule = None self._starting_deadline_seconds = None self._successful_jobs_history_limit = None self._suspend = None self.discriminator = None if concurrency_policy is not None: self.concurrency_policy = concurrency_policy if failed_jobs_history_limit is not None: self.failed_jobs_history_limit = failed_jobs_history_limit self.job_template = job_template self.schedule = schedule if starting_deadline_seconds is not None: self.starting_deadline_seconds = starting_deadline_seconds if successful_jobs_history_limit is not None: self.successful_jobs_history_limit = successful_jobs_history_limit if suspend is not None: self.suspend = suspend @property def concurrency_policy(self): """Gets the concurrency_policy of this V1beta1CronJobSpec. # noqa: E501 Specifies how to treat concurrent executions of a Job. Valid values are: - \"Allow\" (default): allows CronJobs to run concurrently; - \"Forbid\": forbids concurrent runs, skipping next run if previous run hasn't finished yet; - \"Replace\": cancels currently running job and replaces it with a new one # noqa: E501 :return: The concurrency_policy of this V1beta1CronJobSpec. # noqa: E501 :rtype: str """ return self._concurrency_policy @concurrency_policy.setter def concurrency_policy(self, concurrency_policy): """Sets the concurrency_policy of this V1beta1CronJobSpec. Specifies how to treat concurrent executions of a Job. Valid values are: - \"Allow\" (default): allows CronJobs to run concurrently; - \"Forbid\": forbids concurrent runs, skipping next run if previous run hasn't finished yet; - \"Replace\": cancels currently running job and replaces it with a new one # noqa: E501 :param concurrency_policy: The concurrency_policy of this V1beta1CronJobSpec. # noqa: E501 :type: str """ self._concurrency_policy = concurrency_policy @property def failed_jobs_history_limit(self): """Gets the failed_jobs_history_limit of this V1beta1CronJobSpec. # noqa: E501 The number of failed finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified. Defaults to 1. # noqa: E501 :return: The failed_jobs_history_limit of this V1beta1CronJobSpec. # noqa: E501 :rtype: int """ return self._failed_jobs_history_limit @failed_jobs_history_limit.setter def failed_jobs_history_limit(self, failed_jobs_history_limit): """Sets the failed_jobs_history_limit of this V1beta1CronJobSpec. The number of failed finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified. Defaults to 1. # noqa: E501 :param failed_jobs_history_limit: The failed_jobs_history_limit of this V1beta1CronJobSpec. # noqa: E501 :type: int """ self._failed_jobs_history_limit = failed_jobs_history_limit @property def job_template(self): """Gets the job_template of this V1beta1CronJobSpec. # noqa: E501 :return: The job_template of this V1beta1CronJobSpec. # noqa: E501 :rtype: V1beta1JobTemplateSpec """ return self._job_template @job_template.setter def job_template(self, job_template): """Sets the job_template of this V1beta1CronJobSpec. :param job_template: The job_template of this V1beta1CronJobSpec. # noqa: E501 :type: V1beta1JobTemplateSpec """ if self.local_vars_configuration.client_side_validation and job_template is None: # noqa: E501 raise ValueError("Invalid value for `job_template`, must not be `None`") # noqa: E501 self._job_template = job_template @property def schedule(self): """Gets the schedule of this V1beta1CronJobSpec. # noqa: E501 The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron. # noqa: E501 :return: The schedule of this V1beta1CronJobSpec. # noqa: E501 :rtype: str """ return self._schedule @schedule.setter def schedule(self, schedule): """Sets the schedule of this V1beta1CronJobSpec. The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron. # noqa: E501 :param schedule: The schedule of this V1beta1CronJobSpec. # noqa: E501 :type: str """ if self.local_vars_configuration.client_side_validation and schedule is None: # noqa: E501 raise ValueError("Invalid value for `schedule`, must not be `None`") # noqa: E501 self._schedule = schedule @property def starting_deadline_seconds(self): """Gets the starting_deadline_seconds of this V1beta1CronJobSpec. # noqa: E501 Optional deadline in seconds for starting the job if it misses scheduled time for any reason. Missed jobs executions will be counted as failed ones. # noqa: E501 :return: The starting_deadline_seconds of this V1beta1CronJobSpec. # noqa: E501 :rtype: int """ return self._starting_deadline_seconds @starting_deadline_seconds.setter def starting_deadline_seconds(self, starting_deadline_seconds): """Sets the starting_deadline_seconds of this V1beta1CronJobSpec. Optional deadline in seconds for starting the job if it misses scheduled time for any reason. Missed jobs executions will be counted as failed ones. # noqa: E501 :param starting_deadline_seconds: The starting_deadline_seconds of this V1beta1CronJobSpec. # noqa: E501 :type: int """ self._starting_deadline_seconds = starting_deadline_seconds @property def successful_jobs_history_limit(self): """Gets the successful_jobs_history_limit of this V1beta1CronJobSpec. # noqa: E501 The number of successful finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified. Defaults to 3. # noqa: E501 :return: The successful_jobs_history_limit of this V1beta1CronJobSpec. # noqa: E501 :rtype: int """ return self._successful_jobs_history_limit @successful_jobs_history_limit.setter def successful_jobs_history_limit(self, successful_jobs_history_limit): """Sets the successful_jobs_history_limit of this V1beta1CronJobSpec. The number of successful finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified. Defaults to 3. # noqa: E501 :param successful_jobs_history_limit: The successful_jobs_history_limit of this V1beta1CronJobSpec. # noqa: E501 :type: int """ self._successful_jobs_history_limit = successful_jobs_history_limit @property def suspend(self): """Gets the suspend of this V1beta1CronJobSpec. # noqa: E501 This flag tells the controller to suspend subsequent executions, it does not apply to already started executions. Defaults to false. # noqa: E501 :return: The suspend of this V1beta1CronJobSpec. # noqa: E501 :rtype: bool """ return self._suspend @suspend.setter def suspend(self, suspend): """Sets the suspend of this V1beta1CronJobSpec. This flag tells the controller to suspend subsequent executions, it does not apply to already started executions. Defaults to false. # noqa: E501 :param suspend: The suspend of this V1beta1CronJobSpec. # noqa: E501 :type: bool """ self._suspend = suspend def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1beta1CronJobSpec): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V1beta1CronJobSpec): return True return self.to_dict() != other.to_dict()
[ [ [ 275, 281 ], [ 10789, 10795 ] ], [ [ 289, 291 ] ], [ [ 314, 317 ], [ 9985, 9988 ] ], [ [ 371, 384 ], [ 1926, 1939 ] ], [ [ 393, 411 ], [ 11029, 11047 ], [ 11245, 11263 ] ] ]
import click from doing.utils import get_config from doing.utils import get_repo_name from typing import Union def cmd_open_pr(pullrequest_id: Union[str, int]) -> None: """ Open a specific PULLREQUEST_ID. '!' prefix is allowed. """ pullrequest_id = str(pullrequest_id).lstrip("!").strip() project = get_config("project") organization = get_config("organization") click.launch(f"{organization}/{project}/_git/{get_repo_name()}/pullrequest/{pullrequest_id}")
[ [ [ 7, 12 ], [ 395, 400 ] ], [ [ 37, 47 ], [ 322, 332 ], [ 363, 373 ] ], [ [ 72, 85 ], [ 441, 454 ] ], [ [ 105, 110 ], [ 145, 150 ] ], [ [ 117, 128 ] ] ]
from abc import ABC class AbcFacade(ABC): """Any interface will expect to be able to invoke the following methods.""" def count_rows(self): pass def get_rows(self): pass def get_last_workday(self): pass def delete_history(self): pass def disconnect(self): pass
[ [ [ 16, 19 ], [ 38, 41 ] ], [ [ 28, 37 ] ] ]
import builtins import os from rich.repr import RichReprResult import sys from array import array from collections import Counter, defaultdict, deque, UserDict, UserList import dataclasses from dataclasses import dataclass, fields, is_dataclass from inspect import isclass from itertools import islice import re from typing import ( DefaultDict, TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Union, Tuple, ) from types import MappingProxyType try: import attr as _attr_module except ImportError: # pragma: no cover _attr_module = None # type: ignore from .highlighter import ReprHighlighter from . import get_console from ._loop import loop_last from ._pick import pick_bool from .abc import RichRenderable from .cells import cell_len from .highlighter import ReprHighlighter from .jupyter import JupyterMixin, JupyterRenderable from .measure import Measurement from .text import Text if TYPE_CHECKING: from .console import ( Console, ConsoleOptions, HighlighterType, JustifyMethod, OverflowMethod, RenderResult, ) # Matches Jupyter's special methods _re_jupyter_repr = re.compile(f"^_repr_.+_$") def _is_attr_object(obj: Any) -> bool: """Check if an object was created with attrs module.""" return _attr_module is not None and _attr_module.has(type(obj)) def _get_attr_fields(obj: Any) -> Iterable["_attr_module.Attribute[Any]"]: """Get fields for an attrs object.""" return _attr_module.fields(type(obj)) if _attr_module is not None else [] def _is_dataclass_repr(obj: object) -> bool: """Check if an instance of a dataclass contains the default repr. Args: obj (object): A dataclass instance. Returns: bool: True if the default repr is used, False if there is a custom repr. """ # Digging in to a lot of internals here # Catching all exceptions in case something is missing on a non CPython implementation try: return obj.__repr__.__code__.co_filename == dataclasses.__file__ except Exception: # pragma: no coverage return False def install( console: Optional["Console"] = None, overflow: "OverflowMethod" = "ignore", crop: bool = False, indent_guides: bool = False, max_length: Optional[int] = None, max_string: Optional[int] = None, expand_all: bool = False, ) -> None: """Install automatic pretty printing in the Python REPL. Args: console (Console, optional): Console instance or ``None`` to use global console. Defaults to None. overflow (Optional[OverflowMethod], optional): Overflow method. Defaults to "ignore". crop (Optional[bool], optional): Enable cropping of long lines. Defaults to False. indent_guides (bool, optional): Enable indentation guides. Defaults to False. max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to None. max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to None. expand_all (bool, optional): Expand all containers. Defaults to False. max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100. """ from rich import get_console from .console import ConsoleRenderable # needed here to prevent circular import console = console or get_console() assert console is not None def display_hook(value: Any) -> None: """Replacement sys.displayhook which prettifies objects with Rich.""" if value is not None: assert console is not None builtins._ = None # type: ignore console.print( value if isinstance(value, RichRenderable) else Pretty( value, overflow=overflow, indent_guides=indent_guides, max_length=max_length, max_string=max_string, expand_all=expand_all, ), crop=crop, ) builtins._ = value # type: ignore def ipy_display_hook(value: Any) -> None: # pragma: no cover assert console is not None # always skip rich generated jupyter renderables or None values if isinstance(value, JupyterRenderable) or value is None: return # on jupyter rich display, if using one of the special representations don't use rich if console.is_jupyter and any( _re_jupyter_repr.match(attr) for attr in dir(value) ): return # certain renderables should start on a new line if isinstance(value, ConsoleRenderable): console.line() console.print( value if isinstance(value, RichRenderable) else Pretty( value, overflow=overflow, indent_guides=indent_guides, max_length=max_length, max_string=max_string, expand_all=expand_all, margin=12, ), crop=crop, new_line_start=True, ) try: # pragma: no cover ip = get_ipython() # type: ignore from IPython.core.formatters import BaseFormatter class RichFormatter(BaseFormatter): # type: ignore pprint: bool = True def __call__(self, value: Any) -> Any: if self.pprint: return ipy_display_hook(value) else: return repr(value) # replace plain text formatter with rich formatter rich_formatter = RichFormatter() ip.display_formatter.formatters["text/plain"] = rich_formatter except Exception: sys.displayhook = display_hook class Pretty(JupyterMixin): """A rich renderable that pretty prints an object. Args: _object (Any): An object to pretty print. highlighter (HighlighterType, optional): Highlighter object to apply to result, or None for ReprHighlighter. Defaults to None. indent_size (int, optional): Number of spaces in indent. Defaults to 4. justify (JustifyMethod, optional): Justify method, or None for default. Defaults to None. overflow (OverflowMethod, optional): Overflow method, or None for default. Defaults to None. no_wrap (Optional[bool], optional): Disable word wrapping. Defaults to False. indent_guides (bool, optional): Enable indentation guides. Defaults to False. max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to None. max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to None. expand_all (bool, optional): Expand all containers. Defaults to False. margin (int, optional): Subtrace a margin from width to force containers to expand earlier. Defaults to 0. insert_line (bool, optional): Insert a new line if the output has multiple new lines. Defaults to False. """ def __init__( self, _object: Any, highlighter: Optional["HighlighterType"] = None, *, indent_size: int = 4, justify: Optional["JustifyMethod"] = None, overflow: Optional["OverflowMethod"] = None, no_wrap: Optional[bool] = False, indent_guides: bool = False, max_length: Optional[int] = None, max_string: Optional[int] = None, expand_all: bool = False, margin: int = 0, insert_line: bool = False, ) -> None: self._object = _object self.highlighter = highlighter or ReprHighlighter() self.indent_size = indent_size self.justify: Optional["JustifyMethod"] = justify self.overflow: Optional["OverflowMethod"] = overflow self.no_wrap = no_wrap self.indent_guides = indent_guides self.max_length = max_length self.max_string = max_string self.expand_all = expand_all self.margin = margin self.insert_line = insert_line def __rich_console__( self, console: "Console", options: "ConsoleOptions" ) -> "RenderResult": pretty_str = pretty_repr( self._object, max_width=options.max_width - self.margin, indent_size=self.indent_size, max_length=self.max_length, max_string=self.max_string, expand_all=self.expand_all, ) pretty_text = Text( pretty_str, justify=self.justify or options.justify, overflow=self.overflow or options.overflow, no_wrap=pick_bool(self.no_wrap, options.no_wrap), style="pretty", ) pretty_text = ( self.highlighter(pretty_text) if pretty_text else Text( f"{type(self._object)}.__repr__ returned empty string", style="dim italic", ) ) if self.indent_guides and not options.ascii_only: pretty_text = pretty_text.with_indent_guides( self.indent_size, style="repr.indent" ) if self.insert_line and "\n" in pretty_text: yield "" yield pretty_text def __rich_measure__( self, console: "Console", options: "ConsoleOptions" ) -> "Measurement": pretty_str = pretty_repr( self._object, max_width=options.max_width, indent_size=self.indent_size, max_length=self.max_length, max_string=self.max_string, ) text_width = ( max(cell_len(line) for line in pretty_str.splitlines()) if pretty_str else 0 ) return Measurement(text_width, text_width) def _get_braces_for_defaultdict(_object: DefaultDict[Any, Any]) -> Tuple[str, str, str]: return ( f"defaultdict({_object.default_factory!r}, {{", "})", f"defaultdict({_object.default_factory!r}, {{}})", ) def _get_braces_for_array(_object: "array[Any]") -> Tuple[str, str, str]: return (f"array({_object.typecode!r}, [", "])", "array({_object.typecode!r})") _BRACES: Dict[type, Callable[[Any], Tuple[str, str, str]]] = { os._Environ: lambda _object: ("environ({", "})", "environ({})"), array: _get_braces_for_array, defaultdict: _get_braces_for_defaultdict, Counter: lambda _object: ("Counter({", "})", "Counter()"), deque: lambda _object: ("deque([", "])", "deque()"), dict: lambda _object: ("{", "}", "{}"), UserDict: lambda _object: ("{", "}", "{}"), frozenset: lambda _object: ("frozenset({", "})", "frozenset()"), list: lambda _object: ("[", "]", "[]"), UserList: lambda _object: ("[", "]", "[]"), set: lambda _object: ("{", "}", "set()"), tuple: lambda _object: ("(", ")", "()"), MappingProxyType: lambda _object: ("mappingproxy({", "})", "mappingproxy({})"), } _CONTAINERS = tuple(_BRACES.keys()) _MAPPING_CONTAINERS = (dict, os._Environ, MappingProxyType, UserDict) def is_expandable(obj: Any) -> bool: """Check if an object may be expanded by pretty print.""" return ( isinstance(obj, _CONTAINERS) or (is_dataclass(obj)) or (hasattr(obj, "__rich_repr__")) or _is_attr_object(obj) ) and not isclass(obj) @dataclass class Node: """A node in a repr tree. May be atomic or a container.""" key_repr: str = "" value_repr: str = "" open_brace: str = "" close_brace: str = "" empty: str = "" last: bool = False is_tuple: bool = False children: Optional[List["Node"]] = None key_separator = ": " separator: str = ", " def iter_tokens(self) -> Iterable[str]: """Generate tokens for this node.""" if self.key_repr: yield self.key_repr yield self.key_separator if self.value_repr: yield self.value_repr elif self.children is not None: if self.children: yield self.open_brace if self.is_tuple and len(self.children) == 1: yield from self.children[0].iter_tokens() yield "," else: for child in self.children: yield from child.iter_tokens() if not child.last: yield self.separator yield self.close_brace else: yield self.empty def check_length(self, start_length: int, max_length: int) -> bool: """Check the length fits within a limit. Args: start_length (int): Starting length of the line (indent, prefix, suffix). max_length (int): Maximum length. Returns: bool: True if the node can be rendered within max length, otherwise False. """ total_length = start_length for token in self.iter_tokens(): total_length += cell_len(token) if total_length > max_length: return False return True def __str__(self) -> str: repr_text = "".join(self.iter_tokens()) return repr_text def render( self, max_width: int = 80, indent_size: int = 4, expand_all: bool = False ) -> str: """Render the node to a pretty repr. Args: max_width (int, optional): Maximum width of the repr. Defaults to 80. indent_size (int, optional): Size of indents. Defaults to 4. expand_all (bool, optional): Expand all levels. Defaults to False. Returns: str: A repr string of the original object. """ lines = [_Line(node=self, is_root=True)] line_no = 0 while line_no < len(lines): line = lines[line_no] if line.expandable and not line.expanded: if expand_all or not line.check_length(max_width): lines[line_no : line_no + 1] = line.expand(indent_size) line_no += 1 repr_str = "\n".join(str(line) for line in lines) return repr_str @dataclass class _Line: """A line in repr output.""" parent: Optional["_Line"] = None is_root: bool = False node: Optional[Node] = None text: str = "" suffix: str = "" whitespace: str = "" expanded: bool = False last: bool = False @property def expandable(self) -> bool: """Check if the line may be expanded.""" return bool(self.node is not None and self.node.children) def check_length(self, max_length: int) -> bool: """Check this line fits within a given number of cells.""" start_length = ( len(self.whitespace) + cell_len(self.text) + cell_len(self.suffix) ) assert self.node is not None return self.node.check_length(start_length, max_length) def expand(self, indent_size: int) -> Iterable["_Line"]: """Expand this line by adding children on their own line.""" node = self.node assert node is not None whitespace = self.whitespace assert node.children if node.key_repr: new_line = yield _Line( text=f"{node.key_repr}{node.key_separator}{node.open_brace}", whitespace=whitespace, ) else: new_line = yield _Line(text=node.open_brace, whitespace=whitespace) child_whitespace = self.whitespace + " " * indent_size tuple_of_one = node.is_tuple and len(node.children) == 1 for last, child in loop_last(node.children): separator = "," if tuple_of_one else node.separator line = _Line( parent=new_line, node=child, whitespace=child_whitespace, suffix=separator, last=last and not tuple_of_one, ) yield line yield _Line( text=node.close_brace, whitespace=whitespace, suffix=self.suffix, last=self.last, ) def __str__(self) -> str: if self.last: return f"{self.whitespace}{self.text}{self.node or ''}" else: return ( f"{self.whitespace}{self.text}{self.node or ''}{self.suffix.rstrip()}" ) def traverse( _object: Any, max_length: Optional[int] = None, max_string: Optional[int] = None ) -> Node: """Traverse object and generate a tree. Args: _object (Any): Object to be traversed. max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to None. max_string (int, optional): Maximum length of string before truncating, or None to disable truncating. Defaults to None. Returns: Node: The root of a tree structure which can be used to render a pretty repr. """ def to_repr(obj: Any) -> str: """Get repr string for an object, but catch errors.""" if ( max_string is not None and isinstance(obj, (bytes, str)) and len(obj) > max_string ): truncated = len(obj) - max_string obj_repr = f"{obj[:max_string]!r}+{truncated}" else: try: obj_repr = repr(obj) except Exception as error: obj_repr = f"<repr-error {str(error)!r}>" return obj_repr visited_ids: Set[int] = set() push_visited = visited_ids.add pop_visited = visited_ids.remove def _traverse(obj: Any, root: bool = False) -> Node: """Walk the object depth first.""" obj_type = type(obj) py_version = (sys.version_info.major, sys.version_info.minor) children: List[Node] def iter_rich_args(rich_args: Any) -> Iterable[Union[Any, Tuple[str, Any]]]: for arg in rich_args: if isinstance(arg, tuple): if len(arg) == 3: key, child, default = arg if default == child: continue yield key, child elif len(arg) == 2: key, child = arg yield key, child elif len(arg) == 1: yield arg[0] else: yield arg try: fake_attributes = hasattr( obj, "awehoi234_wdfjwljet234_234wdfoijsdfmmnxpi492" ) except Exception: fake_attributes = False rich_repr_result: Optional[RichReprResult] = None if not fake_attributes: try: if hasattr(obj, "__rich_repr__") and not isclass(obj): rich_repr_result = obj.__rich_repr__() except Exception: pass if rich_repr_result is not None: angular = getattr(obj.__rich_repr__, "angular", False) args = list(iter_rich_args(rich_repr_result)) class_name = obj.__class__.__name__ if args: children = [] append = children.append if angular: node = Node( open_brace=f"<{class_name} ", close_brace=">", children=children, last=root, separator=" ", ) else: node = Node( open_brace=f"{class_name}(", close_brace=")", children=children, last=root, ) for last, arg in loop_last(args): if isinstance(arg, tuple): key, child = arg child_node = _traverse(child) child_node.last = last child_node.key_repr = key child_node.key_separator = "=" append(child_node) else: child_node = _traverse(arg) child_node.last = last append(child_node) else: node = Node( value_repr=f"<{class_name}>" if angular else f"{class_name}()", children=[], last=root, ) elif _is_attr_object(obj) and not fake_attributes: children = [] append = children.append attr_fields = _get_attr_fields(obj) if attr_fields: node = Node( open_brace=f"{obj.__class__.__name__}(", close_brace=")", children=children, last=root, ) def iter_attrs() -> Iterable[ Tuple[str, Any, Optional[Callable[[Any], str]]] ]: """Iterate over attr fields and values.""" for attr in attr_fields: if attr.repr: try: value = getattr(obj, attr.name) except Exception as error: # Can happen, albeit rarely yield (attr.name, error, None) else: yield ( attr.name, value, attr.repr if callable(attr.repr) else None, ) for last, (name, value, repr_callable) in loop_last(iter_attrs()): if repr_callable: child_node = Node(value_repr=str(repr_callable(value))) else: child_node = _traverse(value) child_node.last = last child_node.key_repr = name child_node.key_separator = "=" append(child_node) else: node = Node( value_repr=f"{obj.__class__.__name__}()", children=[], last=root ) elif ( is_dataclass(obj) and not isinstance(obj, type) and not fake_attributes and (_is_dataclass_repr(obj) or py_version == (3, 6)) ): obj_id = id(obj) if obj_id in visited_ids: # Recursion detected return Node(value_repr="...") push_visited(obj_id) children = [] append = children.append node = Node( open_brace=f"{obj.__class__.__name__}(", close_brace=")", children=children, last=root, ) for last, field in loop_last(field for field in fields(obj) if field.repr): child_node = _traverse(getattr(obj, field.name)) child_node.key_repr = field.name child_node.last = last child_node.key_separator = "=" append(child_node) pop_visited(obj_id) elif isinstance(obj, _CONTAINERS): for container_type in _CONTAINERS: if isinstance(obj, container_type): obj_type = container_type break obj_id = id(obj) if obj_id in visited_ids: # Recursion detected return Node(value_repr="...") push_visited(obj_id) open_brace, close_brace, empty = _BRACES[obj_type](obj) if obj_type.__repr__ != type(obj).__repr__: node = Node(value_repr=to_repr(obj), last=root) elif obj: children = [] node = Node( open_brace=open_brace, close_brace=close_brace, children=children, last=root, ) append = children.append num_items = len(obj) last_item_index = num_items - 1 if isinstance(obj, _MAPPING_CONTAINERS): iter_items = iter(obj.items()) if max_length is not None: iter_items = islice(iter_items, max_length) for index, (key, child) in enumerate(iter_items): child_node = _traverse(child) child_node.key_repr = to_repr(key) child_node.last = index == last_item_index append(child_node) else: iter_values = iter(obj) if max_length is not None: iter_values = islice(iter_values, max_length) for index, child in enumerate(iter_values): child_node = _traverse(child) child_node.last = index == last_item_index append(child_node) if max_length is not None and num_items > max_length: append(Node(value_repr=f"... +{num_items-max_length}", last=True)) else: node = Node(empty=empty, children=[], last=root) pop_visited(obj_id) else: node = Node(value_repr=to_repr(obj), last=root) node.is_tuple = isinstance(obj, tuple) return node node = _traverse(_object, root=True) return node def pretty_repr( _object: Any, *, max_width: int = 80, indent_size: int = 4, max_length: Optional[int] = None, max_string: Optional[int] = None, expand_all: bool = False, ) -> str: """Prettify repr string by expanding on to new lines to fit within a given width. Args: _object (Any): Object to repr. max_width (int, optional): Desired maximum width of repr string. Defaults to 80. indent_size (int, optional): Number of spaces to indent. Defaults to 4. max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to None. max_string (int, optional): Maximum length of string before truncating, or None to disable truncating. Defaults to None. expand_all (bool, optional): Expand all containers regardless of available width. Defaults to False. Returns: str: A possibly multi-line representation of the object. """ if isinstance(_object, Node): node = _object else: node = traverse(_object, max_length=max_length, max_string=max_string) repr_str = node.render( max_width=max_width, indent_size=indent_size, expand_all=expand_all ) return repr_str def pprint( _object: Any, *, console: Optional["Console"] = None, indent_guides: bool = True, max_length: Optional[int] = None, max_string: Optional[int] = None, expand_all: bool = False, ) -> None: """A convenience function for pretty printing. Args: _object (Any): Object to pretty print. console (Console, optional): Console instance, or None to use default. Defaults to None. max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to None. max_string (int, optional): Maximum length of strings before truncating, or None to disable. Defaults to None. indent_guides (bool, optional): Enable indentation guides. Defaults to True. expand_all (bool, optional): Expand all containers. Defaults to False. """ _console = get_console() if console is None else console _console.print( Pretty( _object, max_length=max_length, max_string=max_string, indent_guides=indent_guides, expand_all=expand_all, overflow="ignore", ), soft_wrap=True, ) if __name__ == "__main__": # pragma: no cover class BrokenRepr: def __repr__(self) -> str: 1 / 0 return "this will fail" d = defaultdict(int) d["foo"] = 5 data = { "foo": [ 1, "Hello World!", 100.123, 323.232, 432324.0, {5, 6, 7, (1, 2, 3, 4), 8}, ], "bar": frozenset({1, 2, 3}), "defaultdict": defaultdict( list, {"crumble": ["apple", "rhubarb", "butter", "sugar", "flour"]} ), "counter": Counter( [ "apple", "orange", "pear", "kumquat", "kumquat", "durian" * 100, ] ), "atomic": (False, True, None), "Broken": BrokenRepr(), } data["foo"].append(data) # type: ignore from rich import print print(Pretty(data, indent_guides=True, max_string=20))
[ [ [ 7, 15 ], [ 3732, 3740 ], [ 4213, 4221 ] ], [ [ 23, 25 ], [ 10481, 10483 ], [ 11241, 11243 ] ], [ [ 48, 62 ], [ 18936, 18950 ] ], [ [ 70, 73 ], [ 5934, 5937 ], [ 18001, 18004 ], [ 18025, 18028 ] ], [ [ 92, 97 ], [ 10550, 10555 ] ], [ [ 122, 129 ], [ 10630, 10637 ], [ 29093, 29100 ] ], [ [ 131, 142 ], [ 10584, 10595 ], [ 28688, 28699 ], [ 28970, 28981 ] ], [ [ 144, 149 ], [ 10693, 10698 ] ], [ [ 151, 159 ], [ 10794, 10802 ], [ 11272, 11280 ] ], [ [ 161, 169 ], [ 10955, 10963 ] ], [ [ 177, 188 ], [ 2070, 2081 ] ], [ [ 213, 222 ], [ 11569, 11578 ], [ 14376, 14385 ] ], [ [ 224, 230 ], [ 23368, 23374 ] ], [ [ 232, 244 ], [ 11445, 11457 ], [ 22696, 22708 ] ], [ [ 265, 272 ], [ 11553, 11560 ], [ 19065, 19072 ] ], [ [ 295, 301 ], [ 24829, 24835 ], [ 25304, 25310 ] ], [ [ 309, 311 ], [ 1206, 1208 ] ], [ [ 337, 348 ], [ 10057, 10068 ] ], [ [ 354, 367 ], [ 967, 980 ] ], [ [ 373, 376 ], [ 10444, 10447 ], [ 1260, 1263 ], [ 1430, 1433 ], [ 7325, 7328 ], [ 10069, 10072 ], [ 10074, 10077 ], [ 11307, 11310 ], [ 16628, 16631 ], [ 26068, 26071 ], [ 27338, 27341 ], [ 3559, 3562 ], [ 4281, 4284 ], [ 5583, 5586 ], [ 5575, 5578 ], [ 17229, 17232 ], [ 17873, 17876 ], [ 18140, 18143 ], [ 18156, 18159 ], [ 18117, 18120 ], [ 21301, 21304 ], [ 21325, 21328 ] ], [ [ 382, 390 ], [ 10434, 10442 ], [ 21315, 21323 ] ], [ [ 396, 400 ], [ 10423, 10427 ] ], [ [ 406, 414 ], [ 1438, 1446 ], [ 11949, 11957 ], [ 15186, 15194 ], [ 18125, 18133 ], [ 21260, 21268 ] ], [ [ 420, 424 ], [ 11847, 11851 ], [ 18067, 18071 ] ], [ [ 430, 438 ], [ 11838, 11846 ], [ 14445, 14453 ], [ 14506, 14514 ], [ 2185, 2193 ], [ 2329, 2337 ], [ 2367, 2375 ], [ 7351, 7359 ], [ 7445, 7453 ], [ 7497, 7505 ], [ 7549, 7557 ], [ 7630, 7638 ], [ 7672, 7680 ], [ 7955, 7963 ], [ 8014, 8022 ], [ 16645, 16653 ], [ 16679, 16687 ], [ 26147, 26155 ], [ 26185, 26193 ], [ 27363, 27371 ], [ 27439, 27447 ], [ 27477, 27485 ], [ 18927, 18935 ], [ 21306, 21314 ] ], [ [ 444, 447 ], [ 17760, 17763 ] ], [ [ 453, 458 ], [ 18134, 18139 ] ], [ [ 464, 469 ], [ 10450, 10455 ], [ 10083, 10088 ], [ 10307, 10312 ], [ 18145, 18150 ], [ 21290, 21295 ] ], [ [ 491, 507 ], [ 11094, 11110 ], [ 11254, 11270 ] ], [ [ 525, 545 ], [ 1345, 1357 ], [ 1374, 1386 ], [ 1566, 1578 ], [ 1532, 1544 ] ], [ [ 590, 602 ], [ 1345, 1357 ], [ 1374, 1386 ], [ 1566, 1578 ], [ 1532, 1544 ] ], [ [ 653, 668 ] ], [ [ 683, 694 ], [ 28197, 28208 ] ], [ [ 714, 723 ], [ 15839, 15848 ], [ 20074, 20083 ], [ 22115, 22124 ], [ 23339, 23348 ] ], [ [ 743, 752 ], [ 8885, 8894 ] ], [ [ 770, 784 ], [ 3852, 3866 ], [ 4943, 4957 ] ], [ [ 804, 812 ], [ 9880, 9888 ], [ 13226, 13234 ], [ 14988, 14996 ], [ 15010, 15018 ] ], [ [ 838, 853 ], [ 7876, 7891 ] ], [ [ 875, 887 ], [ 5980, 5992 ] ], [ [ 889, 906 ], [ 4451, 4468 ] ], [ [ 928, 939 ], [ 9978, 9989 ] ], [ [ 958, 962 ], [ 8726, 8730 ], [ 9075, 9079 ] ], [ [ 1017, 1024 ] ], [ [ 1034, 1048 ] ], [ [ 1058, 1073 ] ], [ [ 1083, 1096 ] ], [ [ 1106, 1120 ] ], [ [ 1130, 1142 ] ], [ [ 1187, 1203 ], [ 4652, 4668 ] ], [ [ 1239, 1254 ], [ 11518, 11533 ], [ 20822, 20837 ] ], [ [ 1408, 1424 ], [ 20958, 20974 ] ], [ [ 1605, 1623 ], [ 22809, 22827 ] ], [ [ 2163, 2170 ] ], [ [ 5973, 5979 ], [ 29463, 29469 ], [ 28271, 28277 ], [ 3889, 3895 ], [ 4976, 4982 ] ], [ [ 10020, 10047 ], [ 10597, 10624 ] ], [ [ 10259, 10280 ], [ 10557, 10578 ] ], [ [ 10414, 10421 ], [ 10423, 10472 ], [ 11196, 11203 ], [ 24109, 24116 ] ], [ [ 11176, 11187 ], [ 11420, 11431 ], [ 23694, 23705 ], [ 23742, 23753 ] ], [ [ 11212, 11231 ], [ 24672, 24691 ] ], [ [ 11288, 11301 ] ], [ [ 11585, 11589 ], [ 14515, 14519 ], [ 16705, 16709 ], [ 27062, 27066 ], [ 17901, 17905 ], [ 18072, 18076 ], [ 19552, 19556 ], [ 19841, 19845 ], [ 20637, 20641 ], [ 21031, 21035 ], [ 22215, 22219 ], [ 22559, 22563 ], [ 22996, 23000 ], [ 23135, 23139 ], [ 24007, 24011 ], [ 24212, 24216 ], [ 24328, 24332 ], [ 25661, 25665 ], [ 25762, 25766 ], [ 25870, 25874 ] ], [ [ 14392, 14397 ], [ 13946, 13951 ], [ 15452, 15457 ], [ 15633, 15638 ], [ 15948, 15953 ], [ 16195, 16200 ] ], [ [ 16605, 16613 ], [ 27117, 27125 ] ], [ [ 26042, 26053 ], [ 8438, 8449 ], [ 9629, 9640 ] ], [ [ 27317, 27323 ] ], [ [ 28578, 28588 ], [ 29359, 29369 ] ], [ [ 28684, 28685 ], [ 28709, 28710 ] ], [ [ 28726, 28730 ], [ 29383, 29387 ], [ 29402, 29406 ], [ 29470, 29474 ] ], [ [ 29446, 29451 ], [ 29457, 29462 ] ] ]
# coding=utf-8 # Copyright 2019 The TensorFlow GAN Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for stargan_estimator.train.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf from tensorflow_gan.examples.stargan_estimator import train_lib mock = tf.compat.v1.test.mock def _test_generator(input_images, _): """Simple generator function.""" return input_images * tf.compat.v1.get_variable('dummy_g', initializer=2.0) def _test_discriminator(inputs, num_domains): """Differentiable dummy discriminator for StarGAN.""" hidden = tf.compat.v1.layers.flatten(inputs) output_src = tf.reduce_mean(input_tensor=hidden, axis=1) output_cls = tf.compat.v1.layers.dense(inputs=hidden, units=num_domains) return output_src, output_cls class TrainTest(tf.test.TestCase): @mock.patch.object(train_lib.data_provider, 'provide_data', autospec=True) @mock.patch.object( train_lib.data_provider, 'provide_celeba_test_set', autospec=True) def test_main(self, mock_provide_celeba_test_set, mock_provide_data): hparams = train_lib.HParams( batch_size=1, patch_size=8, output_dir='/tmp/tfgan_logdir/stargan/', generator_lr=1e-4, discriminator_lr=1e-4, max_number_of_steps=0, steps_per_eval=1, adam_beta1=0.5, adam_beta2=0.999, gen_disc_step_ratio=0.2, master='', ps_tasks=0, task=0) num_domains = 3 # Construct mock inputs. images_shape = [ hparams.batch_size, hparams.patch_size, hparams.patch_size, 3 ] img_list = [np.zeros(images_shape, dtype=np.float32)] * num_domains # Create a list of num_domains arrays of shape [batch_size, num_domains]. # Note: assumes hparams.batch_size <= num_domains. lbl_list = [np.eye(num_domains)[:hparams.batch_size, :]] * num_domains mock_provide_data.return_value = (img_list, lbl_list) mock_provide_celeba_test_set.return_value = np.zeros( [3, hparams.patch_size, hparams.patch_size, 3]) train_lib.train(hparams, _test_generator, _test_discriminator) if __name__ == '__main__': tf.test.main()
[ [ [ 672, 687 ] ], [ [ 711, 719 ] ], [ [ 743, 757 ] ], [ [ 766, 777 ], [ 2192, 2194 ], [ 2221, 2223 ], [ 2397, 2399 ], [ 2562, 2564 ] ], [ [ 785, 801 ], [ 875, 877 ], [ 1386, 1388 ], [ 2727, 2729 ], [ 997, 999 ], [ 1166, 1168 ], [ 1217, 1219 ], [ 1276, 1278 ] ], [ [ 857, 866 ], [ 1427, 1436 ], [ 1511, 1520 ], [ 1664, 1673 ], [ 2633, 2642 ] ], [ [ 868, 872 ], [ 1409, 1413 ], [ 1486, 1490 ] ], [ [ 904, 919 ], [ 2658, 2673 ] ], [ [ 1057, 1076 ], [ 2675, 2694 ] ], [ [ 1376, 1385 ] ] ]
from flask import Flask, request, jsonify, render_template, make_response from qual_id.pattern import Pattern import random app = Flask(__name__) @app.route('/get/', methods=['GET']) def get_response(): pattern = Pattern(request.args.get("pattern", "")) number = int(request.args.get("number", 1)) response_obj = {} if not pattern.is_valid(): response_obj["error"] = "pattern is invalid" else: response_obj["data"] = get_qual_ids(pattern, number) response = make_response(response_obj) return response @app.route('/categories/', methods=['GET']) def categories_response(): response = {'data': Pattern.get_category_options()} return jsonify(response) @app.route('/badge-endpoint/', methods=['GET']) def badge_endpoint_response(): example = get_qual_ids(Pattern('food-animal'), 1)[0] response_obj = { "schemaVersion": 1, "label": "Qual ID", "message": example, "color": f"hsl({random.randint(0,359)}, 100%, 50%)" } response = make_response(response_obj) response.headers['Cache-Control'] = 'no-cache, no-store' return response def get_qual_ids(pattern, number): return [get_qual_id(pattern) for _ in range(number)] def get_qual_id(pattern): return '-'.join([path.get_random_value() for path in pattern.get_categories()]) @app.route('/') def index(): return render_template('welcome.html') if __name__ == '__main__': # Threaded option to enable multiple instances for multiple user access support app.run(threaded=True, port=5000)
[ [ [ 18, 23 ], [ 131, 136 ] ], [ [ 25, 32 ], [ 226, 233 ], [ 274, 281 ] ], [ [ 34, 41 ], [ 666, 673 ] ], [ [ 43, 58 ], [ 1339, 1354 ] ], [ [ 60, 73 ], [ 484, 497 ], [ 992, 1005 ] ], [ [ 102, 109 ], [ 218, 225 ], [ 625, 632 ], [ 790, 797 ] ], [ [ 117, 123 ], [ 939, 945 ] ], [ [ 125, 128 ], [ 150, 153 ], [ 533, 536 ], [ 687, 690 ], [ 1302, 1305 ], [ 1484, 1487 ] ], [ [ 190, 202 ] ], [ [ 580, 599 ] ], [ [ 738, 761 ] ], [ [ 1103, 1115 ], [ 440, 452 ], [ 777, 789 ] ], [ [ 1195, 1206 ], [ 1144, 1155 ] ], [ [ 1321, 1326 ] ] ]
from flask_wtf import FlaskForm from wtforms.validators import Required from wtforms import TextAreaField,SubmitField,StringField from ..models import User class UpdateProfile(FlaskForm): bio = TextAreaField('Update bio.',validators = [Required()]) submit = SubmitField('Update') class PostAblog (FlaskForm): title = StringField('Title',validators = [Required()]) content = TextAreaField('Start blogging',validators = [Required()]) submit = SubmitField('Blog') class PostAComment (FlaskForm): comment = TextAreaField(validators = [Required()]) submit = SubmitField('Comment',validators = [Required()])
[ [ [ 22, 31 ], [ 177, 186 ], [ 307, 316 ], [ 504, 513 ] ], [ [ 63, 71 ], [ 241, 249 ], [ 365, 373 ], [ 437, 445 ], [ 558, 566 ], [ 620, 628 ] ], [ [ 92, 105 ], [ 199, 212 ], [ 392, 405 ], [ 530, 543 ] ], [ [ 106, 117 ], [ 267, 278 ], [ 463, 474 ], [ 584, 595 ] ], [ [ 118, 129 ], [ 331, 342 ] ], [ [ 151, 155 ] ], [ [ 163, 176 ] ], [ [ 296, 305 ] ], [ [ 490, 502 ] ] ]
from . import controllers
[ [ [ 15, 26 ] ] ]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import #::: modules import numpy as np import os, sys import ellc from transitleastsquares import catalog_info import astropy.constants as ac import astropy.units as u import lightkurve as lk import pandas as pd np.random.seed(42) #::: load data and set the units correctly TIC_ID = 85400193 # TIC_ID of our candidate lcf= lk.search_lightcurvefile('TIC '+str(TIC_ID), mission="tess").download_all() ab, mass, massmin, massmax, radius, radiusmin, radiusmax = catalog_info(TIC_ID=TIC_ID) #units for ellc rstar=radius*u.R_sun mstar=mass*u.M_sun #mass and radius for the TLS #rstar=radius #mstar=mass mstar_min = mass-massmin mstar_max = mass+massmax rstar_min = radius-radiusmin rstar_max = radius+radiusmax #uncomment the following lines to check that the parameters used are correct. #print('\n STELLAR PROPERTIES FOR THE SIGNAL SEARCH') #print('================================================\n') #print('limb-darkening estimates using quadratic LD (a,b)=', ab) #print('mass =', format(mstar,'0.5f')) #print('mass_min =', format(mstar_min,'0.5f')) #print('mass_max =', format(mstar_max,'0.5f')) #print('radius =', format(rstar,'0.5f')) #print('radius_min =', format(rstar_min,'0.5f')) #print('radius_max =', format(rstar_max,'0.5f')) lc=lcf.PDCSAP_FLUX.stitch().remove_nans() # remove of the nans lc_new=lk.LightCurve(time=lc.time, flux=lc.flux,flux_err=lc.flux_err) clean=lc_new.remove_outliers(sigma_lower=float('inf'), sigma_upper=3) #remove outliers over 3sigma flux0=clean.flux time=clean.time flux_err = clean.flux_err #period_maximum=(max(time)-min(time))/2. #time, flux0 = np.genfromtxt('TESS_phot.csv', delimiter=',', unpack=True) #rstar = 0.211257 * 41.46650444642 #in Rearth #::: make model def make_model(epoch, period, rplanet): #a = (7.495e-6 * period**2)**(1./3.)*u.au #in AU P1=period*u.day a = np.cbrt((ac.G*mstar*P1**2)/(4*np.pi**2)).to(u.au) #print("radius_1 =", rstar.to(u.au) / a) #star radius convert from AU to in units of a #print("radius_2 =", rplanet.to(u.au) / a) texpo=2./60./24. #print("T_expo = ", texpo,"dy") #tdur=t14(R_s=radius, M_s=mass,P=period,small_planet=False) #we define the typical duration of a small planet in this star #print("transit_duration= ", tdur*24*60,"min" ) model = ellc.lc( t_obs = time, radius_1 = rstar.to(u.au) / a, #star radius convert from AU to in units of a radius_2 = rplanet.to(u.au) / a, #convert from Rearth (equatorial) into AU and then into units of a sbratio = 0, incl = 90, light_3 = 0, t_zero = epoch, period = period, a = None, q = 1e-6, f_c = None, f_s = None, ldc_1=[0.2755,0.5493], ldc_2 = None, gdc_1 = None, gdc_2 = None, didt = None, domdt = None, rotfac_1 = 1, rotfac_2 = 1, hf_1 = 1.5, hf_2 = 1.5, bfac_1 = None, bfac_2 = None, heat_1 = None, heat_2 = None, lambda_1 = None, lambda_2 = None, vsini_1 = None, vsini_2 = None, t_exp=texpo, n_int=None, grid_1='default', grid_2='default', ld_1='quad', ld_2=None, shape_1='sphere', shape_2='sphere', spots_1=None, spots_2=None, exact_grav=False, verbose=1) flux_t = flux0 + model - 1. if model[0] > 0: flux = flux_t flux_err_model = flux_err time_custom = time else: flux = [] time_custom = [] flux_err_model = [] return time_custom, flux, flux_err_model #minutes=10 #print(len(time)) #print(min(time),max(time)) #bins=len(time)*2./minutes #print(bins) #bin_means, bin_edges, binnumber = stats.binned_statistic(time, flux, statistic='mean', bins=bins) #bin_stds, _, _ = stats.binned_statistic(time, flux, statistic='std', bins=bins) #bin_width = (bin_edges[1] - bin_edges[0]) #bin_centers = bin_edges[1:] - bin_width/2 #print('RMS PDCSAP flux (ppm): ',np.std(flux0[~np.isnan(flux0)])*1e6) #print('RMS model (ppm): ',np.std(flux[~np.isnan(flux)])*1e6) #print('RMS 10min bin detrended (ppm): ',np.std(bin_means[~np.isnan(bin_means)])*1e6) #fig, (ax1,ax2,ax3) = plt.subplots(3, 1, figsize=(10,5), constrained_layout=True) ##ax1 #ax1.plot(time, flux0, linewidth=0.05 ,color='black', alpha=0.4) ##ax1.legend(bbox_to_anchor=(0.85, 0.95), loc=2, borderaxespad=0.,fontsize=8) #ax1.set_ylabel("Normalized flux") #ax1.set_xlim(1766,1769) ##ax2 #ax2.plot(time, flux0, linewidth=0.05 ,color='black', alpha=0.4) ##ax2.plot(time, model, linewidth=0.9 ,color='firebrick', alpha=1) #ax2.errorbar(time, model, marker='.', markersize=2, color='firebrick', alpha=1, linestyle='none') #ax2.set_ylabel("Normalized flux") #ax2.set_xlim(1766,1769) ##ax3 #ax3.plot(time, flux, linewidth=0.1 ,color='teal', alpha=0.5) #ax3.errorbar(bin_centers, bin_means, marker='.', markersize=4, color='darkorange', alpha=1, linestyle='none') #ax3.set_ylabel("Normalized flux") #ax3.set_xlabel("Time (days)") #ax3.set_xlim(1766,1769) #plt.savefig('model.png', dpi=200) def logprint(*text): # print(*text) original = sys.stdout with open( os.path.join('tls/'+'P = '+str(period)+' days, Rp = '+str(rplanet)+'.log'), 'a' ) as f: sys.stdout = f print(*text) sys.stdout = original #::: iterate through grid of periods and rplanet dir = "/home/pozuelos/martin/curves" if not os.path.isdir(dir): os.mkdir(dir) max_period = 10 min_period = 0.5 for period in np.arange(min_period, max_period, 0.5): for t0 in np.arange(time[60], time[60] + period - 0.1, period / 5): for rplanet in np.arange(4, 0.65, -0.1): rplanet = np.around(rplanet, decimals=2)*u.R_earth print('\n') print('P = '+str(period)+' days, Rp = '+str(rplanet) + ", T0 = " + str(t0)) time_model, flux_model, flux_err_model = make_model(t0, period, rplanet) file_name = os.path.join(dir + '/P' + str(period) + '_R' + str(rplanet.value) + '_' + str(t0) + '.csv') lc_df = pd.DataFrame(columns=['#time', 'flux', 'flux_err']) lc_df['#time'] = time_model lc_df['flux'] = flux_model lc_df['flux_err'] = flux_err_model lc_df.to_csv(file_name, index=False)
[ [ [ 71, 85 ] ], [ [ 87, 95 ] ], [ [ 97, 112 ] ], [ [ 134, 145 ], [ 321, 323 ], [ 5739, 5741 ], [ 5793, 5795 ], [ 5874, 5876 ], [ 5922, 5924 ], [ 1951, 1953 ], [ 1981, 1983 ] ], [ [ 153, 155 ], [ 5654, 5656 ], [ 5678, 5680 ], [ 6184, 6186 ], [ 5397, 5399 ] ], [ [ 157, 160 ], [ 5371, 5374 ], [ 5493, 5496 ], [ 5533, 5536 ] ], [ [ 168, 172 ], [ 2388, 2392 ] ], [ [ 205, 217 ], [ 571, 583 ] ], [ [ 225, 248 ], [ 1960, 1962 ] ], [ [ 256, 274 ], [ 629, 630 ], [ 648, 649 ], [ 5953, 5954 ], [ 1937, 1938 ], [ 1995, 1996 ], [ 2453, 2454 ], [ 2543, 2544 ] ], [ [ 282, 298 ], [ 436, 438 ], [ 1423, 1425 ] ], [ [ 306, 318 ], [ 6296, 6298 ] ], [ [ 384, 390 ], [ 472, 478 ], [ 591, 597 ] ], [ [ 431, 434 ], [ 1356, 1359 ] ], [ [ 512, 514 ] ], [ [ 516, 520 ], [ 643, 647 ], [ 723, 727 ], [ 748, 752 ] ], [ [ 522, 529 ], [ 728, 735 ] ], [ [ 531, 538 ], [ 753, 760 ] ], [ [ 540, 546 ], [ 622, 628 ], [ 773, 779 ], [ 802, 808 ] ], [ [ 548, 557 ], [ 780, 789 ] ], [ [ 559, 568 ], [ 809, 818 ] ], [ [ 616, 621 ], [ 2444, 2449 ] ], [ [ 637, 642 ], [ 1965, 1970 ] ], [ [ 711, 720 ] ], [ [ 736, 745 ] ], [ [ 761, 770 ] ], [ [ 790, 799 ] ], [ [ 1353, 1355 ], [ 1442, 1444 ], [ 1456, 1458 ], [ 1473, 1475 ] ], [ [ 1416, 1422 ], [ 1492, 1498 ] ], [ [ 1486, 1491 ], [ 1591, 1596 ], [ 1607, 1612 ], [ 1629, 1634 ] ], [ [ 1585, 1590 ], [ 3461, 3466 ] ], [ [ 1602, 1606 ], [ 5803, 5807 ], [ 5813, 5817 ], [ 2416, 2420 ], [ 3579, 3583 ] ], [ [ 1618, 1626 ], [ 3548, 3556 ] ], [ [ 1834, 1844 ], [ 6128, 6138 ] ], [ [ 5321, 5329 ] ], [ [ 5610, 5613 ], [ 5668, 5671 ], [ 5687, 5690 ], [ 6197, 6200 ] ], [ [ 5692, 5702 ], [ 5761, 5771 ] ], [ [ 5708, 5718 ], [ 5749, 5759 ] ], [ [ 5729, 5735 ], [ 5824, 5830 ], [ 5838, 5844 ], [ 6016, 6022 ], [ 6143, 6149 ], [ 6214, 6220 ], [ 5428, 5434 ] ], [ [ 5787, 5789 ], [ 6070, 6072 ], [ 6139, 6141 ], [ 6262, 6264 ] ], [ [ 5863, 5870 ], [ 5932, 5939 ] ], [ [ 5912, 5919 ], [ 6043, 6050 ], [ 6151, 6158 ], [ 6235, 6242 ], [ 5455, 5462 ] ], [ [ 6087, 6097 ], [ 6377, 6387 ] ], [ [ 6099, 6109 ], [ 6416, 6426 ] ], [ [ 6111, 6125 ], [ 6459, 6473 ] ], [ [ 6172, 6181 ], [ 6499, 6508 ] ], [ [ 6288, 6293 ], [ 6360, 6365 ], [ 6400, 6405 ], [ 6439, 6444 ], [ 6486, 6491 ] ] ]
import time import sys import os import threading try: import ConfigParser as ConfigParsers except ImportError: import configparser as ConfigParsers from common import CommonVariables from pwd import getpwuid from stat import * import traceback # [pre_post] # "timeout" : (in seconds), # # .... other params ... # # "pluginName0" : "oracle_plugin", the python plugin file will have same name # "pluginPath0" : "/abc/xyz/" # "pluginConfigPath0" : "sdf/sdf/abcd.json" # # # errorcode policy # errorcode = 0 (CommonVariables.PrePost_PluginStatus_Successs), means success, script runs without error, warnings maybe possible # errorcode = 5 (CommonVariables.PrePost_PluginStatus_Timeout), means timeout # errorcode = 10 (CommonVariables.PrePost_PluginStatus_ConfigNotFound), config file not found # errorcode = process return code, means bash script encountered some other error, like 127 for script not found class PluginHostError(object): def __init__(self, errorCode, pluginName): self.errorCode = errorCode self.pluginName = pluginName def __str__(self): return 'Plugin :- ', self.pluginName , ' ErrorCode :- ' + str(self.errorCode) class PluginHostResult(object): def __init__(self): self.errors = [] self.anyScriptFailed = False self.continueBackup = True self.errorCode = 0 self.fileCode = [] self.filePath = [] def __str__(self): errorStr = '' for error in self.errors: errorStr += (str(error)) + '\n' errorStr += 'Final Error Code :- ' + str(self.errorCode) + '\n' errorStr += 'Any script Failed :- ' + str(self.anyScriptFailed) + '\n' errorStr += 'Continue Backup :- ' + str(self.continueBackup) + '\n' return errorStr class PluginHost(object): """ description of class """ def __init__(self, logger): self.logger = logger self.modulesLoaded = False self.configLocation = '/etc/azure/VMSnapshotPluginHost.conf' self.timeoutInSeconds = 1800 self.plugins = [] self.pluginName = [] self.noOfPlugins = 0 self.preScriptCompleted = [] self.preScriptResult = [] self.postScriptCompleted = [] self.postScriptResult = [] def pre_check(self): self.logger.log('Loading script modules now...',True,'Info') errorCode = CommonVariables.PrePost_PluginStatus_Success dobackup = True fsFreeze_on = True if not os.path.isfile(self.configLocation): self.logger.log('Plugin host Config file does not exist in the location ' + self.configLocation, True) self.configLocation = './main/VMSnapshotPluginHost.conf' permissions = self.get_permissions(self.configLocation) if not os.path.isfile(self.configLocation): self.logger.log('Plugin host Config file does not exist in the location ' + self.configLocation, True) errorCode =CommonVariables.FailedPrepostPluginhostConfigNotFound elif not (int(permissions[1]) == 0 or int(permissions[1]) == 4) or not (int(permissions[2]) == 0 or int(permissions[2]) == 4): self.logger.log('Plugin host Config file does not have desired permissions', True, 'Error') errorCode = CommonVariables.FailedPrepostPluginhostConfigPermissionError elif not self.find_owner(self.configLocation) == 'root': self.logger.log('The owner of the Plugin host Config file ' + self.configLocation + ' is ' + self.find_owner(self.configLocation) + ' but not root', True, 'Error') errorCode = CommonVariables.FailedPrepostPluginhostConfigPermissionError else : errorCode,dobackup,fsFreeze_on = self.load_modules() return errorCode,dobackup,fsFreeze_on def load_modules(self): # Imports all plugin modules using the information in config.json # and initializes basic class variables associated with each plugin len = 0 errorCode = CommonVariables.PrePost_PluginStatus_Success dobackup = True fsFreeze_on = True try: self.logger.log('config file: '+str(self.configLocation),True,'Info') config = ConfigParsers.ConfigParser() config.read(self.configLocation) if (config.has_option('pre_post', 'timeoutInSeconds')): self.timeoutInSeconds = min(int(config.get('pre_post','timeoutInSeconds')),self.timeoutInSeconds) if (config.has_option('pre_post', 'numberOfPlugins')): len = int(config.get('pre_post','numberOfPlugins')) self.logger.log('timeoutInSeconds: '+str(self.timeoutInSeconds),True,'Info') self.logger.log('numberOfPlugins: '+str(len),True,'Info') while len > 0: pname = config.get('pre_post','pluginName'+str(self.noOfPlugins)) ppath = config.get('pre_post','pluginPath'+str(self.noOfPlugins)) pcpath = config.get('pre_post','pluginConfigPath'+str(self.noOfPlugins)) self.logger.log('Name of the Plugin is ' + pname, True) self.logger.log('Plugin config path is ' + pcpath, True) errorCode = CommonVariables.PrePost_PluginStatus_Success dobackup = True if os.path.isfile(pcpath): permissions = self.get_permissions(pcpath) if (int(permissions[0]) %2 == 1) or int(permissions[1]) > 0 or int(permissions[2]) > 0: self.logger.log('Plugin Config file does not have desired permissions', True, 'Error') errorCode = CommonVariables.FailedPrepostPluginConfigPermissionError if not self.find_owner(pcpath) == 'root': self.logger.log('The owner of the Plugin Config file ' + pcpath + ' is ' + self.find_owner(pcpath) + ' but not root', True, 'Error') errorCode = CommonVariables.FailedPrepostPluginConfigPermissionError else: self.logger.log('Plugin host file does not exist in the location ' + pcpath, True, 'Error') errorCode = CommonVariables.FailedPrepostPluginConfigNotFound if(errorCode == CommonVariables.PrePost_PluginStatus_Success): sys.path.append(ppath) plugin = __import__(pname) self.plugins.append(plugin.ScriptRunner(logger=self.logger,name=pname,configPath=pcpath,maxTimeOut=self.timeoutInSeconds)) errorCode,dobackup,fsFreeze_on = self.plugins[self.noOfPlugins].validate_scripts() self.noOfPlugins = self.noOfPlugins + 1 self.pluginName.append(pname) self.preScriptCompleted.append(False) self.preScriptResult.append(None) self.postScriptCompleted.append(False) self.postScriptResult.append(None) len = len - 1 if self.noOfPlugins != 0: self.modulesLoaded = True except Exception as err: errMsg = 'Error in reading PluginHost config file : %s, stack trace: %s' % (str(err), traceback.format_exc()) self.logger.log(errMsg, True, 'Error') errorCode = CommonVariables.FailedPrepostPluginhostConfigParsing return errorCode,dobackup,fsFreeze_on def find_owner(self, filename): file_owner = '' try: file_owner = getpwuid(os.stat(filename).st_uid).pw_name except Exception as err: errMsg = 'Error in fetching owner of the file : ' + filename + ': %s, stack trace: %s' % (str(err), traceback.format_exc()) self.logger.log(errMsg, True, 'Error') return file_owner def get_permissions(self, filename): permissions = '777' try: permissions = oct(os.stat(filename)[ST_MODE])[-3:] self.logger.log('Permisisons of the file ' + filename + ' are ' + permissions,True) except Exception as err: errMsg = 'Error in fetching permissions of the file : ' + filename + ': %s, stack trace: %s' % (str(err), traceback.format_exc()) self.logger.log(errMsg, True, 'Error') return permissions def pre_script(self): # Runs pre_script() for all plugins and maintains a timer result = PluginHostResult() curr = 0 for plugin in self.plugins: t1 = threading.Thread(target=plugin.pre_script, args=(curr, self.preScriptCompleted, self.preScriptResult)) t1.start() curr = curr + 1 flag = True for i in range(0,((self.timeoutInSeconds)/5)+2): #waiting 10 more seconds to escape race condition between Host and script timing out time.sleep(5) flag = True for j in range(0,self.noOfPlugins): flag = flag & self.preScriptCompleted[j] if flag: break continueBackup = True #Plugin timed out if not flag: ecode = CommonVariables.FailedPrepostPluginhostPreTimeout result.anyScriptFailed = True presult = PluginHostError(errorCode = ecode, pluginName = self.pluginName[j]) result.errors.append(presult) else: for j in range(0,self.noOfPlugins): ecode = CommonVariables.FailedPrepostPluginhostPreTimeout continueBackup = continueBackup & self.preScriptResult[j].continueBackup if self.preScriptCompleted[j]: ecode = self.preScriptResult[j].errorCode if ecode != CommonVariables.PrePost_PluginStatus_Success: result.anyScriptFailed = True presult = PluginHostError(errorCode = ecode, pluginName = self.pluginName[j]) result.errors.append(presult) result.continueBackup = continueBackup self.logger.log('Finished prescript execution from PluginHost side. Continue Backup: '+str(continueBackup),True,'Info') return result def post_script(self): # Runs post_script() for all plugins and maintains a timer result = PluginHostResult() if not self.modulesLoaded: return result self.logger.log('Starting postscript for all modules.',True,'Info') curr = 0 for plugin in self.plugins: t1 = threading.Thread(target=plugin.post_script, args=(curr, self.postScriptCompleted, self.postScriptResult)) t1.start() curr = curr + 1 flag = True for i in range(0,((self.timeoutInSeconds)/5)+2): #waiting 10 more seconds to escape race condition between Host and script timing out time.sleep(5) flag = True for j in range(0,self.noOfPlugins): flag = flag & self.postScriptCompleted[j] if flag: break continueBackup = True #Plugin timed out if not flag: ecode = CommonVariables.FailedPrepostPluginhostPostTimeout result.anyScriptFailed = True presult = PluginHostError(errorCode = ecode, pluginName = self.pluginName[j]) result.errors.append(presult) else: for j in range(0,self.noOfPlugins): ecode = CommonVariables.FailedPrepostPluginhostPostTimeout continueBackup = continueBackup & self.postScriptResult[j].continueBackup if self.postScriptCompleted[j]: ecode = self.postScriptResult[j].errorCode if ecode != CommonVariables.PrePost_PluginStatus_Success: result.anyScriptFailed = True presult = PluginHostError(errorCode = ecode, pluginName = self.pluginName[j]) result.errors.append(presult) result.continueBackup = continueBackup self.logger.log('Finished postscript execution from PluginHost side. Continue Backup: '+str(continueBackup),True,'Info') return result
[ [ [ 7, 11 ], [ 8973, 8977 ], [ 10955, 10959 ] ], [ [ 19, 22 ], [ 6453, 6456 ] ], [ [ 30, 32 ], [ 2571, 2573 ], [ 2880, 2882 ], [ 5426, 5428 ], [ 7656, 7658 ], [ 8052, 8054 ] ], [ [ 40, 49 ], [ 8644, 8653 ], [ 10623, 10632 ] ], [ [ 66, 95 ], [ 4324, 4337 ] ], [ [ 127, 156 ], [ 4324, 4337 ] ], [ [ 176, 191 ], [ 2459, 2474 ], [ 3055, 3070 ], [ 3373, 3388 ], [ 3699, 3714 ], [ 4111, 4126 ], [ 5329, 5344 ], [ 5768, 5783 ], [ 6080, 6095 ], [ 6303, 6318 ], [ 6386, 6401 ], [ 7448, 7463 ], [ 9258, 9273 ], [ 9568, 9583 ], [ 9844, 9859 ], [ 11241, 11256 ], [ 11552, 11567 ], [ 11832, 11847 ] ], [ [ 208, 216 ], [ 7647, 7655 ] ], [ [ 234, 235 ], [ 8070, 8077 ] ], [ [ 243, 252 ], [ 7349, 7358 ], [ 7836, 7845 ], [ 8334, 8343 ] ], [ [ 988, 1003 ], [ 9372, 9387 ], [ 9966, 9981 ], [ 11356, 11371 ], [ 11954, 11969 ] ], [ [ 1250, 1266 ], [ 8555, 8571 ], [ 10396, 10412 ] ], [ [ 1861, 1871 ] ] ]
#!/usr/bin/env python3 # Copyright 2019 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from grafanalib import core as g import defaults as d def api_call_latency_panel(expression): def api_call_latency(title, verb, scope, threshold): return d.Graph( title=title, targets=[ g.Target(expr=str(threshold), legendFormat="threshold"), g.Target( expr=d.one_line(expression % {"verb": verb, "scope": scope} ), # TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed # legendFormat="{{verb}} {{scope}}/{{resource}}", ), ], yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ) return [ api_call_latency( title="GET resource latency (percentaile=99, scope=resource, threshold=1s)", verb="GET", scope="resource", threshold=1, ), api_call_latency( title="LIST namespace latency (percentaile=99, scope=namespace, threshold=5s)", verb="LIST", scope="namespace", threshold=5, ), api_call_latency( title="LIST cluster latency (percentaile=99, scope=cluster, threshold=30s)", verb="LIST", scope="cluster", threshold=30, ), api_call_latency( title="Mutating API call latency (threshold=1s)", verb=d.any_of("CREATE", "DELETE", "PATCH", "POST", "PUT"), scope=d.any_of("namespace", "cluster", "resource"), threshold=1, ), ] API_CALL_LATENCY_PANELS = api_call_latency_panel(""" apiserver:apiserver_request_latency_1m:histogram_quantile{ quantile="0.99", verb=~"%(verb)s", scope=~"%(scope)s", resource=~"${resource:regex}s*", subresource!~"exec|proxy", }""") QUANTILE_API_CALL_LATENCY_PANELS = api_call_latency_panel(""" quantile_over_time(0.99, apiserver:apiserver_request_latency_1m:histogram_quantile{ quantile="0.99", verb=~"%(verb)s", scope=~"%(scope)s", resource=~"${resource:regex}s*", subresource!~"exec|proxy", }[5d])""") PAF_PANELS = [ d.simple_graph( "Requests waiting time", "histogram_quantile(0.99, sum(rate(apiserver_flowcontrol_request_wait_duration_seconds_bucket[1m])) by (le, priority_level))", legend="{{priority_level}}", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.simple_graph( "Execution time", "histogram_quantile(0.99, sum(rate(apiserver_flowcontrol_request_execution_seconds_bucket[1m])) by (le, priority_level))", legend="{{priority_level}}", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.simple_graph( "Total execution time per second", "sum(irate(apiserver_flowcontrol_request_execution_seconds_sum[1m])) by (priority_level)", legend="{{priority_level}}", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.simple_graph( "Requests rate by priority level", "sum(irate(apiserver_flowcontrol_dispatched_requests_total[1m])) by (priority_level)", legend="{{priority_level}}", yAxes=g.single_y_axis(format=g.OPS_FORMAT), ), d.simple_graph( "Concurrency limits", "avg(apiserver_flowcontrol_request_concurrency_limit) by (priority_level)", legend="{{priority_level}}", ), ] HEALTH_PANELS = [ d.simple_graph( "Unhealthy nodes", "sum(node_collector_unhealthy_nodes_in_zone) by (zone)", legend="{{zone}}", ), d.simple_graph( "Pod creations", 'sum(irate(apiserver_request_total{verb="POST", resource="pods", subresource=""}[1m]))', yAxes=g.single_y_axis(format=g.OPS_FORMAT), ), d.simple_graph( "Pod bindings", 'sum(irate(apiserver_request_total{verb="POST", resource="pods", subresource="binding"}[1m]))', yAxes=g.single_y_axis(format=g.OPS_FORMAT), ), # It's not clear which "Component restarts" shows more accurate results. d.simple_graph( "Component restarts", "sum(rate(process_start_time_seconds[1m]) > bool 0) by (job, endpoint)", ), d.simple_graph( "Component restarts 2", 'sum(min_over_time(container_start_time_seconds{container!="",container!="POD"}[2m])) by (container)', ), d.simple_graph( "Active component", "sum(leader_election_master_status) by (name, instance)" ), ] ETCD_PANELS = [ d.simple_graph("etcd leader", "etcd_server_is_leader", legend="{{instance}}"), d.simple_graph( "etcd bytes sent", "rate(etcd_network_client_grpc_sent_bytes_total[1m])", yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT), legend="{{instance}}", ), d.simple_graph( "etcd operations rate", d.one_line( """ sum( rate( etcd_request_duration_seconds_count{ operation=~"${etcd_operation:regex}", type=~".*(${etcd_type:pipe})" }[1m] ) ) by (operation, type) """ ), yAxes=g.single_y_axis(format=g.OPS_FORMAT), legend="{{operation}} {{type}}", ), d.simple_graph( "etcd get latency by type (99th percentile)", d.one_line( """ histogram_quantile( 0.99, sum( rate( etcd_request_duration_seconds_bucket{ operation=~"${etcd_operation:regex}", type=~".*(${etcd_type:pipe})" }[1m] ) ) by (le, operation, type, instance) ) """ ), yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), legend="{{operation}} {{type}} on {{instance}}", ), d.simple_graph( "etcd get latency by type (50th percentile)", d.one_line( """ histogram_quantile( 0.50, sum( rate( etcd_request_duration_seconds_bucket{ operation=~"${etcd_operation:regex}", type=~".*(${etcd_type:pipe})" }[1m] ) ) by (le, operation, type, instance) ) """ ), yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.simple_graph("etcd instance id", "sum(etcd_server_id) by (instance, server_id)"), d.simple_graph( "etcd network latency (99th percentile)", "histogram_quantile(0.99, sum(rate(etcd_network_peer_round_trip_time_seconds_bucket[1m])) by (le, instance, To))", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.simple_graph( "etcd compaction keys", "delta(etcd_debugging_mvcc_db_compaction_keys_total[1m])", ), d.simple_graph( "etcd compaction pause sum duration", "delta(etcd_debugging_mvcc_db_compaction_pause_duration_milliseconds_sum[1m])", yAxes=g.single_y_axis(format=g.MILLISECONDS_FORMAT), ), d.simple_graph( "etcd compaction pause num chunks", "delta(etcd_debugging_mvcc_db_compaction_pause_duration_milliseconds_count[1m])", ), d.simple_graph( "etcd_disk_backend_commit_duration_seconds", "histogram_quantile(0.99, sum(rate(etcd_disk_backend_commit_duration_seconds_bucket[1m])) by (le, instance))", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.simple_graph( "etcd wal fsync duration", "histogram_quantile(1.0, sum(rate(etcd_disk_wal_fsync_duration_seconds_bucket[1m])) by (le, endpoint))", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.Graph( title="etcd compaction max pause", points=True, lines=False, targets=[ g.Target( expr="histogram_quantile(1.0, sum(rate(etcd_debugging_mvcc_db_compaction_pause_duration_milliseconds_bucket[1m])) by (le, instance))" ) ], yAxes=g.single_y_axis(format=g.MILLISECONDS_FORMAT), ), d.simple_graph( "etcd objects", "sum(etcd_object_counts) by (resource, instance)", legend="{{instance}}: {{resource}}", ), d.simple_graph( "etcd db size", [ "etcd_mvcc_db_total_size_in_bytes", "etcd_mvcc_db_total_size_in_use_in_bytes", "etcd_server_quota_backend_bytes", ], yAxes=g.single_y_axis(format=g.BYTES_FORMAT), ), ] APISERVER_PANELS = [ d.simple_graph( "goroutines", 'go_goroutines{job="master", endpoint="apiserver"}', legend="{{instance}}", ), d.simple_graph( "gc rate", 'rate(go_gc_duration_seconds_count{job="master", endpoint="apiserver"}[1m])', legend="{{instance}}", ), d.simple_graph( "alloc rate", 'rate(go_memstats_alloc_bytes_total{job="master", endpoint="apiserver"}[1m])', yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT), legend="{{instance}}", ), d.simple_graph( "Number of active watches", 'sum(apiserver_registered_watchers{kind=~"(?i:(${resource:regex}))s*"}) by (instance, group, version, kind)', legend="{{instance}}: {{version}}.{{group}}.{{kind}}", ), d.simple_graph( "Watch events rate", d.one_line( """ sum( irate( apiserver_watch_events_total{ kind=~"(?i:(${resource:regex}))s*" }[1m] ) ) by (instance, group, version, kind)""" ), legend="{{instance}}: {{version}}.{{group}}.{{kind}}", ), d.simple_graph( "Watch events traffic", d.one_line( """ sum( irate( apiserver_watch_events_sizes_sum{ kind=~"(?i:(${resource:regex}))s*" }[1m] ) ) by (instance, group, version, kind)""" ), yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT), legend="{{instance}}: {{version}}.{{group}}.{{kind}}", ), d.simple_graph( "Watch event avg size", d.one_line( """ sum( rate( apiserver_watch_events_sizes_sum{ kind=~"(?i:(${resource:regex}))s*" }[1m] ) / rate( apiserver_watch_events_sizes_count{ kind=~"(?i:(${resource:regex}))s*" }[1m] ) ) by (instance, group, version, kind)""" ), legend="{{instance}}: {{version}}.{{group}}.{{kind}}", ), d.simple_graph( "Inflight requests", "sum(apiserver_current_inflight_requests) by (requestKind, instance)", legend="{{instance}}: {{requestKind}}", ), d.simple_graph( "Request rate", d.one_line( """ sum( rate( apiserver_request_total{ verb=~"${verb:regex}", resource=~"${resource:regex}s*" }[1m] ) ) by (verb, resource, subresource, instance)""" ), # TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed # legend="{{instance}}: {{verb}} {{resource}}", ), d.simple_graph( "Request rate by code", "sum(rate(apiserver_request_total[1m])) by (code, instance)", legend="{{instance}}: {{code}}", ), d.simple_graph( "Request latency (50th percentile) (excl. WATCH)", d.one_line( """ apiserver:apiserver_request_latency:histogram_quantile{ quantile="0.50", verb!="WATCH", verb=~"${verb:regex}", resource=~"${resource:regex}s*" }""" ), # TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed # legend="{{verb}} {{scope}}/{{resource}}", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.simple_graph( "Request latency (99th percentile) (excl. WATCH)", d.one_line( """ apiserver:apiserver_request_latency:histogram_quantile{ quantile="0.99", verb!="WATCH", verb=~"${verb:regex}", resource=~"${resource:regex}s*" }""" ), # TODO(github.com/grafana/grafana/issues/19410): uncomment once fixed # legend="{{verb}} {{scope}}/{{resource}}", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.simple_graph( "Traffic (excl. WATCH)", d.one_line( """ sum( rate( apiserver_response_sizes_sum{ verb!="WATCH", verb=~"${verb:regex}", resource=~"${resource:regex}s*" }[1m] ) ) by (verb, version, resource, subresource, scope, instance)""" ), yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT), ), d.simple_graph( "Webhook admission duration (99th percentile)", "histogram_quantile(0.99, sum(rate(apiserver_admission_webhook_admission_duration_seconds_bucket[1m])) by (le, type, name))", legend="{{type}}: {{name}}", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), d.simple_graph( "Request filter latency for each filter type (99th percentile)", "histogram_quantile(0.99, sum(rate(apiserver_request_filter_duration_seconds_bucket[1m])) by (le, filter))", legend="{{filter}}", yAxes=g.single_y_axis(format=g.SECONDS_FORMAT), ), ] VM_PANELS = [ d.simple_graph( "fs bytes reads by container", "sum(rate(container_fs_reads_bytes_total[1m])) by (container, instance)", legend="{{instance}}: {{container}}", yAxes=g.single_y_axis(format=g.BYTES_FORMAT), ), d.simple_graph( "fs reads by container", "sum(rate(container_fs_reads_total[1m])) by (container, instance)", legend="{{instance}}: {{container}}", ), d.simple_graph( "fs bytes writes by container", "sum(rate(container_fs_writes_bytes_total[1m])) by (container, instance)", legend="{{instance}}: {{container}}", yAxes=g.single_y_axis(format=g.BYTES_FORMAT), ), d.simple_graph( "fs writes by container", "sum(rate(container_fs_writes_total[1m])) by (container, instance)", legend="{{instance}}: {{container}}", ), d.Graph( title="CPU usage by container", targets=[ d.Target( expr='sum(rate(container_cpu_usage_seconds_total{container!=""}[1m])) by (container, instance)', legendFormat="{{instance}}: {{container}}", ), d.Target(expr="machine_cpu_cores", legendFormat="limit"), ], ), d.Graph( title="memory usage by container", targets=[ d.Target( expr='sum(container_memory_usage_bytes{container!=""}) by (container, instance)', legendFormat="{{instance}}: {{container}}", ), d.Target(expr="machine_memory_bytes", legendFormat="limit"), ], yAxes=g.single_y_axis(format=g.BYTES_FORMAT), ), d.Graph( title="memory working set by container", targets=[ d.Target( expr='sum(container_memory_working_set_bytes{container!=""}) by (container, instance)', legendFormat="{{instance}}: {{container}}", ), d.Target(expr="machine_memory_bytes", legendFormat="limit"), ], yAxes=g.single_y_axis(format=g.BYTES_FORMAT), ), d.Graph( title="Network usage (bytes)", targets=[ g.Target( expr='rate(container_network_transmit_bytes_total{id="/"}[1m])', legendFormat="{{instance}} transmit", ), g.Target( expr='rate(container_network_receive_bytes_total{id="/"}[1m])', legendFormat="{{instance}} receive", ), ], yAxes=g.single_y_axis(format=g.BYTES_PER_SEC_FORMAT), ), d.Graph( title="Network usage (packets)", targets=[ g.Target( expr='rate(container_network_transmit_packets_total{id="/"}[1m])', legendFormat="{{instance}} transmit", ), g.Target( expr='rate(container_network_receive_packets_total{id="/"}[1m])', legendFormat="{{instance}} receive", ), ], ), d.Graph( title="Network usage (avg packet size)", targets=[ g.Target( expr='rate(container_network_transmit_bytes_total{id="/"}[1m]) / rate(container_network_transmit_packets_total{id="/"}[1m])', legendFormat="{{instance}} transmit", ), g.Target( expr='rate(container_network_receive_bytes_total{id="/"}[1m]) / rate(container_network_receive_packets_total{id="/"}[1m])', legendFormat="{{instance}} receive", ), ], yAxes=g.single_y_axis(format=g.BYTES_FORMAT), ), d.Graph( title="Network tcp segments", targets=[ g.Target( expr="sum(rate(node_netstat_Tcp_InSegs[1m])) by (instance)", legendFormat="InSegs {{instance}}", ), g.Target( expr="sum(rate(node_netstat_Tcp_OutSegs[1m])) by (instance)", legendFormat="OutSegs {{instance}}", ), g.Target( expr="sum(rate(node_netstat_Tcp_RetransSegs[1m])) by (instance)", legendFormat="RetransSegs {{instance}}", ), ], yAxes=g.single_y_axis(format=g.SHORT_FORMAT, logBase=10), ), ] # The final dashboard must be named 'dashboard' so that grafanalib will find it. dashboard = d.Dashboard( title="Master dashboard", refresh="", rows=[ d.Row(title="API call latency", panels=API_CALL_LATENCY_PANELS), d.Row(title="API call latency aggregated with quantile", panels=QUANTILE_API_CALL_LATENCY_PANELS, collapse=True), d.Row(title="P&F metrics", panels=PAF_PANELS, collapse=True), d.Row(title="Overall cluster health", panels=HEALTH_PANELS, collapse=True), d.Row(title="etcd", panels=ETCD_PANELS, collapse=True), d.Row(title="kube-apiserver", panels=APISERVER_PANELS, collapse=True), d.Row( title="kube-controller-manager", panels=[ d.simple_graph( "Workqueue depths", 'workqueue_depth{endpoint="kube-controller-manager"}', legend="{{name}}", ) ], collapse=True, ), d.Row(title="Master VM", panels=VM_PANELS, collapse=True), ], templating=g.Templating( list=[ d.SOURCE_TEMPLATE, g.Template( name="etcd_type", type="query", dataSource="$source", regex=r"\*\[+\]+(.*)", query="label_values(etcd_request_duration_seconds_count, type)", multi=True, includeAll=True, refresh=g.REFRESH_ON_TIME_RANGE_CHANGE, ), g.Template( name="etcd_operation", type="query", dataSource="$source", query="label_values(etcd_request_duration_seconds_count, operation)", multi=True, includeAll=True, refresh=g.REFRESH_ON_TIME_RANGE_CHANGE, ), g.Template( name="verb", type="query", dataSource="$source", query="label_values(apiserver_request_duration_seconds_count, verb)", multi=True, includeAll=True, refresh=g.REFRESH_ON_TIME_RANGE_CHANGE, ), g.Template( name="resource", type="query", dataSource="$source", regex="(.*)s", query="label_values(apiserver_request_duration_seconds_count, resource)", multi=True, includeAll=True, refresh=g.REFRESH_ON_TIME_RANGE_CHANGE, ), ] ), ).auto_panel_ids()
[ [ [ 635, 644 ], [ 3001, 3002 ], [ 3024, 3025 ], [ 3279, 3280 ], [ 3302, 3303 ], [ 3542, 3543 ], [ 3565, 3566 ], [ 3800, 3801 ], [ 3823, 3824 ], [ 4346, 4347 ], [ 4369, 4370 ], [ 4553, 4554 ], [ 4576, 4577 ], [ 5321, 5322 ], [ 5344, 5345 ], [ 5695, 5696 ], [ 5718, 5719 ], [ 6152, 6153 ], [ 6175, 6176 ], [ 6629, 6630 ], [ 6652, 6653 ], [ 6973, 6974 ], [ 6996, 6997 ], [ 7316, 7317 ], [ 7339, 7340 ], [ 7737, 7738 ], [ 7760, 7761 ], [ 7968, 7969 ], [ 7991, 7992 ], [ 8145, 8146 ], [ 8344, 8345 ], [ 8367, 8368 ], [ 8782, 8783 ], [ 8805, 8806 ], [ 9300, 9301 ], [ 9323, 9324 ], [ 10200, 10201 ], [ 10223, 10224 ], [ 11923, 11924 ], [ 11946, 11947 ], [ 12399, 12400 ], [ 12422, 12423 ], [ 12775, 12776 ], [ 12798, 12799 ], [ 13091, 13092 ], [ 13114, 13115 ], [ 13393, 13394 ], [ 13416, 13417 ], [ 13660, 13661 ], [ 13683, 13684 ], [ 14092, 14093 ], [ 14115, 14116 ], [ 15059, 15060 ], [ 15082, 15083 ], [ 15485, 15486 ], [ 15508, 15509 ], [ 15614, 15615 ], [ 15786, 15787 ], [ 15969, 15970 ], [ 15992, 15993 ], [ 16108, 16109 ], [ 16282, 16283 ], [ 16552, 16553 ], [ 16785, 16786 ], [ 17028, 17029 ], [ 17051, 17052 ], [ 17156, 17157 ], [ 17322, 17323 ], [ 17490, 17491 ], [ 17679, 17680 ], [ 17702, 17703 ], [ 18823, 18824 ], [ 18895, 18896 ], [ 19214, 19215 ], [ 19273, 19274 ], [ 19563, 19564 ], [ 19622, 19623 ], [ 19902, 19903 ], [ 19961, 19962 ], [ 20280, 20281 ], [ 852, 853 ], [ 925, 926 ], [ 1266, 1267 ], [ 1289, 1290 ] ], [ [ 652, 665 ], [ 2765, 2766 ], [ 3054, 3055 ], [ 3332, 3333 ], [ 3595, 3596 ], [ 3849, 3850 ], [ 4048, 4049 ], [ 4194, 4195 ], [ 4395, 4396 ], [ 4679, 4680 ], [ 4817, 4818 ], [ 4987, 4988 ], [ 5118, 5119 ], [ 5201, 5202 ], [ 5411, 5412 ], [ 5467, 5468 ], [ 5785, 5786 ], [ 5863, 5864 ], [ 6262, 6263 ], [ 6340, 6341 ], [ 6682, 6683 ], [ 6770, 6771 ], [ 7026, 7027 ], [ 7152, 7153 ], [ 7374, 7375 ], [ 7535, 7536 ], [ 7790, 7791 ], [ 8021, 8022 ], [ 8402, 8403 ], [ 8557, 8558 ], [ 8857, 8858 ], [ 8998, 8999 ], [ 9161, 9162 ], [ 9390, 9391 ], [ 9634, 9635 ], [ 9687, 9688 ], [ 9944, 9945 ], [ 10000, 10001 ], [ 10322, 10323 ], [ 10378, 10379 ], [ 10745, 10746 ], [ 10928, 10929 ], [ 10976, 10977 ], [ 11331, 11332 ], [ 11501, 11502 ], [ 11584, 11585 ], [ 11976, 11977 ], [ 12059, 12060 ], [ 12452, 12453 ], [ 12509, 12510 ], [ 12834, 12835 ], [ 13144, 13145 ], [ 13463, 13464 ], [ 13711, 13712 ], [ 13893, 13894 ], [ 14143, 14144 ], [ 14327, 14328 ], [ 14406, 14407 ], [ 14616, 14617 ], [ 14696, 14697 ], [ 14778, 14779 ], [ 14973, 14974 ], [ 15110, 15111 ], [ 15198, 15199 ], [ 15399, 15400 ], [ 15536, 15537 ], [ 16028, 16029 ], [ 16464, 16465 ], [ 17079, 17080 ], [ 17834, 17835 ], [ 17912, 17913 ], [ 17985, 17986 ], [ 18107, 18108 ], [ 18177, 18178 ], [ 18261, 18262 ], [ 18325, 18326 ], [ 18404, 18405 ], [ 18493, 18494 ], [ 18742, 18743 ], [ 18864, 18865 ], [ 2058, 2059 ], [ 2130, 2131 ], [ 780, 781 ], [ 960, 961 ] ], [ [ 672, 694 ], [ 2245, 2267 ], [ 2498, 2520 ] ], [ [ 2219, 2242 ], [ 17951, 17974 ] ], [ [ 2463, 2495 ], [ 18049, 18081 ] ], [ [ 2746, 2756 ], [ 18141, 18151 ] ], [ [ 4026, 4039 ], [ 18222, 18235 ] ], [ [ 5098, 5109 ], [ 18288, 18299 ] ], [ [ 8832, 8848 ], [ 18362, 18378 ] ], [ [ 13445, 13454 ], [ 18774, 18783 ] ], [ [ 17822, 17831 ] ] ]
import requests import os from consul import base __all__ = ["Consul"] class HTTPClient(base.HTTPClient): def __init__(self, *args, **kwargs): self.timeout = kwargs.pop("timeout", None) super(HTTPClient, self).__init__(*args, **kwargs) self.session = requests.session() self._pid = os.getpid() def response(self, response): response.encoding = "utf-8" return base.Response(response.status_code, response.headers, response.text) def _request(self, callback, uri, method, data, timeout=None): self._renew_session_on_pid_change() if timeout is None: timeout = self.timeout elif timeout <= 0: timeout = None return callback( self.response( self.session.request( method, url=uri, verify=self.verify, data=data, cert=self.cert, timeout=timeout, ) ) ) def get(self, callback, path, params=None, timeout=None): uri = self.uri(path, params) return self._request( callback, uri=uri, method="GET", data=None, timeout=timeout ) def put(self, callback, path, params=None, data="", timeout=None): uri = self.uri(path, params) return self._request( callback, uri=uri, method="PUT", data=data, timeout=timeout ) def delete(self, callback, path, params=None, timeout=None): uri = self.uri(path, params) return self._request( callback, uri=uri, method="DELETE", data=None, timeout=timeout ) def post(self, callback, path, params=None, data="", timeout=None): uri = self.uri(path, params) return self._request( callback, uri=uri, method="POST", data=data, timeout=timeout ) def _renew_session_on_pid_change(self): """ Check if the pid has changed and create new session if it has""" if self.check_pid: pid = os.getpid() if pid == self._pid: return self._pid = pid self.session = requests.session() class Consul(base.Consul): def connect( self, host, port, scheme, verify=True, cert=None, timeout=None, check_pid=False ): return HTTPClient( host, port, scheme, verify, cert, timeout=timeout, check_pid=check_pid )
[ [ [ 7, 15 ], [ 284, 292 ], [ 2216, 2224 ] ], [ [ 23, 25 ], [ 323, 325 ], [ 2093, 2095 ] ], [ [ 46, 50 ], [ 93, 97 ], [ 2250, 2254 ], [ 421, 425 ] ], [ [ 53, 60 ] ], [ [ 82, 92 ], [ 217, 227 ], [ 2391, 2401 ] ], [ [ 2243, 2249 ] ] ]
# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """ConfusionMatrixMetric & ConfusionMatrix.""" import numpy as np from mindspore._checkparam import Validator as validator from .metric import Metric class ConfusionMatrix(Metric): r""" Computes the confusion matrix. The performance matrix of measurement classification model is the model whose output is binary or multi class. The confusion matrix is calculated. An array of shape [BC4] is returned. The third dimension represents each channel of each sample in the input batch.Where B is the batch size and C is the number of classes to be calculated. If you only want to find confusion matrix, use this class. If you want to find 'PPV', 'TPR', 'TNR', etc., use class 'mindspore.metrics.ConfusionMatrixMetric'. Args: num_classes (int): Number of classes in the dataset. normalize (str): The parameter of calculating ConfusionMatrix supports four Normalization modes, Choose from: - **'no_norm'** (None) - No normalization is used. Default: None. - **'target'** (str) - Normalization based on target value. - **'prediction'** (str) - Normalization based on predicted value. - **'all'** (str) - Normalization over the whole matrix. threshold (float): A threshold, which is used to compare with the input tensor. Default: 0.5. Examples: >>> x = Tensor(np.array([1, 0, 1, 0])) >>> y = Tensor(np.array([1, 0, 0, 1])) >>> metric = nn.ConfusionMatrix(num_classes=2, normalize="no_norm", threshold=0.5) >>> metric.clear() >>> metric.update(x, y) >>> output = metric.eval() >>> print(output) [[1. 1.] [1. 1.]] """ TARGET = "target" PREDICTION = "prediction" ALL = "all" NO_NORM = "no_norm" def __init__(self, num_classes, normalize=NO_NORM, threshold=0.5): super(ConfusionMatrix, self).__init__() self.num_classes = validator.check_value_type("num_classes", num_classes, [int]) if normalize != ConfusionMatrix.TARGET and normalize != ConfusionMatrix.PREDICTION and \ normalize != ConfusionMatrix.ALL and normalize is not ConfusionMatrix.NO_NORM: raise ValueError( 'The normalize way should be in [all, prediction, label, None], but got {}.'.format(normalize) ) self.normalize = normalize self.threshold = validator.check_value_type("threshold", threshold, [float]) self.clear() def clear(self): """Clears the internal evaluation result.""" self.confusion_matrix = np.zeros((self.num_classes, self.num_classes)) self._is_update = False def update(self, *inputs): """ Update state with y_pred and y. Args: inputs: Input `y_pred` and `y`. `y_pred` and `y` are a `Tensor`, a list or an array. `y_pred` is the predicted value, `y` is the true value. The shape of `y_pred` is :math:`(N, C, ...)` or :math:`(N, ...)`. The shape of `y` is :math:`(N, ...)`. Raises: ValueError: If the number of the inputs is not 2. """ if len(inputs) != 2: raise ValueError('ConfusionMatrix need 2 inputs (y_pred, y), but got {}.'.format(len(inputs))) y_pred = self._convert_data(inputs[0]) y = self._convert_data(inputs[1]) if not (y_pred.ndim == y.ndim or y_pred.ndim == y.ndim + 1): raise ValueError("y_pred and y should have the same number of dimensions, or the dimension of y_pred " "equals the dimension of y add 1.") if y_pred.ndim == y.ndim + 1: y_pred = np.argmax(y_pred, axis=1) if y_pred.ndim == y.ndim and y_pred.dtype in (np.float16, np.float32, np.float64): y_pred = (y_pred >= self.threshold).astype(int) trans = (y.reshape(-1) * self.num_classes + y_pred.reshape(-1)).astype(int) bincount = np.bincount(trans, minlength=self.num_classes ** 2) confusion_matrix = bincount.reshape(self.num_classes, self.num_classes) self.confusion_matrix += confusion_matrix self._is_update = True def eval(self): """ Computes confusion matrix. Returns: numpy.ndarray, the computed result. """ if not self._is_update: raise RuntimeError('Call the update method before calling eval.') confusion_matrix = self.confusion_matrix.astype(float) matrix_target = confusion_matrix / confusion_matrix.sum(axis=1, keepdims=True) matrix_pred = confusion_matrix / confusion_matrix.sum(axis=0, keepdims=True) matrix_all = confusion_matrix / confusion_matrix.sum() normalize_dict = {ConfusionMatrix.TARGET: matrix_target, ConfusionMatrix.PREDICTION: matrix_pred, ConfusionMatrix.ALL: matrix_all} if self.normalize == ConfusionMatrix.NO_NORM: return confusion_matrix matrix = normalize_dict.get(self.normalize) if matrix[np.isnan(matrix)].size != 0: matrix[np.isnan(matrix)] = 0 return matrix class ConfusionMatrixMetric(Metric): r""" The performance matrix of measurement classification model is the model whose output is binary or multi class. The correlation measure of confusion matrix was calculated from the full-scale tensor, and the average values of batch, class channel and iteration were collected. This function supports the calculation of all measures described below: the metric name in parameter metric_name. If you want to use confusion matrix to calculate, such as 'PPV', 'TPR', 'TNR', use this class. If you only want to calculate confusion matrix, please use 'mindspore.metrics.ConfusionMatrix'. Args: skip_channel (bool): Whether to skip the measurement calculation on the first channel of the predicted output. Default: True. metric_name (str): The names of indicators are in the following range. Of course, you can also set the industry common aliases for these indicators. Choose from: ["sensitivity", "specificity", "precision", "negative predictive value", "miss rate", "fall out", "false discovery rate", "false omission rate", "prevalence threshold", "threat score", "accuracy", "balanced accuracy", "f1 score", "matthews correlation coefficient", "fowlkes mallows index", "informedness", "markedness"]. calculation_method (bool): If true, the measurement for each sample is calculated first. If it is false, the confusion matrix of all samples is accumulated first. As for classification task, 'calculation_method' should be False. Default: False. decrease (str): Define the mode to reduce the calculation result of one batch of data. Decrease is used only if calculation_method is True. Default: "mean". Choose from: ["none", "mean", "sum", "mean_batch", "sum_batch", "mean_channel", "sum_channel"]. Examples: >>> metric = ConfusionMatrixMetric(skip_channel=True, metric_name="tpr", ... calculation_method=False, decrease="mean") >>> metric.clear() >>> x = Tensor(np.array([[[0], [1]], [[1], [0]]])) >>> y = Tensor(np.array([[[0], [1]], [[0], [1]]])) >>> metric.update(x, y) >>> x = Tensor(np.array([[[0], [1]], [[1], [0]]])) >>> y = Tensor(np.array([[[0], [1]], [[1], [0]]])) >>> avg_output = metric.eval() >>> print(avg_output) [0.5] """ def __init__(self, skip_channel=True, metric_name="sensitivity", calculation_method=False, decrease="mean"): super(ConfusionMatrixMetric, self).__init__() self.confusion_matrix = _ConfusionMatrix(skip_channel=skip_channel, metric_name=metric_name, calculation_method=calculation_method, decrease=decrease) self.skip_channel = validator.check_value_type("skip_channel", skip_channel, [bool]) self.calculation_method = validator.check_value_type("calculation_method", calculation_method, [bool]) self.metric_name = validator.check_value_type("metric_name", metric_name, [str]) decrease_list = ["none", "mean", "sum", "mean_batch", "sum_batch", "mean_channel", "sum_channel"] decrease = validator.check_value_type("decrease", decrease, [str]) self.decrease = validator.check_string(decrease, decrease_list, "decrease") self.clear() def clear(self): """Clears the internal evaluation result.""" self._total_num = 0 self._class_num = 0 self._total_tp = 0.0 self._total_fp = 0.0 self._total_tn = 0.0 self._total_fn = 0.0 def update(self, *inputs): """ Update state with predictions and targets. inputs: Input `y_pred` and `y`. `y_pred` and `y` are a `Tensor`, a list or an array. - **y_pred** (ndarray) - Input data to compute. It must be one-hot format and first dim is batch. The shape of `y_pred` is :math:`(N, C, ...)` or :math:`(N, ...)`. As for classification tasks, `y_pred` should has the shape [BN] where N is larger than 1. As for segmentation tasks, the shape should be [BNHW] or [BNHWD]. - **y** (ndarray) - Compute the true value of the measure. It must be one-hot format and first dim is batch. The shape of `y` is :math:`(N, C, ...)`. Raises: ValueError: If the number of the inputs is not 2. """ if len(inputs) != 2: raise ValueError('ConfusionMatrixMetric need 2 inputs (y_pred, y), but got {}.'.format(len(inputs))) y_pred = self._convert_data(inputs[0]) y = self._convert_data(inputs[1]) if self.calculation_method is True: score, not_nans = self.confusion_matrix(y_pred, y) not_nans = int(not_nans.item()) self._total_num += score.item() * not_nans self._class_num += not_nans else: confusion_matrix = self.confusion_matrix(y_pred, y) confusion_matrix, _ = _decrease_metric(confusion_matrix, "sum") self._total_tp += confusion_matrix[0].item() self._total_fp += confusion_matrix[1].item() self._total_tn += confusion_matrix[2].item() self._total_fn += confusion_matrix[3].item() def eval(self): """ Computes confusion matrix metric. Returns: ndarray, the computed result. """ if self.calculation_method is True: if self._class_num == 0: raise RuntimeError("ConfusionMatrixMetric must have at least one example before it can be computed.") return self._total_num / self._class_num confusion_matrix = np.array([self._total_tp, self._total_fp, self._total_tn, self._total_fn]) return _compute_confusion_matrix_metric(self.metric_name, confusion_matrix) class _ConfusionMatrix: """ Compute confusion matrix related metrics. Args: skip_channel (bool): Whether to skip the measurement calculation on the first channel of the predicted output. Default: True. metric_name (str): The names of indicators are in the following range. Of course, you can also set the industry common aliases for these indicators. calculation_method (bool): If true, the measurement for each sample is calculated first. If it is false, the confusion matrix for each image (the output of function '_get_confusion_matrix') will be returned. In this way, users should achieve the confusion matrixes for all images during an epochand then use '_compute_confusion_matrix_metric' to calculate the metric. Default: False. decrease (Union[DecreaseMetric, str]): ["none", "mean", "sum", "mean_batch", "sum_batch", "mean_channel", "sum_channel"] Define the mode to reduce the calculation result of one batch of data. Decrease is used only if calculation_method is True. Default: "mean". """ def __init__(self, skip_channel=True, metric_name="hit_rate", calculation_method=False, decrease="mean"): super().__init__() self.skip_channel = skip_channel self.metric_name = metric_name self.calculation_method = calculation_method self.decrease = decrease def __call__(self, y_pred, y): """ 'y_preds' is expected to have binarized predictions and 'y' should be in one-hot format. Args: - **y_pred** (ndarray) - Input data to compute. It must be one-hot format and first dim is batch. - **y** (ndarray) - Ground truth to compute the metric. It must be one-hot format and first dim is batch. Raises: ValueError: If `metric_name` is empty. ValueError: when `y_pred` has less than two dimensions. """ if not np.all(y.astype(np.uint8) == y): raise ValueError("y should be a binarized ndarray.") dims = y_pred.ndim if dims < 2: raise ValueError("y_pred should have at least two dimensions.") if dims == 2 or (dims == 3 and y_pred.shape[-1] == 1): if self.calculation_method: self.calculation_method = False confusion_matrix = _get_confusion_matrix(y_pred=y_pred, y=y, skip_channel=self.skip_channel) if self.calculation_method: if isinstance(self.metric_name, str): confusion_matrix = _compute_confusion_matrix_metric(self.metric_name, confusion_matrix) chart, not_nans = _decrease_metric(confusion_matrix, self.decrease) return chart, not_nans if not self.metric_name: raise ValueError("There should be at least one metric name.") results = [] for metric_name in self.metric_name: sub_confusion_matrix = _compute_confusion_matrix_metric(metric_name, confusion_matrix) chart, not_nans = _decrease_metric(sub_confusion_matrix, self.decrease) results.append(chart) results.append(not_nans) return results return confusion_matrix def _get_confusion_matrix(y_pred, y, skip_channel=True): """ The confusion matrix is calculated. An array of shape [BC4] is returned. The third dimension represents each channel of each sample in the input batch.Where B is the batch size and C is the number of classes to be calculated. Args: y_pred (ndarray): input data to compute. It must be one-hot format and first dim is batch. The values should be binarized. y (ndarray): ground truth to compute the metric. It must be one-hot format and first dim is batch. The values should be binarized. skip_channel (bool): whether to skip metric computation on the first channel of the predicted output. Default: True. Raises: ValueError: when `y_pred` and `y` have different shapes. """ if not skip_channel: y = y[:, 1:] if y.shape[1] > 1 else y y_pred = y_pred[:, 1:] if y_pred.shape[1] > 1 else y_pred y = y.astype(float) y_pred = y_pred.astype(float) validator.check('y_shape', y.shape, 'y_pred_shape', y_pred.shape) batch_size, n_class = y_pred.shape[:2] y_pred = y_pred.reshape(batch_size, n_class, -1) y = y.reshape(batch_size, n_class, -1) tp = ((y_pred + y) == 2).astype(float) tn = ((y_pred + y) == 0).astype(float) tp = tp.sum(axis=2) tn = tn.sum(axis=2) p = y.sum(axis=2) n = y.shape[-1] - p fn = p - tp fp = n - tn return np.stack([tp, fp, tn, fn], axis=-1) def _decrease_mean(not_nans, chart): not_nans = not_nans.sum(axis=1) chart = np.where(not_nans > 0, chart.sum(axis=1) / not_nans, np.zeros(1, dtype=float)) not_nans = (not_nans > 0).astype(float).sum(axis=0) chart = np.where(not_nans > 0, chart.sum(axis=0) / not_nans, np.zeros(1, dtype=float)) return not_nans, chart def _decrease_sum(not_nans, chart): not_nans = not_nans.sum(axis=(0, 1)) chart = np.sum(chart, axis=(0, 1)) return not_nans, chart def _decrease_mean_batch(not_nans, chart): not_nans = not_nans.sum(axis=0) chart = np.where(not_nans > 0, chart.sum(axis=0) / not_nans, np.zeros(1, dtype=float)) return not_nans, chart def _decrease_sum_batch(not_nans, chart): not_nans = not_nans.sum(axis=0) chart = chart.sum(axis=0) return not_nans, chart def _decrease_mean_channel(not_nans, chart): not_nans = not_nans.sum(axis=1) chart = np.where(not_nans > 0, chart.sum(axis=1) / not_nans, np.zeros(1, dtype=float)) return not_nans, chart def _decrease_sum_channel(not_nans, chart): not_nans = not_nans.sum(axis=1) chart = chart.sum(axis=1) return not_nans, chart def _decrease_none(not_nans, chart): return not_nans, chart def _decrease_metric(chart, decrease="mean"): """ This function is used to reduce the calculated metrics for each class of each example. Args: chart (ndarray): A data table containing the calculated measurement scores for each batch and class. The first two dims should be batch and class. decrease (str): Define the mode to reduce computation result of 1 batch data. Decrease will only be employed when 'calculation_method' is True. Default: "mean". """ nans = np.isnan(chart) not_nans = (~nans).astype(float) chart[nans] = 0 decrease_dict = {"mean": _decrease_mean(not_nans, chart), "sum": _decrease_sum(not_nans, chart), "mean_batch": _decrease_mean_batch, "sum_batch": _decrease_sum_batch(not_nans, chart), "mean_channel": _decrease_mean_channel(not_nans, chart), "sum_channel": _decrease_sum_channel(not_nans, chart), "none": _decrease_none(not_nans, chart)} not_nans, chart = decrease_dict.get(decrease) return chart, not_nans def _calculate_tpr(tp, p): """Calculate tpr.""" return tp, p def _calculate_tnr(tn, n): """Calculate tnr.""" return tn, n def _calculate_ppv(tp, fp): """Calculate ppv.""" return tp, (tp + fp) def _calculate_npv(tn, fn): """Calculate npv.""" return tn, (tn + fn) def _calculate_fnr(fn, p): """Calculate fnr.""" return fn, p def _calculate_fpr(fp, n): """Calculate fpr.""" return fp, n def _calculate_fdr(tp, fp): """Calculate fdr.""" return fp, (fp + tp) def _calculate_for(tn, fn): """Calculate for.""" return fn, (fn + tn) def _calculate_pt(tp, tn, p, n): """Calculate pt.""" tpr = np.where(p > 0, tp / p, np.array(float("nan"))) tnr = np.where(n > 0, tn / n, np.array(float("nan"))) numerator = np.sqrt(tpr * (1.0 - tnr)) + tnr - 1.0 denominator = tpr + tnr - 1.0 return numerator, denominator def _calculate_ts(tp, fp, fn): """Calculate ts.""" return tp, (tp + fn + fp) def _calculate_acc(tp, tn, p, n): """Calculate acc.""" return (tp + tn), (p + n) def _calculate_ba(tp, tn, p, n): """Calculate ba.""" tpr = np.where(p > 0, tp / p, np.array(float("nan"))) tnr = np.where(n > 0, tn / n, np.array(float("nan"))) numerator, denominator = (tpr + tnr), 2.0 return numerator, denominator def _calculate_f1(tp, fp, fn): """Calculate f1.""" return tp * 2.0, (tp * 2.0 + fn + fp) def _calculate_mcc(tp, fp, tn, fn): """Calculate mcc.""" numerator = tp * tn - fp * fn denominator = np.sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn)) return numerator, denominator def _calculate_fm(tp, fp, p): """Calculate fm.""" tpr = np.where(p > 0, tp / p, np.array(float("nan"))) ppv = np.where((tp + fp) > 0, tp / (tp + fp), np.array(float("nan"))) numerator = np.sqrt(ppv * tpr) denominator = 1.0 return numerator, denominator def _calculate_bm(tp, tn, p, n): """Calculate bm.""" tpr = np.where(p > 0, tp / p, np.array(float("nan"))) tnr = np.where(n > 0, tn / n, np.array(float("nan"))) numerator = tpr + tnr - 1.0 denominator = 1.0 return numerator, denominator def _calculate_mk(tp, fp, tn, fn): """Calculate mk.""" ppv = np.where((tp + fp) > 0, tp / (tp + fp), np.array(float("nan"))) npv = np.where((tn + fn) > 0, tn / (tn + fn), np.array(float("nan"))) npv = tn / (tn + fn) numerator = ppv + npv - 1.0 denominator = 1.0 return numerator, denominator def _compute_confusion_matrix_metric(metric_name, confusion_matrix): """ This function is used to compute confusion matrix related metric. Args: metric_name (str): Refer to conflusionmatrixmetric 'metric_name'. Some of the metrics have multiple aliases (as shown in the wikipedia page aforementioned), and you can also input those names instead. confusion_matrix (ndarray): Refer to '_get_confusion_matrix'. Raises: ValueError: when the size of the last dimension of confusion_matrix is not 4. NotImplementedError: when specify a not implemented metric_name. """ metric = _check_metric_name(metric_name) input_dim = confusion_matrix.ndim if input_dim == 1: confusion_matrix = np.expand_dims(confusion_matrix, 0) if confusion_matrix.shape[-1] != 4: raise ValueError("The size of the last dimension of confusion_matrix should be 4.") tp = confusion_matrix[..., 0] fp = confusion_matrix[..., 1] tn = confusion_matrix[..., 2] fn = confusion_matrix[..., 3] p = tp + fn n = fp + tn metric_name_dict = {"tpr": _calculate_tpr(tp, p), "tnr": _calculate_tnr(tn, n), "ppv": _calculate_ppv(tp, fp), "npv": _calculate_npv(tn, fn), "fnr": _calculate_fnr(fn, p), "fpr": _calculate_fpr(fp, n), "fdr": _calculate_fdr(tp, fp), "for": _calculate_for(tn, fn), "pt": _calculate_pt(tp, tn, p, n), "ts": _calculate_ts(tp, fp, fn), "acc": _calculate_acc(tp, tn, p, n), "ba": _calculate_ba(tp, tn, p, n), "f1": _calculate_f1(tp, fp, fn), "mcc": _calculate_mcc(tp, fp, tn, fn), "fm": _calculate_fm(tp, fp, p), "bm": _calculate_bm(tp, tn, p, n), "mk": _calculate_mk(tp, fp, tn, fn) } numerator, denominator = metric_name_dict.get(metric) if isinstance(denominator, np.ndarray): result = np.where(denominator != 0, numerator / denominator, np.array(float("nan"))) else: result = numerator / denominator return result def _check_metric_name(metric_name): """ There are many metrics related to confusion matrix, and some of the metrics have more than one names. In addition, some of the names are very long. Therefore, this function is used to check and simplify the name. Returns: Simplified metric name. Raises: NotImplementedError: when the metric is not implemented. """ metric_name = metric_name.replace(" ", "_") metric_name = metric_name.lower() metric_name_dict = {"sensitivity": "tpr", "recall": "tpr", "hit_rate": "tpr", "true_positive_rate": "tpr", "tpr": "tpr", "specificity": "tnr", "selectivity": "tnr", "true_negative_rate": "tnr", "tnr": "tnr", "precision": "ppv", "positive_predictive_value": "ppv", "ppv": "ppv", "negative_predictive_value": "npv", "npv": "npv", "miss_rate": "fnr", "false_negative_rate": "fnr", "fnr": "fnr", "fall_out": "fpr", "false_positive_rate": "fpr", "fpr": "fpr", "false_discovery_rate": "fdr", "fdr": "fdr", "false_omission_rate": "for", "for": "for", "prevalence_threshold": "pt", "pt": "pt", "threat_score": "ts", "critical_success_index": "ts", "ts": "ts", "csi": "ts", "accuracy": "acc", "acc": "acc", "balanced_accuracy": "ba", "ba": "ba", "f1_score": "f1", "f1": "f1", "matthews_correlation_coefficient": "mcc", "mcc": "mcc", "fowlkes_mallows_index": "fm", "fm": "fm", "informedness": "bm", "bookmaker_informedness": "bm", "bm": "bm", "markedness": "mk", "deltap": "mk", "mk": "mk" } metric_name_info = metric_name_dict.get(metric_name) if metric_name_info is None: raise NotImplementedError("The metric is not implemented.") return metric_name_info
[ [ [ 721, 732 ], [ 3264, 3266 ], [ 4386, 4388 ], [ 4467, 4469 ], [ 4479, 4481 ], [ 4491, 4493 ], [ 4668, 4670 ], [ 5791, 5793 ], [ 5839, 5841 ], [ 11941, 11943 ], [ 14363, 14365 ], [ 14379, 14381 ], [ 17175, 17177 ], [ 17298, 17300 ], [ 17351, 17353 ], [ 17446, 17448 ], [ 17499, 17501 ], [ 17644, 17646 ], [ 17792, 17794 ], [ 17845, 17847 ], [ 18132, 18134 ], [ 18185, 18187 ], [ 18991, 18993 ], [ 20283, 20285 ], [ 20307, 20309 ], [ 20341, 20343 ], [ 20365, 20367 ], [ 20405, 20407 ], [ 20760, 20762 ], [ 20784, 20786 ], [ 20818, 20820 ], [ 20842, 20844 ], [ 21161, 21163 ], [ 21317, 21319 ], [ 21341, 21343 ], [ 21375, 21377 ], [ 21415, 21417 ], [ 21455, 21457 ], [ 21600, 21602 ], [ 21624, 21626 ], [ 21658, 21660 ], [ 21682, 21684 ], [ 21866, 21868 ], [ 21906, 21908 ], [ 21940, 21942 ], [ 21980, 21982 ], [ 22900, 22902 ], [ 24321, 24323 ], [ 24351, 24353 ], [ 24403, 24405 ] ], [ [ 767, 789 ], [ 2606, 2615 ], [ 3076, 3085 ], [ 9010, 9019 ], [ 9109, 9118 ], [ 9213, 9222 ], [ 9400, 9409 ], [ 9480, 9489 ], [ 16746, 16755 ] ], [ [ 810, 816 ], [ 841, 847 ], [ 5914, 5920 ] ], [ [ 825, 840 ], [ 2544, 2559 ], [ 2692, 2707 ], [ 2732, 2747 ], [ 2794, 2809 ], [ 2835, 2850 ], [ 5464, 5479 ], [ 5529, 5544 ], [ 5596, 5611 ], [ 5659, 5674 ] ], [ [ 5892, 5913 ], [ 8733, 8754 ] ], [ [ 12108, 12124 ], [ 8806, 8822 ] ], [ [ 15681, 15702 ], [ 14766, 14787 ] ], [ [ 17217, 17231 ], [ 19094, 19108 ] ], [ [ 17559, 17572 ], [ 19155, 19168 ] ], [ [ 17705, 17725 ], [ 19222, 19242 ] ], [ [ 17905, 17924 ], [ 19278, 19297 ] ], [ [ 18043, 18065 ], [ 19353, 19375 ] ], [ [ 18245, 18266 ], [ 19430, 19451 ] ], [ [ 18385, 18399 ], [ 19499, 19513 ] ], [ [ 18451, 18467 ], [ 11242, 11258 ], [ 15065, 15081 ], [ 15482, 15498 ] ], [ [ 19616, 19630 ], [ 23269, 23283 ] ], [ [ 19687, 19701 ], [ 23323, 23337 ] ], [ [ 19758, 19772 ], [ 23377, 23391 ] ], [ [ 19838, 19852 ], [ 23432, 23446 ] ], [ [ 19918, 19932 ], [ 23487, 23501 ] ], [ [ 19989, 20003 ], [ 23541, 23555 ] ], [ [ 20060, 20074 ], [ 23595, 23609 ] ], [ [ 20140, 20154 ], [ 23650, 23664 ] ], [ [ 20220, 20233 ], [ 23704, 23717 ] ], [ [ 20519, 20532 ], [ 23763, 23776 ] ], [ [ 20606, 20620 ], [ 23821, 23835 ] ], [ [ 20697, 20710 ], [ 23881, 23894 ] ], [ [ 20953, 20966 ], [ 23940, 23953 ] ], [ [ 21052, 21066 ], [ 23998, 24012 ] ], [ [ 21257, 21270 ], [ 24060, 24073 ] ], [ [ 21537, 21550 ], [ 24116, 24129 ] ], [ [ 21801, 21814 ], [ 24175, 24188 ] ], [ [ 22124, 22156 ], [ 12031, 12063 ], [ 14962, 14994 ], [ 15384, 15416 ] ], [ [ 24502, 24520 ], [ 22779, 22797 ] ] ]
import numpy as np from typing import Union, List, Callable import logging from multiBatelo.score_functions import create_exponential_score_function DEFAULT_K_VALUE = 32 DEFAULT_D_VALUE = 400 DEFAULT_SCORING_FUNCTION_BASE = 1 _default_logger = logging.getLogger("multielo.multielo") class MultiElo: """ Generalized Elo for multiplayer matchups (also simplifies to standard Elo for 1-vs-1 matchups). Does not allow ties. """ def __init__( self, k_value: float = DEFAULT_K_VALUE, d_value: float = DEFAULT_D_VALUE, score_function_base: float = DEFAULT_SCORING_FUNCTION_BASE, custom_score_function: Callable = None, log_base: int = 10, logger: logging.Logger = None, ): """ :param k_value: K parameter in Elo algorithm that determines how much ratings increase or decrease after each match :param d_value: D parameter in Elo algorithm that determines how much Elo difference affects win probability :param score_function_base: base value to use for scoring function; scores are approximately multiplied by this value as you improve from one place to the next (minimum allowed value is 1, which results in a linear scoring function) :param custom_score_function: a function that takes an integer input and returns a numpy array of monotonically decreasing values summing to 1 :param log_base: base to use for logarithms throughout the Elo algorithm. Traditionally Elo uses base-10 logs :param logger: logger to use (optional) """ self.k = k_value self.d = d_value self._score_func = custom_score_function or create_exponential_score_function(base=score_function_base) self._log_base = log_base self.logger = logger or _default_logger def get_new_ratings( self, initial_ratings: Union[List[float], np.ndarray], result_order: List[int] = None, ) -> np.ndarray: """ Update ratings based on results. Takes an array of ratings before the matchup and returns an array with the updated ratings. Provided array should be ordered by the actual results (first place finisher's initial rating first, second place next, and so on). Example usage: >>> elo = MultiElo() >>> elo.get_new_ratings([1200, 1000]) array([1207.68809835, 992.31190165]) >>> elo.get_new_ratings([1200, 1000, 1100, 900]) array([1212.01868209, 1012.15595083, 1087.84404917, 887.98131791]) :param initial_ratings: array of ratings (float values) in order of actual results :param result_order: list where each value indicates the place the player in the same index of initial_ratings finished in. Lower is better. Identify ties by entering the same value for players that tied. For example, [1, 2, 3] indicates that the first listed player won, the second listed player finished 2nd, and the third listed player finished 3rd. [1, 2, 2] would indicate that the second and third players tied for 2nd place. (default = range(len(initial_ratings)) :return: array of updated ratings (float values) in same order as input """ if not isinstance(initial_ratings, np.ndarray): initial_ratings = np.array(initial_ratings) n = len(initial_ratings) # number of players actual_scores = self.get_actual_scores(n, result_order) expected_scores = self.get_expected_scores(initial_ratings) scale_factor = self.k * (n - 1) #print(f"scale factor: {scale_factor}") return initial_ratings + scale_factor * (actual_scores - expected_scores) def get_actual_scores(self, n: int, result_order: List[int] = None) -> np.ndarray: """ Return the scores to be awarded to the players based on the results. :param n: number of players in the matchup :param result_order: list indicating order of finish (see docstring for MultiElo.get_new_ratings for more details :return: array of length n of scores to be assigned to first place, second place, and so on """ # calculate actual scores according to score function, then sort in order of finish result_order = result_order or list(range(n)) scores = self._score_func(n) scores = scores[np.argsort(np.argsort(result_order))] # if there are ties, average the scores of all tied players distinct_results = set(result_order) if len(distinct_results) != n: for place in distinct_results: idx = [i for i, x in enumerate(result_order) if x == place] scores[idx] = scores[idx].mean() self._validate_actual_scores(scores, result_order) # print(f"calculated actual scores: {scores}") return scores @staticmethod def _validate_actual_scores(scores: np.ndarray, result_order: List[int]): if not np.allclose(1, sum(scores)): raise ValueError("scoring function does not return scores summing to 1") if min(scores) != 0: # tie for last place means minimum score doesn't have to be zero, # so only raise error if there isn't a tie for last place last_place = max(result_order) if result_order.count(last_place) == 1: raise ValueError("scoring function does not return minimum value of 0") if not np.all(np.diff(scores[np.argsort(result_order)]) <= 0): raise ValueError("scoring function does not return monotonically decreasing values") def get_expected_scores(self, ratings: Union[List[float], np.ndarray]) -> np.ndarray: """ Get the expected scores for all players given their ratings before the matchup. :param ratings: array of ratings for each player in a matchup :return: array of expected scores for all players """ #print(f"computing expected scores for {ratings}") if not isinstance(ratings, np.ndarray): ratings = np.array(ratings) if ratings.ndim > 1: raise ValueError(f"ratings should be 1-dimensional array (received {ratings.ndim})") # get all pairwise differences diff_mx = ratings - ratings[:, np.newaxis] print(f"diff_mx = \n{diff_mx}") # get individual contributions to expected score using logistic function logistic_mx = 1 / (1 + self._log_base ** (diff_mx / self.d)) np.fill_diagonal(logistic_mx, 0) #print(f"logistic_mx = \n{logistic_mx}") # get each expected score (sum individual contributions, then scale) expected_scores = logistic_mx.sum(axis=1) n = len(ratings) denom = n * (n - 1) / 2 # number of individual head-to-head matchups between n players expected_scores = expected_scores / denom # this should be guaranteed, but check to make sure if not np.allclose(1, sum(expected_scores)): raise ValueError("expected scores do not sum to 1") #print(f"calculated expected scores: {expected_scores}") return expected_scores def simulate_win_probabilities( self, ratings: Union[List[float], np.ndarray], n_sim: int = int(1e5), seed: int = None, ) -> np.ndarray: """ Estimate the probability of each player finishing in each possible place using a simulation. Returns a matrix where (i, j) values are the probability that player i finishes in place j. To simulate a game including players in the ratings array, we generate a score for each player using a Gumbel distribution. If a player has rating R, then that player's score is sampled from a Gumbel(R, D) distribution, where D is the Elo D parameter. Then we rank the players in descending order of their scores to determine first place, second place, ..., last place. We count the number of times each player finishes in each place and then divide by the number of simulations to calculate the proportions. We generate scores using Gumbel distributions because of the property: ~~ Gumbel(a_1, b) - Gumbel(a_2, b) ~ Logistic(a_1 - a_2, b) ~~ The Logistic(a_1 - a_2, b) distribution is the same distribution that describes the pairwise win probability if two payers have Elo ratings a_1 and a_2. In other words, a score sampled from Gumbel(a_1, b) will be greater than a score sampled from Gumbel(a_2, b) with the same probability that a player with Elo rating a_1 will beat a player with Elo rating a_2 in a 1-on-1 matchup. :param ratings: array of ratings of the players involved :param n_sim: number of simulations to run :param seed: (optional) seed for random number generation :return: matrix (a numpy array) where (i, j) values are the probability that player i finishes in place j """ if seed is not None: np.random.seed(seed) # sort so we always get the same result for same distinct ratings, but # keep track of original order idx = np.argsort(ratings) ratings = sorted(ratings) # simulate n_sim scores for each player from Gumbel distributions n_players = len(ratings) n_sim = int(n_sim) scores = np.zeros((n_players, n_sim)) #print(f"simulating {n_sim:,} scores for each player") for i, rating in enumerate(ratings): scores[idx[i], :] = _gumbel_sample( loc=rating, scale=self.d, size=int(n_sim), base=self._log_base ) #print(f"finished sampling {n_sim:,} scores for player {i+1} of {n_players}") # use the scores to decide the order of finish (highest score wins) and # create matrix with proportion of times each player finishes in each place result_mx = self._convert_scores_to_result_proportions(scores) #print(f"finished simulation") return result_mx @staticmethod def _convert_scores_to_result_proportions(scores: np.ndarray) -> np.ndarray: """ Take an array of scores with one row per player and one column per simulation, and return a matrix with one row per player and one column per place. Each (row, col) value in the returned matrix is the count of times player "row" finished in place "col". """ # sort scores from high to low for each simulation results = np.argsort(-scores, axis=0) # put it into a matrix where row = player, column = place, value = count # of times player finished in place n = scores.shape[0] count_mx = np.zeros((n, n)) for i, x in enumerate(results): counts = np.bincount(x, minlength=n) count_mx[:, i] = counts proportion_mx = count_mx / scores.shape[1] return proportion_mx def _gumbel_sample( loc: float, scale: float, size: int = 1, base: float = np.exp(1), ) -> np.ndarray: """ Sample from a Gumbel distribution (optionally with a different log base). :param loc: location parameter for distribution :param scale: scale parameter for distribution (> 0) :param size: number of samples to draw :param base: base for logarithm (defaults to natural log) :return: sample(s) from Gumbel distribution """ if scale <= 0: raise ValueError("scale parameter for Gumbel distribution must be > 0") p = np.random.rand(int(size)) return loc - scale * _log(-_log(p, base=base), base=base) def _log(x, base=np.exp(1)): return np.log(x) / np.log(base)
[ [ [ 7, 18 ], [ 11281, 11283 ], [ 11881, 11883 ], [ 2026, 2028 ], [ 1960, 1962 ], [ 3347, 3349 ], [ 3390, 3392 ], [ 3848, 3850 ], [ 4450, 4452 ], [ 4461, 4463 ], [ 5004, 5006 ], [ 5057, 5059 ], [ 5546, 5548 ], [ 5553, 5555 ], [ 5568, 5570 ], [ 5778, 5780 ], [ 5762, 5764 ], [ 6125, 6127 ], [ 6160, 6162 ], [ 6383, 6385 ], [ 6594, 6596 ], [ 7051, 7053 ], [ 7431, 7433 ], [ 7344, 7346 ], [ 9185, 9187 ], [ 9339, 9341 ], [ 9545, 9547 ], [ 10349, 10351 ], [ 10334, 10336 ], [ 10748, 10750 ], [ 10949, 10951 ], [ 11027, 11029 ], [ 11297, 11299 ], [ 11774, 11776 ], [ 11904, 11906 ], [ 11916, 11918 ] ], [ [ 38, 43 ], [ 1941, 1946 ], [ 5743, 5748 ], [ 7325, 7330 ] ], [ [ 45, 49 ], [ 1947, 1951 ], [ 1999, 2003 ], [ 3827, 3831 ], [ 5030, 5034 ], [ 5749, 5753 ], [ 7331, 7335 ] ], [ [ 51, 59 ], [ 662, 670 ] ], [ [ 67, 74 ], [ 248, 255 ], [ 723, 730 ] ], [ [ 116, 149 ], [ 1726, 1759 ] ], [ [ 152, 167 ], [ 504, 519 ] ], [ [ 173, 188 ], [ 546, 561 ] ], [ [ 195, 224 ], [ 600, 629 ] ], [ [ 230, 245 ], [ 1852, 1867 ] ], [ [ 295, 303 ] ], [ [ 11178, 11192 ], [ 9714, 9728 ] ], [ [ 11868, 11872 ], [ 11825, 11829 ], [ 11831, 11835 ] ] ]
# =============================================================================== # NAME: InstanceTopologyHTMLVisitor.py # # DESCRIPTION: A visitor responsible for the generation of HTML tables # of event ID's, etc. # # AUTHOR: reder # EMAIL: reder@jpl.nasa.gov # DATE CREATED : Sep. 13, 2016 # # Copyright 2016, California Institute of Technology. # ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged. # =============================================================================== # # Python standard modules # import logging import os import sys from fprime_ac.generators import formatters # from fprime_ac.utils import DiffAndRename from fprime_ac.generators.visitors import AbstractVisitor from fprime_ac.models import ModelParser # # Python extention modules and custom interfaces # # from Cheetah import Template # from fprime_ac.utils import version from fprime_ac.utils import ConfigManager # # Import precompiled templates here # try: from fprime_ac.generators.templates.html import HtmlEventsTablePage except ImportError: print("ERROR: must generate python templates first.") sys.exit(-1) # # Universal globals used within module go here. # (DO NOT USE MANY!) # # Global logger init. below. PRINT = logging.getLogger("output") DEBUG = logging.getLogger("debug") # # Module class or classes go here. class InstanceTopologyEventsHTMLVisitor(AbstractVisitor.AbstractVisitor): """ A visitor class responsible for generation of component header classes in C++. """ __instance = None __config = None __fp_dict = None __form = None __form_comment = None __model_parser = None def __init__(self): """ Constructor. """ super().__init__() # self.initBase(self, "HTMLCmdTable") self.__config = ConfigManager.ConfigManager.getInstance() self.__form = formatters.Formatters() self.__form_comment = formatters.CommentFormatters() self.__model_parser = ModelParser.ModelParser.getInstance() self.__cmd_dir = "events" DEBUG.info("InstanceTopologyHTMLVisitor: Instanced.") self.bodytext = "" self.prototypetext = "" self.__fp_dict = ( dict() ) # dictionary of instance name keyword to file handle pointer def _writeTmpl(self, instance, c, visit_str): """ Wrapper to write tmpl to files desc. """ DEBUG.debug("InstanceTopologyHTMLVisitor:%s" % visit_str) DEBUG.debug("===================================") DEBUG.debug(c) self.__fp_dict[instance].writelines(c.__str__()) DEBUG.debug("===================================") def initFilesVisit(self, obj): """ Defined to generate files for generated code products. @parms obj: the instance of the model to visit. """ # Check for command dir here and if none creat it but always switch into it if not os.path.exists(self.__cmd_dir): os.mkdir(self.__cmd_dir) os.chdir(self.__cmd_dir) # Iterate over types for k in list(obj.get_base_id_dict().keys()): tlist = obj.get_base_id_dict()[k] # print "Type: %s\n" % k, # Iterate over instances and get name # Open file if events exist if not do nothing for t in tlist: # print "\tInstance: %s, Base ID: %s\n" % (t[0],t[1]) name = t[0] events_list = t[3].get_comp_xml().get_events() if len(events_list) > 0: filename = "%s_events.html" % t[0] # Open file for writing here... DEBUG.info("Open file: %s" % filename) try: self.__fp_dict[name] = open(filename, "w") DEBUG.info("Completed") except OSError: PRINT.info("Could not open %s file." % filename) sys.exit(-1) DEBUG.info( "Generating HTML Event Table for %s:%s component instance..." % (t[0], k) ) os.chdir("..") def startSourceFilesVisit(self, obj): """ Defined to generate starting static code within files. """ def includes1Visit(self, obj): """ Defined to generate includes within a file. Usually used for the base classes but also for Port types @parms args: the instance of the concrete element to operation on. """ def includes2Visit(self, obj): """ Defined to generate internal includes within a file. Usually used for data type includes and system includes. @parms args: the instance of the concrete element to operation on. """ def namespaceVisit(self, obj): """ Defined to generate namespace code within a file. Also any pre-condition code is generated. @parms args: the instance of the concrete element to operation on. """ def eventArgsStr(self): """ Make a list of event args into a string """ def f(args): def g(lst): name = lst[0] return name return self.argsString(list(map(g, args))) return f def publicVisit(self, obj): """ Defined to generate public stuff within a class. @parms args: the instance of the concrete element to operation on. """ # os.chdir(self.__cmd_dir) c = HtmlEventsTablePage.HtmlEventsTablePage() for k in list(obj.get_base_id_dict().keys()): tlist = obj.get_base_id_dict()[k] # print "Type: %s\n" % k, for t in tlist: if t[0] in list(self.__fp_dict.keys()): # print "\tInstance: %s, Base ID: %s\n" % (t[0],t[1]) eobj = t[3].get_comp_xml() c.name = "{}:{}".format(t[0], k) c.base_id = t[1] c.has_events = len(eobj.get_events()) > 0 c.events = self.__model_parser.getEventsList(eobj) c.event_enums = self.__model_parser.getEventEnumList(eobj) c.event_args = self.__model_parser.getEventArgsDict(eobj) c.event_params = c.event_args c.event_args_str = self.eventArgsStr() c.event_param_strs = self.__model_parser.getEventArgsPrototypeStringDict( eobj ) self._writeTmpl(t[0], c, "InstanceTopologyEventsHTML_Visitor") def protectedVisit(self, obj): """ Defined to generate protected stuff within a class. @parms args: the instance of the concrete element to operation on. """ def privateVisit(self, obj): """ Defined to generate private stuff within a class. @parms args: the instance of the concrete element to operation on. """ def finishSourceFilesVisit(self, obj): """ Defined to generate ending static code within files. """ for fp in list(self.__fp_dict.keys()): self.__fp_dict[fp].close() PRINT.info("Completed generating HTML event tables...")
[ [ [ 548, 555 ], [ 1254, 1261 ], [ 1290, 1297 ] ], [ [ 563, 565 ], [ 2984, 2986 ], [ 3028, 3030 ], [ 3061, 3063 ], [ 4227, 4229 ] ], [ [ 573, 576 ], [ 1131, 1134 ], [ 4030, 4033 ] ], [ [ 611, 621 ], [ 1896, 1906 ], [ 1950, 1960 ] ], [ [ 709, 724 ], [ 1394, 1409 ] ], [ [ 754, 765 ], [ 2011, 2022 ] ], [ [ 917, 930 ], [ 1832, 1845 ] ], [ [ 1029, 1048 ], [ 5644, 5663 ] ], [ [ 1246, 1251 ], [ 3957, 3962 ], [ 7355, 7360 ] ], [ [ 1282, 1287 ], [ 2091, 2096 ], [ 2450, 2455 ], [ 2516, 2521 ], [ 2575, 2580 ], [ 2655, 2660 ], [ 3718, 3723 ], [ 3873, 3878 ], [ 4063, 4068 ] ], [ [ 1360, 1393 ] ] ]
# Copyright 2014 by Saket Choudhary. Based on test_Clustalw_tool.py by Peter # Cock . # # This code is part of the Biopython distribution and governed by its # license. Please see the LICENSE file that should have been included # as part of this package. # Last Checked with samtools [0.1.18 (r982:295)] from Bio import MissingExternalDependencyError import sys import os import unittest from Bio.Sequencing.Applications import SamtoolsViewCommandline from Bio.Sequencing.Applications import SamtoolsCalmdCommandline from Bio.Sequencing.Applications import SamtoolsCatCommandline from Bio.Sequencing.Applications import SamtoolsFaidxCommandline from Bio.Sequencing.Applications import SamtoolsIdxstatsCommandline from Bio.Sequencing.Applications import SamtoolsIndexCommandline from Bio.Sequencing.Applications import SamtoolsMergeCommandline from Bio.Sequencing.Applications import SamtoolsMpileupCommandline from Bio.Sequencing.Applications import SamtoolsSortCommandline # TODO from Bio.Sequencing.Applications import SamtoolsPhaseCommandline # TODO from Bio.Sequencing.Applications import SamtoolsReheaderCommandline # TODO from Bio.Sequencing.Applications import SamtoolsRmdupCommandline # TODO from Bio.Sequencing.Applications import SamtoolsTargetcutCommandline # TODO from Bio.Sequencing.Applications import SamtoolsFixmateCommandline ################################################################# # Try to avoid problems when the OS is in another language os.environ['LANG'] = 'C' samtools_exe = None if sys.platform == "win32": # TODO - Check the path? try: # This can vary depending on the Windows language. prog_files = os.environ["PROGRAMFILES"] except KeyError: prog_files = r"C:\Program Files" # By default tries C:\Program Files\samtools\samtools.exe # or C:\Program Files\samtools.exe was chosen likely_dirs = ["samtools", ""] likely_exes = ["samtools.exe"] for folder in likely_dirs: if os.path.isdir(os.path.join(prog_files, folder)): for filename in likely_exes: if os.path.isfile(os.path.join(prog_files, folder, filename)): samtools_exe = os.path.join(prog_files, folder, filename) break if samtools_exe: break else: from Bio._py3k import getoutput output = getoutput("samtools") # Since "not found" may be in another language, try and be sure this is # really the samtools tool's output if ("not found" not in output and "samtools (Tools for alignments in the SAM format)" in output): samtools_exe = "samtools" if not samtools_exe: raise MissingExternalDependencyError( """Install samtools and correctly set the file path to the program if you want to use it from Biopython""") class SamtoolsTestCase(unittest.TestCase): """Class for implementing Samtools test cases.""" def setUp(self): self.files_to_clean = set() self.samfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), "SamBam", "sam1.sam") self.reference = os.path.join(os.path.dirname(os.path.abspath(__file__)), "BWA", "human_g1k_v37_truncated.fasta") self.referenceindexfile = os.path.join(os.path.dirname(os.path.abspath(__file__)), "BWA", "human_g1k_v37_truncated.fasta.fai") self.samfile2 = os.path.join(os.path.dirname(os.path.abspath(__file__)), "SamBam", "sam2.sam") self.bamfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), "SamBam", "bam1.bam") self.bamfile2 = os.path.join(os.path.dirname(os.path.abspath(__file__)), "SamBam", "bam2.bam") self.outsamfile = os.path.join(os.path.dirname(os.path.abspath(__file__)), "SamBam", "out.sam") self.outbamfile = os.path.join(os.path.dirname(os.path.abspath(__file__)), "SamBam", "out.bam") self.bamindexfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), "SamBam", "bam1.bam.bai") self.sortedbamfile1 = os.path.join(os.path.dirname(os.path.abspath(__file__)), "SamBam", "bam1_sorted.bam") self.sortedbamfile2 = os.path.join(os.path.dirname(os.path.abspath(__file__)), "SamBam", "bam2_sorted.bam") self.files_to_clean = [self.referenceindexfile, self.bamindexfile1, self.outbamfile] def tearDown(self): for filename in self.files_to_clean: if os.path.isfile(filename): os.remove(filename) def test_view(self): """Test for samtools view""" cmdline = SamtoolsViewCommandline(samtools_exe) cmdline.set_parameter("input_file", self.bamfile1) stdout_bam, stderr_bam = cmdline() self.assertTrue(stderr_bam.startswith(""), "SAM file viewing failed: \n%s\nStdout:%s" % (cmdline, stdout_bam)) cmdline.set_parameter("input_file", self.samfile1) cmdline.set_parameter("S", True) stdout_sam, stderr_sam = cmdline() self.assertTrue( stdout_sam.startswith("HWI-1KL120:88:D0LRBACXX:1:1101:1780:2146"), "SAM file viewing failed:\n%s\nStderr:%s" % (cmdline, stderr_sam)) def create_fasta_index(self): """Creates index for reference fasta sequence.""" cmdline = SamtoolsFaidxCommandline(samtools_exe) cmdline.set_parameter("reference", self.reference) stdout, stderr = cmdline() def create_bam_index(self, input_bam): """Creates index of an input bam file""" cmdline = SamtoolsIndexCommandline(samtools_exe) cmdline.set_parameter("input_bam", input_bam) stdout, stderr = cmdline() def test_faidx(self): cmdline = SamtoolsFaidxCommandline(samtools_exe) cmdline.set_parameter("reference", self.reference) stdout, stderr = cmdline() self.assertFalse(stderr, "Samtools faidx failed:\n%s\nStderr:%s" % (cmdline, stderr)) self.assertTrue(os.path.isfile(self.referenceindexfile)) def test_calmd(self): """Test for samtools calmd""" self.create_fasta_index() cmdline = SamtoolsCalmdCommandline(samtools_exe) cmdline.set_parameter("reference", self.reference) cmdline.set_parameter("input_bam", self.bamfile1) # If there is no index file for the reference # samtools calmd creates one at the time of calling if os.path.exists(self.referenceindexfile): # print("exists") stderr_calmd_expected = "" else: # print("doesnt exist") stderr_calmd_expected = "[fai_load] build FASTA index.\n" stdout, stderr = cmdline() self.assertEqual(stderr, stderr_calmd_expected) def test_cat(self): cmdline = SamtoolsCatCommandline(samtools_exe) cmdline.set_parameter("o", self.outbamfile) cmdline.set_parameter("input_bam", [self.bamfile1, self.bamfile2]) stdout, stderr = cmdline() self.assertEqual(stderr, "") # TODO: def test_fixmate(self): def test_sort(self): cmdline = SamtoolsSortCommandline(samtools_exe) cmdline.set_parameter("input_bam", self.bamfile1) cmdline.set_parameter("out_prefix", "SamBam/out") stdout, stderr = cmdline() self.assertFalse(stderr, "Samtools sort failed:\n%s\nStderr:%s" % (cmdline, stderr)) def test_index(self): cmdline = SamtoolsIndexCommandline(samtools_exe) cmdline.set_parameter("input_bam", self.bamfile1) stdout, stderr = cmdline() self.assertFalse(stderr, "Samtools index failed:\n%s\nStderr:%s" % (cmdline, stderr)) self.assertTrue(os.path.exists(self.bamindexfile1)) def test_idxstats(self): self.create_bam_index(self.bamfile1) cmdline = SamtoolsIdxstatsCommandline(samtools_exe) cmdline.set_parameter("input_bam", self.bamfile1) stdout, stderr = cmdline() self.assertFalse(stderr, "Samtools idxstats failed:\n%s\nStderr:%s" % (cmdline, stderr)) def test_merge(self): cmdline = SamtoolsMergeCommandline(samtools_exe) cmdline.set_parameter("input_bam", [self.bamfile1, self.bamfile2]) cmdline.set_parameter("out_bam", self.outbamfile) cmdline.set_parameter("f", True) # Overwrite out.bam if it exists stdout, stderr = cmdline() self.assertFalse(stderr, "Samtools merge failed:\n%s\nStderr:%s" % (cmdline, stderr)) self.assertTrue(os.path.exists(self.outbamfile)) def test_mpileup(self): cmdline = SamtoolsMpileupCommandline(samtools_exe) cmdline.set_parameter("input_file", [self.bamfile1]) stdout, stderr = cmdline() self.assertFalse("[bam_pileup_core]" in stdout) def test_mpileup_list(self): cmdline = SamtoolsMpileupCommandline(samtools_exe) cmdline.set_parameter("input_file", [self.sortedbamfile1, self.sortedbamfile2]) stdout, stderr = cmdline() self.assertFalse("[bam_pileup_core]" in stdout) # TODO: def test_phase(self): # TODO: def test_reheader(self): # TODO: def test_rmdup(self): # TODO: def test_targetcut(self): if __name__ == "__main__": runner = unittest.TextTestRunner(verbosity=2) unittest.main(testRunner=runner)
[ [ [ 324, 354 ], [ 2672, 2702 ] ], [ [ 362, 365 ], [ 1522, 1525 ] ], [ [ 373, 375 ], [ 1473, 1475 ], [ 1665, 1667 ], [ 1979, 1981 ], [ 1993, 1995 ], [ 2088, 2090 ], [ 2103, 2105 ], [ 2183, 2185 ], [ 3009, 3011 ], [ 3022, 3024 ], [ 3038, 3040 ], [ 3187, 3189 ], [ 3200, 3202 ], [ 3216, 3218 ], [ 3394, 3396 ], [ 3407, 3409 ], [ 3423, 3425 ], [ 3613, 3615 ], [ 3626, 3628 ], [ 3642, 3644 ], [ 3790, 3792 ], [ 3803, 3805 ], [ 3819, 3821 ], [ 3967, 3969 ], [ 3980, 3982 ], [ 3996, 3998 ], [ 4146, 4148 ], [ 4159, 4161 ], [ 4175, 4177 ], [ 4328, 4330 ], [ 4341, 4343 ], [ 4357, 4359 ], [ 4513, 4515 ], [ 4526, 4528 ], [ 4542, 4544 ], [ 4710, 4712 ], [ 4723, 4725 ], [ 4739, 4741 ], [ 4910, 4912 ], [ 4923, 4925 ], [ 4939, 4941 ], [ 5258, 5260 ], [ 5300, 5302 ], [ 6877, 6879 ], [ 7317, 7319 ], [ 8675, 8677 ], [ 9581, 9583 ] ], [ [ 383, 391 ], [ 2853, 2861 ], [ 10312, 10320 ], [ 10353, 10361 ] ], [ [ 432, 455 ], [ 5402, 5425 ] ], [ [ 496, 520 ], [ 7035, 7059 ] ], [ [ 561, 583 ], [ 7681, 7703 ] ], [ [ 624, 648 ], [ 6159, 6183 ], [ 6576, 6600 ] ], [ [ 689, 716 ], [ 8804, 8831 ] ], [ [ 757, 781 ], [ 6403, 6427 ], [ 8375, 8399 ] ], [ [ 822, 846 ], [ 9131, 9155 ] ], [ [ 887, 913 ], [ 9661, 9687 ], [ 9906, 9932 ] ], [ [ 954, 977 ], [ 7998, 8021 ] ], [ [ 1499, 1511 ], [ 2267, 2279 ], [ 2648, 2660 ], [ 5426, 5438 ], [ 6184, 6196 ], [ 6428, 6440 ], [ 6601, 6613 ], [ 7060, 7072 ], [ 7704, 7716 ], [ 8022, 8034 ], [ 8400, 8412 ], [ 8832, 8844 ], [ 9156, 9168 ], [ 9688, 9700 ], [ 9933, 9945 ] ], [ [ 1652, 1662 ], [ 2006, 2016 ], [ 2116, 2126 ], [ 2196, 2206 ] ], [ [ 1721, 1731 ], [ 2006, 2016 ], [ 2116, 2126 ], [ 2196, 2206 ] ], [ [ 1871, 1882 ], [ 1955, 1966 ] ], [ [ 1906, 1917 ], [ 2056, 2067 ] ], [ [ 1945, 1951 ], [ 2018, 2024 ], [ 2128, 2134 ], [ 2208, 2214 ] ], [ [ 2044, 2052 ], [ 2136, 2144 ], [ 2216, 2224 ] ], [ [ 2168, 2180 ], [ 2267, 2279 ], [ 2648, 2660 ], [ 5426, 5438 ], [ 6184, 6196 ], [ 6428, 6440 ], [ 6601, 6613 ], [ 7060, 7072 ], [ 7704, 7716 ], [ 8022, 8034 ], [ 8400, 8412 ], [ 8832, 8844 ], [ 9156, 9168 ], [ 9688, 9700 ], [ 9933, 9945 ] ], [ [ 2335, 2344 ], [ 2358, 2367 ] ], [ [ 2349, 2355 ], [ 2524, 2530 ], [ 2597, 2603 ] ], [ [ 2614, 2626 ], [ 2648, 2660 ], [ 5426, 5438 ], [ 6184, 6196 ], [ 6428, 6440 ], [ 6601, 6613 ], [ 7060, 7072 ], [ 7704, 7716 ], [ 8022, 8034 ], [ 8400, 8412 ], [ 8832, 8844 ], [ 9156, 9168 ], [ 9688, 9700 ], [ 9933, 9945 ] ], [ [ 2836, 2852 ] ], [ [ 10303, 10309 ], [ 10378, 10384 ] ] ]
from typing import Any, Dict, List, Optional, Tuple, Type, Union import gym import numpy as np import torch as th from torch.nn import functional as F from stable_baselines3.common.buffers import ReplayBuffer from stable_baselines3.common.noise import ActionNoise from stable_baselines3.common.off_policy_algorithm import OffPolicyAlgorithm from stable_baselines3.common.type_aliases import GymEnv, MaybeCallback, Schedule from stable_baselines3.common.utils import polyak_update from stable_baselines3.sac.policies import SACPolicy class SAC(OffPolicyAlgorithm): """ Soft Actor-Critic (SAC) Off-Policy Maximum Entropy Deep Reinforcement Learning with a Stochastic Actor, This implementation borrows code from original implementation (https://github.com/haarnoja/sac) from OpenAI Spinning Up (https://github.com/openai/spinningup), from the softlearning repo (https://github.com/rail-berkeley/softlearning/) and from Stable Baselines (https://github.com/hill-a/stable-baselines) Paper: https://arxiv.org/abs/1801.01290 Introduction to SAC: https://spinningup.openai.com/en/latest/algorithms/sac.html Note: we use double q target and not value target as discussed in https://github.com/hill-a/stable-baselines/issues/270 :param policy: The policy model to use (MlpPolicy, CnnPolicy, ...) :param env: The environment to learn from (if registered in Gym, can be str) :param learning_rate: learning rate for adam optimizer, the same learning rate will be used for all networks (Q-Values, Actor and Value function) it can be a function of the current progress remaining (from 1 to 0) :param buffer_size: size of the replay buffer :param learning_starts: how many steps of the model to collect transitions for before learning starts :param batch_size: Minibatch size for each gradient update :param tau: the soft update coefficient ("Polyak update", between 0 and 1) :param gamma: the discount factor :param train_freq: Update the model every ``train_freq`` steps. Alternatively pass a tuple of frequency and unit like ``(5, "step")`` or ``(2, "episode")``. :param gradient_steps: How many gradient steps to do after each rollout (see ``train_freq``) Set to ``-1`` means to do as many gradient steps as steps done in the environment during the rollout. :param action_noise: the action noise type (None by default), this can help for hard exploration problem. Cf common.noise for the different action noise type. :param replay_buffer_class: Replay buffer class to use (for instance ``HerReplayBuffer``). If ``None``, it will be automatically selected. :param replay_buffer_kwargs: Keyword arguments to pass to the replay buffer on creation. :param optimize_memory_usage: Enable a memory efficient variant of the replay buffer at a cost of more complexity. See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195 :param ent_coef: Entropy regularization coefficient. (Equivalent to inverse of reward scale in the original SAC paper.) Controlling exploration/exploitation trade-off. Set it to 'auto' to learn it automatically (and 'auto_0.1' for using 0.1 as initial value) :param target_update_interval: update the target network every ``target_network_update_freq`` gradient steps. :param target_entropy: target entropy when learning ``ent_coef`` (``ent_coef = 'auto'``) :param use_sde: Whether to use generalized State Dependent Exploration (gSDE) instead of action noise exploration (default: False) :param sde_sample_freq: Sample a new noise matrix every n steps when using gSDE Default: -1 (only sample at the beginning of the rollout) :param use_sde_at_warmup: Whether to use gSDE instead of uniform sampling during the warm up phase (before learning starts) :param create_eval_env: Whether to create a second environment that will be used for evaluating the agent periodically. (Only available when passing string for the environment) :param policy_kwargs: additional arguments to be passed to the policy on creation :param verbose: the verbosity level: 0 no output, 1 info, 2 debug :param seed: Seed for the pseudo random generators :param device: Device (cpu, cuda, ...) on which the code should be run. Setting it to auto, the code will be run on the GPU if possible. :param _init_setup_model: Whether or not to build the network at the creation of the instance """ def __init__( self, policy: Union[str, Type[SACPolicy]], env: Union[GymEnv, str], learning_rate: Union[float, Schedule] = 3e-4, buffer_size: int = 1_000_000, # 1e6 learning_starts: int = 100, batch_size: int = 256, tau: float = 0.005, gamma: float = 0.99, train_freq: Union[int, Tuple[int, str]] = 1, gradient_steps: int = 1, action_noise: Optional[ActionNoise] = None, replay_buffer_class: Optional[ReplayBuffer] = None, replay_buffer_kwargs: Optional[Dict[str, Any]] = None, optimize_memory_usage: bool = False, ent_coef: Union[str, float] = "auto", target_update_interval: int = 1, target_entropy: Union[str, float] = "auto", use_sde: bool = False, sde_sample_freq: int = -1, use_sde_at_warmup: bool = False, tensorboard_log: Optional[str] = None, create_eval_env: bool = False, policy_kwargs: Optional[Dict[str, Any]] = None, verbose: int = 0, seed: Optional[int] = None, device: Union[th.device, str] = "auto", _init_setup_model: bool = True, ): super(SAC, self).__init__( policy, env, SACPolicy, learning_rate, buffer_size, learning_starts, batch_size, tau, gamma, train_freq, gradient_steps, action_noise, replay_buffer_class=replay_buffer_class, replay_buffer_kwargs=replay_buffer_kwargs, policy_kwargs=policy_kwargs, tensorboard_log=tensorboard_log, verbose=verbose, device=device, create_eval_env=create_eval_env, seed=seed, use_sde=use_sde, sde_sample_freq=sde_sample_freq, use_sde_at_warmup=use_sde_at_warmup, optimize_memory_usage=optimize_memory_usage, supported_action_spaces=(gym.spaces.Box), support_multi_env=True, ) self.target_entropy = target_entropy self.log_ent_coef = None # type: Optional[th.Tensor] # Entropy coefficient / Entropy temperature # Inverse of the reward scale self.ent_coef = ent_coef self.target_update_interval = target_update_interval self.ent_coef_optimizer = None if _init_setup_model: self._setup_model() def _setup_model(self) -> None: super(SAC, self)._setup_model() self._create_aliases() # Target entropy is used when learning the entropy coefficient if self.target_entropy == "auto": # automatically set target entropy if needed self.target_entropy = -np.prod(self.env.action_space.shape).astype(np.float32) else: # Force conversion # this will also throw an error for unexpected string self.target_entropy = float(self.target_entropy) # The entropy coefficient or entropy can be learned automatically # see Automating Entropy Adjustment for Maximum Entropy RL section # of https://arxiv.org/abs/1812.05905 if isinstance(self.ent_coef, str) and self.ent_coef.startswith("auto"): # Default initial value of ent_coef when learned init_value = 1.0 if "_" in self.ent_coef: init_value = float(self.ent_coef.split("_")[1]) assert init_value > 0.0, "The initial value of ent_coef must be greater than 0" # Note: we optimize the log of the entropy coeff which is slightly different from the paper # as discussed in https://github.com/rail-berkeley/softlearning/issues/37 self.log_ent_coef = th.log(th.ones(1, device=self.device) * init_value).requires_grad_(True) self.ent_coef_optimizer = th.optim.Adam([self.log_ent_coef], lr=self.lr_schedule(1)) else: # Force conversion to float # this will throw an error if a malformed string (different from 'auto') # is passed self.ent_coef_tensor = th.tensor(float(self.ent_coef)).to(self.device) def _create_aliases(self) -> None: self.actor = self.policy.actor self.critic = self.policy.critic self.critic_target = self.policy.critic_target def train(self, gradient_steps: int, batch_size: int = 64) -> None: # Switch to train mode (this affects batch norm / dropout) self.policy.set_training_mode(True) # Update optimizers learning rate optimizers = [self.actor.optimizer, self.critic.optimizer] if self.ent_coef_optimizer is not None: optimizers += [self.ent_coef_optimizer] # Update learning rate according to lr schedule self._update_learning_rate(optimizers) ent_coef_losses, ent_coefs = [], [] actor_losses, critic_losses = [], [] for gradient_step in range(gradient_steps): # Sample replay buffer replay_data = self.replay_buffer.sample(batch_size, env=self._vec_normalize_env) # We need to sample because `log_std` may have changed between two gradient steps if self.use_sde: self.actor.reset_noise() # Action by the current actor for the sampled state actions_pi, log_prob = self.actor.action_log_prob(replay_data.observations) log_prob = log_prob.reshape(-1, 1) ent_coef_loss = None if self.ent_coef_optimizer is not None: # Important: detach the variable from the graph # so we don't change it with other losses # see https://github.com/rail-berkeley/softlearning/issues/60 ent_coef = th.exp(self.log_ent_coef.detach()) ent_coef_loss = -(self.log_ent_coef * (log_prob + self.target_entropy).detach()).mean() ent_coef_losses.append(ent_coef_loss.item()) else: ent_coef = self.ent_coef_tensor ent_coefs.append(ent_coef.item()) # Optimize entropy coefficient, also called # entropy temperature or alpha in the paper if ent_coef_loss is not None: self.ent_coef_optimizer.zero_grad() ent_coef_loss.backward() self.ent_coef_optimizer.step() with th.no_grad(): # Select action according to policy next_actions, next_log_prob = self.actor.action_log_prob(replay_data.next_observations) # Compute the next Q values: min over all critics targets next_q_values = th.cat(self.critic_target(replay_data.next_observations, next_actions), dim=1) next_q_values, _ = th.min(next_q_values, dim=1, keepdim=True) # add entropy term next_q_values = next_q_values - ent_coef * next_log_prob.reshape(-1, 1) # td error + entropy term target_q_values = replay_data.rewards + (1 - replay_data.dones) * self.gamma * next_q_values # Get current Q-values estimates for each critic network # using action from the replay buffer current_q_values = self.critic(replay_data.observations, replay_data.actions) # Compute critic loss critic_loss = 0.5 * sum([F.mse_loss(current_q, target_q_values) for current_q in current_q_values]) critic_losses.append(critic_loss.item()) # Optimize the critic self.critic.optimizer.zero_grad() critic_loss.backward() self.critic.optimizer.step() # Compute actor loss # Alternative: actor_loss = th.mean(log_prob - qf1_pi) # Mean over all critic networks q_values_pi = th.cat(self.critic.forward(replay_data.observations, actions_pi), dim=1) min_qf_pi, _ = th.min(q_values_pi, dim=1, keepdim=True) actor_loss = (ent_coef * log_prob - min_qf_pi).mean() actor_losses.append(actor_loss.item()) # Optimize the actor self.actor.optimizer.zero_grad() actor_loss.backward() self.actor.optimizer.step() # Update target networks if gradient_step % self.target_update_interval == 0: polyak_update(self.critic.parameters(), self.critic_target.parameters(), self.tau) self._n_updates += gradient_steps self.logger.record("train/n_updates", self._n_updates, exclude="tensorboard") self.logger.record("train/ent_coef", np.mean(ent_coefs)) self.logger.record("train/actor_loss", np.mean(actor_losses)) self.logger.record("train/critic_loss", np.mean(critic_losses)) if len(ent_coef_losses) > 0: self.logger.record("train/ent_coef_loss", np.mean(ent_coef_losses)) def learn( self, total_timesteps: int, callback: MaybeCallback = None, log_interval: int = 4, eval_env: Optional[GymEnv] = None, eval_freq: int = -1, n_eval_episodes: int = 5, tb_log_name: str = "SAC", eval_log_path: Optional[str] = None, reset_num_timesteps: bool = True, ) -> OffPolicyAlgorithm: return super(SAC, self).learn( total_timesteps=total_timesteps, callback=callback, log_interval=log_interval, eval_env=eval_env, eval_freq=eval_freq, n_eval_episodes=n_eval_episodes, tb_log_name=tb_log_name, eval_log_path=eval_log_path, reset_num_timesteps=reset_num_timesteps, ) def _excluded_save_params(self) -> List[str]: return super(SAC, self)._excluded_save_params() + ["actor", "critic", "critic_target"] def _get_torch_save_params(self) -> Tuple[List[str], List[str]]: state_dicts = ["policy", "actor.optimizer", "critic.optimizer"] if self.ent_coef_optimizer is not None: saved_pytorch_variables = ["log_ent_coef"] state_dicts.append("ent_coef_optimizer") else: saved_pytorch_variables = ["ent_coef_tensor"] return state_dicts, saved_pytorch_variables
[ [ [ 19, 22 ], [ 5168, 5171 ], [ 5601, 5604 ] ], [ [ 24, 28 ], [ 5158, 5162 ], [ 5591, 5595 ] ], [ [ 30, 34 ], [ 14413, 14417 ], [ 14566, 14570 ], [ 14577, 14581 ] ], [ [ 36, 44 ], [ 5029, 5037 ], [ 5088, 5096 ], [ 5149, 5157 ], [ 5498, 5506 ], [ 5582, 5590 ], [ 5655, 5663 ], [ 13730, 13738 ], [ 13875, 13883 ] ], [ [ 46, 51 ], [ 4952, 4957 ], [ 14560, 14565 ] ], [ [ 53, 57 ], [ 4647, 4651 ] ], [ [ 59, 64 ], [ 4636, 4641 ], [ 4678, 4683 ], [ 4721, 4726 ], [ 4941, 4946 ], [ 5245, 5250 ], [ 5338, 5343 ], [ 5693, 5698 ] ], [ [ 73, 76 ], [ 6622, 6625 ] ], [ [ 84, 95 ], [ 7392, 7394 ], [ 7436, 7438 ], [ 13302, 13304 ], [ 13369, 13371 ], [ 13440, 13442 ], [ 13555, 13557 ] ], [ [ 103, 114 ], [ 5699, 5701 ], [ 8406, 8408 ], [ 8413, 8415 ], [ 8517, 8519 ], [ 8774, 8776 ], [ 10443, 10445 ], [ 11069, 11071 ], [ 11345, 11347 ], [ 11459, 11461 ], [ 12514, 12516 ], [ 12614, 12616 ] ], [ [ 136, 151 ], [ 12058, 12059 ] ], [ [ 198, 210 ], [ 5097, 5109 ] ], [ [ 254, 265 ], [ 5038, 5049 ] ], [ [ 324, 342 ], [ 547, 565 ], [ 13948, 13966 ] ], [ [ 393, 399 ], [ 4684, 4690 ], [ 13739, 13745 ] ], [ [ 401, 414 ], [ 13659, 13672 ] ], [ [ 416, 424 ], [ 4734, 4742 ] ], [ [ 468, 481 ], [ 13044, 13057 ] ], [ [ 525, 534 ], [ 4652, 4661 ], [ 5857, 5866 ] ], [ [ 543, 546 ], [ 5787, 5790 ], [ 7130, 7133 ], [ 13990, 13993 ], [ 14445, 14448 ] ] ]
# ./constants.py import os import enum from dotenv import load_dotenv load_dotenv() @enum.unique class InputConfig(enum.Enum): ''' Config for the gameplay, Takes input from .env Value should be something tha can be used by pyAutoGUI If not available then, uses default input config (mine) and Yes I use arrow keys, deal with it! ''' DEFAULT = '' UP = os.getenv('UP', 'up').lower() DOWN = os.getenv('DOWN', 'down').lower() LEFT = os.getenv('LEFT', 'left').lower() RIGHT = os.getenv('RIGHT', 'right').lower() FRONT_PUNCH = os.getenv('FRONT_PUNCH', 'a').lower() BACK_PUNCH = os.getenv('BACK_PUNCH', 's').lower() FRONT_KICK = os.getenv('FRONT_KICK', 'z').lower() BACK_KICK = os.getenv('BACK_KICK', 'x').lower() THROW = os.getenv('THROW', 'd').lower() TAG = os.getenv('TAG', 'c').lower() BLOCK = os.getenv('BLOCK', 'space').lower() FLIP_STANCE = os.getenv('FLIP_STANCE', 'ctrlright').lower() PAUSE = os.getenv('PAUSE', 'tab').lower() BACK = os.getenv('BACK', 'backspace').lower()
[ [ [ 25, 27 ], [ 389, 391 ], [ 430, 432 ], [ 475, 477 ], [ 521, 523 ], [ 575, 577 ], [ 630, 632 ], [ 684, 686 ], [ 737, 739 ], [ 785, 787 ], [ 827, 829 ], [ 869, 871 ], [ 923, 925 ], [ 981, 983 ], [ 1026, 1028 ] ], [ [ 35, 39 ], [ 121, 125 ], [ 91, 95 ] ], [ [ 60, 71 ], [ 74, 85 ] ], [ [ 109, 120 ] ] ]
# -*- coding: utf-8 -*- # ---------------------------------------------------------------------- # Copyright (c) 2021 # # See the LICENSE file for details # see the AUTHORS file for authors # ---------------------------------------------------------------------- #-------------------- # System wide imports # ------------------- # --------------- # Airflow imports # --------------- #-------------- # local imports # ------------- # ----------------------- # Module global variables # ----------------------- # ---------------- # Module constants # ----------------
[]
# Install Notification Module - pip install notify2 import notify2 import time import os notify2.init('Notification') icon_path = os.getcwd() + "/icon.ico" def notiFunc(): noti = notify2.Notification("Welcome to Techix", "Techix is an Tech Dependent Youtube Channel, Please Subscribe to get more Videos Frequently.", icon=icon_path) noti.set_urgency(notify2.URGENCY_NORMAL) noti.show() noti.set_timeout(15000) time.sleep(120) if __name__ == "__main__": notiFunc()
[ [ [ 60, 67 ], [ 91, 98 ], [ 187, 194 ], [ 362, 369 ] ], [ [ 75, 79 ], [ 434, 438 ] ], [ [ 87, 89 ], [ 133, 135 ] ], [ [ 121, 130 ], [ 330, 339 ] ], [ [ 164, 172 ], [ 482, 490 ] ] ]
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor import re class ToypicsIE(InfoExtractor): IE_DESC = 'Toypics video' _VALID_URL = r'https?://videos\.toypics\.net/view/(?P<id>[0-9]+)' _TEST = { 'url': 'http://videos.toypics.net/view/514/chancebulged,-2-1/', 'md5': '16e806ad6d6f58079d210fe30985e08b', 'info_dict': { 'id': '514', 'ext': 'mp4', 'title': "Chance-Bulge'd, 2", 'age_limit': 18, 'uploader': 'kidsune', } } def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) formats = self._parse_html5_media_entries( url, webpage, video_id)[0]['formats'] title = self._html_search_regex([ r'<h1[^>]+class=["\']view-video-title[^>]+>([^<]+)</h', r'<title>([^<]+) - Toypics</title>', ], webpage, 'title') uploader = self._html_search_regex( r'More videos from <strong>([^<]+)</strong>', webpage, 'uploader', fatal=False) return { 'id': video_id, 'formats': formats, 'title': title, 'uploader': uploader, 'age_limit': 18, } class ToypicsUserIE(InfoExtractor): IE_DESC = 'Toypics user profile' _VALID_URL = r'https?://videos\.toypics\.net/(?!view)(?P<id>[^/?#&]+)' _TEST = { 'url': 'http://videos.toypics.net/Mikey', 'info_dict': { 'id': 'Mikey', }, 'playlist_mincount': 19, } def _real_extract(self, url): username = self._match_id(url) profile_page = self._download_webpage( url, username, note='Retrieving profile page') video_count = int(self._search_regex( r'public/">Public Videos \(([0-9]+)\)</a></li>', profile_page, 'video count')) PAGE_SIZE = 8 urls = [] page_count = (video_count + PAGE_SIZE + 1) // PAGE_SIZE for n in range(1, page_count + 1): lpage_url = url + '/public/%d' % n lpage = self._download_webpage( lpage_url, username, note='Downloading page %d/%d' % (n, page_count)) urls.extend( re.findall( r'<div[^>]+class=["\']preview[^>]+>\s*<a[^>]+href="(https?://videos\.toypics\.net/view/[^"]+)"', lpage)) return { '_type': 'playlist', 'id': username, 'entries': [{ '_type': 'url', 'url': eurl, 'ie_key': 'Toypics', } for eurl in urls] }
[ [ [ 39, 55 ] ], [ [ 77, 90 ], [ 119, 132 ], [ 1339, 1352 ] ], [ [ 98, 100 ], [ 2344, 2346 ] ], [ [ 109, 118 ] ], [ [ 1325, 1338 ] ] ]
# -*- coding: utf-8 -*- # Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Encode using wordpiece models. Implements the segmentation algorithm described in the last paragraph of p. 5150, in the following publication: M. Schuster and K. Nakajima, "Japanese and Korean voice search," 2012 IEEE International Conference on Acoustics, Speech and Signal Processing, 2012 https://static.googleusercontent.com/media/research.google.com/en//pubs/archive/37842.pdf """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import sys import tensorflow as tf from lingvo.core.ops import py_x_ops # Must be a large ID. NO_TOKEN = 1 << 31 - 1 NO_TOKEN_STRING = '<unk>' SENTENCE_START_STRING = '<s>' SENTENCE_END_STRING = '</s>' BOW_STR = '▁' class WpmEncoder(object): def __init__(self, wpm_filepath, merge_prob=1.): """Create a WPM encoder. Args: wpm_filepath: a path to the file containing the vocabulary. merge_prob: the probability of merging tokens while encoding. """ # Load vocabulary file. self._pieces = [] with tf.gfile.Open(wpm_filepath, 'r') as f: for line in f.readlines(): line = line.decode('utf-8') piece = line.strip().split('\t')[0] self._pieces.append(piece) self._merge_prob = merge_prob def _TokenToString(self, token): return py_x_ops.vocab_id_to_token(token, vocab=self._pieces) def _StringToToken(self, tokstr): return tf.where( py_x_ops.token_in_vocab(tokstr, vocab=self._pieces), py_x_ops.vocab_token_to_id(tokstr, vocab=self._pieces), tf.broadcast_to(NO_TOKEN, tf.shape(tokstr))) def _MergeTokens(self, tokens): return self._StringToToken( self._TokenToString(tokens[0]) + self._TokenToString(tokens[1])) def _EncodeToIds(self, word): # Below: # * a token is a wordpiece ID. # * the tokens array will be merged in-place. # * the candidates array is an array of size len(tokens) - 1. # It contains the token for the merged wordpiece, if it exists, # -1 otherwise. For instance, candidate[3] = id(token[3] + token[4]). # First, split into basic UTF-8 characters (letters). chars = tf.strings.unicode_split(word, 'UTF-8') tokens = self._StringToToken(chars) tokens = tf.where( tf.equal(tokens, NO_TOKEN), # Unseen character. tf.broadcast_to(self.unk_id, tf.shape(tokens)), tokens) # Create initial candidate list. candidates = tf.map_fn( self._MergeTokens, (tokens[:-1], tokens[1:]), dtype=tokens.dtype) def _ShouldMerge(unused_tokens, candidates): """Merge until not possible, or we abort early according to merge_prob.""" return tf.logical_and( tf.reduce_any(tf.not_equal(candidates, NO_TOKEN)), tf.random.uniform([]) < self._merge_prob) def _MergeOneToken(tokens, i): return tf.expand_dims( self._MergeTokens((tokens[i], tokens[i + 1])), axis=-1) def _MergeCandidates(tokens, candidates): """Merge in the reverse binary tree.""" best_id = tf.argmin(candidates, output_type=tf.int32) # Perform the merge at position best_id. tokens = tf.concat( [tokens[:best_id], [candidates[best_id]], tokens[best_id + 2:]], axis=0) # Recompute the merge candidates. # Only the neighbors of best_id need to be recomputed. empty = tf.zeros([0], dtype=candidates.dtype) def _MergeLeft(): return tf.concat( [candidates[:best_id - 1], _MergeOneToken(tokens, best_id - 1)], axis=0) left_candidates = tf.cond(tf.equal(best_id, 0), lambda: empty, _MergeLeft) def _MergeRight(): return tf.concat( [_MergeOneToken(tokens, best_id), candidates[best_id + 2:]], axis=0) right_candidates = tf.cond( tf.greater_equal(best_id, tf.size(tokens) - 1), lambda: empty, _MergeRight) candidates = tf.concat([left_candidates, right_candidates], axis=0) return tokens, candidates return tf.while_loop( _ShouldMerge, _MergeCandidates, (tokens, candidates), parallel_iterations=1, back_prop=False)[0] def Encode(self, text): """Converts string `text` to integer ids and the encoded string. Encoding includes prefixing the beginning-of-word token to each word. Returns: ids: the encoded integer ids. tokens: the encoded string. """ words = tf.sparse.to_dense(tf.strings.split([text]), default_value='')[0] num_words = tf.size(words) ids_ta = tf.TensorArray(tf.int32, 0, dynamic_size=True) def _WordsToIds(i, words, ids_ta): encoded_ids = self._EncodeToIds(BOW_STR + words[i]) ids_ta = ids_ta.scatter( tf.range(ids_ta.size(), ids_ta.size() + tf.size(encoded_ids)), encoded_ids) return i + 1, words, ids_ta _, _, ids_ta = tf.while_loop( lambda i, *_: i < num_words, _WordsToIds, loop_vars=(tf.constant(0, tf.int32), words, ids_ta), parallel_iterations=30, back_prop=False) ids = ids_ta.stack() return ids, self._TokenToString(ids) def Decode(self, ids): txt = tf.strings.reduce_join(self._TokenToString(ids)) txt = tf.strings.regex_replace(txt, BOW_STR, ' ') # Note that this strips spaces from the end of the input as well. # We assume no inputs rely on the existence of trailing whitespace. txt = tf.strings.strip(txt) return txt @property def sentence_start_id(self): return self._pieces.index(SENTENCE_START_STRING) @property def sentence_start_string(self): return SENTENCE_START_STRING @property def sentence_end_id(self): return self._pieces.index(SENTENCE_END_STRING) @property def sentence_end_string(self): return SENTENCE_END_STRING @property def unk_id(self): return self._pieces.index(NO_TOKEN_STRING)
[ [ [ 1129, 1144 ] ], [ [ 1168, 1176 ] ], [ [ 1200, 1214 ] ], [ [ 1223, 1226 ] ], [ [ 1235, 1251 ], [ 1758, 1760 ], [ 2128, 2130 ], [ 2271, 2273 ], [ 2297, 2299 ], [ 2879, 2881 ], [ 2972, 2974 ], [ 2990, 2992 ], [ 3054, 3056 ], [ 3083, 3085 ], [ 3172, 3174 ], [ 4776, 4778 ], [ 5195, 5197 ], [ 5214, 5216 ], [ 5277, 5279 ], [ 5305, 5307 ], [ 5320, 5322 ], [ 5640, 5642 ], [ 5732, 5734 ], [ 5747, 5749 ], [ 5934, 5936 ], [ 5993, 5995 ], [ 6189, 6191 ], [ 3401, 3403 ], [ 3427, 3429 ], [ 3441, 3443 ], [ 3488, 3490 ], [ 3579, 3581 ], [ 3770, 3772 ], [ 3804, 3806 ], [ 3876, 3878 ], [ 4095, 4097 ], [ 4319, 4321 ], [ 4327, 4329 ], [ 4535, 4537 ], [ 4554, 4556 ], [ 4607, 4609 ], [ 4677, 4679 ], [ 5491, 5493 ], [ 5550, 5552 ], [ 4173, 4175 ], [ 4417, 4419 ] ], [ [ 1281, 1289 ], [ 2026, 2034 ], [ 2146, 2154 ], [ 2207, 2215 ] ], [ [ 1313, 1321 ], [ 2287, 2295 ], [ 3007, 3015 ], [ 3466, 3474 ] ], [ [ 1336, 1351 ], [ 6637, 6652 ] ], [ [ 1363, 1384 ], [ 6300, 6321 ], [ 6382, 6403 ] ], [ [ 1393, 1412 ], [ 6476, 6495 ], [ 6554, 6573 ] ], [ [ 1423, 1430 ], [ 6023, 6030 ], [ 5430, 5437 ] ], [ [ 1445, 1455 ] ] ]
import logging import os import traceback from datetime import datetime, time, timezone from random import Random, choice import disnake from disnake.ext import tasks from disnake.ext.commands import BucketType, cooldown, guild_only from bot.bot import command, group, has_permissions from bot.globals import PLAYLISTS from cogs.cog import Cog from utils.utilities import read_lines logger = logging.getLogger('terminal') class WrestlingGif: def __init__(self, url, text): self.url = url self.text = text def build_embed(self, author, recipient): description = self.text.format(author=author, recipient=recipient) embed = disnake.Embed(description=description) embed.set_image(url=self.url) return embed wrestling_gifs = [ WrestlingGif('https://i.imgur.com/xUi2Vq1.gif', "**{recipient.name}** tries to grab but it fails. **{author.name}** grabs **{recipient.name}**"), WrestlingGif('https://i.imgur.com/osDWTHG.gif', "**{recipient.name}** tries to escape but **{author.name}** pins them down"), WrestlingGif('https://i.imgur.com/HS6R463.gif', "**{author.name}** lifts **{recipient.name}** up. **{recipient.name}** is powerless to do anything"), WrestlingGif('https://i.imgur.com/jbE2XVt.gif', "**{author.name}** challenges **{recipient.name}** to a friendly wrestling match"), WrestlingGif('https://i.imgur.com/XVUjH9x.gif', "**{recipient.name}** tries to attack but **{author.name}** counters"), WrestlingGif('https://i.imgur.com/vTeoYAE.gif', "**{author.name}** and **{recipient.name}** engage in a battle of strength"), WrestlingGif('https://i.imgur.com/iu2kiVy.gif', "**{author.name}** gets a hold of **{recipient.name}**"), WrestlingGif('https://i.imgur.com/BulkVW1.gif', "**{author.name}** gets **{recipient.name}** with a knee strike"), WrestlingGif('https://i.imgur.com/zXaIYLp.gif', "**{author.name}** beats **{recipient.name}** down"), WrestlingGif('https://i.imgur.com/XNOMUcg.gif', "**{author.name}** delivers a low blow to **{recipient.name}**. Nasty strategy"), WrestlingGif('https://i.imgur.com/oSG0V6a.gif', "**{recipient.name}** gets beaten by **{author.name}**"), WrestlingGif('https://i.imgur.com/u0H0ZSA.gif', "**{author.name}** grabs **{recipient.name}**s fucking pants <:GWjojoGachiGASM:363025405562585088>"), WrestlingGif('https://i.imgur.com/VFruiTR.gif', "**{author.name}** flexes on **{recipient.name}** after kicking their ass. WOO"), WrestlingGif('https://i.imgur.com/YCd1aSo.gif', "**{author.name}** beats **{recipient.name}** up"), WrestlingGif('https://i.imgur.com/M3sAu23.gif', "**{author.name}** chokes **{recipient.name}**"), WrestlingGif('https://i.imgur.com/inEROy3.gif', "**{author.name}** throws **{recipient.name}** on the ground"), WrestlingGif('https://i.imgur.com/8qI8f1M.gif', "**{author.name}** battles **{recipient.name}** in a feat of pure strength"), WrestlingGif('https://i.imgur.com/xhVIjIt.gif', "**{author.name}** lifts **{recipient.name}** up"), WrestlingGif('https://i.imgur.com/RW07zr0.gif', "**{author.name}** escapes the choke of **{recipient.name}**"), WrestlingGif('https://i.imgur.com/g6wVGpG.gif', "**{author.name}** escapes **{recipient.name}**s grab and begins a counter-attack"), WrestlingGif('https://i.imgur.com/LKHtUeo.gif', "**{author.name}** gets a hold of **{recipient.name}**"), WrestlingGif('https://i.imgur.com/eCCAKoA.gif', "It's time to wrestle"), WrestlingGif('https://i.imgur.com/ZFiT5Ew.gif', "**{author.name}** lifts **{recipient.name}** up"), WrestlingGif('https://i.imgur.com/A4Oo0Tp.gif', "**{author.name}** puts **{recipient.name}** down"), WrestlingGif('https://i.imgur.com/COQlI5t.gif', "**{author.name}** swaps positions with **{recipient.name}**"), WrestlingGif('https://i.imgur.com/pIaErDy.gif', "**{author.name}** pulls **{recipient.name}**s arms"), WrestlingGif('https://i.imgur.com/hThhSrl.gif', "**{author.name}** locks **{recipient.name}**s leg"), WrestlingGif('https://i.imgur.com/goMZvRE.gif', "**{author.name}** turns the tables on **{recipient.name}**"), WrestlingGif('https://i.imgur.com/3A9eMu0.gif', "**{author.name}** slams **{recipient.name}** on the floor"), WrestlingGif('https://i.imgur.com/G9Iklxu.gif', "**{author.name}** and **{recipient.name}** are in the middle of an intense battle"), WrestlingGif('https://i.imgur.com/c1CQBnJ.gif', "**{recipient.name}** gets elbow struck by **{author.name}**"), WrestlingGif('https://i.imgur.com/cKcOJo0.gif', "**{author.name}** pulls **{recipient.name}**s leg"), WrestlingGif('https://i.imgur.com/Q41oEne.gif', "**{recipient.name}** gets elbow struck by **{author.name}**"), WrestlingGif('https://i.imgur.com/AP7MRnF.gif', "**{author.name}** escapes the hold of **{recipient.name}** and is ready for more"), WrestlingGif('https://i.imgur.com/6khggL1.gif', "**{author.name}** pulls the hair of **{recipient.name}**"), WrestlingGif('https://i.imgur.com/bq0Bjbl.gif', "**{author.name}** got the moves"), WrestlingGif('https://i.imgur.com/aIVoytr.gif', "**{author.name}** throws **{recipient.name}** on the ground"), WrestlingGif('https://i.imgur.com/l137Zzh.gif', "**{recipient.name}** gets elbow struck by **{author.name}**"), WrestlingGif('https://i.imgur.com/tFZv2j9.gif', "**{recipient.name}** and **{author.name}** engage in a fight. **{author.name}** makes the first move"), WrestlingGif('https://i.imgur.com/kVXjE3Q.gif', "**{author.name}** pulls **{recipient.name}**'s hands"), WrestlingGif('https://i.imgur.com/4IsfXSD.gif', "**{author.name}** has **{recipient.name}** locked down"), WrestlingGif('https://i.imgur.com/HnLRl26.gif', "**{author.name}** spins **{recipient.name}** right round baby right round"), WrestlingGif('https://i.imgur.com/uJtuZ4V.gif', "**{author.name}** beats **{recipient.name}** up and locks him down"), WrestlingGif('https://i.imgur.com/ZgXNVIb.gif', "**{recipient.name}** flails his arms around helplessly"), WrestlingGif('https://i.imgur.com/Jcu4NyL.gif', "**{author.name}** manages to get a quick jab in at **{recipient.name}**"), WrestlingGif('https://i.imgur.com/XUpxidH.gif', "**{author.name}** pulls on **{recipient.name}**'s leg"), WrestlingGif('https://i.imgur.com/pTBy6ap.gif', "**{recipient.name}** and **{author.name}** engage in a hugging competition"), WrestlingGif('https://i.imgur.com/ggTj4xI.gif', "**{author.name}** escapes **{recipient.name}**'s hold and counters"), WrestlingGif('https://i.imgur.com/lS2zZre.gif', "**{author.name}** locks **{recipient.name}**'s legs"), WrestlingGif('https://i.imgur.com/fdgI1Br.gif', "**{recipient.name}** gets choked by **{author.name}** and tries to escape but fails"), ] class gachiGASM(Cog): def __init__(self, bot): super().__init__(bot) self.gachilist = self.bot.gachilist if not self.gachilist: self.reload_gachilist() self._start_task = self._reload_and_post.start() logger.info(f'Starting gachi loop.\n{"".join(traceback.format_stack()[-8:])}') def cog_unload(self): self._reload_and_post.cancel() @tasks.loop(time=time(tzinfo=timezone.utc), reconnect=False) async def _reload_and_post(self): logger.info(f'Start task is {self._start_task}, ' f'current task is {self._reload_and_post.get_task()}, ' f'fail status: {self._reload_and_post._last_iteration_failed}, ' f'next iter {self._reload_and_post.next_iteration}.\n{"".join(traceback.format_stack()[-8:])}') self.reload_gachilist() for guild in self.bot.guilds: channel = self.bot.guild_cache.dailygachi(guild.id) if not channel: continue channel = guild.get_channel(channel) if not channel: continue vid = Random(self.get_day()+guild.id).choice(self.gachilist) try: await channel.send(f'Daily gachi {vid}') except disnake.HTTPException: pass def reload_gachilist(self): self.bot.gachilist = read_lines(os.path.join(PLAYLISTS, 'gachi.txt')) self.gachilist = self.bot.gachilist @staticmethod def get_day(): return (datetime.utcnow() - datetime.min).days @command() @cooldown(1, 2, BucketType.channel) async def gachify(self, ctx, *, words): """Gachify a string""" if ' ' not in words: # We need to undo the string view or it will skip the first word ctx.view.undo() await self.gachify2.invoke(ctx) else: return await ctx.send(words.replace(' ', r' \♂ ').upper()[:2000]) @command() @cooldown(1, 2, BucketType.channel) async def gachify2(self, ctx, *, words): """An alternative way of gachifying""" s = r'\♂ ' + words.replace(' ', r' \♂ ').upper() + r' \♂' return await ctx.send(s[:2000]) @command(aliases=['rg']) @cooldown(1, 5, BucketType.channel) async def randomgachi(self, ctx): await ctx.send(choice(self.gachilist)) @group(invoke_without_command=True, aliases=['dg']) @guild_only() @cooldown(1, 5, BucketType.channel) async def dailygachi(self, ctx): await ctx.send(Random(self.get_day()+ctx.guild.id).choice(self.gachilist)) @dailygachi.command(np_pm=True) @cooldown(1, 5) @has_permissions(manage_guild=True) async def subscribe(self, ctx, *, channel: disnake.TextChannel=None): if channel: await self.bot.guild_cache.set_dailygachi(ctx.guild.id, channel.id) return await ctx.send(f'New dailygachi channel set to {channel}') channel = self.bot.guild_cache.dailygachi(ctx.guild.id) channel = ctx.guild.get_channel(channel) if channel: await ctx.send(f'Current dailygachi channel is {channel}') else: await ctx.send('No dailygachi channel set') @dailygachi.command() @cooldown(1, 5) @has_permissions(manage_guild=True) @guild_only() async def unsubscribe(self, ctx): await self.bot.guild_cache.set_dailygachi(ctx.guild.id, None) await ctx.send('Dailygachi channel no longer set') @command() @cooldown(1, 5, BucketType.member) @guild_only() async def wrestle(self, ctx, *, user: disnake.User): if user == ctx.author: await ctx.send('Wrestling against yourself...') return wrestling_gif = choice(wrestling_gifs) await ctx.send(embed=wrestling_gif.build_embed(ctx.author, user)) def setup(bot): bot.add_cog(gachiGASM(bot))
[ [ [ 7, 14 ], [ 395, 402 ] ], [ [ 22, 24 ], [ 8190, 8192 ] ], [ [ 32, 41 ], [ 7076, 7085 ], [ 7581, 7590 ] ], [ [ 63, 71 ], [ 8326, 8334 ], [ 8346, 8354 ] ], [ [ 73, 77 ], [ 7198, 7202 ] ], [ [ 79, 87 ], [ 7210, 7218 ] ], [ [ 107, 113 ], [ 7925, 7931 ], [ 9350, 9356 ] ], [ [ 115, 121 ], [ 9151, 9157 ], [ 10572, 10578 ] ], [ [ 130, 137 ], [ 668, 675 ], [ 8073, 8080 ], [ 9554, 9561 ], [ 10422, 10429 ] ], [ [ 162, 167 ], [ 7182, 7187 ] ], [ [ 201, 211 ], [ 8401, 8411 ], [ 8802, 8812 ], [ 9070, 9080 ], [ 9270, 9280 ], [ 10343, 10353 ] ], [ [ 213, 221 ], [ 8386, 8394 ], [ 8787, 8795 ], [ 9055, 9063 ], [ 9255, 9263 ], [ 9452, 9460 ], [ 10067, 10075 ], [ 10328, 10336 ] ], [ [ 223, 233 ], [ 9237, 9247 ], [ 10127, 10137 ], [ 10367, 10377 ] ], [ [ 255, 262 ], [ 8371, 8378 ], [ 8772, 8779 ], [ 9026, 9033 ], [ 10313, 10320 ] ], [ [ 264, 269 ], [ 9181, 9186 ] ], [ [ 271, 286 ], [ 9472, 9487 ], [ 10087, 10102 ] ], [ [ 311, 320 ], [ 8203, 8212 ] ], [ [ 342, 345 ], [ 6789, 6792 ] ], [ [ 374, 384 ], [ 8179, 8189 ] ], [ [ 386, 392 ], [ 7031, 7037 ], [ 7288, 7294 ] ], [ [ 433, 445 ], [ 791, 803 ], [ 941, 953 ], [ 1071, 1083 ], [ 1225, 1237 ], [ 1361, 1373 ], [ 1485, 1497 ], [ 1615, 1627 ], [ 1725, 1737 ], [ 1844, 1856 ], [ 1950, 1962 ], [ 2084, 2096 ], [ 2194, 2206 ], [ 2348, 2360 ], [ 2482, 2494 ], [ 2586, 2598 ], [ 2688, 2700 ], [ 2804, 2816 ], [ 2934, 2946 ], [ 3038, 3050 ], [ 3154, 3166 ], [ 3291, 3303 ], [ 3401, 3413 ], [ 3478, 3490 ], [ 3582, 3594 ], [ 3687, 3699 ], [ 3803, 3815 ], [ 3910, 3922 ], [ 4016, 4028 ], [ 4131, 4143 ], [ 4245, 4257 ], [ 4383, 4395 ], [ 4499, 4511 ], [ 4605, 4617 ], [ 4721, 4733 ], [ 4858, 4870 ], [ 4971, 4983 ], [ 5059, 5071 ], [ 5175, 5187 ], [ 5291, 5303 ], [ 5448, 5460 ], [ 5558, 5570 ], [ 5669, 5681 ], [ 5799, 5811 ], [ 5922, 5934 ], [ 6033, 6045 ], [ 6161, 6173 ], [ 6271, 6283 ], [ 6402, 6414 ], [ 6525, 6537 ], [ 6633, 6645 ] ], [ [ 768, 782 ], [ 10579, 10593 ] ], [ [ 6779, 6788 ], [ 10704, 10713 ] ], [ [ 10676, 10681 ] ] ]
#!/usr/bin/env python # Copyright (c) 2018, Michael Boyle # See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE> # Construct the version number from the date and time this python version was created. from os import environ from sys import platform on_windows = ('win' in platform.lower() and not 'darwin' in platform.lower()) if "package_version" in environ: version = environ["package_version"] print("Setup.py using environment version='{0}'".format(version)) else: print("The variable 'package_version' was not present in the environment") try: # For cases where this is being installed from git. This gives the true version number. from subprocess import check_output if on_windows: version = check_output("""git log -1 --format=%cd --date=format:'%Y.%m.%d.%H.%M.%S'""", shell=False) version = version.decode('ascii').strip().replace('.0', '.').replace("'", "") else: version = check_output("""git log -1 --format=%cd --date=format:'%Y.%-m.%-d.%-H.%-M.%-S'""", shell=True).decode('ascii').rstrip() print("Setup.py using git log version='{0}'".format(version)) except: # For cases where this isn't being installed from git. This gives the wrong version number, # but at least it provides some information. try: from time import strftime, gmtime try: version = strftime("%Y.%-m.%-d.%-H.%-M.%-S", gmtime()) except ValueError: # because Windows version = strftime("%Y.%m.%d.%H.%M.%S", gmtime()).replace('.0', '.') print("Setup.py using strftime version='{0}'".format(version)) except: version = '0.0.0' print("Setup.py failed to determine the version; using '{0}'".format(version)) with open('_version.py', 'w') as f: f.write('__version__ = "{0}"'.format(version)) long_description = """\ This package creates a quaternion type in python, and further enables numpy to create and manipulate arrays of quaternions. The usual algebraic operations (addition and multiplication) are available, along with numerous properties like norm and various types of distance measures between two quaternions. There are also additional functions like "squad" and "slerp" interpolation, and conversions to and from axis-angle, matrix, and Euler-angle representations of rotations. The core of the code is written in C for speed. """ if __name__ == "__main__": import numpy from setuptools import setup, Extension # from distutils.core import setup, Extension from distutils.errors import DistutilsError if numpy.__dict__.get('quaternion') is not None: raise DistutilsError('The target NumPy already has a quaternion type') extension = Extension( name='quaternion.numpy_quaternion', # This is the name of the object file that will be compiled sources=['quaternion.c', 'numpy_quaternion.c'], extra_compile_args=['/O2' if on_windows else '-O3'], depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'], include_dirs=[numpy.get_include()] ) extension2 = Extension( name='quaternion.numpy_dual_quaternion', # This is the name of the object file that will be compiled sources=['dual_quaternion.c', 'numpy_dual_quaternion.c'], extra_compile_args=['/O2' if on_windows else '-O3'], depends=['dual_quaternion.c', 'dual_quaternion.h', 'numpy_dual_quaternion.c'], include_dirs=[numpy.get_include()] ) setup(name='numpy-quaternion', # Uploaded to pypi under this name packages=['quaternion'], # This is the actual package name package_dir={'quaternion': ''}, ext_modules=[extension, extension2], version=version, install_requires=[ 'numpy>=1.13', ], url='https://github.com/moble/quaternion', author='Michael Boyle', author_email='mob22@cornell.edu', description='Add built-in support for quaternions to numpy', long_description=long_description, )
[ [ [ 252, 259 ], [ 387, 394 ], [ 410, 417 ] ], [ [ 276, 284 ], [ 308, 316 ], [ 345, 353 ] ], [ [ 285, 295 ], [ 753, 763 ], [ 3042, 3052 ], [ 3428, 3438 ] ], [ [ 400, 407 ], [ 497, 504 ], [ 1931, 1938 ], [ 3836, 3843 ] ], [ [ 729, 741 ], [ 787, 799 ], [ 1004, 1016 ] ], [ [ 777, 784 ], [ 900, 907 ] ], [ [ 890, 897 ], [ 1184, 1191 ], [ 1931, 1938 ], [ 3836, 3843 ] ], [ [ 994, 1001 ], [ 1184, 1191 ], [ 1931, 1938 ], [ 3836, 3843 ] ], [ [ 1402, 1410 ], [ 1462, 1470 ], [ 1583, 1591 ] ], [ [ 1412, 1418 ], [ 1497, 1503 ], [ 1613, 1619 ] ], [ [ 1452, 1459 ], [ 1707, 1714 ], [ 1931, 1938 ], [ 3836, 3843 ] ], [ [ 1573, 1580 ], [ 1707, 1714 ], [ 1931, 1938 ], [ 3836, 3843 ] ], [ [ 1745, 1752 ], [ 1844, 1851 ], [ 1931, 1938 ], [ 3836, 3843 ] ], [ [ 1887, 1888 ], [ 1894, 1895 ] ], [ [ 1943, 1959 ], [ 4145, 4161 ] ], [ [ 2537, 2542 ], [ 2692, 2697 ], [ 3160, 3165 ], [ 3561, 3566 ] ], [ [ 2570, 2575 ], [ 3592, 3597 ] ], [ [ 2577, 2586 ], [ 2833, 2842 ], [ 3204, 3213 ] ], [ [ 2670, 2684 ], [ 2752, 2766 ] ], [ [ 2821, 2830 ], [ 3794, 3803 ] ], [ [ 3191, 3201 ], [ 3805, 3815 ] ] ]
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT license. import textworld from textworld.challenges import coin_collector def test_making_coin_collector(): expected = { 1: {"quest_length": 1, "nb_rooms": 1}, 100: {"quest_length": 100, "nb_rooms": 100}, 101: {"quest_length": 1, "nb_rooms": 2}, 200: {"quest_length": 100, "nb_rooms": 200}, 201: {"quest_length": 1, "nb_rooms": 3}, 300: {"quest_length": 100, "nb_rooms": 300}, } for level in [1, 100, 101, 200, 201, 300]: options = textworld.GameOptions() options.seeds = 1234 settings = {"level": level} game = coin_collector.make(settings, options) assert len(game.quests[0].commands) == expected[level]["quest_length"] assert len(game.world.rooms) == expected[level]["nb_rooms"]
[ [ [ 102, 111 ], [ 588, 597 ] ], [ [ 145, 159 ], [ 693, 707 ] ], [ [ 166, 192 ] ] ]
# django from django import forms from django.contrib.auth.models import User # choices from core.cooggerapp.choices import * # models from core.cooggerapp.models import ( Content, OtherAddressesOfUsers, UserProfile, ReportModel, UTopic, Issue) from .models.utils import send_mail class UTopicForm(forms.ModelForm): class Meta: model = UTopic fields = ["name", "image_address", "definition", "tags", "address"] class ContentForm(forms.ModelForm): msg = forms.CharField( max_length=150, label="Commit Message", help_text="What has changed with this update?" ) class Meta: model = Content fields = ["category", "language", "title", "body", "tags"] @classmethod def send_mail(cls, form): send_mail( subject = f"{form.user} publish a new content | coogger".title(), template_name="email/post.html", context=dict( get_absolute_url=form.get_absolute_url ), to=[u.user.email for u in form.user.follow.follower if u.user.email], ) class ReplyForm(forms.ModelForm): class Meta: model = Content fields = ["title", "body"] class AddressesForm(forms.ModelForm): class Meta: model = OtherAddressesOfUsers fields = ["choices", "address"] class CSettingsUserForm(forms.ModelForm): class Meta: model = User fields = ["first_name", "last_name", "username", "email"] class OtherAddressesOfUsersForm(forms.ModelForm): class Meta: model = OtherAddressesOfUsers fields = ["choices", "address"] class AboutForm(forms.ModelForm): class Meta: model = UserProfile fields = ["about"] class ReportsForm(forms.ModelForm): class Meta: model = ReportModel fields = ["complaints", "add"] class NewIssueForm(forms.ModelForm): class Meta: model = Issue fields = ["title", "body"] @classmethod def send_mail(cls, form): send_mail( subject=f"{form.user} opened a new issue on your {form.utopic.name} topic | coogger".title(), template_name="email/new-issue.html", context=dict( form=form, ), to=[form.utopic.user.email] ) class NewIssueReplyForm(forms.ModelForm): body = forms.CharField( widget=forms.Textarea, help_text="problem | question | or anything else") class Meta: model = Issue fields = ["body"] class NewContentReplyForm(forms.ModelForm): body = forms.CharField( widget=forms.Textarea, help_text="Your content | problem | question | or anything else") class Meta: model = Content fields = ["body"]
[ [ [ 28, 33 ], [ 311, 316 ], [ 465, 470 ], [ 493, 498 ], [ 1139, 1144 ], [ 1255, 1260 ], [ 1393, 1398 ], [ 1548, 1553 ], [ 1678, 1683 ], [ 1787, 1792 ], [ 1908, 1913 ], [ 2368, 2373 ], [ 2397, 2402 ], [ 2429, 2434 ], [ 2597, 2602 ], [ 2626, 2631 ], [ 2658, 2663 ] ], [ [ 73, 77 ], [ 1443, 1447 ] ], [ [ 125, 126 ] ], [ [ 178, 185 ], [ 662, 669 ], [ 1190, 1197 ], [ 2789, 2796 ] ], [ [ 187, 208 ], [ 1305, 1326 ], [ 1598, 1619 ] ], [ [ 210, 221 ], [ 1728, 1739 ] ], [ [ 227, 238 ], [ 1837, 1848 ] ], [ [ 240, 246 ], [ 362, 368 ] ], [ [ 248, 253 ], [ 1958, 1963 ], [ 2537, 2542 ] ], [ [ 282, 291 ], [ 797, 806 ], [ 2055, 2064 ] ], [ [ 300, 310 ] ], [ [ 453, 464 ] ], [ [ 1129, 1138 ] ], [ [ 1241, 1254 ] ], [ [ 1375, 1392 ] ], [ [ 1522, 1547 ] ], [ [ 1668, 1677 ] ], [ [ 1775, 1786 ] ], [ [ 1895, 1907 ] ], [ [ 2350, 2367 ] ], [ [ 2577, 2596 ] ] ]
from math import ceil, sqrt def my_sqrt(input_num): return ceil(sqrt(input_num)) def is_divisible(dividend, divisor): return dividend % divisor == 0 def is_prime(input_num): return True
[ [ [ 17, 21 ], [ 71, 75 ] ], [ [ 23, 27 ], [ 76, 80 ] ], [ [ 39, 46 ] ], [ [ 102, 114 ] ], [ [ 180, 188 ] ] ]
""" Phonon DOS and bandstructure analysis package. """
[]
# -*- coding: utf-8 -*- # this file is released under public domain and you can use without limitations ######################################################################### ## Customize your APP title, subtitle and menus here ######################################################################### response.logo = A(B('web',SPAN(2),'py'),XML('&trade;&nbsp;'), _class="brand",_href="http://www.web2py.com/") response.title = request.application.replace('_',' ').title() response.subtitle = '' ## read more at http://dev.w3.org/html5/markup/meta.name.html response.meta.author = 'Your Name <you@example.com>' response.meta.description = 'a cool new app' response.meta.keywords = 'web2py, python, framework' response.meta.generator = 'Web2py Web Framework' ## your http://google.com/analytics id response.google_analytics_id = None ######################################################################### ## this is the main application menu add/remove items as required ######################################################################### response.menu = [ (T('Home'), False, URL('default', 'index'), []) ] DEVELOPMENT_MENU = True ######################################################################### ## provide shortcuts for development. remove in production ######################################################################### def _(): # shortcuts app = request.application ctr = request.controller # useful links to internal and external resources response.menu += [ (SPAN('web2py', _class='highlighted'), False, 'http://web2py.com', [ (T('My Sites'), False, URL('admin', 'default', 'site')), (T('This App'), False, URL('admin', 'default', 'design/%s' % app), [ (T('Controller'), False, URL( 'admin', 'default', 'edit/%s/controllers/%s.py' % (app, ctr))), (T('View'), False, URL( 'admin', 'default', 'edit/%s/views/%s' % (app, response.view))), (T('Layout'), False, URL( 'admin', 'default', 'edit/%s/views/layout.html' % app)), (T('Stylesheet'), False, URL( 'admin', 'default', 'edit/%s/static/css/web2py.css' % app)), (T('DB Model'), False, URL( 'admin', 'default', 'edit/%s/models/db.py' % app)), (T('Menu Model'), False, URL( 'admin', 'default', 'edit/%s/models/menu.py' % app)), (T('Database'), False, URL(app, 'appadmin', 'index')), (T('Errors'), False, URL( 'admin', 'default', 'errors/' + app)), (T('About'), False, URL( 'admin', 'default', 'about/' + app)), ]), ('web2py.com', False, 'http://www.web2py.com', [ (T('Download'), False, 'http://www.web2py.com/examples/default/download'), (T('Support'), False, 'http://www.web2py.com/examples/default/support'), (T('Demo'), False, 'http://web2py.com/demo_admin'), (T('Quick Examples'), False, 'http://web2py.com/examples/default/examples'), (T('FAQ'), False, 'http://web2py.com/AlterEgo'), (T('Videos'), False, 'http://www.web2py.com/examples/default/videos/'), (T('Free Applications'), False, 'http://web2py.com/appliances'), (T('Plugins'), False, 'http://web2py.com/plugins'), (T('Layouts'), False, 'http://web2py.com/layouts'), (T('Recipes'), False, 'http://web2pyslices.com/'), (T('Semantic'), False, 'http://web2py.com/semantic'), ]), (T('Documentation'), False, 'http://www.web2py.com/book', [ (T('Preface'), False, 'http://www.web2py.com/book/default/chapter/00'), (T('Introduction'), False, 'http://www.web2py.com/book/default/chapter/01'), (T('Python'), False, 'http://www.web2py.com/book/default/chapter/02'), (T('Overview'), False, 'http://www.web2py.com/book/default/chapter/03'), (T('The Core'), False, 'http://www.web2py.com/book/default/chapter/04'), (T('The Views'), False, 'http://www.web2py.com/book/default/chapter/05'), (T('Database'), False, 'http://www.web2py.com/book/default/chapter/06'), (T('Forms and Validators'), False, 'http://www.web2py.com/book/default/chapter/07'), (T('Email and SMS'), False, 'http://www.web2py.com/book/default/chapter/08'), (T('Access Control'), False, 'http://www.web2py.com/book/default/chapter/09'), (T('Services'), False, 'http://www.web2py.com/book/default/chapter/10'), (T('Ajax Recipes'), False, 'http://www.web2py.com/book/default/chapter/11'), (T('Components and Plugins'), False, 'http://www.web2py.com/book/default/chapter/12'), (T('Deployment Recipes'), False, 'http://www.web2py.com/book/default/chapter/13'), (T('Other Recipes'), False, 'http://www.web2py.com/book/default/chapter/14'), (T('Buy this book'), False, 'http://stores.lulu.com/web2py'), ]), (T('Community'), False, None, [ (T('Groups'), False, 'http://www.web2py.com/examples/default/usergroups'), (T('Twitter'), False, 'http://twitter.com/web2py'), (T('Live Chat'), False, 'http://webchat.freenode.net/?channels=web2py'), ]), (T('Plugins'), False, None, [ ('plugin_wiki', False, 'http://web2py.com/examples/default/download'), (T('Other Plugins'), False, 'http://web2py.com/plugins'), (T('Layout Plugins'), False, 'http://web2py.com/layouts'), ]) ] )] if DEVELOPMENT_MENU: _() if "auth" in locals(): auth.wikimenu()
[ [ [ 1147, 1163 ], [ 6186, 6202 ] ], [ [ 1384, 1385 ], [ 6204, 6205 ] ] ]
# -*- coding: utf-8 -*- import email import json import logging import smtplib from email.header import Header from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from zvt import zvt_config from zvt.networking.request import get_http_session, sync_get, sync_post class Informer(object): logger = logging.getLogger(__name__) def send_message(self, to_user, title, body, **kwargs): pass class EmailInformer(Informer): def __init__(self, ssl=True) -> None: super().__init__() self.ssl = ssl def send_message_(self, to_user, title, body, **kwargs): host = zvt_config['smtp_host'] port = zvt_config['smtp_port'] if self.ssl: try: smtp_client = smtplib.SMTP_SSL(host=host, port=port) except: smtp_client = smtplib.SMTP_SSL() else: try: smtp_client = smtplib.SMTP(host=host, port=port) except: smtp_client = smtplib.SMTP() smtp_client.connect(host=host, port=port) smtp_client.login(zvt_config['email_username'], zvt_config['email_password']) msg = MIMEMultipart('alternative') msg['Subject'] = Header(title).encode() msg['From'] = "{} <{}>".format(Header('zvt').encode(), zvt_config['email_username']) if type(to_user) is list: msg['To'] = ", ".join(to_user) else: msg['To'] = to_user msg['Message-id'] = email.utils.make_msgid() msg['Date'] = email.utils.formatdate() plain_text = MIMEText(body, _subtype='plain', _charset='UTF-8') msg.attach(plain_text) try: smtp_client.sendmail(zvt_config['email_username'], to_user, msg.as_string()) except Exception as e: self.logger.exception('send email failed', e) def send_message(self, to_user, title, body, sub_size=20, with_sender=True, **kwargs): if type(to_user) is list and sub_size: size = len(to_user) if size >= sub_size: step_size = int(size / sub_size) if size % sub_size: step_size = step_size + 1 else: step_size = 1 for step in range(step_size): sub_to_user = to_user[sub_size * step:sub_size * (step + 1)] if with_sender: sub_to_user.append(zvt_config['email_username']) self.send_message_(sub_to_user, title, body, **kwargs) else: self.send_message_(to_user, title, body, **kwargs) class WechatInformer(Informer): GET_TOKEN_URL = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid={}&secret={}".format( zvt_config['wechat_app_id'], zvt_config['wechat_app_secrect']) GET_TEMPLATE_URL = "https://api.weixin.qq.com/cgi-bin/template/get_all_private_template?access_token={}" SEND_MSG_URL = "https://api.weixin.qq.com/cgi-bin/message/template/send?access_token={}" token = None def __init__(self, http_session) -> None: self.refresh_token(http_session) def refresh_token(self, http_session): resp = sync_get(http_session, self.GET_TOKEN_URL) self.logger.info("refresh_token resp.status_code:{}, resp.text:{}".format(resp.status_code, resp.text)) if resp.status_code == 200 and resp.json() and 'access_token' in resp.json(): self.token = resp.json()['access_token'] else: self.logger.exception("could not refresh_token") def send_price_notification(self, http_session, to_user, security_name, current_price, change_pct): the_json = self._format_price_notification(to_user, security_name, current_price, change_pct) the_data = json.dumps(the_json, ensure_ascii=False).encode('utf-8') json_result = sync_post(http_session, self.SEND_MSG_URL.format(self.token), json=the_data) if json_result is not None: self.logger.info("send_price_notification to user:{} data:{} success".format(to_user, the_json)) def _format_price_notification(self, to_user, security_name, current_price, change_pct): if change_pct > 0: title = '吃肉喝汤' else: title = '关灯吃面' # 先固定一个template # { # "template_id": "mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U", # "title": "涨跌幅提醒", # "primary_industry": "金融业", # "deputy_industry": "证券|基金|理财|信托", # "content": "{{first.DATA}}\n股票名:{{keyword1.DATA}}\n最新价:{{keyword2.DATA}}\n涨跌幅:{{keyword3.DATA}}\n{{remark.DATA}}", # "example": "您好,腾新控股最新价130.50元,上涨达到设置的3.2%\r\n股票名:腾讯控股(00700)\r\n最新价:130.50元\r\n涨跌幅:+3.2%\r\n点击查看最新实时行情。" # } template_id = 'mkqi-L1h56mH637vLXiuS_ulLTs1byDYYgLBbSXQ65U' the_json = { "touser": to_user, "template_id": template_id, "url": "http://www.foolcage.com", "data": { "first": { "value": title, "color": "#173177" }, "keyword1": { "value": security_name, "color": "#173177" }, "keyword2": { "value": current_price, "color": "#173177" }, "keyword3": { "value": '{:.2%}'.format(change_pct), "color": "#173177" }, "remark": { "value": "会所嫩模 Or 下海干活?", "color": "#173177" } } } return the_json if __name__ == '__main__': email_action = EmailInformer() email_action.send_message(["5533061@qq.com", '2315983623@qq.com'], 'helo', 'just a test', sub_size=20) http_session = get_http_session() weixin_action = WechatInformer(http_session) weixin_action.send_price_notification(http_session, to_user='oRvNP0XIb9G3g6a-2fAX9RHX5--Q', security_name='BTC/USDT', current_price=1000, change_pct='0.5%') # the __all__ is generated __all__ = ['Informer', 'EmailInformer', 'WechatInformer']
[ [ [ 31, 36 ], [ 1511, 1516 ], [ 1558, 1563 ] ], [ [ 44, 48 ], [ 3818, 3822 ] ], [ [ 56, 63 ], [ 335, 342 ] ], [ [ 71, 78 ], [ 770, 777 ], [ 859, 866 ], [ 939, 946 ], [ 1024, 1031 ] ], [ [ 104, 110 ], [ 1244, 1250 ], [ 1306, 1312 ] ], [ [ 144, 157 ], [ 1190, 1203 ] ], [ [ 186, 194 ], [ 1605, 1613 ] ], [ [ 212, 222 ], [ 2791, 2801 ], [ 2820, 2830 ], [ 639, 649 ], [ 678, 688 ], [ 1116, 1126 ], [ 1146, 1156 ], [ 1330, 1340 ], [ 1734, 1744 ], [ 2453, 2463 ] ], [ [ 258, 274 ], [ 5935, 5951 ] ], [ [ 276, 284 ], [ 3222, 3230 ] ], [ [ 286, 295 ], [ 3898, 3907 ] ], [ [ 304, 312 ], [ 459, 467 ], [ 2654, 2662 ] ], [ [ 445, 458 ], [ 5792, 5805 ] ], [ [ 2639, 2653 ], [ 5974, 5988 ] ], [ [ 5777, 5789 ], [ 5812, 5824 ] ], [ [ 5920, 5932 ], [ 5989, 6001 ], [ 6045, 6057 ] ], [ [ 5958, 5971 ], [ 6007, 6020 ] ], [ [ 6361, 6368 ] ] ]
import numpy as np from scipy import optimize def f(x, a): return x**3 - a def fder(x, a): return 3 * x**2 rng = np.random.default_rng() x = rng.standard_normal(100) a = np.arange(-50, 50) vec_res = optimize.newton(f, x, fprime=fder, args=(a, ), maxiter=200) print(vec_res)
[ [ [ 7, 18 ], [ 117, 119 ], [ 174, 176 ] ], [ [ 37, 45 ], [ 203, 211 ] ], [ [ 50, 51 ], [ 219, 220 ] ], [ [ 81, 85 ], [ 232, 236 ] ], [ [ 111, 114 ], [ 145, 148 ] ], [ [ 141, 142 ], [ 222, 223 ] ], [ [ 170, 171 ], [ 244, 245 ] ], [ [ 193, 200 ], [ 269, 276 ] ] ]
#!/usr/bin/env python2.7 # Copyright 2015 gRPC authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import hashlib import itertools import collections import os import sys import subprocess import re import perfection # Configuration: a list of either strings or 2-tuples of strings. # A single string represents a static grpc_mdstr. # A 2-tuple represents a static grpc_mdelem (and appropriate grpc_mdstrs will # also be created). # The list of 2-tuples must begin with the static hpack table elements as # defined by RFC 7541 and be in the same order because of an hpack encoding # performance optimization that relies on this. If you want to change this, then # you must change the implementation of the encoding optimization as well. CONFIG = [ # metadata strings 'host', 'grpc-timeout', 'grpc-internal-encoding-request', 'grpc-internal-stream-encoding-request', 'grpc-payload-bin', ':path', 'grpc-encoding', 'grpc-accept-encoding', 'user-agent', ':authority', 'grpc-message', 'grpc-status', 'grpc-server-stats-bin', 'grpc-tags-bin', 'grpc-trace-bin', 'grpc-previous-rpc-attempts', 'grpc-retry-pushback-ms', '1', '2', '3', '4', '', # channel arg keys 'grpc.wait_for_ready', 'grpc.timeout', 'grpc.max_request_message_bytes', 'grpc.max_response_message_bytes', # well known method names '/grpc.lb.v1.LoadBalancer/BalanceLoad', # compression algorithm names 'deflate', 'gzip', 'stream/gzip', # metadata elements # begin hpack static elements (':authority', ''), (':method', 'GET'), (':method', 'POST'), (':path', '/'), (':path', '/index.html'), (':scheme', 'http'), (':scheme', 'https'), (':status', '200'), (':status', '204'), (':status', '206'), (':status', '304'), (':status', '400'), (':status', '404'), (':status', '500'), ('accept-charset', ''), ('accept-encoding', 'gzip, deflate'), ('accept-language', ''), ('accept-ranges', ''), ('accept', ''), ('access-control-allow-origin', ''), ('age', ''), ('allow', ''), ('authorization', ''), ('cache-control', ''), ('content-disposition', ''), ('content-encoding', ''), ('content-language', ''), ('content-length', ''), ('content-location', ''), ('content-range', ''), ('content-type', ''), ('cookie', ''), ('date', ''), ('etag', ''), ('expect', ''), ('expires', ''), ('from', ''), ('host', ''), ('if-match', ''), ('if-modified-since', ''), ('if-none-match', ''), ('if-range', ''), ('if-unmodified-since', ''), ('last-modified', ''), ('link', ''), ('location', ''), ('max-forwards', ''), ('proxy-authenticate', ''), ('proxy-authorization', ''), ('range', ''), ('referer', ''), ('refresh', ''), ('retry-after', ''), ('server', ''), ('set-cookie', ''), ('strict-transport-security', ''), ('transfer-encoding', ''), ('user-agent', ''), ('vary', ''), ('via', ''), ('www-authenticate', ''), # end hpack static elements ('grpc-status', '0'), ('grpc-status', '1'), ('grpc-status', '2'), ('grpc-encoding', 'identity'), ('grpc-encoding', 'gzip'), ('grpc-encoding', 'deflate'), ('te', 'trailers'), ('content-type', 'application/grpc'), (':scheme', 'grpc'), (':method', 'PUT'), ('accept-encoding', ''), ('content-encoding', 'identity'), ('content-encoding', 'gzip'), ('lb-token', ''), ('lb-cost-bin', ''), ] # All entries here are ignored when counting non-default initial metadata that # prevents the chttp2 server from sending a Trailers-Only response. METADATA_BATCH_CALLOUTS = [ # (name) (':path'), (':method'), (':status'), (':authority'), (':scheme'), ('te'), ('grpc-message'), ('grpc-status'), ('grpc-payload-bin'), ('grpc-encoding'), ('grpc-accept-encoding'), ('grpc-server-stats-bin'), ('grpc-tags-bin'), ('grpc-trace-bin'), ('content-type'), ('content-encoding'), ('accept-encoding'), ('grpc-internal-encoding-request'), ('grpc-internal-stream-encoding-request'), ('user-agent'), ('host'), ('lb-token'), ('grpc-previous-rpc-attempts'), ('grpc-retry-pushback-ms'), ] COMPRESSION_ALGORITHMS = [ 'identity', 'deflate', 'gzip', ] STREAM_COMPRESSION_ALGORITHMS = [ 'identity', 'gzip', ] # utility: mangle the name of a config def mangle(elem, name=None): xl = { '-': '_', ':': '', '/': 'slash', '.': 'dot', ',': 'comma', ' ': '_', } def m0(x): if not x: return 'empty' r = '' for c in x: put = xl.get(c, c.lower()) if not put: continue last_is_underscore = r[-1] == '_' if r else True if last_is_underscore and put == '_': continue elif len(put) > 1: if not last_is_underscore: r += '_' r += put r += '_' else: r += put if r[-1] == '_': r = r[:-1] return r def n(default, name=name): if name is None: return 'grpc_%s_' % default if name == '': return '' return 'grpc_%s_' % name if isinstance(elem, tuple): return '%s%s_%s' % (n('mdelem'), m0(elem[0]), m0(elem[1])) else: return '%s%s' % (n('mdstr'), m0(elem)) # utility: generate some hash value for a string def fake_hash(elem): return hashlib.md5(elem).hexdigest()[0:8] # utility: print a big comment block into a set of files def put_banner(files, banner): for f in files: print >> f, '/*' for line in banner: print >> f, ' * %s' % line print >> f, ' */' print >> f # build a list of all the strings we need all_strs = list() all_elems = list() static_userdata = {} # put metadata batch callouts first, to make the check of if a static metadata # string is a callout trivial for elem in METADATA_BATCH_CALLOUTS: if elem not in all_strs: all_strs.append(elem) for elem in CONFIG: if isinstance(elem, tuple): if elem[0] not in all_strs: all_strs.append(elem[0]) if elem[1] not in all_strs: all_strs.append(elem[1]) if elem not in all_elems: all_elems.append(elem) else: if elem not in all_strs: all_strs.append(elem) compression_elems = [] for mask in range(1, 1 << len(COMPRESSION_ALGORITHMS)): val = ','.join(COMPRESSION_ALGORITHMS[alg] for alg in range(0, len(COMPRESSION_ALGORITHMS)) if (1 << alg) & mask) elem = ('grpc-accept-encoding', val) if val not in all_strs: all_strs.append(val) if elem not in all_elems: all_elems.append(elem) compression_elems.append(elem) static_userdata[elem] = 1 + (mask | 1) stream_compression_elems = [] for mask in range(1, 1 << len(STREAM_COMPRESSION_ALGORITHMS)): val = ','.join(STREAM_COMPRESSION_ALGORITHMS[alg] for alg in range(0, len(STREAM_COMPRESSION_ALGORITHMS)) if (1 << alg) & mask) elem = ('accept-encoding', val) if val not in all_strs: all_strs.append(val) if elem not in all_elems: all_elems.append(elem) stream_compression_elems.append(elem) static_userdata[elem] = 1 + (mask | 1) # output configuration args = sys.argv[1:] H = None C = None D = None if args: if 'header' in args: H = sys.stdout else: H = open('/dev/null', 'w') if 'source' in args: C = sys.stdout else: C = open('/dev/null', 'w') if 'dictionary' in args: D = sys.stdout else: D = open('/dev/null', 'w') else: H = open( os.path.join( os.path.dirname(sys.argv[0]), '../../../src/core/lib/transport/static_metadata.h'), 'w') C = open( os.path.join( os.path.dirname(sys.argv[0]), '../../../src/core/lib/transport/static_metadata.cc'), 'w') D = open( os.path.join( os.path.dirname(sys.argv[0]), '../../../test/core/end2end/fuzzers/hpack.dictionary'), 'w') # copy-paste copyright notice from this file with open(sys.argv[0]) as my_source: copyright = [] for line in my_source: if line[0] != '#': break for line in my_source: if line[0] == '#': copyright.append(line) break for line in my_source: if line[0] != '#': break copyright.append(line) put_banner([H, C], [line[2:].rstrip() for line in copyright]) hex_bytes = [ord(c) for c in 'abcdefABCDEF0123456789'] def esc_dict(line): out = "\"" for c in line: if 32 <= c < 127: if c != ord('"'): out += chr(c) else: out += "\\\"" else: out += '\\x%02X' % c return out + "\"" put_banner([H, C], """WARNING: Auto-generated code. To make changes to this file, change tools/codegen/core/gen_static_metadata.py, and then re-run it. See metadata.h for an explanation of the interface here, and metadata.cc for an explanation of what's going on. """.splitlines()) print >> H, '#ifndef GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H' print >> H, '#define GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H' print >> H print >> H, '#include <grpc/support/port_platform.h>' print >> H print >> H, '#include "src/core/lib/transport/metadata.h"' print >> H print >> C, '#include <grpc/support/port_platform.h>' print >> C print >> C, '#include "src/core/lib/transport/static_metadata.h"' print >> C print >> C, '#include "src/core/lib/slice/slice_internal.h"' print >> C str_ofs = 0 id2strofs = {} for i, elem in enumerate(all_strs): id2strofs[i] = str_ofs str_ofs += len(elem) def slice_def(i): return ('{&grpc_static_metadata_refcounts[%d],' ' {{g_bytes+%d, %d}}}') % (i, id2strofs[i], len(all_strs[i])) # validate configuration for elem in METADATA_BATCH_CALLOUTS: assert elem in all_strs print >> H, '#define GRPC_STATIC_MDSTR_COUNT %d' % len(all_strs) print >> H, ('extern const grpc_slice ' 'grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT];') for i, elem in enumerate(all_strs): print >> H, '/* "%s" */' % elem print >> H, '#define %s (grpc_static_slice_table[%d])' % ( mangle(elem).upper(), i) print >> H print >> C, 'static uint8_t g_bytes[] = {%s};' % (','.join( '%d' % ord(c) for c in ''.join(all_strs))) print >> C print >> C, 'static void static_ref(void *unused) {}' print >> C, 'static void static_unref(void *unused) {}' print >> C, ('static const grpc_slice_refcount_vtable static_sub_vtable = ' '{static_ref, static_unref, grpc_slice_default_eq_impl, ' 'grpc_slice_default_hash_impl};') print >> H, ('extern const grpc_slice_refcount_vtable ' 'grpc_static_metadata_vtable;') print >> C, ('const grpc_slice_refcount_vtable grpc_static_metadata_vtable = ' '{static_ref, static_unref, grpc_static_slice_eq, ' 'grpc_static_slice_hash};') print >> C, ('static grpc_slice_refcount static_sub_refcnt = ' '{&static_sub_vtable, &static_sub_refcnt};') print >> H, ('extern grpc_slice_refcount ' 'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT];') print >> C, ('grpc_slice_refcount ' 'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT] = {') for i, elem in enumerate(all_strs): print >> C, ' {&grpc_static_metadata_vtable, &static_sub_refcnt},' print >> C, '};' print >> C print >> H, '#define GRPC_IS_STATIC_METADATA_STRING(slice) \\' print >> H, (' ((slice).refcount != NULL && (slice).refcount->vtable == ' '&grpc_static_metadata_vtable)') print >> H print >> C, ('const grpc_slice grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT]' ' = {') for i, elem in enumerate(all_strs): print >> C, slice_def(i) + ',' print >> C, '};' print >> C print >> H, '#define GRPC_STATIC_METADATA_INDEX(static_slice) \\' print >> H, (' ((int)((static_slice).refcount - ' 'grpc_static_metadata_refcounts))') print >> H print >> D, '# hpack fuzzing dictionary' for i, elem in enumerate(all_strs): print >> D, '%s' % (esc_dict([len(elem)] + [ord(c) for c in elem])) for i, elem in enumerate(all_elems): print >> D, '%s' % (esc_dict([0, len(elem[0])] + [ord(c) for c in elem[0]] + [len(elem[1])] + [ord(c) for c in elem[1]])) print >> H, '#define GRPC_STATIC_MDELEM_COUNT %d' % len(all_elems) print >> H, ('extern grpc_mdelem_data ' 'grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT];') print >> H, ('extern uintptr_t ' 'grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT];') for i, elem in enumerate(all_elems): print >> H, '/* "%s": "%s" */' % elem print >> H, ('#define %s (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[%d], ' 'GRPC_MDELEM_STORAGE_STATIC))') % (mangle(elem).upper(), i) print >> H print >> C, ('uintptr_t grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT] ' '= {') print >> C, ' %s' % ','.join( '%d' % static_userdata.get(elem, 0) for elem in all_elems) print >> C, '};' print >> C def str_idx(s): for i, s2 in enumerate(all_strs): if s == s2: return i def md_idx(m): for i, m2 in enumerate(all_elems): if m == m2: return i def offset_trials(mink): yield 0 for i in range(1, 100): for mul in [-1, 1]: yield mul * i def perfect_hash(keys, name): p = perfection.hash_parameters(keys) def f(i, p=p): i += p.offset x = i % p.t y = i / p.t return x + p.r[y] return { 'PHASHRANGE': p.t - 1 + max(p.r), 'PHASHNKEYS': len(p.slots), 'pyfunc': f, 'code': """ static const int8_t %(name)s_r[] = {%(r)s}; static uint32_t %(name)s_phash(uint32_t i) { i %(offset_sign)s= %(offset)d; uint32_t x = i %% %(t)d; uint32_t y = i / %(t)d; uint32_t h = x; if (y < GPR_ARRAY_SIZE(%(name)s_r)) { uint32_t delta = (uint32_t)%(name)s_r[y]; h += delta; } return h; } """ % { 'name': name, 'r': ','.join('%d' % (r if r is not None else 0) for r in p.r), 't': p.t, 'offset': abs(p.offset), 'offset_sign': '+' if p.offset > 0 else '-' } } elem_keys = [ str_idx(elem[0]) * len(all_strs) + str_idx(elem[1]) for elem in all_elems ] elem_hash = perfect_hash(elem_keys, 'elems') print >> C, elem_hash['code'] keys = [0] * int(elem_hash['PHASHRANGE']) idxs = [255] * int(elem_hash['PHASHNKEYS']) for i, k in enumerate(elem_keys): h = elem_hash['pyfunc'](k) assert keys[h] == 0 keys[h] = k idxs[h] = i print >> C, 'static const uint16_t elem_keys[] = {%s};' % ','.join( '%d' % k for k in keys) print >> C, 'static const uint8_t elem_idxs[] = {%s};' % ','.join( '%d' % i for i in idxs) print >> C print >> H, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b);' print >> C, 'grpc_mdelem grpc_static_mdelem_for_static_strings(int a, int b) {' print >> C, ' if (a == -1 || b == -1) return GRPC_MDNULL;' print >> C, ' uint32_t k = (uint32_t)(a * %d + b);' % len(all_strs) print >> C, ' uint32_t h = elems_phash(k);' print >> C, ' return h < GPR_ARRAY_SIZE(elem_keys) && elem_keys[h] == k && elem_idxs[h] != 255 ? GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[elem_idxs[h]], GRPC_MDELEM_STORAGE_STATIC) : GRPC_MDNULL;' print >> C, '}' print >> C print >> C, 'grpc_mdelem_data grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT] = {' for a, b in all_elems: print >> C, '{%s,%s},' % (slice_def(str_idx(a)), slice_def(str_idx(b))) print >> C, '};' print >> H, 'typedef enum {' for elem in METADATA_BATCH_CALLOUTS: print >> H, ' %s,' % mangle(elem, 'batch').upper() print >> H, ' GRPC_BATCH_CALLOUTS_COUNT' print >> H, '} grpc_metadata_batch_callouts_index;' print >> H print >> H, 'typedef union {' print >> H, ' struct grpc_linked_mdelem *array[GRPC_BATCH_CALLOUTS_COUNT];' print >> H, ' struct {' for elem in METADATA_BATCH_CALLOUTS: print >> H, ' struct grpc_linked_mdelem *%s;' % mangle(elem, '').lower() print >> H, ' } named;' print >> H, '} grpc_metadata_batch_callouts;' print >> H print >> H, '#define GRPC_BATCH_INDEX_OF(slice) \\' print >> H, ' (GRPC_IS_STATIC_METADATA_STRING((slice)) ? (grpc_metadata_batch_callouts_index)GPR_CLAMP(GRPC_STATIC_METADATA_INDEX((slice)), 0, GRPC_BATCH_CALLOUTS_COUNT) : GRPC_BATCH_CALLOUTS_COUNT)' print >> H print >> H, 'extern const uint8_t grpc_static_accept_encoding_metadata[%d];' % ( 1 << len(COMPRESSION_ALGORITHMS)) print >> C, 'const uint8_t grpc_static_accept_encoding_metadata[%d] = {' % ( 1 << len(COMPRESSION_ALGORITHMS)) print >> C, '0,%s' % ','.join('%d' % md_idx(elem) for elem in compression_elems) print >> C, '};' print >> C print >> H, '#define GRPC_MDELEM_ACCEPT_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[grpc_static_accept_encoding_metadata[(algs)]], GRPC_MDELEM_STORAGE_STATIC))' print >> H print >> H, 'extern const uint8_t grpc_static_accept_stream_encoding_metadata[%d];' % ( 1 << len(STREAM_COMPRESSION_ALGORITHMS)) print >> C, 'const uint8_t grpc_static_accept_stream_encoding_metadata[%d] = {' % ( 1 << len(STREAM_COMPRESSION_ALGORITHMS)) print >> C, '0,%s' % ','.join( '%d' % md_idx(elem) for elem in stream_compression_elems) print >> C, '};' print >> H, '#define GRPC_MDELEM_ACCEPT_STREAM_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table[grpc_static_accept_stream_encoding_metadata[(algs)]], GRPC_MDELEM_STORAGE_STATIC))' print >> H, '#endif /* GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H */' H.close() C.close()
[ [ [ 611, 618 ], [ 6213, 6220 ] ], [ [ 626, 635 ] ], [ [ 643, 654 ] ], [ [ 662, 664 ], [ 8511, 8513 ], [ 8537, 8539 ], [ 8660, 8662 ], [ 8686, 8688 ], [ 8810, 8812 ], [ 8836, 8838 ] ], [ [ 672, 675 ], [ 8151, 8154 ], [ 8237, 8240 ], [ 8330, 8333 ], [ 8427, 8430 ], [ 8553, 8556 ], [ 8702, 8705 ], [ 8852, 8855 ], [ 8995, 8998 ] ], [ [ 683, 693 ] ], [ [ 701, 703 ] ], [ [ 711, 721 ], [ 14403, 14413 ] ], [ [ 1244, 1250 ], [ 6814, 6820 ] ], [ [ 4254, 4277 ], [ 6718, 6741 ], [ 10780, 10803 ], [ 16622, 16645 ], [ 16952, 16975 ] ], [ [ 4876, 4898 ], [ 7199, 7221 ], [ 7315, 7337 ], [ 7244, 7266 ], [ 17496, 17518 ], [ 17611, 17633 ] ], [ [ 4949, 4978 ], [ 7678, 7707 ], [ 7808, 7837 ], [ 7730, 7759 ], [ 18050, 18079 ], [ 18179, 18208 ] ], [ [ 5058, 5064 ], [ 11148, 11154 ], [ 13788, 13794 ], [ 16673, 16679 ], [ 17030, 17036 ] ], [ [ 6185, 6194 ] ], [ [ 6311, 6321 ], [ 9327, 9337 ], [ 9706, 9716 ] ], [ [ 6539, 6547 ], [ 6762, 6770 ], [ 6780, 6788 ], [ 6880, 6888 ], [ 6902, 6910 ], [ 6953, 6961 ], [ 6975, 6983 ], [ 7102, 7110 ], [ 7124, 7132 ], [ 7440, 7448 ], [ 7458, 7466 ], [ 7935, 7943 ], [ 7953, 7961 ], [ 10532, 10540 ], [ 10824, 10832 ], [ 10889, 10897 ], [ 11030, 11038 ], [ 11279, 11287 ], [ 12266, 12274 ], [ 12699, 12707 ], [ 13017, 13025 ], [ 15278, 15286 ], [ 16095, 16103 ], [ 10727, 10735 ], [ 14093, 14101 ] ], [ [ 6557, 6566 ], [ 7023, 7032 ], [ 7046, 7055 ], [ 7498, 7507 ], [ 7517, 7526 ], [ 7993, 8002 ], [ 8012, 8021 ], [ 13125, 13134 ], [ 13353, 13362 ], [ 13602, 13611 ], [ 14009, 14018 ], [ 15319, 15328 ], [ 16476, 16485 ], [ 14189, 14198 ] ], [ [ 6576, 6591 ], [ 7579, 7594 ], [ 8081, 8096 ], [ 13968, 13983 ] ], [ [ 6710, 6714 ], [ 6750, 6754 ], [ 6796, 6800 ] ], [ [ 6806, 6810 ], [ 6840, 6844 ], [ 6865, 6869 ], [ 6918, 6922 ], [ 6938, 6942 ], [ 6991, 6995 ], [ 7011, 7015 ], [ 7063, 7067 ], [ 7090, 7094 ], [ 7140, 7144 ] ], [ [ 7146, 7163 ], [ 7544, 7561 ], [ 17698, 17715 ] ], [ [ 7173, 7177 ], [ 7375, 7379 ], [ 7608, 7612 ] ], [ [ 7229, 7232 ], [ 7417, 7420 ], [ 7429, 7432 ], [ 7474, 7477 ] ], [ [ 7385, 7389 ], [ 7486, 7490 ], [ 7534, 7538 ], [ 7569, 7573 ], [ 7595, 7599 ] ], [ [ 7618, 7642 ], [ 8039, 8063 ], [ 18278, 18302 ] ], [ [ 7652, 7656 ], [ 7875, 7879 ], [ 8110, 8114 ] ], [ [ 7715, 7718 ], [ 7912, 7915 ], [ 7924, 7927 ], [ 7969, 7972 ] ], [ [ 7885, 7889 ], [ 7981, 7985 ], [ 8029, 8033 ], [ 8071, 8075 ], [ 8097, 8101 ] ], [ [ 8144, 8148 ], [ 8194, 8198 ], [ 8219, 8223 ], [ 8312, 8316 ], [ 8409, 8413 ] ], [ [ 8164, 8165 ] ], [ [ 8173, 8174 ] ], [ [ 8182, 8183 ] ], [ [ 8233, 8234 ], [ 9339, 9340 ], [ 9718, 9719 ], [ 10000, 10001 ], [ 10064, 10065 ], [ 10128, 10129 ], [ 10139, 10140 ], [ 10193, 10194 ], [ 10204, 10205 ], [ 10263, 10264 ], [ 10843, 10844 ], [ 10908, 10909 ], [ 11054, 11055 ], [ 11090, 11091 ], [ 11182, 11183 ], [ 11615, 11616 ], [ 12022, 12023 ], [ 12386, 12387 ], [ 12449, 12450 ], [ 12570, 12571 ], [ 12782, 12783 ], [ 12848, 12849 ], [ 12948, 12949 ], [ 13306, 13307 ], [ 13373, 13374 ], [ 13481, 13482 ], [ 13627, 13628 ], [ 13669, 13670 ], [ 13822, 13823 ], [ 15826, 15827 ], [ 16590, 16591 ], [ 16660, 16661 ], [ 16712, 16713 ], [ 16754, 16755 ], [ 16806, 16807 ], [ 16817, 16818 ], [ 16847, 16848 ], [ 16924, 16925 ], [ 16990, 16991 ], [ 17064, 17065 ], [ 17089, 17090 ], [ 17135, 17136 ], [ 17146, 17147 ], [ 17198, 17199 ], [ 17399, 17400 ], [ 17411, 17412 ], [ 17755, 17756 ], [ 17946, 17947 ], [ 17958, 17959 ], [ 18331, 18332 ], [ 18537, 18538 ], [ 18598, 18599 ] ], [ [ 8266, 8267 ], [ 9339, 9340 ], [ 9718, 9719 ], [ 10000, 10001 ], [ 10064, 10065 ], [ 10128, 10129 ], [ 10139, 10140 ], [ 10193, 10194 ], [ 10204, 10205 ], [ 10263, 10264 ], [ 10843, 10844 ], [ 10908, 10909 ], [ 11054, 11055 ], [ 11090, 11091 ], [ 11182, 11183 ], [ 11615, 11616 ], [ 12022, 12023 ], [ 12386, 12387 ], [ 12449, 12450 ], [ 12570, 12571 ], [ 12782, 12783 ], [ 12848, 12849 ], [ 12948, 12949 ], [ 13306, 13307 ], [ 13373, 13374 ], [ 13481, 13482 ], [ 13627, 13628 ], [ 13669, 13670 ], [ 13822, 13823 ], [ 15826, 15827 ], [ 16590, 16591 ], [ 16660, 16661 ], [ 16712, 16713 ], [ 16754, 16755 ], [ 16806, 16807 ], [ 16817, 16818 ], [ 16847, 16848 ], [ 16924, 16925 ], [ 16990, 16991 ], [ 17064, 17065 ], [ 17089, 17090 ], [ 17135, 17136 ], [ 17146, 17147 ], [ 17198, 17199 ], [ 17399, 17400 ], [ 17411, 17412 ], [ 17755, 17756 ], [ 17946, 17947 ], [ 17958, 17959 ], [ 18331, 18332 ], [ 18537, 18538 ], [ 18598, 18599 ] ], [ [ 8326, 8327 ], [ 9342, 9343 ], [ 9721, 9722 ], [ 10274, 10275 ], [ 10328, 10329 ], [ 10339, 10340 ], [ 10405, 10406 ], [ 10416, 10417 ], [ 10477, 10478 ], [ 11193, 11194 ], [ 11300, 11301 ], [ 11311, 11312 ], [ 11365, 11366 ], [ 11421, 11422 ], [ 11716, 11717 ], [ 11901, 11902 ], [ 12138, 12139 ], [ 12290, 12291 ], [ 12358, 12359 ], [ 12375, 12376 ], [ 12581, 12582 ], [ 12723, 12724 ], [ 12754, 12755 ], [ 12771, 12772 ], [ 13834, 13835 ], [ 13935, 13936 ], [ 14029, 14030 ], [ 14046, 14047 ], [ 15385, 15386 ], [ 15623, 15624 ], [ 15719, 15720 ], [ 15814, 15815 ], [ 15905, 15906 ], [ 15985, 15986 ], [ 16045, 16046 ], [ 16114, 16115 ], [ 16159, 16160 ], [ 16359, 16360 ], [ 16375, 16376 ], [ 16387, 16388 ], [ 16500, 16501 ], [ 16572, 16573 ], [ 17530, 17531 ], [ 17645, 17646 ], [ 17726, 17727 ], [ 17743, 17744 ], [ 18091, 18092 ], [ 18220, 18221 ], [ 18313, 18314 ], [ 18608, 18609 ] ], [ [ 8359, 8360 ], [ 9342, 9343 ], [ 9721, 9722 ], [ 10274, 10275 ], [ 10328, 10329 ], [ 10339, 10340 ], [ 10405, 10406 ], [ 10416, 10417 ], [ 10477, 10478 ], [ 11193, 11194 ], [ 11300, 11301 ], [ 11311, 11312 ], [ 11365, 11366 ], [ 11421, 11422 ], [ 11716, 11717 ], [ 11901, 11902 ], [ 12138, 12139 ], [ 12290, 12291 ], [ 12358, 12359 ], [ 12375, 12376 ], [ 12581, 12582 ], [ 12723, 12724 ], [ 12754, 12755 ], [ 12771, 12772 ], [ 13834, 13835 ], [ 13935, 13936 ], [ 14029, 14030 ], [ 14046, 14047 ], [ 15385, 15386 ], [ 15623, 15624 ], [ 15719, 15720 ], [ 15814, 15815 ], [ 15905, 15906 ], [ 15985, 15986 ], [ 16045, 16046 ], [ 16114, 16115 ], [ 16159, 16160 ], [ 16359, 16360 ], [ 16375, 16376 ], [ 16387, 16388 ], [ 16500, 16501 ], [ 16572, 16573 ], [ 17530, 17531 ], [ 17645, 17646 ], [ 17726, 17727 ], [ 17743, 17744 ], [ 18091, 18092 ], [ 18220, 18221 ], [ 18313, 18314 ], [ 18608, 18609 ] ], [ [ 8423, 8424 ], [ 12960, 12961 ], [ 13041, 13042 ], [ 13150, 13151 ] ], [ [ 8456, 8457 ], [ 12960, 12961 ], [ 13041, 13042 ], [ 13150, 13151 ] ], [ [ 8493, 8494 ], [ 9339, 9340 ], [ 9718, 9719 ], [ 10000, 10001 ], [ 10064, 10065 ], [ 10128, 10129 ], [ 10139, 10140 ], [ 10193, 10194 ], [ 10204, 10205 ], [ 10263, 10264 ], [ 10843, 10844 ], [ 10908, 10909 ], [ 11054, 11055 ], [ 11090, 11091 ], [ 11182, 11183 ], [ 11615, 11616 ], [ 12022, 12023 ], [ 12386, 12387 ], [ 12449, 12450 ], [ 12570, 12571 ], [ 12782, 12783 ], [ 12848, 12849 ], [ 12948, 12949 ], [ 13306, 13307 ], [ 13373, 13374 ], [ 13481, 13482 ], [ 13627, 13628 ], [ 13669, 13670 ], [ 13822, 13823 ], [ 15826, 15827 ], [ 16590, 16591 ], [ 16660, 16661 ], [ 16712, 16713 ], [ 16754, 16755 ], [ 16806, 16807 ], [ 16817, 16818 ], [ 16847, 16848 ], [ 16924, 16925 ], [ 16990, 16991 ], [ 17064, 17065 ], [ 17089, 17090 ], [ 17135, 17136 ], [ 17146, 17147 ], [ 17198, 17199 ], [ 17399, 17400 ], [ 17411, 17412 ], [ 17755, 17756 ], [ 17946, 17947 ], [ 17958, 17959 ], [ 18331, 18332 ], [ 18537, 18538 ], [ 18598, 18599 ] ], [ [ 8642, 8643 ], [ 9342, 9343 ], [ 9721, 9722 ], [ 10274, 10275 ], [ 10328, 10329 ], [ 10339, 10340 ], [ 10405, 10406 ], [ 10416, 10417 ], [ 10477, 10478 ], [ 11193, 11194 ], [ 11300, 11301 ], [ 11311, 11312 ], [ 11365, 11366 ], [ 11421, 11422 ], [ 11716, 11717 ], [ 11901, 11902 ], [ 12138, 12139 ], [ 12290, 12291 ], [ 12358, 12359 ], [ 12375, 12376 ], [ 12581, 12582 ], [ 12723, 12724 ], [ 12754, 12755 ], [ 12771, 12772 ], [ 13834, 13835 ], [ 13935, 13936 ], [ 14029, 14030 ], [ 14046, 14047 ], [ 15385, 15386 ], [ 15623, 15624 ], [ 15719, 15720 ], [ 15814, 15815 ], [ 15905, 15906 ], [ 15985, 15986 ], [ 16045, 16046 ], [ 16114, 16115 ], [ 16159, 16160 ], [ 16359, 16360 ], [ 16375, 16376 ], [ 16387, 16388 ], [ 16500, 16501 ], [ 16572, 16573 ], [ 17530, 17531 ], [ 17645, 17646 ], [ 17726, 17727 ], [ 17743, 17744 ], [ 18091, 18092 ], [ 18220, 18221 ], [ 18313, 18314 ], [ 18608, 18609 ] ], [ [ 8792, 8793 ], [ 12960, 12961 ], [ 13041, 13042 ], [ 13150, 13151 ] ], [ [ 9011, 9020 ], [ 9057, 9066 ], [ 9129, 9138 ], [ 9236, 9245 ] ], [ [ 9026, 9035 ], [ 9179, 9188 ], [ 9300, 9309 ], [ 9377, 9386 ] ], [ [ 9049, 9053 ], [ 9079, 9083 ] ], [ [ 9121, 9125 ], [ 9151, 9155 ], [ 9196, 9200 ] ], [ [ 9228, 9232 ], [ 9258, 9262 ], [ 9317, 9321 ] ], [ [ 9390, 9399 ] ], [ [ 9451, 9459 ], [ 13052, 13060 ], [ 13161, 13169 ] ], [ [ 10480, 10487 ], [ 10562, 10569 ], [ 10574, 10581 ] ], [ [ 10492, 10501 ], [ 10547, 10556 ], [ 10709, 10718 ] ], [ [ 10511, 10512 ], [ 10557, 10558 ] ], [ [ 10514, 10518 ], [ 10589, 10593 ] ], [ [ 10601, 10610 ], [ 12726, 12735 ], [ 16517, 16526 ], [ 16540, 16549 ] ], [ [ 10772, 10776 ], [ 10816, 10820 ] ], [ [ 11009, 11010 ], [ 11170, 11171 ] ], [ [ 11012, 11016 ], [ 11072, 11076 ], [ 11155, 11159 ] ], [ [ 12245, 12246 ] ], [ [ 12248, 12252 ] ], [ [ 12678, 12679 ], [ 12736, 12737 ] ], [ [ 12681, 12685 ] ], [ [ 12996, 12997 ] ], [ [ 12999, 13003 ], [ 13066, 13070 ], [ 13092, 13096 ] ], [ [ 13104, 13105 ] ], [ [ 13107, 13111 ], [ 13178, 13182 ], [ 13207, 13211 ], [ 13256, 13260 ], [ 13285, 13289 ] ], [ [ 13581, 13582 ], [ 13810, 13811 ] ], [ [ 13584, 13588 ], [ 13651, 13655 ], [ 13795, 13799 ] ], [ [ 14054, 14061 ], [ 15255, 15262 ], [ 15290, 15297 ], [ 16527, 16534 ], [ 16550, 16557 ] ], [ [ 14151, 14157 ], [ 17673, 17679 ], [ 18253, 18259 ] ], [ [ 14248, 14261 ] ], [ [ 14369, 14381 ], [ 15343, 15355 ] ], [ [ 15237, 15246 ], [ 15356, 15365 ], [ 15515, 15524 ] ], [ [ 15331, 15340 ], [ 15388, 15397 ], [ 15424, 15433 ], [ 15468, 15477 ], [ 15535, 15544 ] ], [ [ 15407, 15411 ], [ 15569, 15573 ], [ 15586, 15590 ], [ 15704, 15708 ] ], [ [ 15449, 15453 ], [ 15602, 15606 ], [ 15799, 15803 ] ], [ [ 15497, 15498 ], [ 15612, 15613 ] ], [ [ 15500, 15501 ], [ 15555, 15556 ], [ 15596, 15597 ] ], [ [ 15531, 15532 ], [ 15574, 15575 ], [ 15591, 15592 ], [ 15607, 15608 ] ], [ [ 16468, 16469 ], [ 16535, 16536 ] ], [ [ 16471, 16472 ], [ 16558, 16559 ] ], [ [ 16614, 16618 ], [ 16680, 16684 ] ], [ [ 16944, 16948 ], [ 17037, 17041 ] ] ]
# imports - compatibility packages from __future__ import absolute_import # module imports from bulbea.entity import Share, Stock from bulbea.config import AppConfig from bulbea.app import app from bulbea.learn import sentiment __version__ = AppConfig.VERSION
[ [ [ 58, 73 ] ], [ [ 118, 123 ] ], [ [ 125, 130 ] ], [ [ 157, 166 ], [ 248, 257 ] ], [ [ 193, 196 ] ], [ [ 223, 232 ] ], [ [ 234, 245 ] ] ]
"""Immutable sets that support efficient merging, traversal, and membership check. """ def _empty(): """Create an empty set. Returns: set, new empty set. """ return struct(_set_items = dict()) def _is_member(s, e): """Return true if `e` is in the set `s`. Args: s: The set to inspect. e: The element to search for. Result: Bool, true if `e` is in `s`, false otherwise. """ return e in s._set_items def _insert(s, e): """Insert an element into the set. Args: s: Set to insert new element into. e: The element to insert. Result: A copy of set `s` with `s` element added. """ r = dict(s._set_items) r[e] = None return struct(_set_items = r) def _mutable_insert(s, e): """The same as `set.insert`, but modifies the first argument in place. Args: s: Set to insert new element into. e: The element to insert. Result: set `s` with `s` element added. """ s._set_items[e] = None return s def _union(s0, s1): """Return union of two sets. Args: s0: One set. s1: Another set. Result: set, union of the two sets. """ r = dict(s0._set_items) r.update(s1._set_items) return struct(_set_items = r) def _mutable_union(s0, s1): """Modify set `s0` adding elements from `s1` to it. Args: s0: One set. s1: Another set. Result: set, union of the two sets. """ s0._set_items.update(s1._set_items) return s0 def _map(s, f): """Map elements of given set using a function. Args: s: Original set. f: Function to apply to elements of the set. Result: set with elements obtained by application of function `f` to the elements of `s`. """ return struct(_set_items = { f(x): None for x in s._set_items.keys()}) def _from_list(l): """Create a set containing elements from given list. Args: l: List, source of the elements for the new set. Result: set containing elements from given list. """ return (struct(_set_items = { x: None for x in l })) def _to_list(s): """Convert set into a list of its elements. Args: s: Set to convert. Returns: List of elements of the set. """ return s._set_items.keys() def _to_depset(s): """Similar to `set.to_list`, but produces a depset. Args: s: Set to convert. Returns: Depset of elements from the set. """ return depset(_to_list(s)) set = struct( empty = _empty, is_member = _is_member, insert = _insert, mutable_insert = _mutable_insert, union = _union, mutable_union = _mutable_union, map = _map, from_list = _from_list, to_list = _to_list, to_depset = _to_depset, )
[ [ [ 92, 98 ], [ 2409, 2415 ] ], [ [ 212, 222 ], [ 2431, 2441 ] ], [ [ 442, 449 ], [ 2457, 2464 ] ], [ [ 711, 726 ], [ 2485, 2500 ] ], [ [ 979, 985 ], [ 2516, 2522 ] ], [ [ 1211, 1225 ], [ 2542, 2556 ] ], [ [ 1440, 1444 ], [ 2572, 2576 ] ], [ [ 1765, 1775 ], [ 2592, 2602 ] ], [ [ 2019, 2027 ], [ 2618, 2626 ], [ 2367, 2375 ] ], [ [ 2195, 2205 ], [ 2642, 2652 ] ], [ [ 2381, 2384 ] ] ]
import re from . import csv SDC_SOURCE_FILE_COLUMN = "_sdc_source_file" SDC_SOURCE_LINENO_COLUMN = "_sdc_source_lineno" # TODO: Add additional logging # TODO: conn needs get_files and get_file_handle functions def get_schema_for_table(conn, table_spec): files = conn.get_files(table_spec['search_prefix'], table_spec['search_pattern']) if not files: return {} samples = sample_files(conn, table_spec, files) data_schema = { **generate_schema(samples, table_spec), SDC_SOURCE_FILE_COLUMN: {'type': 'string'}, SDC_SOURCE_LINENO_COLUMN: {'type': 'integer'}, csv.SDC_EXTRA_COLUMN: {'type': 'array', 'items': {'type': 'string'}}, } return { 'type': 'object', 'properties': data_schema, } def sample_file(conn, table_spec, f, sample_rate, max_records): table_name = table_spec['table_name'] plurality = "s" if sample_rate != 1 else "" samples = [] file_handle = conn.get_file_handle(f) # Add file_name to opts and flag infer_compression to support gzipped files opts = {'key_properties': table_spec['key_properties'], 'delimiter': table_spec['delimiter'], 'encoding': table_spec.get('encoding', 'utf-8'), 'file_name': f['filepath']} readers = csv.get_row_iterators(file_handle, options=opts, infer_compression=True) for reader in readers: current_row = 0 for row in reader: if (current_row % sample_rate) == 0: if row.get(csv.SDC_EXTRA_COLUMN): row.pop(csv.SDC_EXTRA_COLUMN) samples.append(row) current_row += 1 if len(samples) >= max_records: break # Empty sample to show field selection, if needed empty_file = False if len(samples) == 0: empty_file = True # Assumes all reader objects in readers have the same fieldnames samples.append({name: None for name in reader.fieldnames}) return (empty_file, samples) # pylint: disable=too-many-arguments def sample_files(conn, table_spec, files, sample_rate=1, max_records=1000, max_files=5): to_return = [] empty_samples = [] files_so_far = 0 sorted_files = sorted(files, key=lambda f: f['last_modified'], reverse=True) for f in sorted_files: empty_file, samples = sample_file(conn, table_spec, f, sample_rate, max_records) if empty_file: empty_samples += samples else: to_return += samples files_so_far += 1 if files_so_far >= max_files: break if not any(to_return): return empty_samples return to_return def infer(datum): """ Returns the inferred data type """ if datum is None or datum == '': return None try: int(datum) return 'integer' except (ValueError, TypeError): pass try: #numbers are NOT floats, they are DECIMALS float(datum) return 'number' except (ValueError, TypeError): pass return 'string' def count_sample(sample, counts, table_spec): for key, value in sample.items(): if key not in counts: counts[key] = {} date_overrides = table_spec.get('date_overrides', []) if key in date_overrides: datatype = "date-time" else: datatype = infer(value) if datatype is not None: counts[key][datatype] = counts[key].get(datatype, 0) + 1 return counts def pick_datatype(counts): """ If the underlying records are ONLY of type `integer`, `number`, or `date-time`, then return that datatype. If the underlying records are of type `integer` and `number` only, return `number`. Otherwise return `string`. """ to_return = 'string' if counts.get('date-time', 0) > 0: return 'date-time' if len(counts) == 1: if counts.get('integer', 0) > 0: to_return = 'integer' elif counts.get('number', 0) > 0: to_return = 'number' elif(len(counts) == 2 and counts.get('integer', 0) > 0 and counts.get('number', 0) > 0): to_return = 'number' return to_return def generate_schema(samples, table_spec): counts = {} for sample in samples: # {'name' : { 'string' : 45}} counts = count_sample(sample, counts, table_spec) for key, value in counts.items(): datatype = pick_datatype(value) if datatype == 'date-time': counts[key] = { 'anyOf': [ {'type': ['null', 'string'], 'format': 'date-time'}, {'type': ['null', 'string']} ] } else: types = ['null', datatype] if datatype != 'string': types.append('string') counts[key] = { 'type': types, } return counts
[ [ [ 7, 9 ] ], [ [ 25, 28 ], [ 618, 621 ], [ 1297, 1300 ], [ 1525, 1528 ], [ 1576, 1579 ] ], [ [ 30, 52 ], [ 511, 533 ] ], [ [ 74, 98 ], [ 563, 587 ] ], [ [ 218, 238 ] ], [ [ 780, 791 ], [ 2383, 2394 ] ], [ [ 2077, 2089 ], [ 396, 408 ] ], [ [ 2760, 2765 ], [ 3474, 3479 ] ], [ [ 3166, 3178 ], [ 4465, 4477 ] ], [ [ 3614, 3627 ], [ 4564, 4577 ] ], [ [ 4329, 4344 ], [ 465, 480 ] ] ]
import warnings from inspect import isfunction, signature import pkg_resources import pytest from appdaemon.plugins.hass.hassapi import Hass from appdaemontestframework.common import AppdaemonTestFrameworkError class AutomationFixtureError(AppdaemonTestFrameworkError): pass def _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks): _inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks) automation = automation_class( None, automation_class.__name__, None, None, None, None, None ) automation.initialize() given_that.mock_functions_are_cleared() return automation def _inject_helpers_and_call_function(function, given_that, hass_functions, hass_mocks): injectable_fixtures = { 'given_that': given_that, 'hass_functions': hass_functions, 'hass_mocks': hass_mocks, } def _check_valid(param): if param not in injectable_fixtures: raise AutomationFixtureError( f"'{param}' is not a valid fixture! | The only fixtures injectable in '@automation_fixture' are: {list(injectable_fixtures.keys())}") if param == 'hass_functions': warnings.warn( """ Injecting `hass_functions` into automation fixtures is deprecated. Replace `hass_functions` with `hass_mocks` injections and access hass_functions with `hass_mocks.hass_functions` """, DeprecationWarning) args = [] for param in signature(function).parameters: _check_valid(param) args.append(injectable_fixtures.get(param)) function(*tuple(args)) def ensure_automation_is_valid(automation_class): def function_exist_in_automation_class(func_name): return func_name in dir(automation_class) def function_has_arguments_other_than_self(func_name): func_parameters = signature(getattr(automation_class, func_name)).parameters return list(func_parameters.keys()) != ["self"] def __init___was_overridden(): return '__init__' in automation_class.__dict__ # noinspection PyPep8Naming,SpellCheckingInspection def not_subclass_of_Hass(): return not issubclass(automation_class, Hass) if not function_exist_in_automation_class('initialize'): raise AutomationFixtureError( f"'{automation_class.__name__}' has no 'initialize' function! Make sure you implemented it!") if function_has_arguments_other_than_self('initialize'): raise AutomationFixtureError( f"'{automation_class.__name__}' 'initialize' should have no arguments other than 'self'!") if __init___was_overridden(): raise AutomationFixtureError(f"'{automation_class.__name__}' should not override '__init__'") if not_subclass_of_Hass(): raise AutomationFixtureError(f"'{automation_class.__name__}' should be a subclass of 'Hass'") class _AutomationFixtureDecoratorWithoutArgs: def __init__(self, automation_classes): self.automation_classes = automation_classes for automation in self.automation_classes: ensure_automation_is_valid(automation) def __call__(self, function): @pytest.fixture(params=self.automation_classes, ids=self._generate_id) def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks): automation_class = request.param return _instantiate_and_initialize_automation(function, automation_class, given_that, hass_functions, hass_mocks) return automation_fixture_with_initialisation def _generate_id(self, automation_classes): return automation_classes.__name__ class _AutomationFixtureDecoratorWithArgs: def __init__(self, automation_classes_with_args): self.automation_classes_with_args = automation_classes_with_args for automation, _args in self.automation_classes_with_args: ensure_automation_is_valid(automation) def __call__(self, function): @pytest.fixture(params=self.automation_classes_with_args, ids=self._generate_id) def automation_fixture_with_initialisation(request, given_that, hass_functions, hass_mocks): automation_class = request.param[0] automation_args = request.param[1] automation = _instantiate_and_initialize_automation( function, automation_class, given_that, hass_functions, hass_mocks) return (automation, automation_args) return automation_fixture_with_initialisation def _generate_id(self, automation_classes_with_args): return automation_classes_with_args[0].__name__ def automation_fixture(*args): """ Decorator to seamlessly initialize and inject an automation fixture 4 Versions: - Single Class: @automation_fixture(MyAutomation) - Multiple Classes: @automation_fixture(MyAutomation, MyOtherAutomation) - Single Class w/ params: @automation_fixture((upstairs.Bedroom, {'motion': 'binary_sensor.bedroom_motion'})) - Multiple Classes w/ params: @automation_fixture( (upstairs.Bedroom, {'motion': 'binary_sensor.bedroom_motion'}), (upstairs.Bathroom, {'motion': 'binary_sensor.bathroom_motion'}), ) When multiple classes are passed, tests will be generated for each automation. When using parameters, the injected object will be a tuple: `(Initialized_Automation, params)` # Pre-initialization setup All code in the `@automation_fixture` function will be executed before initializing the `automation_class` 3 fixtures are injectable in `@automation_fixture`: 'given_that', 'hass_mocks' and 'hass_functions' 'hass_functions' is deprecated in favor of 'hass_mocks' Examples: ```python @automation_fixture(Bathroom) def bathroom(): pass # -> `Bathroom` automation will be initialized and available in tests as `bathroom` --- @automation_fixture(Bathroom) def bathroom(given_that): given_that.time_is(time(hour=13)) # -> 1. `given_that.time_is(time(hour=13))` will be called # -> 2. `Bathroom` automation will be initialized and available in tests as `bathroom` ``` Do not return anything, any returned object will be ignored """ if not args or isfunction(args[0]): raise AutomationFixtureError( 'Do not forget to pass the automation class(es) as argument') if type(args[0]) is not tuple: automation_classes = args return _AutomationFixtureDecoratorWithoutArgs(automation_classes) else: automation_classes_with_args = args return _AutomationFixtureDecoratorWithArgs(automation_classes_with_args)
[ [ [ 7, 15 ], [ 1320, 1328 ] ], [ [ 36, 46 ], [ 6580, 6590 ] ], [ [ 48, 57 ], [ 1657, 1666 ], [ 2040, 2049 ] ], [ [ 65, 78 ] ], [ [ 87, 93 ], [ 3357, 3363 ], [ 4181, 4187 ] ], [ [ 137, 141 ], [ 2383, 2387 ] ], [ [ 185, 212 ], [ 244, 271 ] ], [ [ 221, 243 ], [ 2465, 2487 ], [ 2670, 2692 ], [ 2845, 2867 ], [ 2978, 3000 ], [ 6615, 6637 ], [ 1095, 1117 ] ], [ [ 289, 327 ], [ 3592, 3630 ], [ 4482, 4520 ] ], [ [ 773, 806 ], [ 401, 434 ] ], [ [ 1803, 1829 ], [ 3274, 3300 ], [ 4098, 4124 ] ], [ [ 3074, 3112 ], [ 6798, 6836 ] ], [ [ 3854, 3889 ], [ 6926, 6961 ] ], [ [ 4831, 4849 ] ] ]
""" Cisco_IOS_XE_ospf_oper This module contains a collection of YANG definitions for monitoring the operation of ospf protocol in a Network Element. Copyright (c) 2016\-2018 by Cisco Systems, Inc. All rights reserved. """ from collections import OrderedDict from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64 from ydk.filters import YFilter from ydk.errors import YError, YModelError from ydk.errors.error_handler import handle_type_error as _handle_type_error class AddressFamily(Enum): """ AddressFamily (Enum Class) Address family type .. data:: address_family_ipv4 = 0 .. data:: address_family_ipv6 = 1 """ address_family_ipv4 = Enum.YLeaf(0, "address-family-ipv4") address_family_ipv6 = Enum.YLeaf(1, "address-family-ipv6") class NbrStateType(Enum): """ NbrStateType (Enum Class) OSPF neighbor state type .. data:: ospf_nbr_down = 1 Neighbor state down .. data:: ospf_nbr_attempt = 2 Neighbor attempt state .. data:: ospf_nbr_init = 3 Neighbor init state .. data:: ospf_nbr_two_way = 4 Neighbor 2-way state .. data:: ospf_nbr_exchange_start = 5 Neighbor exchange start state .. data:: ospf_nbr_exchange = 6 Neighbor exchange state .. data:: ospf_nbr_loading = 7 Neighbor loading state .. data:: ospf_nbr_full = 8 Neighbor full state """ ospf_nbr_down = Enum.YLeaf(1, "ospf-nbr-down") ospf_nbr_attempt = Enum.YLeaf(2, "ospf-nbr-attempt") ospf_nbr_init = Enum.YLeaf(3, "ospf-nbr-init") ospf_nbr_two_way = Enum.YLeaf(4, "ospf-nbr-two-way") ospf_nbr_exchange_start = Enum.YLeaf(5, "ospf-nbr-exchange-start") ospf_nbr_exchange = Enum.YLeaf(6, "ospf-nbr-exchange") ospf_nbr_loading = Enum.YLeaf(7, "ospf-nbr-loading") ospf_nbr_full = Enum.YLeaf(8, "ospf-nbr-full") class OspfAuthType(Enum): """ OspfAuthType (Enum Class) OSPF Authentication type .. data:: ospf_auth_ipsec = 0 .. data:: ospf_auth_trailer_keychain = 1 .. data:: ospf_auth_trailer_key = 2 .. data:: ospf_auth_type_none = 3 """ ospf_auth_ipsec = Enum.YLeaf(0, "ospf-auth-ipsec") ospf_auth_trailer_keychain = Enum.YLeaf(1, "ospf-auth-trailer-keychain") ospf_auth_trailer_key = Enum.YLeaf(2, "ospf-auth-trailer-key") ospf_auth_type_none = Enum.YLeaf(3, "ospf-auth-type-none") class OspfExternalMetricType(Enum): """ OspfExternalMetricType (Enum Class) External metric type .. data:: ospf_ext_metric_type_1 = 0 .. data:: ospf_ext_metric_type_2 = 1 """ ospf_ext_metric_type_1 = Enum.YLeaf(0, "ospf-ext-metric-type-1") ospf_ext_metric_type_2 = Enum.YLeaf(1, "ospf-ext-metric-type-2") class OspfNetworkType(Enum): """ OspfNetworkType (Enum Class) OSPF network type .. data:: ospf_broadcast = 0 OSPF broadcast multi-access network .. data:: ospf_non_broadcast = 1 OSPF Non-Broadcast Multi-Access (NBMA) network .. data:: ospf_point_to_multipoint = 2 OSPF point-to-multipoint network .. data:: ospf_point_to_point = 3 OSPF point-to-point network """ ospf_broadcast = Enum.YLeaf(0, "ospf-broadcast") ospf_non_broadcast = Enum.YLeaf(1, "ospf-non-broadcast") ospf_point_to_multipoint = Enum.YLeaf(2, "ospf-point-to-multipoint") ospf_point_to_point = Enum.YLeaf(3, "ospf-point-to-point") class OspfOperationMode(Enum): """ OspfOperationMode (Enum Class) OSPF operational mode .. data:: ospf_ships_in_the_night = 0 Ships-in-the-night operation mode in which each OSPF instance carries only one address family """ ospf_ships_in_the_night = Enum.YLeaf(0, "ospf-ships-in-the-night") class Ospfv2AuthTypeSelection(Enum): """ Ospfv2AuthTypeSelection (Enum Class) The authentication type .. data:: ospfv2_auth_none = 0 No authentication configured .. data:: ospfv2_auth_trailer_key = 1 Authentication uses the trailer key .. data:: ospfv2_auth_trailer_key_chain = 2 Authentication uses a trailer key chain """ ospfv2_auth_none = Enum.YLeaf(0, "ospfv2-auth-none") ospfv2_auth_trailer_key = Enum.YLeaf(1, "ospfv2-auth-trailer-key") ospfv2_auth_trailer_key_chain = Enum.YLeaf(2, "ospfv2-auth-trailer-key-chain") class Ospfv2CryptoAlgorithm(Enum): """ Ospfv2CryptoAlgorithm (Enum Class) The algorithm in use .. data:: ospfv2_crypto_cleartest = 0 The OSPFv2 authentication is sent as cleartext .. data:: ospfv2_crypto_md5 = 1 The OSPFv2 authentication is encrypted using Message Digest 5 """ ospfv2_crypto_cleartest = Enum.YLeaf(0, "ospfv2-crypto-cleartest") ospfv2_crypto_md5 = Enum.YLeaf(1, "ospfv2-crypto-md5") class Ospfv2IntfState(Enum): """ Ospfv2IntfState (Enum Class) The possible states that an interface can be in .. data:: ospfv2_interface_state_down = 0 The interface is in the down state .. data:: ospfv2_interface_state_loopback = 1 The interface is in loopback state .. data:: ospfv2_interface_state_waiting = 2 The interface is in waiting state .. data:: ospfv2_interface_state_point_to_mpoint = 3 The interface is in point-to-multipoint state .. data:: ospfv2_interface_state_point_to_point = 4 The interface is in point-to-point state .. data:: ospfv2_interface_state_dr = 5 The interface is in the designated router state .. data:: ospfv2_interface_state_backup = 6 The interface is providing backup for another interface .. data:: ospfv2_interface_state_other = 7 The interface is in a state other than the ones nummerated in this list """ ospfv2_interface_state_down = Enum.YLeaf(0, "ospfv2-interface-state-down") ospfv2_interface_state_loopback = Enum.YLeaf(1, "ospfv2-interface-state-loopback") ospfv2_interface_state_waiting = Enum.YLeaf(2, "ospfv2-interface-state-waiting") ospfv2_interface_state_point_to_mpoint = Enum.YLeaf(3, "ospfv2-interface-state-point-to-mpoint") ospfv2_interface_state_point_to_point = Enum.YLeaf(4, "ospfv2-interface-state-point-to-point") ospfv2_interface_state_dr = Enum.YLeaf(5, "ospfv2-interface-state-dr") ospfv2_interface_state_backup = Enum.YLeaf(6, "ospfv2-interface-state-backup") ospfv2_interface_state_other = Enum.YLeaf(7, "ospfv2-interface-state-other") class Ospfv2LsaType(Enum): """ Ospfv2LsaType (Enum Class) Link State Advertisement type .. data:: ospfv2_lsa_type_unsupported_lsa_type = 0 .. data:: ospfv2_lsa_type_router = 1 .. data:: ospfv2_lsa_type_network = 2 .. data:: ospfv2_lsa_type_summary_net = 3 .. data:: ospfv2_lsa_type_summary_router = 4 .. data:: ospfv2_lsa_type_as_external = 5 .. data:: ospfv2_lsa_type_nssa = 6 .. data:: ospfv2_lsa_type_link_scope_opaque = 7 .. data:: ospfv2_lsa_type_area_scope_opaque = 8 .. data:: ospfv2_lsa_type_as_scope_opaque = 9 """ ospfv2_lsa_type_unsupported_lsa_type = Enum.YLeaf(0, "ospfv2-lsa-type-unsupported-lsa-type") ospfv2_lsa_type_router = Enum.YLeaf(1, "ospfv2-lsa-type-router") ospfv2_lsa_type_network = Enum.YLeaf(2, "ospfv2-lsa-type-network") ospfv2_lsa_type_summary_net = Enum.YLeaf(3, "ospfv2-lsa-type-summary-net") ospfv2_lsa_type_summary_router = Enum.YLeaf(4, "ospfv2-lsa-type-summary-router") ospfv2_lsa_type_as_external = Enum.YLeaf(5, "ospfv2-lsa-type-as-external") ospfv2_lsa_type_nssa = Enum.YLeaf(6, "ospfv2-lsa-type-nssa") ospfv2_lsa_type_link_scope_opaque = Enum.YLeaf(7, "ospfv2-lsa-type-link-scope-opaque") ospfv2_lsa_type_area_scope_opaque = Enum.YLeaf(8, "ospfv2-lsa-type-area-scope-opaque") ospfv2_lsa_type_as_scope_opaque = Enum.YLeaf(9, "ospfv2-lsa-type-as-scope-opaque") class OspfOperData(Entity): """ Operational state of ospf .. attribute:: ospf_state OSPF operational state **type**\: :py:class:`OspfState <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState>` **presence node**\: True **config**\: False .. attribute:: ospfv2_instance The OSPF instance **type**\: list of :py:class:`Ospfv2Instance <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData, self).__init__() self._top_entity = None self.yang_name = "ospf-oper-data" self.yang_parent_name = "Cisco-IOS-XE-ospf-oper" self.is_top_level_class = True self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([("ospf-state", ("ospf_state", OspfOperData.OspfState)), ("ospfv2-instance", ("ospfv2_instance", OspfOperData.Ospfv2Instance))]) self._leafs = OrderedDict() self.ospf_state = None self._children_name_map["ospf_state"] = "ospf-state" self.ospfv2_instance = YList(self) self._segment_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData, [], name, value) class OspfState(Entity): """ OSPF operational state .. attribute:: op_mode OSPF operation mode **type**\: :py:class:`OspfOperationMode <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperationMode>` **config**\: False .. attribute:: ospf_instance OSPF routing protocol instance **type**\: list of :py:class:`OspfInstance <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance>` **config**\: False This class is a :ref:`presence class<presence-class>` """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState, self).__init__() self.yang_name = "ospf-state" self.yang_parent_name = "ospf-oper-data" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = [] self._child_classes = OrderedDict([("ospf-instance", ("ospf_instance", OspfOperData.OspfState.OspfInstance))]) self.is_presence_container = True self._leafs = OrderedDict([ ('op_mode', (YLeaf(YType.enumeration, 'op-mode'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfOperationMode', '')])), ]) self.op_mode = None self.ospf_instance = YList(self) self._segment_path = lambda: "ospf-state" self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState, ['op_mode'], name, value) class OspfInstance(Entity): """ OSPF routing protocol instance .. attribute:: af (key) Address\-family of the instance **type**\: :py:class:`AddressFamily <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.AddressFamily>` **config**\: False .. attribute:: router_id (key) Defined in RFC 2328. A 32\-bit number that uniquely identifies the router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospf_area List of ospf areas **type**\: list of :py:class:`OspfArea <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea>` **config**\: False .. attribute:: link_scope_lsas List OSPF link scope LSA **type**\: list of :py:class:`LinkScopeLsas <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas>` **config**\: False .. attribute:: multi_topology OSPF multi\-topology interface augmentation **type**\: list of :py:class:`MultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.MultiTopology>` **config**\: False .. attribute:: process_id The process identifier used to refer to this instance **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance, self).__init__() self.yang_name = "ospf-instance" self.yang_parent_name = "ospf-state" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = ['af','router_id'] self._child_classes = OrderedDict([("ospf-area", ("ospf_area", OspfOperData.OspfState.OspfInstance.OspfArea)), ("link-scope-lsas", ("link_scope_lsas", OspfOperData.OspfState.OspfInstance.LinkScopeLsas)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.MultiTopology))]) self._leafs = OrderedDict([ ('af', (YLeaf(YType.enumeration, 'af'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'AddressFamily', '')])), ('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])), ('process_id', (YLeaf(YType.uint16, 'process-id'), ['int'])), ]) self.af = None self.router_id = None self.process_id = None self.ospf_area = YList(self) self.link_scope_lsas = YList(self) self.multi_topology = YList(self) self._segment_path = lambda: "ospf-instance" + "[af='" + str(self.af) + "']" + "[router-id='" + str(self.router_id) + "']" self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/ospf-state/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance, ['af', 'router_id', 'process_id'], name, value) class OspfArea(Entity): """ List of ospf areas .. attribute:: area_id (key) OSPF area ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospf_interface List of OSPF interfaces **type**\: list of :py:class:`OspfInterface <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface>` **config**\: False .. attribute:: area_scope_lsa List of OSPF area scope LSA **type**\: list of :py:class:`AreaScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea, self).__init__() self.yang_name = "ospf-area" self.yang_parent_name = "ospf-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['area_id'] self._child_classes = OrderedDict([("ospf-interface", ("ospf_interface", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa))]) self._leafs = OrderedDict([ ('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])), ]) self.area_id = None self.ospf_interface = YList(self) self.area_scope_lsa = YList(self) self._segment_path = lambda: "ospf-area" + "[area-id='" + str(self.area_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea, ['area_id'], name, value) class OspfInterface(Entity): """ List of OSPF interfaces .. attribute:: name (key) Interface name **type**\: str **config**\: False .. attribute:: network_type Network type **type**\: :py:class:`OspfNetworkType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfNetworkType>` **config**\: False .. attribute:: passive Enable/Disable passive **type**\: bool **config**\: False .. attribute:: demand_circuit Enable/Disable demand circuit **type**\: bool **config**\: False .. attribute:: multi_area Multi Area **type**\: :py:class:`MultiArea <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea>` **config**\: False .. attribute:: static_neighbor Staticly configured neighbors **type**\: list of :py:class:`StaticNeighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor>` **config**\: False .. attribute:: node_flag Set prefix as a node representative prefix **type**\: bool **config**\: False .. attribute:: fast_reroute Fast reroute config **type**\: :py:class:`FastReroute <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute>` **config**\: False .. attribute:: cost Interface cost **type**\: int **range:** 0..65535 **config**\: False .. attribute:: hello_interval Time between hello packets **type**\: int **range:** 0..65535 **config**\: False .. attribute:: dead_interval Interval after which a neighbor is declared dead **type**\: int **range:** 0..65535 **config**\: False .. attribute:: retransmit_interval Time between retransmitting unacknowledged Link State Advertisements (LSAs) **type**\: int **range:** 0..65535 **config**\: False .. attribute:: transmit_delay Estimated time needed to send link\-state update **type**\: int **range:** 0..65535 **config**\: False .. attribute:: mtu_ignore Enable/Disable ignoring of MTU in DBD packets **type**\: bool **config**\: False .. attribute:: lls Enable/Disable link\-local signaling (LLS) support **type**\: bool **config**\: False .. attribute:: prefix_suppression Suppress advertisement of the prefixes **type**\: bool **config**\: False .. attribute:: bfd Enable/disable bfd **type**\: bool **config**\: False .. attribute:: ttl_security TTL security **type**\: :py:class:`TtlSecurity <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity>` **config**\: False .. attribute:: enable Enable/disable protocol on the interface **type**\: bool **config**\: False .. attribute:: authentication Authentication configuration **type**\: :py:class:`Authentication <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication>` **config**\: False .. attribute:: state Interface state **type**\: str **config**\: False .. attribute:: hello_timer Hello timer **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: wait_timer Wait timer **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: dr Designated Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: bdr Backup Designated Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: ospf_neighbor List of OSPF neighbors **type**\: list of :py:class:`OspfNeighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor>` **config**\: False .. attribute:: intf_link_scope_lsas List OSPF link scope LSAs **type**\: list of :py:class:`IntfLinkScopeLsas <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas>` **config**\: False .. attribute:: intf_multi_topology OSPF interface topology **type**\: list of :py:class:`IntfMultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology>` **config**\: False .. attribute:: priority Configure OSPF router priority **type**\: int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, self).__init__() self.yang_name = "ospf-interface" self.yang_parent_name = "ospf-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([("multi-area", ("multi_area", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea)), ("static-neighbor", ("static_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor)), ("fast-reroute", ("fast_reroute", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute)), ("ttl-security", ("ttl_security", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity)), ("authentication", ("authentication", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication)), ("ospf-neighbor", ("ospf_neighbor", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor)), ("intf-link-scope-lsas", ("intf_link_scope_lsas", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas)), ("intf-multi-topology", ("intf_multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology))]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])), ('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])), ('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])), ('node_flag', (YLeaf(YType.boolean, 'node-flag'), ['bool'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])), ('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])), ('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])), ('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])), ('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])), ('lls', (YLeaf(YType.boolean, 'lls'), ['bool'])), ('prefix_suppression', (YLeaf(YType.boolean, 'prefix-suppression'), ['bool'])), ('bfd', (YLeaf(YType.boolean, 'bfd'), ['bool'])), ('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])), ('state', (YLeaf(YType.str, 'state'), ['str'])), ('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])), ('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])), ('dr', (YLeaf(YType.str, 'dr'), ['str','str'])), ('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])), ('priority', (YLeaf(YType.uint8, 'priority'), ['int'])), ]) self.name = None self.network_type = None self.passive = None self.demand_circuit = None self.node_flag = None self.cost = None self.hello_interval = None self.dead_interval = None self.retransmit_interval = None self.transmit_delay = None self.mtu_ignore = None self.lls = None self.prefix_suppression = None self.bfd = None self.enable = None self.state = None self.hello_timer = None self.wait_timer = None self.dr = None self.bdr = None self.priority = None self.multi_area = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea() self.multi_area.parent = self self._children_name_map["multi_area"] = "multi-area" self.fast_reroute = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute() self.fast_reroute.parent = self self._children_name_map["fast_reroute"] = "fast-reroute" self.ttl_security = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity() self.ttl_security.parent = self self._children_name_map["ttl_security"] = "ttl-security" self.authentication = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication() self.authentication.parent = self self._children_name_map["authentication"] = "authentication" self.static_neighbor = YList(self) self.ospf_neighbor = YList(self) self.intf_link_scope_lsas = YList(self) self.intf_multi_topology = YList(self) self._segment_path = lambda: "ospf-interface" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface, ['name', 'network_type', 'passive', 'demand_circuit', 'node_flag', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'mtu_ignore', 'lls', 'prefix_suppression', 'bfd', 'enable', 'state', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'priority'], name, value) class MultiArea(Entity): """ Multi Area .. attribute:: multi_area_id Multi\-area ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: cost Interface cost for multi\-area **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, self).__init__() self.yang_name = "multi-area" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('multi_area_id', (YLeaf(YType.uint32, 'multi-area-id'), ['int'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ]) self.multi_area_id = None self.cost = None self._segment_path = lambda: "multi-area" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.MultiArea, ['multi_area_id', 'cost'], name, value) class StaticNeighbor(Entity): """ Staticly configured neighbors .. attribute:: address (key) Neighbor IP address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: cost Neighbor cost **type**\: int **range:** 0..65535 **config**\: False .. attribute:: poll_interval Neighbor polling intervali in seconds **type**\: int **range:** 0..65535 **config**\: False **units**\: seconds """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, self).__init__() self.yang_name = "static-neighbor" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['address'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('address', (YLeaf(YType.str, 'address'), ['str','str'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ('poll_interval', (YLeaf(YType.uint16, 'poll-interval'), ['int'])), ]) self.address = None self.cost = None self.poll_interval = None self._segment_path = lambda: "static-neighbor" + "[address='" + str(self.address) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.StaticNeighbor, ['address', 'cost', 'poll_interval'], name, value) class FastReroute(Entity): """ Fast reroute config .. attribute:: candidate_disabled Prevent the interface to be used as backup **type**\: bool **config**\: False .. attribute:: enabled Activates LFA. This model assumes activation of per\-prefix LFA **type**\: bool **config**\: False .. attribute:: remote_lfa_enabled Activates remote LFA **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, self).__init__() self.yang_name = "fast-reroute" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('candidate_disabled', (YLeaf(YType.boolean, 'candidate-disabled'), ['bool'])), ('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])), ('remote_lfa_enabled', (YLeaf(YType.boolean, 'remote-lfa-enabled'), ['bool'])), ]) self.candidate_disabled = None self.enabled = None self.remote_lfa_enabled = None self._segment_path = lambda: "fast-reroute" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.FastReroute, ['candidate_disabled', 'enabled', 'remote_lfa_enabled'], name, value) class TtlSecurity(Entity): """ TTL security .. attribute:: enabled Enable/Disable TTL security check **type**\: bool **config**\: False .. attribute:: hops Maximum number of hops that a OSPF packet may have traveled **type**\: int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, self).__init__() self.yang_name = "ttl-security" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('enabled', (YLeaf(YType.boolean, 'enabled'), ['bool'])), ('hops', (YLeaf(YType.uint8, 'hops'), ['int'])), ]) self.enabled = None self.hops = None self._segment_path = lambda: "ttl-security" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.TtlSecurity, ['enabled', 'hops'], name, value) class Authentication(Entity): """ Authentication configuration .. attribute:: sa SA name **type**\: str **config**\: False .. attribute:: key_chain key\-chain name **type**\: str **config**\: False .. attribute:: key_string Key string in ASCII format **type**\: str **config**\: False .. attribute:: crypto_algorithm_val Crypto algorithm **type**\: :py:class:`CryptoAlgorithmVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal>` **config**\: False .. attribute:: no_auth No authentication enabled **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, self).__init__() self.yang_name = "authentication" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("crypto-algorithm-val", ("crypto_algorithm_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal))]) self._leafs = OrderedDict([ ('sa', (YLeaf(YType.str, 'sa'), ['str'])), ('key_chain', (YLeaf(YType.str, 'key-chain'), ['str'])), ('key_string', (YLeaf(YType.str, 'key-string'), ['str'])), ('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])), ]) self.sa = None self.key_chain = None self.key_string = None self.no_auth = None self.crypto_algorithm_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal() self.crypto_algorithm_val.parent = self self._children_name_map["crypto_algorithm_val"] = "crypto-algorithm-val" self._segment_path = lambda: "authentication" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication, ['sa', 'key_chain', 'key_string', 'no_auth'], name, value) class CryptoAlgorithmVal(Entity): """ Crypto algorithm .. attribute:: hmac_sha1_12 HMAC\-SHA1\-12 algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha1_20 HMAC\-SHA1\-20 algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: md5 MD5 algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: sha_1 SHA\-1 algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha_1 HMAC\-SHA\-1 authentication algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha_256 HMAC\-SHA\-256 authentication algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha_384 HMAC\-SHA\-384 authentication algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False .. attribute:: hmac_sha_512 HMAC\-SHA\-512 authentication algorithm **type**\: :py:class:`Empty<ydk.types.Empty>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, self).__init__() self.yang_name = "crypto-algorithm-val" self.yang_parent_name = "authentication" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('hmac_sha1_12', (YLeaf(YType.empty, 'hmac-sha1-12'), ['Empty'])), ('hmac_sha1_20', (YLeaf(YType.empty, 'hmac-sha1-20'), ['Empty'])), ('md5', (YLeaf(YType.empty, 'md5'), ['Empty'])), ('sha_1', (YLeaf(YType.empty, 'sha-1'), ['Empty'])), ('hmac_sha_1', (YLeaf(YType.empty, 'hmac-sha-1'), ['Empty'])), ('hmac_sha_256', (YLeaf(YType.empty, 'hmac-sha-256'), ['Empty'])), ('hmac_sha_384', (YLeaf(YType.empty, 'hmac-sha-384'), ['Empty'])), ('hmac_sha_512', (YLeaf(YType.empty, 'hmac-sha-512'), ['Empty'])), ]) self.hmac_sha1_12 = None self.hmac_sha1_20 = None self.md5 = None self.sha_1 = None self.hmac_sha_1 = None self.hmac_sha_256 = None self.hmac_sha_384 = None self.hmac_sha_512 = None self._segment_path = lambda: "crypto-algorithm-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.Authentication.CryptoAlgorithmVal, ['hmac_sha1_12', 'hmac_sha1_20', 'md5', 'sha_1', 'hmac_sha_1', 'hmac_sha_256', 'hmac_sha_384', 'hmac_sha_512'], name, value) class OspfNeighbor(Entity): """ List of OSPF neighbors .. attribute:: neighbor_id (key) OSPF neighbor ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: address Neighbor address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: dr Designated Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: bdr Backup Designated Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: state OSPF neighbor state **type**\: :py:class:`NbrStateType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.NbrStateType>` **config**\: False .. attribute:: stats Per\-neighbor statistics **type**\: :py:class:`Stats <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, self).__init__() self.yang_name = "ospf-neighbor" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['neighbor_id'] self._child_classes = OrderedDict([("stats", ("stats", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats))]) self._leafs = OrderedDict([ ('neighbor_id', (YLeaf(YType.str, 'neighbor-id'), ['str','str'])), ('address', (YLeaf(YType.str, 'address'), ['str','str'])), ('dr', (YLeaf(YType.str, 'dr'), ['str','str'])), ('bdr', (YLeaf(YType.str, 'bdr'), ['str','str'])), ('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])), ]) self.neighbor_id = None self.address = None self.dr = None self.bdr = None self.state = None self.stats = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats() self.stats.parent = self self._children_name_map["stats"] = "stats" self._segment_path = lambda: "ospf-neighbor" + "[neighbor-id='" + str(self.neighbor_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor, ['neighbor_id', 'address', 'dr', 'bdr', 'state'], name, value) class Stats(Entity): """ Per\-neighbor statistics .. attribute:: nbr_event_count The number of time this neighbor has changed state or an error has occurred **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: nbr_retrans_qlen The current length of the retransmission queue **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, self).__init__() self.yang_name = "stats" self.yang_parent_name = "ospf-neighbor" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('nbr_event_count', (YLeaf(YType.uint32, 'nbr-event-count'), ['int'])), ('nbr_retrans_qlen', (YLeaf(YType.uint32, 'nbr-retrans-qlen'), ['int'])), ]) self.nbr_event_count = None self.nbr_retrans_qlen = None self._segment_path = lambda: "stats" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.OspfNeighbor.Stats, ['nbr_event_count', 'nbr_retrans_qlen'], name, value) class IntfLinkScopeLsas(Entity): """ List OSPF link scope LSAs .. attribute:: lsa_type (key) OSPF link scope LSA type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_scope_lsa List of OSPF link scope LSAs **type**\: list of :py:class:`LinkScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa>` **config**\: False .. attribute:: area_scope_lsa List OSPF area scope LSA databases **type**\: list of :py:class:`AreaScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, self).__init__() self.yang_name = "intf-link-scope-lsas" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type'] self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ]) self.lsa_type = None self.link_scope_lsa = YList(self) self.area_scope_lsa = YList(self) self._segment_path = lambda: "intf-link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas, ['lsa_type'], name, value) class LinkScopeLsa(Entity): """ List of OSPF link scope LSAs .. attribute:: lsa_id (key) LSA ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: version Version **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link OSPFv2 LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External>` **config**\: False .. attribute:: ospfv2_unknown_tlv OSPFv2 Unknown TLV **type**\: list of :py:class:`Ospfv2UnknownTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv>` **config**\: False .. attribute:: ospfv3_lsa_val OSPFv3 LSA **type**\: :py:class:`Ospfv3LsaVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix_list OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3PrefixList <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix>` **config**\: False .. attribute:: multi_topology OSPF multi\-topology interface augmentation **type**\: list of :py:class:`MultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology>` **config**\: False .. attribute:: router_address Router address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: tlv Link TLV **type**\: :py:class:`Tlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv>` **config**\: False .. attribute:: unknown_sub_tlv OSPFv2 Unknown sub TLV **type**\: list of :py:class:`UnknownSubTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, self).__init__() self.yang_name = "link-scope-lsa" self.yang_parent_name = "intf-link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_id','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ('version', (YLeaf(YType.uint32, 'version'), ['int'])), ('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])), ]) self.lsa_id = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.version = None self.router_address = None self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal() self.ospfv3_lsa_val.parent = self self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val" self.tlv = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv() self.tlv.parent = self self._children_name_map["tlv"] = "tlv" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv2_unknown_tlv = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix_list = YList(self) self.ospfv3_ia_prefix = YList(self) self.multi_topology = YList(self) self.unknown_sub_tlv = YList(self) self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ OSPFv2 LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv2UnknownTlv(Entity): """ OSPFv2 Unknown TLV .. attribute:: type (key) TLV type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: length TLV length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: value TLV value **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__() self.yang_name = "ospfv2-unknown-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value) class Ospfv3LsaVal(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__() self.yang_name = "ospfv3-lsa-val" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3PrefixList(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__() self.yang_name = "ospfv3-prefix-list" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class MultiTopology(Entity): """ OSPF multi\-topology interface augmentation .. attribute:: name (key) One of the topology enabled on this interface **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__() self.yang_name = "multi-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value) class Tlv(Entity): """ Link TLV .. attribute:: link_type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_id Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: local_if_ipv4_addr List of local interface IPv4 addresses **type**\: union of the below types: **type**\: list of str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: list of str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: local_remote_ipv4_addr List of remote interface IPv4 addresses **type**\: union of the below types: **type**\: list of str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: list of str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: te_metric TE metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: max_bandwidth Maximum bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: max_reservable_bandwidth Maximum reservable bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: unreserved_bandwidth Unrseerved bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: admin_group Administrative group/Resource class/Color **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, self).__init__() self.yang_name = "tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])), ('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])), ('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])), ('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])), ('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])), ('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])), ('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])), ]) self.link_type = None self.link_id = None self.local_if_ipv4_addr = [] self.local_remote_ipv4_addr = [] self.te_metric = None self.max_bandwidth = None self.max_reservable_bandwidth = None self.unreserved_bandwidth = None self.admin_group = None self._segment_path = lambda: "tlv" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value) class UnknownSubTlv(Entity): """ OSPFv2 Unknown sub TLV .. attribute:: type (key) TLV type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: length TLV length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: value TLV value **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__() self.yang_name = "unknown-sub-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value) class AreaScopeLsa(Entity): """ List OSPF area scope LSA databases .. attribute:: lsa_type (key) LSA Type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link Router LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External>` **config**\: False .. attribute:: ospfv3_lsa OSPFv3 LSA **type**\: :py:class:`Ospfv3Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "intf-link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ]) self.lsa_type = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa() self.ospfv3_lsa.parent = self self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix = YList(self) self.ospfv3_ia_prefix = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ Router LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv3Lsa(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__() self.yang_name = "ospfv3-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3Prefix(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__() self.yang_name = "ospfv3-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfLinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class IntfMultiTopology(Entity): """ OSPF interface topology .. attribute:: name (key) One of the topology enabled on this interface **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, self).__init__() self.yang_name = "intf-multi-topology" self.yang_parent_name = "ospf-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "intf-multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.OspfInterface.IntfMultiTopology, ['name'], name, value) class AreaScopeLsa(Entity): """ List of OSPF area scope LSA .. attribute:: lsa_type (key) OSPF link scope LSA type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: area_scope_lsa List of OSPF link scope LSAs **type**\: list of :py:class:`AreaScopeLsa_ <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "ospf-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type'] self._child_classes = OrderedDict([("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ]) self.lsa_type = None self.area_scope_lsa = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa, ['lsa_type'], name, value) class AreaScopeLsa_(Entity): """ List of OSPF link scope LSAs .. attribute:: lsa_type (key) LSA Type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link Router LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External>` **config**\: False .. attribute:: ospfv3_lsa OSPFv3 LSA **type**\: :py:class:`Ospfv3Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ]) self.lsa_type = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa() self.ospfv3_lsa.parent = self self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix = YList(self) self.ospfv3_ia_prefix = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ Router LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv3Lsa(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, self).__init__() self.yang_name = "ospfv3-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3Prefix(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, self).__init__() self.yang_name = "ospfv3-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.OspfArea.AreaScopeLsa.AreaScopeLsa_.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class LinkScopeLsas(Entity): """ List OSPF link scope LSA .. attribute:: lsa_type (key) OSPF link scope LSA type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_scope_lsa List of OSPF link scope LSAs **type**\: list of :py:class:`LinkScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa>` **config**\: False .. attribute:: area_scope_lsa List OSPF area scope LSA databases **type**\: list of :py:class:`AreaScopeLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, self).__init__() self.yang_name = "link-scope-lsas" self.yang_parent_name = "ospf-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type'] self._child_classes = OrderedDict([("link-scope-lsa", ("link_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa)), ("area-scope-lsa", ("area_scope_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ]) self.lsa_type = None self.link_scope_lsa = YList(self) self.area_scope_lsa = YList(self) self._segment_path = lambda: "link-scope-lsas" + "[lsa-type='" + str(self.lsa_type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas, ['lsa_type'], name, value) class LinkScopeLsa(Entity): """ List of OSPF link scope LSAs .. attribute:: lsa_id (key) LSA ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: version Version **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link OSPFv2 LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External>` **config**\: False .. attribute:: ospfv2_unknown_tlv OSPFv2 Unknown TLV **type**\: list of :py:class:`Ospfv2UnknownTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv>` **config**\: False .. attribute:: ospfv3_lsa_val OSPFv3 LSA **type**\: :py:class:`Ospfv3LsaVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix_list OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3PrefixList <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix>` **config**\: False .. attribute:: multi_topology OSPF multi\-topology interface augmentation **type**\: list of :py:class:`MultiTopology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology>` **config**\: False .. attribute:: router_address Router address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: tlv Link TLV **type**\: :py:class:`Tlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv>` **config**\: False .. attribute:: unknown_sub_tlv OSPFv2 Unknown sub TLV **type**\: list of :py:class:`UnknownSubTlv <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, self).__init__() self.yang_name = "link-scope-lsa" self.yang_parent_name = "link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_id','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External)), ("ospfv2-unknown-tlv", ("ospfv2_unknown_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv)), ("ospfv3-lsa-val", ("ospfv3_lsa_val", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link)), ("ospfv3-prefix-list", ("ospfv3_prefix_list", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix)), ("multi-topology", ("multi_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology)), ("tlv", ("tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv)), ("unknown-sub-tlv", ("unknown_sub_tlv", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ('version', (YLeaf(YType.uint32, 'version'), ['int'])), ('router_address', (YLeaf(YType.str, 'router-address'), ['str','str'])), ]) self.lsa_id = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.version = None self.router_address = None self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa_val = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal() self.ospfv3_lsa_val.parent = self self._children_name_map["ospfv3_lsa_val"] = "ospfv3-lsa-val" self.tlv = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv() self.tlv.parent = self self._children_name_map["tlv"] = "tlv" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv2_unknown_tlv = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix_list = YList(self) self.ospfv3_ia_prefix = YList(self) self.multi_topology = YList(self) self.unknown_sub_tlv = YList(self) self._segment_path = lambda: "link-scope-lsa" + "[lsa-id='" + str(self.lsa_id) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa, ['lsa_id', 'adv_router', 'decoded_completed', 'raw_data', 'version', 'router_address'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ OSPFv2 LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv2UnknownTlv(Entity): """ OSPFv2 Unknown TLV .. attribute:: type (key) TLV type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: length TLV length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: value TLV value **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, self).__init__() self.yang_name = "ospfv2-unknown-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "ospfv2-unknown-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv2UnknownTlv, ['type', 'length', 'value'], name, value) class Ospfv3LsaVal(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, self).__init__() self.yang_name = "ospfv3-lsa-val" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3LsaVal.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3PrefixList(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, self).__init__() self.yang_name = "ospfv3-prefix-list" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix-list" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3PrefixList, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class MultiTopology(Entity): """ OSPF multi\-topology interface augmentation .. attribute:: name (key) One of the topology enabled on this interface **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, self).__init__() self.yang_name = "multi-topology" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.MultiTopology, ['name'], name, value) class Tlv(Entity): """ Link TLV .. attribute:: link_type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_id Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: local_if_ipv4_addr List of local interface IPv4 addresses **type**\: union of the below types: **type**\: list of str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: list of str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: local_remote_ipv4_addr List of remote interface IPv4 addresses **type**\: union of the below types: **type**\: list of str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: list of str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: te_metric TE metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: max_bandwidth Maximum bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: max_reservable_bandwidth Maximum reservable bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: unreserved_bandwidth Unrseerved bandwidth **type**\: :py:class:`Decimal64<ydk.types.Decimal64>` **range:** \-92233720368547758.08..92233720368547758.07 **config**\: False .. attribute:: admin_group Administrative group/Resource class/Color **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, self).__init__() self.yang_name = "tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('local_if_ipv4_addr', (YLeafList(YType.str, 'local-if-ipv4-addr'), ['str','str'])), ('local_remote_ipv4_addr', (YLeafList(YType.str, 'local-remote-ipv4-addr'), ['str','str'])), ('te_metric', (YLeaf(YType.uint32, 'te-metric'), ['int'])), ('max_bandwidth', (YLeaf(YType.str, 'max-bandwidth'), ['Decimal64'])), ('max_reservable_bandwidth', (YLeaf(YType.str, 'max-reservable-bandwidth'), ['Decimal64'])), ('unreserved_bandwidth', (YLeaf(YType.str, 'unreserved-bandwidth'), ['Decimal64'])), ('admin_group', (YLeaf(YType.uint32, 'admin-group'), ['int'])), ]) self.link_type = None self.link_id = None self.local_if_ipv4_addr = [] self.local_remote_ipv4_addr = [] self.te_metric = None self.max_bandwidth = None self.max_reservable_bandwidth = None self.unreserved_bandwidth = None self.admin_group = None self._segment_path = lambda: "tlv" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.Tlv, ['link_type', 'link_id', 'local_if_ipv4_addr', 'local_remote_ipv4_addr', 'te_metric', 'max_bandwidth', 'max_reservable_bandwidth', 'unreserved_bandwidth', 'admin_group'], name, value) class UnknownSubTlv(Entity): """ OSPFv2 Unknown sub TLV .. attribute:: type (key) TLV type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: length TLV length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: value TLV value **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, self).__init__() self.yang_name = "unknown-sub-tlv" self.yang_parent_name = "link-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['type'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('value', (YLeafList(YType.uint8, 'value'), ['int'])), ]) self.type = None self.length = None self.value = [] self._segment_path = lambda: "unknown-sub-tlv" + "[type='" + str(self.type) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.LinkScopeLsa.UnknownSubTlv, ['type', 'length', 'value'], name, value) class AreaScopeLsa(Entity): """ List OSPF area scope LSA databases .. attribute:: lsa_type (key) LSA Type **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: adv_router (key) Advertising router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: decoded_completed The OSPF LSA body is fully decoded **type**\: bool **config**\: False .. attribute:: raw_data The complete LSA in network byte order as received/sent over the wire **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: ospfv2_lsa OSPFv2 LSA **type**\: :py:class:`Ospfv2Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa>` **config**\: False .. attribute:: ospfv2_link Router LSA link **type**\: list of :py:class:`Ospfv2Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link>` **config**\: False .. attribute:: ospfv2_topology Summary LSA **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology>` **config**\: False .. attribute:: ospfv2_external External LSA **type**\: list of :py:class:`Ospfv2External <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External>` **config**\: False .. attribute:: ospfv3_lsa OSPFv3 LSA **type**\: :py:class:`Ospfv3Lsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa>` **config**\: False .. attribute:: ospfv3_link OSPFv3 links **type**\: list of :py:class:`Ospfv3Link <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link>` **config**\: False .. attribute:: ospfv3_prefix OSPFv3 prefix\-list **type**\: list of :py:class:`Ospfv3Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix>` **config**\: False .. attribute:: ospfv3_ia_prefix OSPFv3 intra\-area prefix\-list **type**\: list of :py:class:`Ospfv3IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, self).__init__() self.yang_name = "area-scope-lsa" self.yang_parent_name = "link-scope-lsas" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','adv_router'] self._child_classes = OrderedDict([("ospfv2-lsa", ("ospfv2_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa)), ("ospfv2-link", ("ospfv2_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link)), ("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology)), ("ospfv2-external", ("ospfv2_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External)), ("ospfv3-lsa", ("ospfv3_lsa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa)), ("ospfv3-link", ("ospfv3_link", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link)), ("ospfv3-prefix", ("ospfv3_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix)), ("ospfv3-ia-prefix", ("ospfv3_ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint32, 'lsa-type'), ['int'])), ('adv_router', (YLeaf(YType.str, 'adv-router'), ['str','str'])), ('decoded_completed', (YLeaf(YType.boolean, 'decoded-completed'), ['bool'])), ('raw_data', (YLeafList(YType.uint8, 'raw-data'), ['int'])), ]) self.lsa_type = None self.adv_router = None self.decoded_completed = None self.raw_data = [] self.ospfv2_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa() self.ospfv2_lsa.parent = self self._children_name_map["ospfv2_lsa"] = "ospfv2-lsa" self.ospfv3_lsa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa() self.ospfv3_lsa.parent = self self._children_name_map["ospfv3_lsa"] = "ospfv3-lsa" self.ospfv2_link = YList(self) self.ospfv2_topology = YList(self) self.ospfv2_external = YList(self) self.ospfv3_link = YList(self) self.ospfv3_prefix = YList(self) self.ospfv3_ia_prefix = YList(self) self._segment_path = lambda: "area-scope-lsa" + "[lsa-type='" + str(self.lsa_type) + "']" + "[adv-router='" + str(self.adv_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa, ['lsa_type', 'adv_router', 'decoded_completed', 'raw_data'], name, value) class Ospfv2Lsa(Entity): """ OSPFv2 LSA .. attribute:: header Decoded OSPFv2 LSA header data **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv2 LSA body data **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, self).__init__() self.yang_name = "ospfv2-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv2-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv2 LSA header data .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: opaque_type Opaque type **type**\: int **range:** 0..255 **config**\: False .. attribute:: opaque_id Opaque ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False .. attribute:: flag_options LSA options **type**\: :py:class:`LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.LsaFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('opaque_type', (YLeaf(YType.uint8, 'opaque-type'), ['int'])), ('opaque_id', (YLeaf(YType.uint32, 'opaque-id'), ['int'])), ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ('flag_options', (YLeaf(YType.bits, 'flag-options'), ['Bits'])), ]) self.lsa_id = None self.opaque_type = None self.opaque_id = None self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self.flag_options = Bits() self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.Header, ['lsa_id', 'opaque_type', 'opaque_id', 'age', 'type', 'adv_router', 'seq_num', 'checksum', 'length', 'flag_options'], name, value) class LsaBody(Entity): """ Decoded OSPFv2 LSA body data .. attribute:: num_of_links Number of links **type**\: int **range:** 0..65535 **config**\: False .. attribute:: network Network details **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network>` **config**\: False .. attribute:: summary_mask Summary mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_mask External mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: body_flag_options LSA body flags **type**\: :py:class:`Ospfv2LsaBodyFlagsOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaBodyFlagsOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv2-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network))]) self._leafs = OrderedDict([ ('num_of_links', (YLeaf(YType.uint16, 'num-of-links'), ['int'])), ('summary_mask', (YLeaf(YType.str, 'summary-mask'), ['str','str'])), ('external_mask', (YLeaf(YType.str, 'external-mask'), ['str','str'])), ('body_flag_options', (YLeaf(YType.bits, 'body-flag-options'), ['Bits'])), ]) self.num_of_links = None self.summary_mask = None self.external_mask = None self.body_flag_options = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody, ['num_of_links', 'summary_mask', 'external_mask', 'body_flag_options'], name, value) class Network(Entity): """ Network details .. attribute:: network_mask IP network mask **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_mask', (YLeaf(YType.str, 'network-mask'), ['str','str'])), ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ]) self.network_mask = None self.attached_router = [] self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Lsa.LsaBody.Network, ['network_mask', 'attached_router'], name, value) class Ospfv2Link(Entity): """ Router LSA link .. attribute:: link_id (key) Link ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) Link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: ospfv2_topology Topology specific information **type**\: list of :py:class:`Ospfv2Topology <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, self).__init__() self.yang_name = "ospfv2-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_id','link_data'] self._child_classes = OrderedDict([("ospfv2-topology", ("ospfv2_topology", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology))]) self._leafs = OrderedDict([ ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ]) self.link_id = None self.link_data = None self.type = None self.ospfv2_topology = YList(self) self._segment_path = lambda: "ospfv2-link" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link, ['link_id', 'link_data', 'type'], name, value) class Ospfv2Topology(Entity): """ Topology specific information .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "ospfv2-link" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Link.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2Topology(Entity): """ Summary LSA .. attribute:: mt_id (key) MT\-ID for topology enabled link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, self).__init__() self.yang_name = "ospfv2-topology" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.mt_id = None self.metric = None self._segment_path = lambda: "ospfv2-topology" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2Topology, ['mt_id', 'metric'], name, value) class Ospfv2External(Entity): """ External LSA .. attribute:: mt_id (key) MT\-ID for topology enabled on the link **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: metric Metric for the topology **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, self).__init__() self.yang_name = "ospfv2-external" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['mt_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint32, 'mt-id'), ['int'])), ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ]) self.mt_id = None self.metric = None self.forwarding_address = None self.external_route_tag = None self._segment_path = lambda: "ospfv2-external" + "[mt-id='" + str(self.mt_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv2External, ['mt_id', 'metric', 'forwarding_address', 'external_route_tag'], name, value) class Ospfv3Lsa(Entity): """ OSPFv3 LSA .. attribute:: header Decoded OSPFv3 LSA header **type**\: :py:class:`Header <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header>` **config**\: False .. attribute:: lsa_body Decoded OSPFv3 LSA body **type**\: :py:class:`LsaBody <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, self).__init__() self.yang_name = "ospfv3-lsa" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("header", ("header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header)), ("lsa-body", ("lsa_body", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody))]) self._leafs = OrderedDict() self.header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header() self.header.parent = self self._children_name_map["header"] = "header" self.lsa_body = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody() self.lsa_body.parent = self self._children_name_map["lsa_body"] = "lsa-body" self._segment_path = lambda: "ospfv3-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa, [], name, value) class Header(Entity): """ Decoded OSPFv3 LSA header .. attribute:: lsa_id LSA ID **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: lsa_header LSA header **type**\: :py:class:`LsaHeader <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader>` **config**\: False .. attribute:: lsa_hdr_options OSPFv3 LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, self).__init__() self.yang_name = "header" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-header", ("lsa_header", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader))]) self._leafs = OrderedDict([ ('lsa_id', (YLeaf(YType.str, 'lsa-id'), ['str','str'])), ('lsa_hdr_options', (YLeaf(YType.bits, 'lsa-hdr-options'), ['Bits'])), ]) self.lsa_id = None self.lsa_hdr_options = Bits() self.lsa_header = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader() self.lsa_header.parent = self self._children_name_map["lsa_header"] = "lsa-header" self._segment_path = lambda: "header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header, ['lsa_id', 'lsa_hdr_options'], name, value) class LsaHeader(Entity): """ LSA header .. attribute:: age LSA age **type**\: int **range:** 0..65535 **config**\: False .. attribute:: type LSA type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: adv_router LSA advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: seq_num LSA sequence number **type**\: str **config**\: False .. attribute:: checksum LSA checksum **type**\: str **config**\: False .. attribute:: length LSA length **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, self).__init__() self.yang_name = "lsa-header" self.yang_parent_name = "header" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('age', (YLeaf(YType.uint16, 'age'), ['int'])), ('type', (YLeaf(YType.uint16, 'type'), ['int'])), ('adv_router', (YLeaf(YType.uint32, 'adv-router'), ['int'])), ('seq_num', (YLeaf(YType.str, 'seq-num'), ['str'])), ('checksum', (YLeaf(YType.str, 'checksum'), ['str'])), ('length', (YLeaf(YType.uint16, 'length'), ['int'])), ]) self.age = None self.type = None self.adv_router = None self.seq_num = None self.checksum = None self.length = None self._segment_path = lambda: "lsa-header" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.Header.LsaHeader, ['age', 'type', 'adv_router', 'seq_num', 'checksum', 'length'], name, value) class LsaBody(Entity): """ Decoded OSPFv3 LSA body .. attribute:: network OSPFv3 network **type**\: :py:class:`Network <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network>` **config**\: False .. attribute:: prefix OSPFv3 inter area prefix **type**\: :py:class:`Prefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix>` **config**\: False .. attribute:: ia_router OSPFv3 inter area router **type**\: :py:class:`IaRouter <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter>` **config**\: False .. attribute:: lsa_external OSPFv3 LSA external **type**\: :py:class:`LsaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal>` **config**\: False .. attribute:: nssa OSPFv3 NSSA **type**\: :py:class:`Nssa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa>` **config**\: False .. attribute:: link_data OSPFv3 Link data **type**\: :py:class:`LinkData <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData>` **config**\: False .. attribute:: ia_prefix OSPFv3 Intra area prefixes **type**\: :py:class:`IaPrefix <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix>` **config**\: False .. attribute:: lsa_flag_options LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False .. attribute:: lsa_body_flags LSA Body Flags **type**\: :py:class:`Ospfv3LsaBodyFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaBodyFlagOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, self).__init__() self.yang_name = "lsa-body" self.yang_parent_name = "ospfv3-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("network", ("network", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network)), ("prefix", ("prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix)), ("ia-router", ("ia_router", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter)), ("lsa-external", ("lsa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal)), ("nssa", ("nssa", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa)), ("link-data", ("link_data", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData)), ("ia-prefix", ("ia_prefix", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix))]) self._leafs = OrderedDict([ ('lsa_flag_options', (YLeaf(YType.bits, 'lsa-flag-options'), ['Bits'])), ('lsa_body_flags', (YLeaf(YType.bits, 'lsa-body-flags'), ['Bits'])), ]) self.lsa_flag_options = Bits() self.lsa_body_flags = Bits() self.network = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network() self.network.parent = self self._children_name_map["network"] = "network" self.prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix() self.prefix.parent = self self._children_name_map["prefix"] = "prefix" self.ia_router = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter() self.ia_router.parent = self self._children_name_map["ia_router"] = "ia-router" self.lsa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal() self.lsa_external.parent = self self._children_name_map["lsa_external"] = "lsa-external" self.nssa = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa() self.nssa.parent = self self._children_name_map["nssa"] = "nssa" self.link_data = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData() self.link_data.parent = self self._children_name_map["link_data"] = "link-data" self.ia_prefix = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix() self.ia_prefix.parent = self self._children_name_map["ia_prefix"] = "ia-prefix" self._segment_path = lambda: "lsa-body" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody, ['lsa_flag_options', 'lsa_body_flags'], name, value) class Network(Entity): """ OSPFv3 network .. attribute:: attached_router List of the routers attached to the network **type**\: list of int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_net_options Network LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, self).__init__() self.yang_name = "network" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('attached_router', (YLeafList(YType.uint32, 'attached-router'), ['int'])), ('lsa_net_options', (YLeaf(YType.bits, 'lsa-net-options'), ['Bits'])), ]) self.attached_router = [] self.lsa_net_options = Bits() self._segment_path = lambda: "network" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Network, ['attached_router', 'lsa_net_options'], name, value) class Prefix(Entity): """ OSPFv3 inter area prefix .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ia_prefix Inter area Prefix **type**\: str **config**\: False .. attribute:: ia_prefix_options Inter area prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, self).__init__() self.yang_name = "prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('ia_prefix', (YLeaf(YType.str, 'ia-prefix'), ['str'])), ('ia_prefix_options', (YLeaf(YType.str, 'ia-prefix-options'), ['str'])), ]) self.metric = None self.ia_prefix = None self.ia_prefix_options = None self._segment_path = lambda: "prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Prefix, ['metric', 'ia_prefix', 'ia_prefix_options'], name, value) class IaRouter(Entity): """ OSPFv3 inter area router .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: destination_router_id Router ID of the router being described by the LSA **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_ia_options Inter area LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, self).__init__() self.yang_name = "ia-router" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('destination_router_id', (YLeaf(YType.uint32, 'destination-router-id'), ['int'])), ('lsa_ia_options', (YLeaf(YType.bits, 'lsa-ia-options'), ['Bits'])), ]) self.metric = None self.destination_router_id = None self.lsa_ia_options = Bits() self._segment_path = lambda: "ia-router" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaRouter, ['metric', 'destination_router_id', 'lsa_ia_options'], name, value) class LsaExternal(Entity): """ OSPFv3 LSA external .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, self).__init__() self.yang_name = "lsa-external" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LsaExternal.Flags, ['e_flag'], name, value) class Nssa(Entity): """ OSPFv3 NSSA .. attribute:: lsa_nssa_external NSSA LSA **type**\: :py:class:`LsaNssaExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, self).__init__() self.yang_name = "nssa" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("lsa-nssa-external", ("lsa_nssa_external", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal))]) self._leafs = OrderedDict() self.lsa_nssa_external = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal() self.lsa_nssa_external.parent = self self._children_name_map["lsa_nssa_external"] = "lsa-nssa-external" self._segment_path = lambda: "nssa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa, [], name, value) class LsaNssaExternal(Entity): """ NSSA LSA .. attribute:: metric Metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: flags LSA Flags **type**\: :py:class:`Flags <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags>` **config**\: False .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: external_prefix Prefix **type**\: str **config**\: False .. attribute:: external_prefix_options Prefix options **type**\: str **config**\: False .. attribute:: forwarding_address Forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: external_route_tag Route tag **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, self).__init__() self.yang_name = "lsa-nssa-external" self.yang_parent_name = "nssa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("flags", ("flags", OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags))]) self._leafs = OrderedDict([ ('metric', (YLeaf(YType.uint32, 'metric'), ['int'])), ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('external_prefix', (YLeaf(YType.str, 'external-prefix'), ['str'])), ('external_prefix_options', (YLeaf(YType.str, 'external-prefix-options'), ['str'])), ('forwarding_address', (YLeaf(YType.str, 'forwarding-address'), ['str','str'])), ('external_route_tag', (YLeaf(YType.uint32, 'external-route-tag'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ]) self.metric = None self.referenced_ls_type = None self.external_prefix = None self.external_prefix_options = None self.forwarding_address = None self.external_route_tag = None self.referenced_link_state_id = None self.flags = OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags() self.flags.parent = self self._children_name_map["flags"] = "flags" self._segment_path = lambda: "lsa-nssa-external" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal, ['metric', 'referenced_ls_type', 'external_prefix', 'external_prefix_options', 'forwarding_address', 'external_route_tag', 'referenced_link_state_id'], name, value) class Flags(Entity): """ LSA Flags .. attribute:: e_flag When set, the metric specified is a Type 2 external metric **type**\: bool **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, self).__init__() self.yang_name = "flags" self.yang_parent_name = "lsa-nssa-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('e_flag', (YLeaf(YType.boolean, 'e-flag'), ['bool'])), ]) self.e_flag = None self._segment_path = lambda: "flags" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.Nssa.LsaNssaExternal.Flags, ['e_flag'], name, value) class LinkData(Entity): """ OSPFv3 Link data .. attribute:: rtr_priority Router priority of the interce **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_local_interface_address The originating router's link\-local interface address on the link **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_id_options Link data LSA options **type**\: :py:class:`Ospfv3LsaOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv3LsaOptions>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, self).__init__() self.yang_name = "link-data" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('rtr_priority', (YLeaf(YType.uint8, 'rtr-priority'), ['int'])), ('link_local_interface_address', (YLeaf(YType.str, 'link-local-interface-address'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint32, 'num-of-prefixes'), ['int'])), ('lsa_id_options', (YLeaf(YType.bits, 'lsa-id-options'), ['Bits'])), ]) self.rtr_priority = None self.link_local_interface_address = None self.num_of_prefixes = None self.lsa_id_options = Bits() self._segment_path = lambda: "link-data" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.LinkData, ['rtr_priority', 'link_local_interface_address', 'num_of_prefixes', 'lsa_id_options'], name, value) class IaPrefix(Entity): """ OSPFv3 Intra area prefixes .. attribute:: referenced_ls_type Referenced Link State type **type**\: int **range:** 0..65535 **config**\: False .. attribute:: referenced_link_state_id Referenced Link State ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: referenced_adv_router Referenced Advertising Router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: num_of_prefixes Number of prefixes **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, self).__init__() self.yang_name = "ia-prefix" self.yang_parent_name = "lsa-body" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('referenced_ls_type', (YLeaf(YType.uint16, 'referenced-ls-type'), ['int'])), ('referenced_link_state_id', (YLeaf(YType.uint32, 'referenced-link-state-id'), ['int'])), ('referenced_adv_router', (YLeaf(YType.str, 'referenced-adv-router'), ['str','str'])), ('num_of_prefixes', (YLeaf(YType.uint16, 'num-of-prefixes'), ['int'])), ]) self.referenced_ls_type = None self.referenced_link_state_id = None self.referenced_adv_router = None self.num_of_prefixes = None self._segment_path = lambda: "ia-prefix" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Lsa.LsaBody.IaPrefix, ['referenced_ls_type', 'referenced_link_state_id', 'referenced_adv_router', 'num_of_prefixes'], name, value) class Ospfv3Link(Entity): """ OSPFv3 links .. attribute:: interface_id (key) Interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_interface_id (key) Neighbor interface ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: neighbor_router_id (key) Neighbor router ID **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: type Link type **type**\: int **range:** 0..255 **config**\: False .. attribute:: metric Metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, self).__init__() self.yang_name = "ospfv3-link" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['interface_id','neighbor_interface_id','neighbor_router_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('interface_id', (YLeaf(YType.uint32, 'interface-id'), ['int'])), ('neighbor_interface_id', (YLeaf(YType.uint32, 'neighbor-interface-id'), ['int'])), ('neighbor_router_id', (YLeaf(YType.uint32, 'neighbor-router-id'), ['int'])), ('type', (YLeaf(YType.uint8, 'type'), ['int'])), ('metric', (YLeaf(YType.uint16, 'metric'), ['int'])), ]) self.interface_id = None self.neighbor_interface_id = None self.neighbor_router_id = None self.type = None self.metric = None self._segment_path = lambda: "ospfv3-link" + "[interface-id='" + str(self.interface_id) + "']" + "[neighbor-interface-id='" + str(self.neighbor_interface_id) + "']" + "[neighbor-router-id='" + str(self.neighbor_router_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Link, ['interface_id', 'neighbor_interface_id', 'neighbor_router_id', 'type', 'metric'], name, value) class Ospfv3Prefix(Entity): """ OSPFv3 prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, self).__init__() self.yang_name = "ospfv3-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3Prefix, ['prefix', 'prefix_options'], name, value) class Ospfv3IaPrefix(Entity): """ OSPFv3 intra\-area prefix\-list .. attribute:: prefix (key) Prefix **type**\: str **config**\: False .. attribute:: prefix_options Prefix options **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, self).__init__() self.yang_name = "ospfv3-ia-prefix" self.yang_parent_name = "area-scope-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['prefix'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('prefix', (YLeaf(YType.str, 'prefix'), ['str'])), ('prefix_options', (YLeaf(YType.str, 'prefix-options'), ['str'])), ]) self.prefix = None self.prefix_options = None self._segment_path = lambda: "ospfv3-ia-prefix" + "[prefix='" + str(self.prefix) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.LinkScopeLsas.AreaScopeLsa.Ospfv3IaPrefix, ['prefix', 'prefix_options'], name, value) class MultiTopology(Entity): """ OSPF multi\-topology interface augmentation .. attribute:: name (key) One of the topology enabled on this interface **type**\: str **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.OspfState.OspfInstance.MultiTopology, self).__init__() self.yang_name = "multi-topology" self.yang_parent_name = "ospf-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ]) self.name = None self._segment_path = lambda: "multi-topology" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.OspfState.OspfInstance.MultiTopology, ['name'], name, value) class Ospfv2Instance(Entity): """ The OSPF instance .. attribute:: instance_id (key) The routing instance identifier assigned to the OSPF instance **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: vrf_name The name of the Virtual Routing and Forwarding instance that the OSPF instance is operating within **type**\: str **config**\: False .. attribute:: router_id The router identifer assigned to the OSPF instance **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospfv2_area The OSPF area information **type**\: list of :py:class:`Ospfv2Area <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area>` **config**\: False .. attribute:: ospfv2_lsdb_external The external LSDB information **type**\: list of :py:class:`Ospfv2LsdbExternal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance, self).__init__() self.yang_name = "ospfv2-instance" self.yang_parent_name = "ospf-oper-data" self.is_top_level_class = False self.has_list_ancestor = False self.ylist_key_names = ['instance_id'] self._child_classes = OrderedDict([("ospfv2-area", ("ospfv2_area", OspfOperData.Ospfv2Instance.Ospfv2Area)), ("ospfv2-lsdb-external", ("ospfv2_lsdb_external", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal))]) self._leafs = OrderedDict([ ('instance_id', (YLeaf(YType.uint32, 'instance-id'), ['int'])), ('vrf_name', (YLeaf(YType.str, 'vrf-name'), ['str'])), ('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])), ]) self.instance_id = None self.vrf_name = None self.router_id = None self.ospfv2_area = YList(self) self.ospfv2_lsdb_external = YList(self) self._segment_path = lambda: "ospfv2-instance" + "[instance-id='" + str(self.instance_id) + "']" self._absolute_path = lambda: "Cisco-IOS-XE-ospf-oper:ospf-oper-data/%s" % self._segment_path() self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance, ['instance_id', 'vrf_name', 'router_id'], name, value) class Ospfv2Area(Entity): """ The OSPF area information .. attribute:: area_id (key) The area identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: ospfv2_lsdb_area The OSPF Link State Database information for this area **type**\: list of :py:class:`Ospfv2LsdbArea <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea>` **config**\: False .. attribute:: ospfv2_interface A list of interfaces that belong to the area **type**\: list of :py:class:`Ospfv2Interface <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area, self).__init__() self.yang_name = "ospfv2-area" self.yang_parent_name = "ospfv2-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['area_id'] self._child_classes = OrderedDict([("ospfv2-lsdb-area", ("ospfv2_lsdb_area", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea)), ("ospfv2-interface", ("ospfv2_interface", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface))]) self._leafs = OrderedDict([ ('area_id', (YLeaf(YType.uint32, 'area-id'), ['int'])), ]) self.area_id = None self.ospfv2_lsdb_area = YList(self) self.ospfv2_interface = YList(self) self._segment_path = lambda: "ospfv2-area" + "[area-id='" + str(self.area_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area, ['area_id'], name, value) class Ospfv2LsdbArea(Entity): """ The OSPF Link State Database information for this area .. attribute:: lsa_type (key) Link State Advertisement type **type**\: int **range:** 0..255 **config**\: False .. attribute:: lsa_id (key) Link State Advertisement Identifer **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: advertising_router (key) Advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_age The age of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: lsa_options The options of the Link State Advertisement **type**\: :py:class:`Ospfv2LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaFlagOptions>` **config**\: False .. attribute:: lsa_seq_number The sequence number for the Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_checksum The checksum of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: lsa_length The length, in bytes, of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: ospfv2_router_lsa_links The router Link State Advertisement links **type**\: list of :py:class:`Ospfv2RouterLsaLinks <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks>` **config**\: False .. attribute:: unsupported_lsa The unsupported Link State Advertisements **type**\: :py:class:`UnsupportedLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa>` **config**\: False .. attribute:: router_lsa The router Link State Advertisements **type**\: :py:class:`RouterLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa>` **config**\: False .. attribute:: network_lsa The network Link State Advertisements **type**\: :py:class:`NetworkLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa>` **config**\: False .. attribute:: network_summary_lsa The network summary Link State Advertisements **type**\: :py:class:`NetworkSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa>` **config**\: False .. attribute:: router_summary_lsa The router summary Link State Advertisements **type**\: :py:class:`RouterSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa>` **config**\: False .. attribute:: external_lsa The external Link State Advertisements **type**\: :py:class:`ExternalLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa>` **config**\: False .. attribute:: nssa_lsa The Not So Stubby Area Link state advertisements **type**\: :py:class:`NssaLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, self).__init__() self.yang_name = "ospfv2-lsdb-area" self.yang_parent_name = "ospfv2-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','lsa_id','advertising_router'] self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])), ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])), ('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])), ('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])), ('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])), ('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])), ('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])), ]) self.lsa_type = None self.lsa_id = None self.advertising_router = None self.lsa_age = None self.lsa_options = Bits() self.lsa_seq_number = None self.lsa_checksum = None self.lsa_length = None self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa() self.unsupported_lsa.parent = self self._children_name_map["unsupported_lsa"] = "unsupported-lsa" self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa() self.router_lsa.parent = self self._children_name_map["router_lsa"] = "router-lsa" self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa() self.network_lsa.parent = self self._children_name_map["network_lsa"] = "network-lsa" self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa() self.network_summary_lsa.parent = self self._children_name_map["network_summary_lsa"] = "network-summary-lsa" self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa() self.router_summary_lsa.parent = self self._children_name_map["router_summary_lsa"] = "router-summary-lsa" self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa() self.external_lsa.parent = self self._children_name_map["external_lsa"] = "external-lsa" self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa() self.nssa_lsa.parent = self self._children_name_map["nssa_lsa"] = "nssa-lsa" self.ospfv2_router_lsa_links = YList(self) self._segment_path = lambda: "ospfv2-lsdb-area" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value) class Ospfv2RouterLsaLinks(Entity): """ The router Link State Advertisement links .. attribute:: link_type (key) Link Type **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_id (key) link Identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_topo Link topology **type**\: list of :py:class:`LinkTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, self).__init__() self.yang_name = "ospfv2-router-lsa-links" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_type','link_id','link_data'] self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo))]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ]) self.link_type = None self.link_id = None self.link_data = None self.link_topo = YList(self) self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value) class LinkTopo(Entity): """ Link topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, self).__init__() self.yang_name = "link-topo" self.yang_parent_name = "ospfv2-router-lsa-links" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "link-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value) class UnsupportedLsa(Entity): """ The unsupported Link State Advertisements .. attribute:: lsa_data Link State Advertisement data **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, self).__init__() self.yang_name = "unsupported-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])), ]) self.lsa_data = [] self._segment_path = lambda: "unsupported-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.UnsupportedLsa, ['lsa_data'], name, value) class RouterLsa(Entity): """ The router Link State Advertisements .. attribute:: router_lsa_bits Router Link State Advertisement bits **type**\: :py:class:`Ospfv2RouterLsaBits <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2RouterLsaBits>` **config**\: False .. attribute:: router_lsa_number_links Router Link State Advertisement number of links **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, self).__init__() self.yang_name = "router-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])), ('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])), ]) self.router_lsa_bits = Bits() self.router_lsa_number_links = None self._segment_path = lambda: "router-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value) class NetworkLsa(Entity): """ The network Link State Advertisements .. attribute:: network_lsa_mask Network Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: network_attached_routers Network attached routers **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, self).__init__() self.yang_name = "network-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])), ('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])), ]) self.network_lsa_mask = None self.network_attached_routers = [] self._segment_path = lambda: "network-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value) class NetworkSummaryLsa(Entity): """ The network summary Link State Advertisements .. attribute:: summary_lsa_mask The summary Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: summary_topo The summary topology **type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, self).__init__() self.yang_name = "network-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "network-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): """ The summary topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology Metric **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "network-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class RouterSummaryLsa(Entity): """ The router summary Link State Advertisements .. attribute:: summary_lsa_mask The summary Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: summary_topo The summary topology **type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, self).__init__() self.yang_name = "router-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "router-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): """ The summary topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology Metric **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "router-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class ExternalLsa(Entity): """ The external Link State Advertisements .. attribute:: external_lsa_mask The mask for the external Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: external_topo The external topology Link State Advertisement **type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, self).__init__() self.yang_name = "external-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "external-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): """ The external topology Link State Advertisement .. attribute:: mt_id The multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric_type The topoligy metric type associated with the Link State Advertisement **type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>` **config**\: False .. attribute:: topo_metric The topology metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: topo_forwarding_address The topology forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: topo_route_tag The topology route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "external-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) class NssaLsa(Entity): """ The Not So Stubby Area Link state advertisements .. attribute:: external_lsa_mask The mask for the external Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: external_topo The external topology Link State Advertisement **type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, self).__init__() self.yang_name = "nssa-lsa" self.yang_parent_name = "ospfv2-lsdb-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "nssa-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): """ The external topology Link State Advertisement .. attribute:: mt_id The multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric_type The topoligy metric type associated with the Link State Advertisement **type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>` **config**\: False .. attribute:: topo_metric The topology metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: topo_forwarding_address The topology forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: topo_route_tag The topology route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "nssa-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2LsdbArea.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) class Ospfv2Interface(Entity): """ A list of interfaces that belong to the area .. attribute:: name (key) Name of the interface **type**\: str **config**\: False .. attribute:: network_type Network type **type**\: :py:class:`OspfNetworkType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfNetworkType>` **config**\: False .. attribute:: enable If the interface is enabled **type**\: bool **config**\: False .. attribute:: passive If the interface is in passive mode **type**\: bool **config**\: False .. attribute:: demand_circuit If this is a demand circuit **type**\: bool **config**\: False .. attribute:: mtu_ignore If the MTU is being ignored **type**\: bool **config**\: False .. attribute:: prefix_suppresion If prefix suppression is enabled **type**\: bool **config**\: False .. attribute:: cost The OSPFv2 cost **type**\: int **range:** 0..65535 **config**\: False .. attribute:: hello_interval The hello interval in seconds **type**\: int **range:** 0..65535 **config**\: False .. attribute:: dead_interval The dead interval in seconds **type**\: int **range:** 0..65535 **config**\: False .. attribute:: retransmit_interval The retransmit interval in seconds **type**\: int **range:** 0..65535 **config**\: False .. attribute:: transmit_delay The delay before transmitting a keepalive in seconds **type**\: int **range:** 0..65535 **config**\: False .. attribute:: hello_timer The current hello timer in seconds **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: wait_timer The wait timer in seconds **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: dr The designated router identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: bdr The backup designated router identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: dr_ip The address of the designated router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: bdr_ip The address of the backup designated router **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: state The current state of the interface **type**\: :py:class:`Ospfv2IntfState <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2IntfState>` **config**\: False .. attribute:: ttl_security_val The TTL security information **type**\: :py:class:`TtlSecurityVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal>` **config**\: False .. attribute:: auth_val The authentication information **type**\: :py:class:`AuthVal <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal>` **config**\: False .. attribute:: ospfv2_neighbor All the neighbors on the interface **type**\: list of :py:class:`Ospfv2Neighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, self).__init__() self.yang_name = "ospfv2-interface" self.yang_parent_name = "ospfv2-area" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['name'] self._child_classes = OrderedDict([("ttl-security-val", ("ttl_security_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal)), ("auth-val", ("auth_val", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal)), ("ospfv2-neighbor", ("ospfv2_neighbor", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor))]) self._leafs = OrderedDict([ ('name', (YLeaf(YType.str, 'name'), ['str'])), ('network_type', (YLeaf(YType.enumeration, 'network-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfNetworkType', '')])), ('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])), ('passive', (YLeaf(YType.boolean, 'passive'), ['bool'])), ('demand_circuit', (YLeaf(YType.boolean, 'demand-circuit'), ['bool'])), ('mtu_ignore', (YLeaf(YType.boolean, 'mtu-ignore'), ['bool'])), ('prefix_suppresion', (YLeaf(YType.boolean, 'prefix-suppresion'), ['bool'])), ('cost', (YLeaf(YType.uint16, 'cost'), ['int'])), ('hello_interval', (YLeaf(YType.uint16, 'hello-interval'), ['int'])), ('dead_interval', (YLeaf(YType.uint16, 'dead-interval'), ['int'])), ('retransmit_interval', (YLeaf(YType.uint16, 'retransmit-interval'), ['int'])), ('transmit_delay', (YLeaf(YType.uint16, 'transmit-delay'), ['int'])), ('hello_timer', (YLeaf(YType.uint32, 'hello-timer'), ['int'])), ('wait_timer', (YLeaf(YType.uint32, 'wait-timer'), ['int'])), ('dr', (YLeaf(YType.uint32, 'dr'), ['int'])), ('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])), ('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])), ('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])), ('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2IntfState', '')])), ]) self.name = None self.network_type = None self.enable = None self.passive = None self.demand_circuit = None self.mtu_ignore = None self.prefix_suppresion = None self.cost = None self.hello_interval = None self.dead_interval = None self.retransmit_interval = None self.transmit_delay = None self.hello_timer = None self.wait_timer = None self.dr = None self.bdr = None self.dr_ip = None self.bdr_ip = None self.state = None self.ttl_security_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal() self.ttl_security_val.parent = self self._children_name_map["ttl_security_val"] = "ttl-security-val" self.auth_val = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal() self.auth_val.parent = self self._children_name_map["auth_val"] = "auth-val" self.ospfv2_neighbor = YList(self) self._segment_path = lambda: "ospfv2-interface" + "[name='" + str(self.name) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface, ['name', 'network_type', 'enable', 'passive', 'demand_circuit', 'mtu_ignore', 'prefix_suppresion', 'cost', 'hello_interval', 'dead_interval', 'retransmit_interval', 'transmit_delay', 'hello_timer', 'wait_timer', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'state'], name, value) class TtlSecurityVal(Entity): """ The TTL security information .. attribute:: enable Indicates whether time to live security is enabled **type**\: bool **config**\: False .. attribute:: hops Number of hops for time to live security **type**\: int **range:** \-2147483648..2147483647 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, self).__init__() self.yang_name = "ttl-security-val" self.yang_parent_name = "ospfv2-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('enable', (YLeaf(YType.boolean, 'enable'), ['bool'])), ('hops', (YLeaf(YType.int32, 'hops'), ['int'])), ]) self.enable = None self.hops = None self._segment_path = lambda: "ttl-security-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.TtlSecurityVal, ['enable', 'hops'], name, value) class AuthVal(Entity): """ The authentication information .. attribute:: no_auth No authentication in use **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: auth_key Trailer key chain information **type**\: :py:class:`AuthKey <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey>` **config**\: False .. attribute:: key_chain Trailer key information **type**\: :py:class:`KeyChain <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, self).__init__() self.yang_name = "auth-val" self.yang_parent_name = "ospfv2-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("auth-key", ("auth_key", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey)), ("key-chain", ("key_chain", OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain))]) self._leafs = OrderedDict([ ('no_auth', (YLeaf(YType.uint32, 'no-auth'), ['int'])), ]) self.no_auth = None self.auth_key = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey() self.auth_key.parent = self self._children_name_map["auth_key"] = "auth-key" self.key_chain = OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain() self.key_chain.parent = self self._children_name_map["key_chain"] = "key-chain" self._segment_path = lambda: "auth-val" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal, ['no_auth'], name, value) class AuthKey(Entity): """ Trailer key chain information .. attribute:: key_id The key identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: key_string The key string **type**\: list of int **range:** 0..255 **config**\: False .. attribute:: crypto_algo The algorithm in use **type**\: :py:class:`Ospfv2CryptoAlgorithm <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2CryptoAlgorithm>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, self).__init__() self.yang_name = "auth-key" self.yang_parent_name = "auth-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('key_id', (YLeaf(YType.uint8, 'key-id'), ['int'])), ('key_string', (YLeafList(YType.uint8, 'key-string'), ['int'])), ('crypto_algo', (YLeaf(YType.enumeration, 'crypto-algo'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'Ospfv2CryptoAlgorithm', '')])), ]) self.key_id = None self.key_string = [] self.crypto_algo = None self._segment_path = lambda: "auth-key" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.AuthKey, ['key_id', 'key_string', 'crypto_algo'], name, value) class KeyChain(Entity): """ Trailer key information .. attribute:: key_chain The key chain **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, self).__init__() self.yang_name = "key-chain" self.yang_parent_name = "auth-val" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('key_chain', (YLeafList(YType.uint8, 'key-chain'), ['int'])), ]) self.key_chain = [] self._segment_path = lambda: "key-chain" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.AuthVal.KeyChain, ['key_chain'], name, value) class Ospfv2Neighbor(Entity): """ All the neighbors on the interface .. attribute:: nbr_id (key) The neighbor identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: address Neighbor address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: dr The neighbor's Designated Router indentifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: bdr The neighbor's Backup Designated Router identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: dr_ip The designated routers' IP address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: bdr_ip The backup designated routers' IP address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: event_count A count of neighbor events **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: retrans_count A count of the retransmission events **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: state The current neighbor state **type**\: :py:class:`NbrStateType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.NbrStateType>` **config**\: False .. attribute:: dead_timer The dead timer in seconds **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, self).__init__() self.yang_name = "ospfv2-neighbor" self.yang_parent_name = "ospfv2-interface" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['nbr_id'] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('nbr_id', (YLeaf(YType.uint32, 'nbr-id'), ['int'])), ('address', (YLeaf(YType.str, 'address'), ['str','str'])), ('dr', (YLeaf(YType.uint32, 'dr'), ['int'])), ('bdr', (YLeaf(YType.uint32, 'bdr'), ['int'])), ('dr_ip', (YLeaf(YType.str, 'dr-ip'), ['str','str'])), ('bdr_ip', (YLeaf(YType.str, 'bdr-ip'), ['str','str'])), ('event_count', (YLeaf(YType.uint32, 'event-count'), ['int'])), ('retrans_count', (YLeaf(YType.uint32, 'retrans-count'), ['int'])), ('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'NbrStateType', '')])), ('dead_timer', (YLeaf(YType.uint32, 'dead-timer'), ['int'])), ]) self.nbr_id = None self.address = None self.dr = None self.bdr = None self.dr_ip = None self.bdr_ip = None self.event_count = None self.retrans_count = None self.state = None self.dead_timer = None self._segment_path = lambda: "ospfv2-neighbor" + "[nbr-id='" + str(self.nbr_id) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2Area.Ospfv2Interface.Ospfv2Neighbor, ['nbr_id', 'address', 'dr', 'bdr', 'dr_ip', 'bdr_ip', 'event_count', 'retrans_count', 'state', 'dead_timer'], name, value) class Ospfv2LsdbExternal(Entity): """ The external LSDB information .. attribute:: lsa_type (key) Link State Advertisement type **type**\: int **range:** 0..255 **config**\: False .. attribute:: lsa_id (key) Link State Advertisement Identifer **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: advertising_router (key) Advertising router **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_age The age of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: lsa_options The options of the Link State Advertisement **type**\: :py:class:`Ospfv2LsaFlagOptions <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2LsaFlagOptions>` **config**\: False .. attribute:: lsa_seq_number The sequence number for the Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: lsa_checksum The checksum of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: lsa_length The length, in bytes, of the Link State Advertisement **type**\: int **range:** 0..65535 **config**\: False .. attribute:: ospfv2_router_lsa_links The router Link State Advertisement links **type**\: list of :py:class:`Ospfv2RouterLsaLinks <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks>` **config**\: False .. attribute:: unsupported_lsa The unsupported Link State Advertisements **type**\: :py:class:`UnsupportedLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa>` **config**\: False .. attribute:: router_lsa The router Link State Advertisements **type**\: :py:class:`RouterLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa>` **config**\: False .. attribute:: network_lsa The network Link State Advertisements **type**\: :py:class:`NetworkLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa>` **config**\: False .. attribute:: network_summary_lsa The network summary Link State Advertisements **type**\: :py:class:`NetworkSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa>` **config**\: False .. attribute:: router_summary_lsa The router summary Link State Advertisements **type**\: :py:class:`RouterSummaryLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa>` **config**\: False .. attribute:: external_lsa The external Link State Advertisements **type**\: :py:class:`ExternalLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa>` **config**\: False .. attribute:: nssa_lsa The Not So Stubby Area Link state advertisements **type**\: :py:class:`NssaLsa <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, self).__init__() self.yang_name = "ospfv2-lsdb-external" self.yang_parent_name = "ospfv2-instance" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['lsa_type','lsa_id','advertising_router'] self._child_classes = OrderedDict([("ospfv2-router-lsa-links", ("ospfv2_router_lsa_links", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks)), ("unsupported-lsa", ("unsupported_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa)), ("router-lsa", ("router_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa)), ("network-lsa", ("network_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa)), ("network-summary-lsa", ("network_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa)), ("router-summary-lsa", ("router_summary_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa)), ("external-lsa", ("external_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa)), ("nssa-lsa", ("nssa_lsa", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa))]) self._leafs = OrderedDict([ ('lsa_type', (YLeaf(YType.uint8, 'lsa-type'), ['int'])), ('lsa_id', (YLeaf(YType.uint32, 'lsa-id'), ['int'])), ('advertising_router', (YLeaf(YType.uint32, 'advertising-router'), ['int'])), ('lsa_age', (YLeaf(YType.uint16, 'lsa-age'), ['int'])), ('lsa_options', (YLeaf(YType.bits, 'lsa-options'), ['Bits'])), ('lsa_seq_number', (YLeaf(YType.uint32, 'lsa-seq-number'), ['int'])), ('lsa_checksum', (YLeaf(YType.uint16, 'lsa-checksum'), ['int'])), ('lsa_length', (YLeaf(YType.uint16, 'lsa-length'), ['int'])), ]) self.lsa_type = None self.lsa_id = None self.advertising_router = None self.lsa_age = None self.lsa_options = Bits() self.lsa_seq_number = None self.lsa_checksum = None self.lsa_length = None self.unsupported_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa() self.unsupported_lsa.parent = self self._children_name_map["unsupported_lsa"] = "unsupported-lsa" self.router_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa() self.router_lsa.parent = self self._children_name_map["router_lsa"] = "router-lsa" self.network_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa() self.network_lsa.parent = self self._children_name_map["network_lsa"] = "network-lsa" self.network_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa() self.network_summary_lsa.parent = self self._children_name_map["network_summary_lsa"] = "network-summary-lsa" self.router_summary_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa() self.router_summary_lsa.parent = self self._children_name_map["router_summary_lsa"] = "router-summary-lsa" self.external_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa() self.external_lsa.parent = self self._children_name_map["external_lsa"] = "external-lsa" self.nssa_lsa = OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa() self.nssa_lsa.parent = self self._children_name_map["nssa_lsa"] = "nssa-lsa" self.ospfv2_router_lsa_links = YList(self) self._segment_path = lambda: "ospfv2-lsdb-external" + "[lsa-type='" + str(self.lsa_type) + "']" + "[lsa-id='" + str(self.lsa_id) + "']" + "[advertising-router='" + str(self.advertising_router) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal, ['lsa_type', 'lsa_id', 'advertising_router', 'lsa_age', 'lsa_options', 'lsa_seq_number', 'lsa_checksum', 'lsa_length'], name, value) class Ospfv2RouterLsaLinks(Entity): """ The router Link State Advertisement links .. attribute:: link_type (key) Link Type **type**\: int **range:** 0..255 **config**\: False .. attribute:: link_id (key) link Identifier **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_data (key) link data **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: link_topo Link topology **type**\: list of :py:class:`LinkTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, self).__init__() self.yang_name = "ospfv2-router-lsa-links" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = ['link_type','link_id','link_data'] self._child_classes = OrderedDict([("link-topo", ("link_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo))]) self._leafs = OrderedDict([ ('link_type', (YLeaf(YType.uint8, 'link-type'), ['int'])), ('link_id', (YLeaf(YType.uint32, 'link-id'), ['int'])), ('link_data', (YLeaf(YType.uint32, 'link-data'), ['int'])), ]) self.link_type = None self.link_id = None self.link_data = None self.link_topo = YList(self) self._segment_path = lambda: "ospfv2-router-lsa-links" + "[link-type='" + str(self.link_type) + "']" + "[link-id='" + str(self.link_id) + "']" + "[link-data='" + str(self.link_data) + "']" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks, ['link_type', 'link_id', 'link_data'], name, value) class LinkTopo(Entity): """ Link topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology metric **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, self).__init__() self.yang_name = "link-topo" self.yang_parent_name = "ospfv2-router-lsa-links" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint16, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "link-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.Ospfv2RouterLsaLinks.LinkTopo, ['mt_id', 'topo_metric'], name, value) class UnsupportedLsa(Entity): """ The unsupported Link State Advertisements .. attribute:: lsa_data Link State Advertisement data **type**\: list of int **range:** 0..255 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, self).__init__() self.yang_name = "unsupported-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('lsa_data', (YLeafList(YType.uint8, 'lsa-data'), ['int'])), ]) self.lsa_data = [] self._segment_path = lambda: "unsupported-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.UnsupportedLsa, ['lsa_data'], name, value) class RouterLsa(Entity): """ The router Link State Advertisements .. attribute:: router_lsa_bits Router Link State Advertisement bits **type**\: :py:class:`Ospfv2RouterLsaBits <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.Ospfv2RouterLsaBits>` **config**\: False .. attribute:: router_lsa_number_links Router Link State Advertisement number of links **type**\: int **range:** 0..65535 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, self).__init__() self.yang_name = "router-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('router_lsa_bits', (YLeaf(YType.bits, 'router-lsa-bits'), ['Bits'])), ('router_lsa_number_links', (YLeaf(YType.uint16, 'router-lsa-number-links'), ['int'])), ]) self.router_lsa_bits = Bits() self.router_lsa_number_links = None self._segment_path = lambda: "router-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterLsa, ['router_lsa_bits', 'router_lsa_number_links'], name, value) class NetworkLsa(Entity): """ The network Link State Advertisements .. attribute:: network_lsa_mask Network Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: network_attached_routers Network attached routers **type**\: list of int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, self).__init__() self.yang_name = "network-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('network_lsa_mask', (YLeaf(YType.uint32, 'network-lsa-mask'), ['int'])), ('network_attached_routers', (YLeafList(YType.uint32, 'network-attached-routers'), ['int'])), ]) self.network_lsa_mask = None self.network_attached_routers = [] self._segment_path = lambda: "network-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkLsa, ['network_lsa_mask', 'network_attached_routers'], name, value) class NetworkSummaryLsa(Entity): """ The network summary Link State Advertisements .. attribute:: summary_lsa_mask The summary Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: summary_topo The summary topology **type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, self).__init__() self.yang_name = "network-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "network-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): """ The summary topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology Metric **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "network-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NetworkSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class RouterSummaryLsa(Entity): """ The router summary Link State Advertisements .. attribute:: summary_lsa_mask The summary Link State Advertisement mask **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: summary_topo The summary topology **type**\: list of :py:class:`SummaryTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, self).__init__() self.yang_name = "router-summary-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("summary-topo", ("summary_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo))]) self._leafs = OrderedDict([ ('summary_lsa_mask', (YLeaf(YType.uint32, 'summary-lsa-mask'), ['int'])), ]) self.summary_lsa_mask = None self.summary_topo = YList(self) self._segment_path = lambda: "router-summary-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa, ['summary_lsa_mask'], name, value) class SummaryTopo(Entity): """ The summary topology .. attribute:: mt_id Multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric Topology Metric **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, self).__init__() self.yang_name = "summary-topo" self.yang_parent_name = "router-summary-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ]) self.mt_id = None self.topo_metric = None self._segment_path = lambda: "summary-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.RouterSummaryLsa.SummaryTopo, ['mt_id', 'topo_metric'], name, value) class ExternalLsa(Entity): """ The external Link State Advertisements .. attribute:: external_lsa_mask The mask for the external Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: external_topo The external topology Link State Advertisement **type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, self).__init__() self.yang_name = "external-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "external-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): """ The external topology Link State Advertisement .. attribute:: mt_id The multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric_type The topoligy metric type associated with the Link State Advertisement **type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>` **config**\: False .. attribute:: topo_metric The topology metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: topo_forwarding_address The topology forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: topo_route_tag The topology route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "external-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.ExternalLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) class NssaLsa(Entity): """ The Not So Stubby Area Link state advertisements .. attribute:: external_lsa_mask The mask for the external Link State Advertisement **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: external_topo The external topology Link State Advertisement **type**\: list of :py:class:`ExternalTopo <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo>` **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, self).__init__() self.yang_name = "nssa-lsa" self.yang_parent_name = "ospfv2-lsdb-external" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([("external-topo", ("external_topo", OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo))]) self._leafs = OrderedDict([ ('external_lsa_mask', (YLeaf(YType.uint32, 'external-lsa-mask'), ['int'])), ]) self.external_lsa_mask = None self.external_topo = YList(self) self._segment_path = lambda: "nssa-lsa" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa, ['external_lsa_mask'], name, value) class ExternalTopo(Entity): """ The external topology Link State Advertisement .. attribute:: mt_id The multi topology identifier **type**\: int **range:** 0..255 **config**\: False .. attribute:: topo_metric_type The topoligy metric type associated with the Link State Advertisement **type**\: :py:class:`OspfExternalMetricType <ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper.OspfExternalMetricType>` **config**\: False .. attribute:: topo_metric The topology metric **type**\: int **range:** 0..4294967295 **config**\: False .. attribute:: topo_forwarding_address The topology forwarding address **type**\: union of the below types: **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? **config**\: False .. attribute:: topo_route_tag The topology route tag **type**\: int **range:** 0..4294967295 **config**\: False """ _prefix = 'ospf-ios-xe-oper' _revision = '2018-02-01' def __init__(self): super(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, self).__init__() self.yang_name = "external-topo" self.yang_parent_name = "nssa-lsa" self.is_top_level_class = False self.has_list_ancestor = True self.ylist_key_names = [] self._child_classes = OrderedDict([]) self._leafs = OrderedDict([ ('mt_id', (YLeaf(YType.uint8, 'mt-id'), ['int'])), ('topo_metric_type', (YLeaf(YType.enumeration, 'topo-metric-type'), [('ydk.models.cisco_ios_xe.Cisco_IOS_XE_ospf_oper', 'OspfExternalMetricType', '')])), ('topo_metric', (YLeaf(YType.uint32, 'topo-metric'), ['int'])), ('topo_forwarding_address', (YLeaf(YType.str, 'topo-forwarding-address'), ['str','str'])), ('topo_route_tag', (YLeaf(YType.uint32, 'topo-route-tag'), ['int'])), ]) self.mt_id = None self.topo_metric_type = None self.topo_metric = None self.topo_forwarding_address = None self.topo_route_tag = None self._segment_path = lambda: "external-topo" self._is_frozen = True def __setattr__(self, name, value): self._perform_setattr(OspfOperData.Ospfv2Instance.Ospfv2LsdbExternal.NssaLsa.ExternalTopo, ['mt_id', 'topo_metric_type', 'topo_metric', 'topo_forwarding_address', 'topo_route_tag'], name, value) def clone_ptr(self): self._top_entity = OspfOperData() return self._top_entity
[ [ [ 249, 260 ], [ 8918, 8929 ], [ 9081, 9092 ], [ 10531, 10542 ], [ 10692, 10703 ], [ 13627, 13638 ], [ 13931, 13942 ], [ 16613, 16624 ], [ 16860, 16871 ], [ 28095, 28106 ], [ 29066, 29077 ], [ 35342, 35353 ], [ 35400, 35411 ], [ 38573, 38584 ], [ 38631, 38642 ], [ 41171, 41182 ], [ 41229, 41240 ], [ 43569, 43580 ], [ 43627, 43638 ], [ 46624, 46635 ], [ 46826, 46837 ], [ 51599, 51610 ], [ 51661, 51672 ], [ 58661, 58672 ], [ 58818, 58829 ], [ 61980, 61991 ], [ 62042, 62053 ], [ 64890, 64901 ], [ 65208, 65219 ], [ 75840, 75851 ], [ 77612, 77623 ], [ 82560, 82571 ], [ 82893, 82904 ], [ 89656, 89667 ], [ 89726, 89737 ], [ 96552, 96563 ], [ 96763, 96774 ], [ 101120, 101131 ], [ 101194, 101205 ], [ 104787, 104798 ], [ 105010, 105021 ], [ 108053, 108064 ], [ 108123, 108134 ], [ 110769, 110780 ], [ 110835, 110846 ], [ 114894, 114905 ], [ 114960, 114971 ], [ 118332, 118343 ], [ 118398, 118409 ], [ 121302, 121313 ], [ 121641, 121652 ], [ 125750, 125761 ], [ 125971, 125982 ], [ 130806, 130817 ], [ 130880, 130891 ], [ 138005, 138016 ], [ 139112, 139123 ], [ 144410, 144421 ], [ 144484, 144495 ], [ 147711, 147722 ], [ 147785, 147796 ], [ 151405, 151416 ], [ 151479, 151490 ], [ 158062, 158073 ], [ 158286, 158297 ], [ 162277, 162288 ], [ 162355, 162366 ], [ 164722, 164733 ], [ 164973, 164984 ], [ 171643, 171654 ], [ 171880, 171891 ], [ 176088, 176099 ], [ 176170, 176181 ], [ 180672, 180683 ], [ 180746, 180757 ], [ 185875, 185886 ], [ 185949, 185960 ], [ 190390, 190401 ], [ 190456, 190467 ], [ 193611, 193622 ], [ 193677, 193688 ], [ 196131, 196142 ], [ 196197, 196208 ], [ 198381, 198392 ], [ 198447, 198458 ], [ 205225, 205236 ], [ 205291, 205302 ], [ 209631, 209642 ], [ 209697, 209708 ], [ 217357, 217368 ], [ 218543, 218554 ], [ 222669, 222680 ], [ 223002, 223013 ], [ 229765, 229776 ], [ 229835, 229846 ], [ 236661, 236672 ], [ 236872, 236883 ], [ 241229, 241240 ], [ 241303, 241314 ], [ 244896, 244907 ], [ 245119, 245130 ], [ 248162, 248173 ], [ 248232, 248243 ], [ 250878, 250889 ], [ 250944, 250955 ], [ 255003, 255014 ], [ 255069, 255080 ], [ 258219, 258230 ], [ 258552, 258563 ], [ 262638, 262649 ], [ 262856, 262867 ], [ 267682, 267693 ], [ 267756, 267767 ], [ 274850, 274861 ], [ 275936, 275947 ], [ 281207, 281218 ], [ 281281, 281292 ], [ 284502, 284513 ], [ 284576, 284587 ], [ 288190, 288201 ], [ 288264, 288275 ], [ 294838, 294849 ], [ 295059, 295070 ], [ 299041, 299052 ], [ 299119, 299130 ], [ 301477, 301488 ], [ 301725, 301736 ], [ 308383, 308394 ], [ 308617, 308628 ], [ 312816, 312827 ], [ 312898, 312909 ], [ 317394, 317405 ], [ 317468, 317479 ], [ 322591, 322602 ], [ 322665, 322676 ], [ 327103, 327114 ], [ 327169, 327180 ], [ 330311, 330322 ], [ 330377, 330388 ], [ 332822, 332833 ], [ 332888, 332899 ], [ 334852, 334863 ], [ 334910, 334921 ], [ 336918, 336929 ], [ 337083, 337094 ], [ 343757, 343768 ], [ 344795, 344806 ], [ 348593, 348604 ], [ 348886, 348897 ], [ 355113, 355124 ], [ 355179, 355190 ], [ 361575, 361586 ], [ 361764, 361775 ], [ 365843, 365854 ], [ 365913, 365924 ], [ 369216, 369227 ], [ 369417, 369428 ], [ 372240, 372251 ], [ 372306, 372317 ], [ 374744, 374755 ], [ 374806, 374817 ], [ 378561, 378572 ], [ 378623, 378634 ], [ 381529, 381540 ], [ 381822, 381833 ], [ 385598, 385609 ], [ 385794, 385805 ], [ 390254, 390265 ], [ 390324, 390335 ], [ 396872, 396883 ], [ 397828, 397839 ], [ 402689, 402700 ], [ 402759, 402770 ], [ 405752, 405763 ], [ 405822, 405833 ], [ 409192, 409203 ], [ 409262, 409273 ], [ 415386, 415397 ], [ 415585, 415596 ], [ 419333, 419344 ], [ 419407, 419418 ], [ 421591, 421602 ], [ 421817, 421828 ], [ 428023, 428034 ], [ 428235, 428246 ], [ 432200, 432211 ], [ 432278, 432289 ], [ 436486, 436497 ], [ 436556, 436567 ], [ 441359, 441370 ], [ 441429, 441440 ], [ 445535, 445546 ], [ 445597, 445608 ], [ 448523, 448534 ], [ 448585, 448596 ], [ 450838, 450849 ], [ 450900, 450911 ], [ 453356, 453367 ], [ 453612, 453623 ], [ 462472, 462483 ], [ 463912, 463923 ], [ 468151, 468162 ], [ 468422, 468433 ], [ 474149, 474160 ], [ 474211, 474222 ], [ 480204, 480215 ], [ 480380, 480391 ], [ 484208, 484219 ], [ 484274, 484285 ], [ 487314, 487325 ], [ 487502, 487513 ], [ 490123, 490134 ], [ 490185, 490196 ], [ 492433, 492444 ], [ 492491, 492502 ], [ 495960, 495971 ], [ 496018, 496029 ], [ 498888, 498899 ], [ 498946, 498957 ], [ 501414, 501425 ], [ 501691, 501702 ], [ 505225, 505236 ], [ 505411, 505422 ], [ 509541, 509552 ], [ 509607, 509618 ], [ 515721, 515732 ], [ 516631, 516642 ], [ 521190, 521201 ], [ 521256, 521267 ], [ 524045, 524056 ], [ 524111, 524122 ], [ 527261, 527272 ], [ 527327, 527338 ], [ 533037, 533048 ], [ 533226, 533237 ], [ 536776, 536787 ], [ 536846, 536857 ], [ 538892, 538903 ], [ 539108, 539119 ], [ 544910, 544921 ], [ 545112, 545123 ], [ 548879, 548890 ], [ 548953, 548964 ], [ 552897, 552908 ], [ 552963, 552974 ], [ 557470, 557481 ], [ 557536, 557547 ], [ 561331, 561342 ], [ 561389, 561400 ], [ 564130, 564141 ], [ 564188, 564199 ], [ 566276, 566287 ], [ 566334, 566345 ], [ 568208, 568219 ], [ 568266, 568277 ], [ 574062, 574073 ], [ 574120, 574131 ], [ 577878, 577889 ], [ 577936, 577947 ], [ 584337, 584348 ], [ 585299, 585310 ], [ 588823, 588834 ], [ 589094, 589105 ], [ 594821, 594832 ], [ 594883, 594894 ], [ 600876, 600887 ], [ 601052, 601063 ], [ 604880, 604891 ], [ 604946, 604957 ], [ 607986, 607997 ], [ 608174, 608185 ], [ 610795, 610806 ], [ 610857, 610868 ], [ 613105, 613116 ], [ 613163, 613174 ], [ 616632, 616643 ], [ 616690, 616701 ], [ 619388, 619399 ], [ 619659, 619670 ], [ 623170, 623181 ], [ 623353, 623364 ], [ 627474, 627485 ], [ 627540, 627551 ], [ 633623, 633634 ], [ 634512, 634523 ], [ 639044, 639055 ], [ 639110, 639121 ], [ 641893, 641904 ], [ 641959, 641970 ], [ 645103, 645114 ], [ 645169, 645180 ], [ 650870, 650881 ], [ 651056, 651067 ], [ 654597, 654608 ], [ 654667, 654678 ], [ 656704, 656715 ], [ 656917, 656928 ], [ 662707, 662718 ], [ 662906, 662917 ], [ 666664, 666675 ], [ 666738, 666749 ], [ 670676, 670687 ], [ 670742, 670753 ], [ 675243, 675254 ], [ 675309, 675320 ], [ 679101, 679112 ], [ 679159, 679170 ], [ 681887, 681898 ], [ 681945, 681956 ], [ 684024, 684035 ], [ 684082, 684093 ], [ 685746, 685757 ], [ 685796, 685807 ], [ 688034, 688045 ], [ 688248, 688259 ], [ 690646, 690657 ], [ 690889, 690900 ], [ 697380, 697391 ], [ 698308, 698319 ], [ 703795, 703806 ], [ 703962, 703973 ], [ 706420, 706431 ], [ 706478, 706489 ], [ 708246, 708257 ], [ 708300, 708311 ], [ 710234, 710245 ], [ 710288, 710299 ], [ 712409, 712420 ], [ 712463, 712474 ], [ 714703, 714714 ], [ 714876, 714887 ], [ 716953, 716964 ], [ 717011, 717022 ], [ 719204, 719215 ], [ 719376, 719387 ], [ 721449, 721460 ], [ 721507, 721518 ], [ 723711, 723722 ], [ 723881, 723892 ], [ 727672, 727683 ], [ 727730, 727741 ], [ 730577, 730588 ], [ 730743, 730754 ], [ 734518, 734529 ], [ 734576, 734587 ], [ 743557, 743568 ], [ 743925, 743936 ], [ 748977, 748988 ], [ 749031, 749042 ], [ 751434, 751445 ], [ 751689, 751700 ], [ 754464, 754475 ], [ 754522, 754533 ], [ 756568, 756579 ], [ 756626, 756637 ], [ 762698, 762709 ], [ 762752, 762763 ], [ 769908, 769919 ], [ 770776, 770787 ], [ 775829, 775840 ], [ 775985, 775996 ], [ 778245, 778256 ], [ 778299, 778310 ], [ 779921, 779932 ], [ 779971, 779982 ], [ 781739, 781750 ], [ 781789, 781800 ], [ 783728, 783739 ], [ 783778, 783789 ], [ 785837, 785848 ], [ 785999, 786010 ], [ 787894, 787905 ], [ 787948, 787959 ], [ 789960, 789971 ], [ 790121, 790132 ], [ 792012, 792023 ], [ 792066, 792077 ], [ 794089, 794100 ], [ 794248, 794259 ], [ 797733, 797744 ], [ 797787, 797798 ], [ 800429, 800440 ], [ 800584, 800595 ], [ 804053, 804064 ], [ 804107, 804118 ] ], [ [ 284, 290 ], [ 7973, 7979 ], [ 9464, 9470 ], [ 11301, 11307 ], [ 15020, 15026 ], [ 17477, 17483 ], [ 33916, 33922 ], [ 36112, 36118 ], [ 39542, 39548 ], [ 42171, 42177 ], [ 44320, 44326 ], [ 48148, 48154 ], [ 53456, 53462 ], [ 60309, 60315 ], [ 62854, 62860 ], [ 65953, 65959 ], [ 80659, 80665 ], [ 83951, 83957 ], [ 91962, 91968 ], [ 98397, 98403 ], [ 102145, 102151 ], [ 106144, 106150 ], [ 109018, 109024 ], [ 111682, 111688 ], [ 116226, 116232 ], [ 119398, 119404 ], [ 122712, 122718 ], [ 127202, 127208 ], [ 132475, 132481 ], [ 142382, 142388 ], [ 145450, 145456 ], [ 148901, 148907 ], [ 152646, 152652 ], [ 160725, 160731 ], [ 163108, 163114 ], [ 165871, 165877 ], [ 174434, 174440 ], [ 176965, 176971 ], [ 182175, 182181 ], [ 187410, 187416 ], [ 192049, 192055 ], [ 194561, 194567 ], [ 197076, 197082 ], [ 199113, 199119 ], [ 207527, 207533 ], [ 210688, 210694 ], [ 220768, 220774 ], [ 224060, 224066 ], [ 232071, 232077 ], [ 238506, 238512 ], [ 242254, 242260 ], [ 246253, 246259 ], [ 249127, 249133 ], [ 251791, 251797 ], [ 256328, 256334 ], [ 259610, 259616 ], [ 264081, 264087 ], [ 269348, 269354 ], [ 279182, 279188 ], [ 282244, 282250 ], [ 285689, 285695 ], [ 289428, 289434 ], [ 297492, 297498 ], [ 299869, 299875 ], [ 302617, 302623 ], [ 311165, 311171 ], [ 313690, 313696 ], [ 318894, 318900 ], [ 324123, 324129 ], [ 328758, 328764 ], [ 331252, 331258 ], [ 333765, 333771 ], [ 335496, 335502 ], [ 337706, 337712 ], [ 346862, 346868 ], [ 349846, 349852 ], [ 357293, 357299 ], [ 363294, 363300 ], [ 366806, 366812 ], [ 370481, 370487 ], [ 373143, 373149 ], [ 375595, 375601 ], [ 379808, 379814 ], [ 382782, 382788 ], [ 386931, 386937 ], [ 391826, 391832 ], [ 400806, 400812 ], [ 403664, 403670 ], [ 406869, 406875 ], [ 410360, 410366 ], [ 417890, 417896 ], [ 420107, 420113 ], [ 422641, 422647 ], [ 430655, 430661 ], [ 433020, 433026 ], [ 437908, 437914 ], [ 442813, 442819 ], [ 447104, 447110 ], [ 449402, 449408 ], [ 451712, 451718 ], [ 454245, 454251 ], [ 466563, 466569 ], [ 469311, 469317 ], [ 476212, 476218 ], [ 481824, 481830 ], [ 485118, 485124 ], [ 488505, 488511 ], [ 490973, 490979 ], [ 493231, 493237 ], [ 497145, 497151 ], [ 499823, 499829 ], [ 502593, 502599 ], [ 506484, 506490 ], [ 511031, 511037 ], [ 519437, 519443 ], [ 522115, 522121 ], [ 525104, 525110 ], [ 528371, 528377 ], [ 535427, 535433 ], [ 537508, 537514 ], [ 539888, 539894 ], [ 547428, 547434 ], [ 549657, 549663 ], [ 554253, 554259 ], [ 558858, 558864 ], [ 562827, 562833 ], [ 564965, 564971 ], [ 567106, 567112 ], [ 568841, 568847 ], [ 576137, 576143 ], [ 578804, 578810 ], [ 587235, 587241 ], [ 589983, 589989 ], [ 596884, 596890 ], [ 602496, 602502 ], [ 605790, 605796 ], [ 609177, 609183 ], [ 611645, 611651 ], [ 613903, 613909 ], [ 617810, 617816 ], [ 620548, 620554 ], [ 624420, 624426 ], [ 628961, 628967 ], [ 637294, 637300 ], [ 639966, 639972 ], [ 642949, 642955 ], [ 646210, 646216 ], [ 653251, 653257 ], [ 655326, 655332 ], [ 657691, 657697 ], [ 665216, 665222 ], [ 667439, 667445 ], [ 672029, 672035 ], [ 676628, 676634 ], [ 680593, 680599 ], [ 682713, 682719 ], [ 684848, 684854 ], [ 686285, 686291 ], [ 689145, 689151 ], [ 691467, 691473 ], [ 701755, 701761 ], [ 704980, 704986 ], [ 707196, 707202 ], [ 708826, 708832 ], [ 711036, 711042 ], [ 713229, 713235 ], [ 715502, 715508 ], [ 717734, 717740 ], [ 720000, 720006 ], [ 722224, 722230 ], [ 724500, 724506 ], [ 729092, 729098 ], [ 731354, 731360 ], [ 735939, 735945 ], [ 747667, 747673 ], [ 749676, 749682 ], [ 752681, 752687 ], [ 755466, 755472 ], [ 757187, 757193 ], [ 764639, 764645 ], [ 773995, 774001 ], [ 776944, 776950 ], [ 778970, 778976 ], [ 780458, 780464 ], [ 782490, 782496 ], [ 784497, 784503 ], [ 786582, 786588 ], [ 788624, 788630 ], [ 790702, 790708 ], [ 792736, 792742 ], [ 794824, 794830 ], [ 799078, 799084 ], [ 801152, 801158 ] ], [ [ 292, 302 ] ], [ [ 304, 312 ] ], [ [ 314, 318 ], [ 564, 568 ], [ 749, 753 ], [ 813, 817 ], [ 871, 875 ], [ 1492, 1496 ], [ 1547, 1551 ], [ 1602, 1606 ], [ 1657, 1661 ], [ 1722, 1726 ], [ 1788, 1792 ], [ 1847, 1851 ], [ 1902, 1906 ], [ 1954, 1958 ], [ 2222, 2226 ], [ 2289, 2293 ], [ 2362, 2366 ], [ 2428, 2432 ], [ 2496, 2500 ], [ 2700, 2704 ], [ 2770, 2774 ], [ 2834, 2838 ], [ 3259, 3263 ], [ 3317, 3321 ], [ 3385, 3389 ], [ 3454, 3458 ], [ 3517, 3521 ], [ 3777, 3781 ], [ 3850, 3854 ], [ 4219, 4223 ], [ 4284, 4288 ], [ 4362, 4366 ], [ 4439, 4443 ], [ 4767, 4771 ], [ 4833, 4837 ], [ 4892, 4896 ], [ 5870, 5874 ], [ 5954, 5958 ], [ 6041, 6045 ], [ 6135, 6139 ], [ 6236, 6240 ], [ 6324, 6328 ], [ 6404, 6408 ], [ 6487, 6491 ], [ 6555, 6559 ], [ 7171, 7175 ], [ 7255, 7259 ], [ 7326, 7330 ], [ 7402, 7406 ], [ 7485, 7489 ], [ 7568, 7572 ], [ 7641, 7645 ], [ 7720, 7724 ], [ 7812, 7816 ], [ 7902, 7906 ] ], [ [ 320, 325 ], [ 10741, 10746 ], [ 13979, 13984 ], [ 14123, 14128 ], [ 14204, 14209 ], [ 16917, 16922 ], [ 29124, 29129 ], [ 29207, 29212 ], [ 29369, 29374 ], [ 29462, 29467 ], [ 29557, 29562 ], [ 29642, 29647 ], [ 29730, 29735 ], [ 29827, 29832 ], [ 29929, 29934 ], [ 30032, 30037 ], [ 30126, 30131 ], [ 30211, 30216 ], [ 30304, 30309 ], [ 30397, 30402 ], [ 30478, 30483 ], [ 30561, 30566 ], [ 30644, 30649 ], [ 30735, 30740 ], [ 30817, 30822 ], [ 30895, 30900 ], [ 30979, 30984 ], [ 35471, 35476 ], [ 35562, 35567 ], [ 38696, 38701 ], [ 38784, 38789 ], [ 38875, 38880 ], [ 41305, 41310 ], [ 41406, 41411 ], [ 41507, 41512 ], [ 43692, 43697 ], [ 43779, 43784 ], [ 46886, 46891 ], [ 46968, 46973 ], [ 47058, 47063 ], [ 47146, 47151 ], [ 51735, 51740 ], [ 51838, 51843 ], [ 51932, 51937 ], [ 52019, 52024 ], [ 52113, 52118 ], [ 52214, 52219 ], [ 52317, 52322 ], [ 52420, 52425 ], [ 58887, 58892 ], [ 58982, 58987 ], [ 59068, 59073 ], [ 59150, 59155 ], [ 59235, 59240 ], [ 62119, 62124 ], [ 62228, 62233 ], [ 65274, 65279 ], [ 77680, 77685 ], [ 77774, 77779 ], [ 77882, 77887 ], [ 77991, 77996 ], [ 78083, 78088 ], [ 78182, 78187 ], [ 89802, 89807 ], [ 89908, 89913 ], [ 90013, 90018 ], [ 90111, 90116 ], [ 90204, 90209 ], [ 90304, 90309 ], [ 90407, 90412 ], [ 90505, 90510 ], [ 90602, 90607 ], [ 90706, 90711 ], [ 96845, 96850 ], [ 96955, 96960 ], [ 97069, 97074 ], [ 97188, 97193 ], [ 101280, 101285 ], [ 101404, 101409 ], [ 105083, 105088 ], [ 105181, 105186 ], [ 105276, 105281 ], [ 108198, 108203 ], [ 108295, 108300 ], [ 110906, 110911 ], [ 110999, 111004 ], [ 115031, 115036 ], [ 115124, 115129 ], [ 115230, 115235 ], [ 115351, 115356 ], [ 118468, 118473 ], [ 118560, 118565 ], [ 118657, 118662 ], [ 126047, 126052 ], [ 126157, 126162 ], [ 130957, 130962 ], [ 131054, 131059 ], [ 131158, 131163 ], [ 131265, 131270 ], [ 131367, 131372 ], [ 131468, 131473 ], [ 139198, 139203 ], [ 139313, 139318 ], [ 144577, 144582 ], [ 144697, 144702 ], [ 147865, 147870 ], [ 147970, 147975 ], [ 148083, 148088 ], [ 151559, 151564 ], [ 151676, 151681 ], [ 151801, 151806 ], [ 158366, 158371 ], [ 158480, 158485 ], [ 158603, 158608 ], [ 158728, 158733 ], [ 158856, 158861 ], [ 158985, 158990 ], [ 159117, 159122 ], [ 162439, 162444 ], [ 171964, 171969 ], [ 172082, 172087 ], [ 172209, 172214 ], [ 172338, 172343 ], [ 172470, 172475 ], [ 172603, 172608 ], [ 172739, 172744 ], [ 176258, 176263 ], [ 180832, 180837 ], [ 180961, 180966 ], [ 181097, 181102 ], [ 181216, 181221 ], [ 186041, 186046 ], [ 186173, 186178 ], [ 186308, 186313 ], [ 186437, 186442 ], [ 190534, 190539 ], [ 190649, 190654 ], [ 190770, 190775 ], [ 190874, 190879 ], [ 190965, 190970 ], [ 193749, 193754 ], [ 193848, 193853 ], [ 196269, 196274 ], [ 196368, 196373 ], [ 198517, 198522 ], [ 205366, 205371 ], [ 205463, 205468 ], [ 205574, 205579 ], [ 205703, 205708 ], [ 205819, 205824 ], [ 205923, 205928 ], [ 206045, 206050 ], [ 206174, 206179 ], [ 206290, 206295 ], [ 209767, 209772 ], [ 209859, 209864 ], [ 209956, 209961 ], [ 218613, 218618 ], [ 218709, 218714 ], [ 218817, 218822 ], [ 218926, 218931 ], [ 229911, 229916 ], [ 230017, 230022 ], [ 230122, 230127 ], [ 230220, 230225 ], [ 230313, 230318 ], [ 230413, 230418 ], [ 230516, 230521 ], [ 230614, 230619 ], [ 230711, 230716 ], [ 230815, 230820 ], [ 236954, 236959 ], [ 237064, 237069 ], [ 237178, 237183 ], [ 237297, 237302 ], [ 241389, 241394 ], [ 241513, 241518 ], [ 245192, 245197 ], [ 245290, 245295 ], [ 245385, 245390 ], [ 248307, 248312 ], [ 248404, 248409 ], [ 251015, 251020 ], [ 251108, 251113 ], [ 255140, 255145 ], [ 255233, 255238 ], [ 255339, 255344 ], [ 255460, 255465 ], [ 262932, 262937 ], [ 263042, 263047 ], [ 267833, 267838 ], [ 267930, 267935 ], [ 268034, 268039 ], [ 268141, 268146 ], [ 268243, 268248 ], [ 268344, 268349 ], [ 276022, 276027 ], [ 276137, 276142 ], [ 281374, 281379 ], [ 281494, 281499 ], [ 284656, 284661 ], [ 284761, 284766 ], [ 284874, 284879 ], [ 288344, 288349 ], [ 288461, 288466 ], [ 288586, 288591 ], [ 295139, 295144 ], [ 295253, 295258 ], [ 295376, 295381 ], [ 295501, 295506 ], [ 295629, 295634 ], [ 295758, 295763 ], [ 295890, 295895 ], [ 299203, 299208 ], [ 308701, 308706 ], [ 308819, 308824 ], [ 308946, 308951 ], [ 309075, 309080 ], [ 309207, 309212 ], [ 309340, 309345 ], [ 309476, 309481 ], [ 312986, 312991 ], [ 317554, 317559 ], [ 317683, 317688 ], [ 317819, 317824 ], [ 317938, 317943 ], [ 322757, 322762 ], [ 322889, 322894 ], [ 323024, 323029 ], [ 323153, 323158 ], [ 327247, 327252 ], [ 327362, 327367 ], [ 327483, 327488 ], [ 327587, 327592 ], [ 327678, 327683 ], [ 330449, 330454 ], [ 330548, 330553 ], [ 332960, 332965 ], [ 333059, 333064 ], [ 334972, 334977 ], [ 337145, 337150 ], [ 344861, 344866 ], [ 344953, 344958 ], [ 345057, 345062 ], [ 345162, 345167 ], [ 355251, 355256 ], [ 355353, 355358 ], [ 355454, 355459 ], [ 355548, 355553 ], [ 355637, 355642 ], [ 355733, 355738 ], [ 355832, 355837 ], [ 355926, 355931 ], [ 356019, 356024 ], [ 356119, 356124 ], [ 361842, 361847 ], [ 361948, 361953 ], [ 362058, 362063 ], [ 362173, 362178 ], [ 365995, 366000 ], [ 366115, 366120 ], [ 369486, 369491 ], [ 369580, 369585 ], [ 369671, 369676 ], [ 372377, 372382 ], [ 372470, 372475 ], [ 374873, 374878 ], [ 374962, 374967 ], [ 378690, 378695 ], [ 378779, 378784 ], [ 378881, 378886 ], [ 378998, 379003 ], [ 385866, 385871 ], [ 385972, 385977 ], [ 390397, 390402 ], [ 390490, 390495 ], [ 390590, 390595 ], [ 390693, 390698 ], [ 390791, 390796 ], [ 390888, 390893 ], [ 397910, 397915 ], [ 398021, 398026 ], [ 402848, 402853 ], [ 402964, 402969 ], [ 405898, 405903 ], [ 405999, 406004 ], [ 406108, 406113 ], [ 409338, 409343 ], [ 409451, 409456 ], [ 409572, 409577 ], [ 415661, 415666 ], [ 415771, 415776 ], [ 415890, 415895 ], [ 416011, 416016 ], [ 416135, 416140 ], [ 416260, 416265 ], [ 416388, 416393 ], [ 419487, 419492 ], [ 428315, 428320 ], [ 428429, 428434 ], [ 428552, 428557 ], [ 428677, 428682 ], [ 428805, 428810 ], [ 428934, 428939 ], [ 429066, 429071 ], [ 432362, 432367 ], [ 436638, 436643 ], [ 436763, 436768 ], [ 436895, 436900 ], [ 437010, 437015 ], [ 441517, 441522 ], [ 441645, 441650 ], [ 441776, 441781 ], [ 441901, 441906 ], [ 445671, 445676 ], [ 445782, 445787 ], [ 445899, 445904 ], [ 445999, 446004 ], [ 446086, 446091 ], [ 448653, 448658 ], [ 448748, 448753 ], [ 450968, 450973 ], [ 451063, 451068 ], [ 453670, 453675 ], [ 463972, 463977 ], [ 464058, 464063 ], [ 464158, 464163 ], [ 464259, 464264 ], [ 464343, 464348 ], [ 464434, 464439 ], [ 474279, 474284 ], [ 474377, 474382 ], [ 474474, 474479 ], [ 474564, 474569 ], [ 474649, 474654 ], [ 474741, 474746 ], [ 474836, 474841 ], [ 474926, 474931 ], [ 475015, 475020 ], [ 475111, 475116 ], [ 480454, 480459 ], [ 480556, 480561 ], [ 480662, 480667 ], [ 480773, 480778 ], [ 484352, 484357 ], [ 484468, 484473 ], [ 487567, 487572 ], [ 487657, 487662 ], [ 487744, 487749 ], [ 490252, 490257 ], [ 490341, 490346 ], [ 492554, 492559 ], [ 492639, 492644 ], [ 496081, 496086 ], [ 496166, 496171 ], [ 496264, 496269 ], [ 496377, 496382 ], [ 499008, 499013 ], [ 499092, 499097 ], [ 499181, 499186 ], [ 505479, 505484 ], [ 505581, 505586 ], [ 509676, 509681 ], [ 509765, 509770 ], [ 509861, 509866 ], [ 509960, 509965 ], [ 510054, 510059 ], [ 510147, 510152 ], [ 516709, 516714 ], [ 516816, 516821 ], [ 521341, 521346 ], [ 521453, 521458 ], [ 524183, 524188 ], [ 524280, 524285 ], [ 524385, 524390 ], [ 527399, 527404 ], [ 527508, 527513 ], [ 527625, 527630 ], [ 533298, 533303 ], [ 533404, 533409 ], [ 533519, 533524 ], [ 533636, 533641 ], [ 533756, 533761 ], [ 533877, 533882 ], [ 534001, 534006 ], [ 536922, 536927 ], [ 545188, 545193 ], [ 545298, 545303 ], [ 545417, 545422 ], [ 545538, 545543 ], [ 545662, 545667 ], [ 545787, 545792 ], [ 545915, 545920 ], [ 549033, 549038 ], [ 553041, 553046 ], [ 553162, 553167 ], [ 553290, 553295 ], [ 553401, 553406 ], [ 557620, 557625 ], [ 557744, 557749 ], [ 557871, 557876 ], [ 557992, 557997 ], [ 561459, 561464 ], [ 561566, 561571 ], [ 561679, 561684 ], [ 561775, 561780 ], [ 561858, 561863 ], [ 564252, 564257 ], [ 564343, 564348 ], [ 566398, 566403 ], [ 566489, 566494 ], [ 568328, 568333 ], [ 574187, 574192 ], [ 574276, 574281 ], [ 574379, 574384 ], [ 574500, 574505 ], [ 574608, 574613 ], [ 574704, 574709 ], [ 574818, 574823 ], [ 574939, 574944 ], [ 575047, 575052 ], [ 577998, 578003 ], [ 578082, 578087 ], [ 578171, 578176 ], [ 585361, 585366 ], [ 585449, 585454 ], [ 585549, 585554 ], [ 585650, 585655 ], [ 594951, 594956 ], [ 595049, 595054 ], [ 595146, 595151 ], [ 595236, 595241 ], [ 595321, 595326 ], [ 595413, 595418 ], [ 595508, 595513 ], [ 595598, 595603 ], [ 595687, 595692 ], [ 595783, 595788 ], [ 601126, 601131 ], [ 601228, 601233 ], [ 601334, 601339 ], [ 601445, 601450 ], [ 605024, 605029 ], [ 605140, 605145 ], [ 608239, 608244 ], [ 608329, 608334 ], [ 608416, 608421 ], [ 610924, 610929 ], [ 611013, 611018 ], [ 613226, 613231 ], [ 613311, 613316 ], [ 616753, 616758 ], [ 616838, 616843 ], [ 616936, 616941 ], [ 617049, 617054 ], [ 623421, 623426 ], [ 623523, 623528 ], [ 627609, 627614 ], [ 627698, 627703 ], [ 627794, 627799 ], [ 627893, 627898 ], [ 627987, 627992 ], [ 628080, 628085 ], [ 634590, 634595 ], [ 634697, 634702 ], [ 639195, 639200 ], [ 639307, 639312 ], [ 642031, 642036 ], [ 642128, 642133 ], [ 642233, 642238 ], [ 645241, 645246 ], [ 645350, 645355 ], [ 645467, 645472 ], [ 651128, 651133 ], [ 651234, 651239 ], [ 651349, 651354 ], [ 651466, 651471 ], [ 651586, 651591 ], [ 651707, 651712 ], [ 651831, 651836 ], [ 654743, 654748 ], [ 662982, 662987 ], [ 663092, 663097 ], [ 663211, 663216 ], [ 663332, 663337 ], [ 663456, 663461 ], [ 663581, 663586 ], [ 663709, 663714 ], [ 666818, 666823 ], [ 670820, 670825 ], [ 670941, 670946 ], [ 671069, 671074 ], [ 671180, 671185 ], [ 675393, 675398 ], [ 675517, 675522 ], [ 675644, 675649 ], [ 675765, 675770 ], [ 679229, 679234 ], [ 679336, 679341 ], [ 679449, 679454 ], [ 679545, 679550 ], [ 679628, 679633 ], [ 682009, 682014 ], [ 682100, 682105 ], [ 684146, 684151 ], [ 684237, 684242 ], [ 685850, 685855 ], [ 688301, 688306 ], [ 688378, 688383 ], [ 688450, 688455 ], [ 690942, 690947 ], [ 698366, 698371 ], [ 698445, 698450 ], [ 698535, 698540 ], [ 698626, 698631 ], [ 698710, 698715 ], [ 698800, 698805 ], [ 698892, 698897 ], [ 698980, 698985 ], [ 704025, 704030 ], [ 704110, 704115 ], [ 704196, 704201 ], [ 706541, 706546 ], [ 706630, 706635 ], [ 708366, 708371 ], [ 710357, 710362 ], [ 710464, 710469 ], [ 712533, 712538 ], [ 712647, 712652 ], [ 714946, 714951 ], [ 717074, 717079 ], [ 717163, 717168 ], [ 719446, 719451 ], [ 721570, 721575 ], [ 721659, 721664 ], [ 723952, 723957 ], [ 727793, 727798 ], [ 727887, 727892 ], [ 728068, 728073 ], [ 728176, 728181 ], [ 728290, 728295 ], [ 730814, 730819 ], [ 734639, 734644 ], [ 734733, 734738 ], [ 734914, 734919 ], [ 735022, 735027 ], [ 735136, 735141 ], [ 743979, 743984 ], [ 744058, 744063 ], [ 744215, 744220 ], [ 744296, 744301 ], [ 744385, 744390 ], [ 744477, 744482 ], [ 744572, 744577 ], [ 744661, 744666 ], [ 744745, 744750 ], [ 744838, 744843 ], [ 744936, 744941 ], [ 745035, 745040 ], [ 745126, 745131 ], [ 745213, 745218 ], [ 745291, 745296 ], [ 745362, 745367 ], [ 745436, 745441 ], [ 745516, 745521 ], [ 745596, 745601 ], [ 749091, 749096 ], [ 749173, 749178 ], [ 751750, 751755 ], [ 754586, 754591 ], [ 754679, 754684 ], [ 754773, 754778 ], [ 756697, 756702 ], [ 762812, 762817 ], [ 762895, 762900 ], [ 762977, 762982 ], [ 763052, 763057 ], [ 763130, 763135 ], [ 763214, 763219 ], [ 763304, 763309 ], [ 763398, 763403 ], [ 763486, 763491 ], [ 763641, 763646 ], [ 770830, 770835 ], [ 770905, 770910 ], [ 770991, 770996 ], [ 771078, 771083 ], [ 771158, 771163 ], [ 771244, 771249 ], [ 771332, 771337 ], [ 771416, 771421 ], [ 776044, 776049 ], [ 776125, 776130 ], [ 776207, 776212 ], [ 778358, 778363 ], [ 778443, 778448 ], [ 780033, 780038 ], [ 781854, 781859 ], [ 781957, 781962 ], [ 783844, 783849 ], [ 783954, 783959 ], [ 786065, 786070 ], [ 788007, 788012 ], [ 788092, 788097 ], [ 790187, 790192 ], [ 792125, 792130 ], [ 792210, 792215 ], [ 794315, 794320 ], [ 797846, 797851 ], [ 797936, 797941 ], [ 798113, 798118 ], [ 798217, 798222 ], [ 798327, 798332 ], [ 800651, 800656 ], [ 804166, 804171 ], [ 804256, 804261 ], [ 804433, 804438 ], [ 804537, 804542 ], [ 804647, 804652 ] ], [ [ 327, 332 ], [ 10735, 10740 ], [ 13973, 13978 ], [ 14117, 14122 ], [ 14198, 14203 ], [ 16911, 16916 ], [ 29118, 29123 ], [ 29201, 29206 ], [ 29363, 29368 ], [ 29456, 29461 ], [ 29551, 29556 ], [ 29636, 29641 ], [ 29724, 29729 ], [ 29821, 29826 ], [ 29923, 29928 ], [ 30026, 30031 ], [ 30120, 30125 ], [ 30205, 30210 ], [ 30298, 30303 ], [ 30391, 30396 ], [ 30472, 30477 ], [ 30555, 30560 ], [ 30638, 30643 ], [ 30729, 30734 ], [ 30811, 30816 ], [ 30889, 30894 ], [ 30973, 30978 ], [ 35465, 35470 ], [ 35556, 35561 ], [ 38690, 38695 ], [ 38778, 38783 ], [ 38869, 38874 ], [ 41299, 41304 ], [ 41400, 41405 ], [ 41501, 41506 ], [ 43686, 43691 ], [ 43773, 43778 ], [ 46880, 46885 ], [ 46962, 46967 ], [ 47052, 47057 ], [ 47140, 47145 ], [ 51729, 51734 ], [ 51832, 51837 ], [ 51926, 51931 ], [ 52013, 52018 ], [ 52107, 52112 ], [ 52208, 52213 ], [ 52311, 52316 ], [ 52414, 52419 ], [ 58881, 58886 ], [ 58976, 58981 ], [ 59062, 59067 ], [ 59144, 59149 ], [ 59229, 59234 ], [ 62113, 62118 ], [ 62222, 62227 ], [ 65268, 65273 ], [ 77674, 77679 ], [ 77768, 77773 ], [ 77876, 77881 ], [ 78077, 78082 ], [ 78176, 78181 ], [ 89796, 89801 ], [ 89902, 89907 ], [ 90007, 90012 ], [ 90105, 90110 ], [ 90198, 90203 ], [ 90298, 90303 ], [ 90401, 90406 ], [ 90499, 90504 ], [ 90596, 90601 ], [ 90700, 90705 ], [ 96839, 96844 ], [ 96949, 96954 ], [ 97063, 97068 ], [ 97182, 97187 ], [ 101274, 101279 ], [ 105077, 105082 ], [ 105175, 105180 ], [ 105270, 105275 ], [ 108192, 108197 ], [ 108289, 108294 ], [ 110900, 110905 ], [ 110993, 110998 ], [ 115025, 115030 ], [ 115118, 115123 ], [ 115224, 115229 ], [ 115345, 115350 ], [ 118462, 118467 ], [ 118554, 118559 ], [ 126041, 126046 ], [ 126151, 126156 ], [ 130951, 130956 ], [ 131048, 131053 ], [ 131152, 131157 ], [ 131259, 131264 ], [ 131361, 131366 ], [ 131462, 131467 ], [ 139192, 139197 ], [ 139307, 139312 ], [ 144691, 144696 ], [ 147859, 147864 ], [ 147964, 147969 ], [ 148077, 148082 ], [ 151553, 151558 ], [ 151670, 151675 ], [ 151795, 151800 ], [ 158360, 158365 ], [ 158474, 158479 ], [ 158597, 158602 ], [ 158722, 158727 ], [ 158850, 158855 ], [ 158979, 158984 ], [ 159111, 159116 ], [ 162433, 162438 ], [ 171958, 171963 ], [ 172076, 172081 ], [ 172203, 172208 ], [ 172332, 172337 ], [ 172464, 172469 ], [ 172597, 172602 ], [ 172733, 172738 ], [ 176252, 176257 ], [ 180826, 180831 ], [ 180955, 180960 ], [ 181091, 181096 ], [ 181210, 181215 ], [ 186035, 186040 ], [ 186167, 186172 ], [ 186302, 186307 ], [ 186431, 186436 ], [ 190528, 190533 ], [ 190643, 190648 ], [ 190764, 190769 ], [ 190868, 190873 ], [ 190959, 190964 ], [ 193743, 193748 ], [ 193842, 193847 ], [ 196263, 196268 ], [ 196362, 196367 ], [ 198511, 198516 ], [ 205360, 205365 ], [ 205457, 205462 ], [ 205813, 205818 ], [ 205917, 205922 ], [ 206039, 206044 ], [ 206168, 206173 ], [ 206284, 206289 ], [ 209761, 209766 ], [ 209853, 209858 ], [ 218607, 218612 ], [ 218703, 218708 ], [ 218811, 218816 ], [ 229905, 229910 ], [ 230011, 230016 ], [ 230116, 230121 ], [ 230214, 230219 ], [ 230307, 230312 ], [ 230407, 230412 ], [ 230510, 230515 ], [ 230608, 230613 ], [ 230705, 230710 ], [ 230809, 230814 ], [ 236948, 236953 ], [ 237058, 237063 ], [ 237172, 237177 ], [ 237291, 237296 ], [ 241383, 241388 ], [ 245186, 245191 ], [ 245284, 245289 ], [ 245379, 245384 ], [ 248301, 248306 ], [ 248398, 248403 ], [ 251009, 251014 ], [ 251102, 251107 ], [ 255134, 255139 ], [ 255227, 255232 ], [ 255333, 255338 ], [ 255454, 255459 ], [ 262926, 262931 ], [ 263036, 263041 ], [ 267827, 267832 ], [ 267924, 267929 ], [ 268028, 268033 ], [ 268135, 268140 ], [ 268237, 268242 ], [ 268338, 268343 ], [ 276016, 276021 ], [ 276131, 276136 ], [ 281488, 281493 ], [ 284650, 284655 ], [ 284755, 284760 ], [ 284868, 284873 ], [ 288338, 288343 ], [ 288455, 288460 ], [ 288580, 288585 ], [ 295133, 295138 ], [ 295247, 295252 ], [ 295370, 295375 ], [ 295495, 295500 ], [ 295623, 295628 ], [ 295752, 295757 ], [ 295884, 295889 ], [ 299197, 299202 ], [ 308695, 308700 ], [ 308813, 308818 ], [ 308940, 308945 ], [ 309069, 309074 ], [ 309201, 309206 ], [ 309334, 309339 ], [ 309470, 309475 ], [ 312980, 312985 ], [ 317548, 317553 ], [ 317677, 317682 ], [ 317813, 317818 ], [ 317932, 317937 ], [ 322751, 322756 ], [ 322883, 322888 ], [ 323018, 323023 ], [ 323147, 323152 ], [ 327241, 327246 ], [ 327356, 327361 ], [ 327477, 327482 ], [ 327581, 327586 ], [ 327672, 327677 ], [ 330443, 330448 ], [ 330542, 330547 ], [ 332954, 332959 ], [ 333053, 333058 ], [ 334966, 334971 ], [ 337139, 337144 ], [ 344855, 344860 ], [ 344947, 344952 ], [ 345051, 345056 ], [ 355245, 355250 ], [ 355347, 355352 ], [ 355448, 355453 ], [ 355542, 355547 ], [ 355631, 355636 ], [ 355727, 355732 ], [ 355826, 355831 ], [ 355920, 355925 ], [ 356013, 356018 ], [ 356113, 356118 ], [ 361836, 361841 ], [ 361942, 361947 ], [ 362052, 362057 ], [ 362167, 362172 ], [ 365989, 365994 ], [ 369480, 369485 ], [ 369574, 369579 ], [ 369665, 369670 ], [ 372371, 372376 ], [ 372464, 372469 ], [ 374867, 374872 ], [ 374956, 374961 ], [ 378684, 378689 ], [ 378773, 378778 ], [ 378875, 378880 ], [ 378992, 378997 ], [ 385860, 385865 ], [ 385966, 385971 ], [ 390391, 390396 ], [ 390484, 390489 ], [ 390584, 390589 ], [ 390687, 390692 ], [ 390785, 390790 ], [ 390882, 390887 ], [ 397904, 397909 ], [ 398015, 398020 ], [ 402958, 402963 ], [ 405892, 405897 ], [ 405993, 405998 ], [ 406102, 406107 ], [ 409332, 409337 ], [ 409445, 409450 ], [ 409566, 409571 ], [ 415655, 415660 ], [ 415765, 415770 ], [ 415884, 415889 ], [ 416005, 416010 ], [ 416129, 416134 ], [ 416254, 416259 ], [ 416382, 416387 ], [ 419481, 419486 ], [ 428309, 428314 ], [ 428423, 428428 ], [ 428546, 428551 ], [ 428671, 428676 ], [ 428799, 428804 ], [ 428928, 428933 ], [ 429060, 429065 ], [ 432356, 432361 ], [ 436632, 436637 ], [ 436757, 436762 ], [ 436889, 436894 ], [ 437004, 437009 ], [ 441511, 441516 ], [ 441639, 441644 ], [ 441770, 441775 ], [ 441895, 441900 ], [ 445665, 445670 ], [ 445776, 445781 ], [ 445893, 445898 ], [ 445993, 445998 ], [ 446080, 446085 ], [ 448647, 448652 ], [ 448742, 448747 ], [ 450962, 450967 ], [ 451057, 451062 ], [ 453664, 453669 ], [ 463966, 463971 ], [ 464052, 464057 ], [ 464152, 464157 ], [ 464337, 464342 ], [ 464428, 464433 ], [ 474273, 474278 ], [ 474371, 474376 ], [ 474468, 474473 ], [ 474558, 474563 ], [ 474643, 474648 ], [ 474735, 474740 ], [ 474830, 474835 ], [ 474920, 474925 ], [ 475009, 475014 ], [ 475105, 475110 ], [ 480448, 480453 ], [ 480550, 480555 ], [ 480656, 480661 ], [ 480767, 480772 ], [ 484346, 484351 ], [ 487561, 487566 ], [ 487651, 487656 ], [ 487738, 487743 ], [ 490246, 490251 ], [ 490335, 490340 ], [ 492548, 492553 ], [ 492633, 492638 ], [ 496075, 496080 ], [ 496160, 496165 ], [ 496258, 496263 ], [ 496371, 496376 ], [ 499002, 499007 ], [ 499086, 499091 ], [ 505473, 505478 ], [ 505575, 505580 ], [ 509670, 509675 ], [ 509759, 509764 ], [ 509855, 509860 ], [ 509954, 509959 ], [ 510048, 510053 ], [ 510141, 510146 ], [ 516703, 516708 ], [ 516810, 516815 ], [ 521447, 521452 ], [ 524177, 524182 ], [ 524274, 524279 ], [ 524379, 524384 ], [ 527393, 527398 ], [ 527502, 527507 ], [ 527619, 527624 ], [ 533292, 533297 ], [ 533398, 533403 ], [ 533513, 533518 ], [ 533630, 533635 ], [ 533750, 533755 ], [ 533871, 533876 ], [ 533995, 534000 ], [ 536916, 536921 ], [ 545182, 545187 ], [ 545292, 545297 ], [ 545411, 545416 ], [ 545532, 545537 ], [ 545656, 545661 ], [ 545781, 545786 ], [ 545909, 545914 ], [ 549027, 549032 ], [ 553035, 553040 ], [ 553156, 553161 ], [ 553284, 553289 ], [ 553395, 553400 ], [ 557614, 557619 ], [ 557738, 557743 ], [ 557865, 557870 ], [ 557986, 557991 ], [ 561453, 561458 ], [ 561560, 561565 ], [ 561673, 561678 ], [ 561769, 561774 ], [ 561852, 561857 ], [ 564246, 564251 ], [ 564337, 564342 ], [ 566392, 566397 ], [ 566483, 566488 ], [ 568322, 568327 ], [ 574181, 574186 ], [ 574270, 574275 ], [ 574602, 574607 ], [ 574698, 574703 ], [ 574812, 574817 ], [ 574933, 574938 ], [ 575041, 575046 ], [ 577992, 577997 ], [ 578076, 578081 ], [ 585355, 585360 ], [ 585443, 585448 ], [ 585543, 585548 ], [ 594945, 594950 ], [ 595043, 595048 ], [ 595140, 595145 ], [ 595230, 595235 ], [ 595315, 595320 ], [ 595407, 595412 ], [ 595502, 595507 ], [ 595592, 595597 ], [ 595681, 595686 ], [ 595777, 595782 ], [ 601120, 601125 ], [ 601222, 601227 ], [ 601328, 601333 ], [ 601439, 601444 ], [ 605018, 605023 ], [ 608233, 608238 ], [ 608323, 608328 ], [ 608410, 608415 ], [ 610918, 610923 ], [ 611007, 611012 ], [ 613220, 613225 ], [ 613305, 613310 ], [ 616747, 616752 ], [ 616832, 616837 ], [ 616930, 616935 ], [ 617043, 617048 ], [ 623415, 623420 ], [ 623517, 623522 ], [ 627603, 627608 ], [ 627692, 627697 ], [ 627788, 627793 ], [ 627887, 627892 ], [ 627981, 627986 ], [ 628074, 628079 ], [ 634584, 634589 ], [ 634691, 634696 ], [ 639301, 639306 ], [ 642025, 642030 ], [ 642122, 642127 ], [ 642227, 642232 ], [ 645235, 645240 ], [ 645344, 645349 ], [ 645461, 645466 ], [ 651122, 651127 ], [ 651228, 651233 ], [ 651343, 651348 ], [ 651460, 651465 ], [ 651580, 651585 ], [ 651701, 651706 ], [ 651825, 651830 ], [ 654737, 654742 ], [ 662976, 662981 ], [ 663086, 663091 ], [ 663205, 663210 ], [ 663326, 663331 ], [ 663450, 663455 ], [ 663575, 663580 ], [ 663703, 663708 ], [ 666812, 666817 ], [ 670814, 670819 ], [ 670935, 670940 ], [ 671063, 671068 ], [ 671174, 671179 ], [ 675387, 675392 ], [ 675511, 675516 ], [ 675638, 675643 ], [ 675759, 675764 ], [ 679223, 679228 ], [ 679330, 679335 ], [ 679443, 679448 ], [ 679539, 679544 ], [ 679622, 679627 ], [ 682003, 682008 ], [ 682094, 682099 ], [ 684140, 684145 ], [ 684231, 684236 ], [ 685844, 685849 ], [ 688295, 688300 ], [ 688372, 688377 ], [ 688444, 688449 ], [ 690936, 690941 ], [ 698360, 698365 ], [ 698439, 698444 ], [ 698529, 698534 ], [ 698620, 698625 ], [ 698704, 698709 ], [ 698794, 698799 ], [ 698886, 698891 ], [ 698974, 698979 ], [ 704019, 704024 ], [ 704104, 704109 ], [ 704190, 704195 ], [ 706535, 706540 ], [ 706624, 706629 ], [ 710351, 710356 ], [ 710458, 710463 ], [ 712527, 712532 ], [ 714940, 714945 ], [ 717068, 717073 ], [ 717157, 717162 ], [ 719440, 719445 ], [ 721564, 721569 ], [ 721653, 721658 ], [ 723946, 723951 ], [ 727787, 727792 ], [ 727881, 727886 ], [ 728062, 728067 ], [ 728170, 728175 ], [ 728284, 728289 ], [ 730808, 730813 ], [ 734633, 734638 ], [ 734727, 734732 ], [ 734908, 734913 ], [ 735016, 735021 ], [ 735130, 735135 ], [ 743973, 743978 ], [ 744052, 744057 ], [ 744209, 744214 ], [ 744290, 744295 ], [ 744379, 744384 ], [ 744471, 744476 ], [ 744566, 744571 ], [ 744655, 744660 ], [ 744739, 744744 ], [ 744832, 744837 ], [ 744930, 744935 ], [ 745029, 745034 ], [ 745120, 745125 ], [ 745207, 745212 ], [ 745285, 745290 ], [ 745356, 745361 ], [ 745430, 745435 ], [ 745510, 745515 ], [ 745590, 745595 ], [ 749085, 749090 ], [ 749167, 749172 ], [ 751744, 751749 ], [ 754580, 754585 ], [ 754767, 754772 ], [ 762806, 762811 ], [ 762889, 762894 ], [ 762971, 762976 ], [ 763046, 763051 ], [ 763124, 763129 ], [ 763208, 763213 ], [ 763298, 763303 ], [ 763392, 763397 ], [ 763480, 763485 ], [ 763635, 763640 ], [ 770824, 770829 ], [ 770899, 770904 ], [ 770985, 770990 ], [ 771072, 771077 ], [ 771152, 771157 ], [ 771238, 771243 ], [ 771326, 771331 ], [ 771410, 771415 ], [ 776038, 776043 ], [ 776119, 776124 ], [ 776201, 776206 ], [ 778352, 778357 ], [ 778437, 778442 ], [ 781848, 781853 ], [ 781951, 781956 ], [ 783838, 783843 ], [ 786059, 786064 ], [ 788001, 788006 ], [ 788086, 788091 ], [ 790181, 790186 ], [ 792119, 792124 ], [ 792204, 792209 ], [ 794309, 794314 ], [ 797840, 797845 ], [ 797930, 797935 ], [ 798107, 798112 ], [ 798211, 798216 ], [ 798321, 798326 ], [ 800645, 800650 ], [ 804160, 804165 ], [ 804250, 804255 ], [ 804427, 804432 ], [ 804531, 804536 ], [ 804641, 804646 ] ], [ [ 334, 343 ], [ 77981, 77990 ], [ 101394, 101403 ], [ 118647, 118656 ], [ 144567, 144576 ], [ 205564, 205573 ], [ 205693, 205702 ], [ 209946, 209955 ], [ 218916, 218925 ], [ 241503, 241512 ], [ 281364, 281373 ], [ 345152, 345161 ], [ 366105, 366114 ], [ 402838, 402847 ], [ 464249, 464258 ], [ 484458, 484467 ], [ 499171, 499180 ], [ 521331, 521340 ], [ 574369, 574378 ], [ 574490, 574499 ], [ 578161, 578170 ], [ 585640, 585649 ], [ 605130, 605139 ], [ 639185, 639194 ], [ 708356, 708365 ], [ 712637, 712646 ], [ 754669, 754678 ], [ 756687, 756696 ], [ 780023, 780032 ], [ 783944, 783953 ] ], [ [ 345, 350 ], [ 9220, 9225 ], [ 10934, 10939 ], [ 14405, 14410 ], [ 14456, 14461 ], [ 14506, 14511 ], [ 17060, 17065 ], [ 17114, 17119 ], [ 33078, 33083 ], [ 33135, 33140 ], [ 33199, 33204 ], [ 33262, 33267 ], [ 65443, 65448 ], [ 65505, 65510 ], [ 79531, 79536 ], [ 79598, 79603 ], [ 79665, 79670 ], [ 79735, 79740 ], [ 79798, 79803 ], [ 79868, 79873 ], [ 79936, 79941 ], [ 80002, 80007 ], [ 80069, 80074 ], [ 105575, 105580 ], [ 219871, 219876 ], [ 219938, 219943 ], [ 220005, 220010 ], [ 220068, 220073 ], [ 220133, 220138 ], [ 220201, 220206 ], [ 245684, 245689 ], [ 337302, 337307 ], [ 346023, 346028 ], [ 346086, 346091 ], [ 346149, 346154 ], [ 346208, 346213 ], [ 346269, 346274 ], [ 346333, 346338 ], [ 369950, 369955 ], [ 453815, 453820 ], [ 453869, 453874 ], [ 465566, 465571 ], [ 465625, 465630 ], [ 465684, 465689 ], [ 465746, 465751 ], [ 465801, 465806 ], [ 465863, 465868 ], [ 465923, 465928 ], [ 465981, 465986 ], [ 466040, 466045 ], [ 488003, 488008 ], [ 586445, 586450 ], [ 586504, 586509 ], [ 586563, 586568 ], [ 586618, 586623 ], [ 586675, 586680 ], [ 586735, 586740 ], [ 608675, 608680 ], [ 688639, 688644 ], [ 688691, 688696 ], [ 691075, 691080 ], [ 691127, 691132 ], [ 701156, 701161 ], [ 704440, 704445 ], [ 715117, 715122 ], [ 719617, 719622 ], [ 724126, 724131 ], [ 730988, 730993 ], [ 747051, 747056 ], [ 773419, 773424 ], [ 776431, 776436 ], [ 786224, 786229 ], [ 790346, 790351 ], [ 794477, 794482 ], [ 800813, 800818 ] ], [ [ 352, 364 ] ], [ [ 366, 370 ], [ 91391, 91395 ], [ 97538, 97542 ], [ 126366, 126370 ], [ 139463, 139467 ], [ 139532, 139536 ], [ 144925, 144929 ], [ 152098, 152102 ], [ 181598, 181602 ], [ 231500, 231504 ], [ 237647, 237651 ], [ 263251, 263255 ], [ 276287, 276291 ], [ 276356, 276360 ], [ 281722, 281726 ], [ 288883, 288887 ], [ 318320, 318324 ], [ 356760, 356764 ], [ 362503, 362507 ], [ 386169, 386173 ], [ 398163, 398167 ], [ 398228, 398232 ], [ 403180, 403184 ], [ 409853, 409857 ], [ 437372, 437376 ], [ 475708, 475712 ], [ 481083, 481087 ], [ 505766, 505770 ], [ 516950, 516954 ], [ 517011, 517015 ], [ 521657, 521661 ], [ 527890, 527894 ], [ 553743, 553747 ], [ 596380, 596384 ], [ 601755, 601759 ], [ 623708, 623712 ], [ 634831, 634835 ], [ 634892, 634896 ], [ 639511, 639515 ], [ 645732, 645736 ], [ 671522, 671526 ], [ 699253, 699257 ], [ 710591, 710595 ], [ 771665, 771669 ], [ 782076, 782080 ] ], [ [ 372, 377 ] ], [ [ 379, 388 ] ], [ [ 413, 420 ] ], [ [ 444, 450 ] ], [ [ 452, 463 ] ], [ [ 501, 540 ] ], [ [ 550, 563 ] ], [ [ 858, 870 ] ], [ [ 1941, 1953 ] ], [ [ 2473, 2495 ] ], [ [ 2818, 2833 ] ], [ [ 3499, 3516 ] ], [ [ 3826, 3849 ] ], [ [ 4417, 4438 ] ], [ [ 4876, 4891 ] ], [ [ 6541, 6554 ] ], [ [ 7960, 7972 ], [ 8613, 8625 ], [ 8961, 8973 ], [ 9027, 9039 ], [ 9411, 9423 ], [ 10235, 10247 ], [ 10580, 10592 ], [ 11222, 11234 ], [ 13279, 13291 ], [ 13668, 13680 ], [ 13756, 13768 ], [ 13847, 13859 ], [ 14906, 14918 ], [ 16241, 16253 ], [ 16664, 16676 ], [ 16764, 16776 ], [ 17367, 17379 ], [ 27687, 27699 ], [ 28138, 28150 ], [ 28250, 28262 ], [ 28361, 28373 ], [ 28469, 28481 ], [ 28581, 28593 ], [ 28694, 28706 ], [ 28819, 28831 ], [ 28947, 28959 ], [ 32052, 32064 ], [ 32299, 32311 ], [ 32554, 32566 ], [ 32811, 32823 ], [ 33530, 33542 ], [ 34905, 34917 ], [ 35958, 35970 ], [ 38117, 38129 ], [ 39375, 39387 ], [ 40730, 40742 ], [ 41988, 42000 ], [ 43128, 43140 ], [ 44170, 44182 ], [ 46178, 46190 ], [ 46687, 46699 ], [ 47463, 47475 ], [ 47963, 47975 ], [ 51104, 51116 ], [ 53194, 53206 ], [ 58205, 58217 ], [ 58694, 58706 ], [ 59646, 59658 ], [ 60135, 60147 ], [ 61516, 61528 ], [ 62673, 62685 ], [ 64425, 64437 ], [ 64941, 64953 ], [ 65072, 65084 ], [ 65803, 65815 ], [ 75327, 75339 ], [ 75883, 75895 ], [ 76018, 76030 ], [ 76162, 76174 ], [ 76310, 76322 ], [ 76464, 76476 ], [ 76612, 76624 ], [ 76750, 76762 ], [ 76900, 76912 ], [ 77052, 77064 ], [ 77198, 77210 ], [ 77321, 77333 ], [ 77458, 77470 ], [ 78645, 78657 ], [ 78949, 78961 ], [ 79257, 79269 ], [ 80421, 80433 ], [ 82044, 82056 ], [ 82595, 82607 ], [ 82731, 82743 ], [ 82958, 82970 ], [ 83263, 83275 ], [ 83786, 83798 ], [ 89117, 89129 ], [ 91674, 91686 ], [ 96010, 96022 ], [ 96589, 96601 ], [ 97601, 97613 ], [ 98151, 98163 ], [ 100549, 100561 ], [ 101928, 101940 ], [ 104248, 104260 ], [ 104840, 104852 ], [ 105940, 105952 ], [ 107488, 107500 ], [ 108814, 108826 ], [ 110236, 110248 ], [ 111490, 111502 ], [ 114361, 114373 ], [ 115988, 116000 ], [ 117795, 117807 ], [ 119198, 119210 ], [ 120779, 120791 ], [ 121337, 121349 ], [ 121476, 121488 ], [ 121706, 121718 ], [ 122014, 122026 ], [ 122544, 122556 ], [ 125204, 125216 ], [ 125793, 125805 ], [ 126432, 126444 ], [ 126993, 127005 ], [ 130230, 130242 ], [ 132227, 132239 ], [ 137456, 137468 ], [ 138042, 138054 ], [ 138186, 138198 ], [ 138335, 138347 ], [ 138492, 138504 ], [ 138636, 138648 ], [ 138783, 138795 ], [ 138934, 138946 ], [ 139595, 139607 ], [ 139925, 139937 ], [ 140254, 140266 ], [ 140597, 140609 ], [ 140944, 140956 ], [ 141265, 141277 ], [ 141605, 141617 ], [ 142165, 142177 ], [ 143836, 143848 ], [ 145225, 145237 ], [ 147139, 147151 ], [ 148669, 148681 ], [ 150828, 150840 ], [ 152400, 152412 ], [ 157479, 157491 ], [ 158095, 158107 ], [ 159797, 159809 ], [ 160382, 160394 ], [ 161667, 161679 ], [ 162902, 162914 ], [ 164154, 164166 ], [ 164779, 164791 ], [ 165057, 165069 ], [ 165673, 165685 ], [ 171026, 171038 ], [ 171676, 171688 ], [ 173455, 173467 ], [ 174078, 174090 ], [ 175440, 175452 ], [ 176745, 176757 ], [ 180095, 180107 ], [ 181900, 181912 ], [ 185298, 185310 ], [ 187130, 187142 ], [ 189813, 189825 ], [ 191797, 191809 ], [ 193072, 193084 ], [ 194358, 194370 ], [ 195596, 195608 ], [ 196876, 196888 ], [ 197851, 197863 ], [ 198944, 198956 ], [ 204722, 204734 ], [ 207197, 207209 ], [ 209100, 209112 ], [ 210494, 210506 ], [ 216842, 216854 ], [ 217400, 217412 ], [ 217535, 217547 ], [ 217679, 217691 ], [ 217827, 217839 ], [ 217965, 217977 ], [ 218100, 218112 ], [ 218240, 218252 ], [ 218388, 218400 ], [ 219270, 219282 ], [ 219570, 219582 ], [ 220557, 220569 ], [ 222153, 222165 ], [ 222704, 222716 ], [ 222840, 222852 ], [ 223067, 223079 ], [ 223372, 223384 ], [ 223895, 223907 ], [ 229226, 229238 ], [ 231783, 231795 ], [ 236119, 236131 ], [ 236698, 236710 ], [ 237710, 237722 ], [ 238260, 238272 ], [ 240658, 240670 ], [ 242037, 242049 ], [ 244357, 244369 ], [ 244949, 244961 ], [ 246049, 246061 ], [ 247597, 247609 ], [ 248923, 248935 ], [ 250345, 250357 ], [ 251599, 251611 ], [ 254470, 254482 ], [ 256097, 256109 ], [ 257703, 257715 ], [ 258254, 258266 ], [ 258390, 258402 ], [ 258617, 258629 ], [ 258922, 258934 ], [ 259445, 259457 ], [ 262099, 262111 ], [ 262681, 262693 ], [ 263317, 263329 ], [ 263875, 263887 ], [ 267109, 267121 ], [ 269103, 269115 ], [ 274308, 274320 ], [ 274887, 274899 ], [ 275028, 275040 ], [ 275174, 275186 ], [ 275328, 275340 ], [ 275469, 275481 ], [ 275613, 275625 ], [ 275761, 275773 ], [ 276419, 276431 ], [ 276746, 276758 ], [ 277072, 277084 ], [ 277412, 277424 ], [ 277756, 277768 ], [ 278074, 278086 ], [ 278411, 278423 ], [ 278968, 278980 ], [ 280636, 280648 ], [ 282022, 282034 ], [ 283933, 283945 ], [ 285460, 285472 ], [ 287616, 287628 ], [ 289185, 289197 ], [ 294258, 294270 ], [ 294871, 294883 ], [ 296570, 296582 ], [ 297152, 297164 ], [ 298434, 298446 ], [ 299666, 299678 ], [ 300912, 300924 ], [ 301534, 301546 ], [ 301809, 301821 ], [ 302422, 302434 ], [ 307769, 307781 ], [ 308416, 308428 ], [ 310192, 310204 ], [ 310812, 310824 ], [ 312171, 312183 ], [ 313473, 313485 ], [ 316820, 316832 ], [ 318622, 318634 ], [ 322017, 322029 ], [ 323846, 323858 ], [ 326526, 326538 ], [ 328510, 328522 ], [ 329781, 329793 ], [ 331053, 331065 ], [ 332287, 332299 ], [ 333567, 333579 ], [ 334392, 334404 ], [ 335356, 335368 ], [ 336507, 336519 ], [ 336969, 336981 ], [ 337578, 337590 ], [ 343290, 343302 ], [ 343800, 343812 ], [ 343917, 343929 ], [ 344043, 344055 ], [ 344173, 344185 ], [ 344293, 344305 ], [ 344410, 344422 ], [ 344532, 344544 ], [ 344662, 344674 ], [ 345482, 345494 ], [ 345752, 345764 ], [ 346673, 346685 ], [ 348119, 348131 ], [ 348628, 348640 ], [ 348746, 348758 ], [ 348947, 348959 ], [ 349222, 349234 ], [ 349703, 349715 ], [ 354616, 354628 ], [ 357027, 357039 ], [ 361075, 361087 ], [ 361612, 361624 ], [ 362562, 362574 ], [ 363070, 363082 ], [ 365314, 365326 ], [ 366611, 366623 ], [ 368719, 368731 ], [ 369269, 369281 ], [ 370299, 370311 ], [ 371717, 371729 ], [ 372961, 372973 ], [ 374253, 374265 ], [ 375425, 375437 ], [ 378070, 378082 ], [ 379599, 379611 ], [ 381055, 381067 ], [ 381564, 381576 ], [ 381682, 381694 ], [ 381883, 381895 ], [ 382158, 382170 ], [ 382639, 382651 ], [ 385101, 385113 ], [ 385641, 385653 ], [ 386231, 386243 ], [ 386747, 386759 ], [ 389723, 389735 ], [ 391603, 391615 ], [ 396372, 396384 ], [ 396909, 396921 ], [ 397032, 397044 ], [ 397160, 397172 ], [ 397296, 397308 ], [ 397419, 397431 ], [ 397545, 397557 ], [ 397675, 397687 ], [ 398287, 398299 ], [ 398584, 398596 ], [ 398880, 398892 ], [ 399190, 399202 ], [ 399504, 399516 ], [ 399792, 399804 ], [ 400099, 400111 ], [ 400614, 400626 ], [ 402160, 402172 ], [ 403464, 403476 ], [ 405225, 405237 ], [ 406662, 406674 ], [ 408660, 408672 ], [ 410139, 410151 ], [ 414848, 414860 ], [ 415419, 415431 ], [ 417032, 417044 ], [ 417572, 417584 ], [ 418768, 418780 ], [ 419926, 419938 ], [ 421068, 421080 ], [ 421648, 421660 ], [ 421897, 421909 ], [ 422468, 422480 ], [ 427451, 427463 ], [ 428056, 428068 ], [ 429746, 429758 ], [ 430324, 430336 ], [ 431597, 431609 ], [ 432825, 432837 ], [ 435954, 435966 ], [ 437658, 437670 ], [ 440827, 440839 ], [ 442558, 442570 ], [ 445000, 445012 ], [ 446878, 446890 ], [ 448035, 448047 ], [ 449225, 449237 ], [ 450345, 450357 ], [ 451543, 451555 ], [ 452972, 452984 ], [ 453407, 453419 ], [ 453511, 453523 ], [ 454130, 454142 ], [ 462039, 462051 ], [ 462515, 462527 ], [ 462623, 462635 ], [ 462740, 462752 ], [ 462861, 462873 ], [ 462988, 463000 ], [ 463109, 463121 ], [ 463220, 463232 ], [ 463343, 463355 ], [ 463468, 463480 ], [ 463587, 463599 ], [ 463683, 463695 ], [ 463793, 463805 ], [ 464833, 464845 ], [ 465086, 465098 ], [ 465343, 465355 ], [ 466360, 466372 ], [ 467710, 467722 ], [ 468186, 468198 ], [ 468295, 468307 ], [ 468479, 468491 ], [ 468733, 468745 ], [ 469181, 469193 ], [ 473685, 473697 ], [ 475959, 475971 ], [ 479737, 479749 ], [ 480241, 480253 ], [ 481138, 481150 ], [ 481613, 481625 ], [ 483712, 483724 ], [ 484936, 484948 ], [ 486850, 486862 ], [ 487367, 487379 ], [ 488336, 488348 ], [ 489633, 489645 ], [ 490804, 490816 ], [ 491975, 491987 ], [ 493074, 493086 ], [ 495502, 495514 ], [ 496942, 496954 ], [ 498426, 498438 ], [ 499658, 499670 ], [ 500966, 500978 ], [ 501449, 501461 ], [ 501561, 501573 ], [ 501748, 501760 ], [ 502005, 502017 ], [ 502460, 502472 ], [ 504754, 504766 ], [ 505268, 505280 ], [ 505824, 505836 ], [ 506310, 506322 ], [ 509040, 509052 ], [ 510818, 510830 ], [ 515247, 515259 ], [ 515758, 515770 ], [ 515875, 515887 ], [ 515997, 516009 ], [ 516127, 516139 ], [ 516244, 516256 ], [ 516364, 516376 ], [ 516488, 516500 ], [ 517066, 517078 ], [ 517345, 517357 ], [ 517623, 517635 ], [ 517915, 517927 ], [ 518211, 518223 ], [ 518481, 518493 ], [ 518770, 518782 ], [ 519255, 519267 ], [ 520691, 520703 ], [ 521925, 521937 ], [ 523548, 523560 ], [ 524907, 524919 ], [ 526759, 526771 ], [ 528160, 528172 ], [ 532529, 532541 ], [ 533070, 533082 ], [ 534609, 534621 ], [ 535119, 535131 ], [ 536241, 536253 ], [ 537337, 537349 ], [ 538399, 538411 ], [ 538949, 538961 ], [ 539184, 539196 ], [ 539725, 539737 ], [ 544368, 544380 ], [ 544943, 544955 ], [ 546559, 546571 ], [ 547107, 547119 ], [ 548306, 548318 ], [ 549472, 549484 ], [ 552395, 552407 ], [ 554013, 554025 ], [ 556968, 556980 ], [ 558613, 558625 ], [ 560829, 560841 ], [ 562610, 562622 ], [ 563666, 563678 ], [ 564797, 564809 ], [ 565816, 565828 ], [ 566941, 566953 ], [ 567753, 567765 ], [ 568707, 568719 ], [ 573634, 573646 ], [ 575842, 575854 ], [ 577422, 577434 ], [ 578645, 578657 ], [ 583902, 583914 ], [ 584380, 584392 ], [ 584488, 584500 ], [ 584605, 584617 ], [ 584726, 584738 ], [ 584837, 584849 ], [ 584945, 584957 ], [ 585058, 585070 ], [ 585179, 585191 ], [ 585946, 585958 ], [ 586195, 586207 ], [ 587059, 587071 ], [ 588382, 588394 ], [ 588858, 588870 ], [ 588967, 588979 ], [ 589151, 589163 ], [ 589405, 589417 ], [ 589853, 589865 ], [ 594357, 594369 ], [ 596631, 596643 ], [ 600409, 600421 ], [ 600913, 600925 ], [ 601810, 601822 ], [ 602285, 602297 ], [ 604384, 604396 ], [ 605608, 605620 ], [ 607522, 607534 ], [ 608039, 608051 ], [ 609008, 609020 ], [ 610305, 610317 ], [ 611476, 611488 ], [ 612647, 612659 ], [ 613746, 613758 ], [ 616174, 616186 ], [ 617614, 617626 ], [ 618947, 618959 ], [ 619423, 619435 ], [ 619532, 619544 ], [ 619716, 619728 ], [ 619970, 619982 ], [ 620418, 620430 ], [ 622706, 622718 ], [ 623213, 623225 ], [ 623766, 623778 ], [ 624249, 624261 ], [ 626976, 626988 ], [ 628751, 628763 ], [ 633156, 633168 ], [ 633660, 633672 ], [ 633774, 633786 ], [ 633893, 633905 ], [ 634020, 634032 ], [ 634134, 634146 ], [ 634251, 634263 ], [ 634372, 634384 ], [ 634947, 634959 ], [ 635223, 635235 ], [ 635498, 635510 ], [ 635787, 635799 ], [ 636080, 636092 ], [ 636347, 636359 ], [ 636633, 636645 ], [ 637115, 637127 ], [ 638548, 638560 ], [ 639779, 639791 ], [ 641399, 641411 ], [ 642755, 642767 ], [ 644604, 644616 ], [ 646002, 646014 ], [ 650365, 650377 ], [ 650903, 650915 ], [ 652439, 652451 ], [ 652946, 652958 ], [ 654065, 654077 ], [ 655158, 655170 ], [ 656214, 656226 ], [ 656761, 656773 ], [ 656993, 657005 ], [ 657531, 657543 ], [ 662168, 662180 ], [ 662740, 662752 ], [ 664353, 664365 ], [ 664898, 664910 ], [ 666094, 666106 ], [ 667257, 667269 ], [ 670177, 670189 ], [ 671792, 671804 ], [ 674744, 674756 ], [ 676386, 676398 ], [ 678599, 678611 ], [ 680380, 680392 ], [ 681432, 681444 ], [ 682549, 682561 ], [ 683564, 683576 ], [ 684689, 684701 ], [ 685367, 685379 ], [ 686181, 686193 ], [ 687715, 687727 ], [ 688079, 688091 ], [ 688171, 688183 ], [ 689034, 689046 ], [ 690300, 690312 ], [ 690701, 690713 ], [ 690800, 690812 ], [ 691366, 691378 ], [ 696963, 696975 ], [ 697449, 697461 ], [ 697567, 697579 ], [ 697669, 697681 ], [ 697768, 697780 ], [ 697884, 697896 ], [ 698005, 698017 ], [ 698113, 698125 ], [ 698208, 698220 ], [ 699439, 699451 ], [ 699687, 699699 ], [ 699916, 699928 ], [ 700157, 700169 ], [ 700428, 700440 ], [ 700689, 700701 ], [ 700923, 700935 ], [ 701522, 701534 ], [ 703328, 703340 ], [ 703836, 703848 ], [ 704815, 704827 ], [ 705960, 705972 ], [ 707031, 707043 ], [ 707826, 707838 ], [ 708694, 708706 ], [ 709824, 709836 ], [ 710874, 710886 ], [ 711997, 712009 ], [ 713057, 713069 ], [ 714276, 714288 ], [ 714750, 714762 ], [ 715354, 715366 ], [ 716494, 716506 ], [ 717567, 717579 ], [ 718779, 718791 ], [ 719251, 719263 ], [ 719853, 719865 ], [ 720992, 721004 ], [ 722063, 722075 ], [ 723297, 723309 ], [ 723760, 723772 ], [ 724356, 724368 ], [ 727224, 727236 ], [ 728874, 728886 ], [ 730171, 730183 ], [ 730626, 730638 ], [ 731214, 731226 ], [ 734078, 734090 ], [ 735720, 735732 ], [ 743173, 743185 ], [ 743612, 743624 ], [ 743711, 743723 ], [ 743817, 743829 ], [ 746575, 746587 ], [ 746825, 746837 ], [ 747305, 747317 ], [ 748555, 748567 ], [ 749539, 749551 ], [ 751027, 751039 ], [ 751473, 751485 ], [ 751575, 751587 ], [ 751899, 751911 ], [ 752139, 752151 ], [ 752555, 752567 ], [ 754033, 754045 ], [ 755302, 755314 ], [ 756135, 756147 ], [ 757045, 757057 ], [ 762269, 762281 ], [ 764407, 764419 ], [ 769514, 769526 ], [ 769977, 769989 ], [ 770088, 770100 ], [ 770183, 770195 ], [ 770275, 770287 ], [ 770384, 770396 ], [ 770498, 770510 ], [ 770599, 770611 ], [ 770687, 770699 ], [ 771835, 771847 ], [ 772064, 772076 ], [ 772274, 772286 ], [ 772496, 772508 ], [ 772748, 772760 ], [ 772990, 773002 ], [ 773205, 773217 ], [ 773773, 773785 ], [ 775389, 775401 ], [ 775870, 775882 ], [ 776790, 776802 ], [ 777816, 777828 ], [ 778816, 778828 ], [ 779528, 779540 ], [ 780337, 780349 ], [ 781356, 781368 ], [ 782339, 782351 ], [ 783343, 783355 ], [ 784336, 784348 ], [ 785437, 785449 ], [ 785884, 785896 ], [ 786445, 786457 ], [ 787466, 787478 ], [ 788468, 788480 ], [ 789562, 789574 ], [ 790007, 790019 ], [ 790566, 790578 ], [ 791586, 791598 ], [ 792586, 792598 ], [ 793702, 793714 ], [ 794138, 794150 ], [ 794691, 794703 ], [ 797316, 797328 ], [ 798871, 798883 ], [ 800050, 800062 ], [ 800478, 800490 ], [ 801023, 801035 ], [ 803644, 803656 ], [ 805191, 805203 ], [ 805421, 805433 ] ] ]
# Copyright (c) 2012-2013, 2015-2016 ARM Limited # All rights reserved # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Copyright (c) 2010 Advanced Micro Devices, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Lisa Hsu # Configure the M5 cache hierarchy config in one place # from __future__ import print_function from __future__ import absolute_import import m5 from m5.objects import * from .Caches import * from . import HWPConfig def config_cache(options, system): if options.external_memory_system and (options.caches or options.l2cache): print("External caches and internal caches are exclusive options.\n") sys.exit(1) if options.external_memory_system: ExternalCache = ExternalCacheFactory(options.external_memory_system) if options.cpu_type == "O3_ARM_v7a_3": try: import cores.arm.O3_ARM_v7a as core except: print("O3_ARM_v7a_3 is unavailable. Did you compile the O3 model?") sys.exit(1) dcache_class, icache_class, l2_cache_class, walk_cache_class = \ core.O3_ARM_v7a_DCache, core.O3_ARM_v7a_ICache, \ core.O3_ARM_v7aL2, \ core.O3_ARM_v7aWalkCache elif options.cpu_type == "HPI": try: import cores.arm.HPI as core except: print("HPI is unavailable.") sys.exit(1) dcache_class, icache_class, l2_cache_class, walk_cache_class = \ core.HPI_DCache, core.HPI_ICache, core.HPI_L2, core.HPI_WalkCache else: dcache_class, icache_class, l2_cache_class, walk_cache_class = \ L1_DCache, L1_ICache, L2Cache, None if buildEnv['TARGET_ISA'] == 'x86': walk_cache_class = PageTableWalkerCache # Set the cache line size of the system system.cache_line_size = options.cacheline_size # If elastic trace generation is enabled, make sure the memory system is # minimal so that compute delays do not include memory access latencies. # Configure the compulsory L1 caches for the O3CPU, do not configure # any more caches. if options.l2cache and options.elastic_trace_en: fatal("When elastic trace is enabled, do not configure L2 caches.") if options.l2cache: # Provide a clock for the L2 and the L1-to-L2 bus here as they # are not connected using addTwoLevelCacheHierarchy. Use the # same clock as the CPUs. system.l2 = l2_cache_class(clk_domain=system.cpu_clk_domain, size=options.l2_size, assoc=options.l2_assoc) system.tol2bus = L2XBar(clk_domain = system.cpu_clk_domain) system.l2.cpu_side = system.tol2bus.master system.l2.mem_side = system.membus.slave if options.l2_hwp_type: hwpClass = HWPConfig.get(options.l2_hwp_type) if system.l2.prefetcher != "Null": print("Warning: l2-hwp-type is set (", hwpClass, "), but", "the current l2 has a default Hardware Prefetcher", "of type", type(system.l2.prefetcher), ", using the", "specified by the flag option.") system.l2.prefetcher = hwpClass() if options.memchecker: system.memchecker = MemChecker() for i in range(options.num_cpus): if options.caches: icache = icache_class(size=options.l1i_size, assoc=options.l1i_assoc) dcache = dcache_class(size=options.l1d_size, assoc=options.l1d_assoc) # If we have a walker cache specified, instantiate two # instances here if walk_cache_class: iwalkcache = walk_cache_class() dwalkcache = walk_cache_class() else: iwalkcache = None dwalkcache = None if options.memchecker: dcache_mon = MemCheckerMonitor(warn_only=True) dcache_real = dcache # Do not pass the memchecker into the constructor of # MemCheckerMonitor, as it would create a copy; we require # exactly one MemChecker instance. dcache_mon.memchecker = system.memchecker # Connect monitor dcache_mon.mem_side = dcache.cpu_side # Let CPU connect to monitors dcache = dcache_mon if options.l1d_hwp_type: hwpClass = HWPConfig.get(options.l1d_hwp_type) if dcache.prefetcher != m5.params.NULL: print("Warning: l1d-hwp-type is set (", hwpClass, "), but", "the current l1d has a default Hardware Prefetcher", "of type", type(dcache.prefetcher), ", using the", "specified by the flag option.") dcache.prefetcher = hwpClass() if options.l1i_hwp_type: hwpClass = HWPConfig.get(options.l1i_hwp_type) if icache.prefetcher != m5.params.NULL: print("Warning: l1i-hwp-type is set (", hwpClass, "), but", "the current l1i has a default Hardware Prefetcher", "of type", type(icache.prefetcher), ", using the", "specified by the flag option.") icache.prefetcher = hwpClass() # When connecting the caches, the clock is also inherited # from the CPU in question system.cpu[i].addPrivateSplitL1Caches(icache, dcache, iwalkcache, dwalkcache) if options.memchecker: # The mem_side ports of the caches haven't been connected yet. # Make sure connectAllPorts connects the right objects. system.cpu[i].dcache = dcache_real system.cpu[i].dcache_mon = dcache_mon elif options.external_memory_system: # These port names are presented to whatever 'external' system # gem5 is connecting to. Its configuration will likely depend # on these names. For simplicity, we would advise configuring # it to use this naming scheme; if this isn't possible, change # the names below. if buildEnv['TARGET_ISA'] in ['x86', 'arm']: system.cpu[i].addPrivateSplitL1Caches( ExternalCache("cpu%d.icache" % i), ExternalCache("cpu%d.dcache" % i), ExternalCache("cpu%d.itb_walker_cache" % i), ExternalCache("cpu%d.dtb_walker_cache" % i)) else: system.cpu[i].addPrivateSplitL1Caches( ExternalCache("cpu%d.icache" % i), ExternalCache("cpu%d.dcache" % i)) system.cpu[i].createInterruptController() if options.l2cache: system.cpu[i].connectAllPorts(system.tol2bus, system.membus) elif options.external_memory_system: system.cpu[i].connectUncachedPorts(system.membus) else: system.cpu[i].connectAllPorts(system.membus) return system # ExternalSlave provides a "port", but when that port connects to a cache, # the connecting CPU SimObject wants to refer to its "cpu_side". # The 'ExternalCache' class provides this adaptation by rewriting the name, # eliminating distracting changes elsewhere in the config code. class ExternalCache(ExternalSlave): def __getattr__(cls, attr): if (attr == "cpu_side"): attr = "port" return super(ExternalSlave, cls).__getattr__(attr) def __setattr__(cls, attr, value): if (attr == "cpu_side"): attr = "port" return super(ExternalSlave, cls).__setattr__(attr, value) def ExternalCacheFactory(port_type): def make(name): return ExternalCache(port_data=name, port_type=port_type, addr_ranges=[AllMemory]) return make
[ [ [ 2263, 2277 ] ], [ [ 2301, 2316 ] ], [ [ 2325, 2327 ], [ 6586, 6588 ], [ 7085, 7087 ] ], [ [ 2351, 2352 ] ], [ [ 2373, 2374 ], [ 9571, 9584 ], [ 2600, 2603 ], [ 2942, 2945 ], [ 3319, 3322 ], [ 3578, 3587 ], [ 3589, 3598 ], [ 3600, 3607 ], [ 3626, 3634 ], [ 3690, 3710 ], [ 4120, 4125 ], [ 4598, 4604 ], [ 5260, 5270 ], [ 5948, 5965 ], [ 8377, 8385 ], [ 9699, 9712 ], [ 9857, 9870 ], [ 10068, 10077 ] ], [ [ 2389, 2398 ], [ 4796, 4805 ], [ 6510, 6519 ], [ 7009, 7018 ] ], [ [ 2404, 2416 ] ], [ [ 9557, 9570 ], [ 9975, 9988 ] ], [ [ 9907, 9927 ], [ 2676, 2696 ] ] ]