code
stringlengths 10
805k
| def_use_chains
sequencelengths 0
667
|
---|---|
import logging
from typing import Optional, Any
from opentrons import types
from opentrons.calibration_storage import get
from opentrons.calibration_storage.types import TipLengthCalNotFound
from opentrons.hardware_control.dev_types import PipetteDict
from opentrons.protocol_api.labware import Labware, Well
from opentrons.protocols.api_support.types import APIVersion
from opentrons_shared_data.protocol.dev_types import LiquidHandlingCommand, \
BlowoutLocation
def validate_blowout_location(
api_version: APIVersion,
liquid_handling_command: LiquidHandlingCommand,
blowout_location: Optional[Any]) -> None:
"""Validate the blowout location."""
if blowout_location and api_version < APIVersion(2, 8):
raise ValueError(
'Cannot specify blowout location when using api' +
' version below 2.8, current version is {api_version}'
.format(api_version=api_version))
elif liquid_handling_command == 'consolidate' \
and blowout_location == 'source well':
raise ValueError(
"blowout location for consolidate cannot be source well")
elif liquid_handling_command == 'distribute' \
and blowout_location == 'destination well':
raise ValueError(
"blowout location for distribute cannot be destination well")
elif liquid_handling_command == 'transfer' and \
blowout_location and \
blowout_location not in \
[location.value for location in BlowoutLocation]:
raise ValueError(
"blowout location should be either 'source well', " +
" 'destination well', or 'trash'" +
f" but it is {blowout_location}")
def tip_length_for(pipette: PipetteDict, tiprack: Labware) -> float:
""" Get the tip length, including overlap, for a tip from this rack """
def _build_length_from_overlap() -> float:
tip_overlap = pipette['tip_overlap'].get(
tiprack.uri,
pipette['tip_overlap']['default'])
tip_length = tiprack.tip_length
return tip_length - tip_overlap
try:
return get.load_tip_length_calibration(
pipette['pipette_id'],
tiprack._implementation.get_definition()
).tip_length
except TipLengthCalNotFound:
return _build_length_from_overlap()
VALID_PIP_TIPRACK_VOL = {
'p10': [10, 20],
'p20': [10, 20],
'p50': [200, 300],
'p300': [200, 300],
'p1000': [1000]
}
def validate_tiprack(
instrument_name: str,
tiprack: Labware,
log: logging.Logger) -> None:
"""Validate a tiprack logging a warning message."""
# TODO AA 2020-06-24 - we should instead add the acceptable Opentrons
# tipracks to the pipette as a refactor
if tiprack._implementation.get_definition()['namespace'] \
== 'opentrons':
tiprack_vol = tiprack.wells()[0].max_volume
valid_vols = VALID_PIP_TIPRACK_VOL[instrument_name.split('_')[0]]
if tiprack_vol not in valid_vols:
log.warning(
f'The pipette {instrument_name} and its tiprack '
f'{tiprack.load_name} in slot {tiprack.parent} appear to '
'be mismatched. Please check your protocol before running '
'on the robot.')
def determine_drop_target(
api_version: APIVersion,
location: Well,
return_height: float,
version_breakpoint: APIVersion = None) -> types.Location:
"""Determine the drop target based on well and api version."""
version_breakpoint = version_breakpoint or APIVersion(2, 2)
if api_version < version_breakpoint:
bot = location.bottom()
return types.Location(
point=bot.point._replace(z=bot.point.z + 10),
labware=location)
else:
tr = location.parent
assert tr.is_tiprack
z_height = return_height * tr.tip_length
return location.top(-z_height)
def validate_can_aspirate(location: types.Location) -> None:
""" Can one aspirate on the given `location` or not? This method is
pretty basic and will probably remain so (?) as the future holds neat
ambitions for how validation is implemented. And as robots become more
intelligent more rigorous testing will be possible
Args:
location: target for aspiration
Raises:
RuntimeError:
"""
if _is_tiprack(location):
raise RuntimeError("Cannot aspirate a tiprack")
def validate_can_dispense(location: types.Location) -> None:
""" Can one dispense to the given `location` or not? This method is
pretty basic and will probably remain so (?) as the future holds neat
ambitions for how validation is implemented. And as robots become more
intelligent more rigorous testing will be possible
Args:
location: target for dispense
Raises:
RuntimeError:
"""
if _is_tiprack(location):
raise RuntimeError("Cannot dispense to a tiprack")
def _is_tiprack(location: types.Location) -> bool:
labware = location.labware.as_labware()
return labware.parent and labware.parent.is_tiprack
| [
[
[
7,
14
],
[
2602,
2609
]
],
[
[
34,
42
],
[
617,
625
]
],
[
[
44,
47
],
[
626,
629
]
],
[
[
71,
76
],
[
3502,
3507
],
[
3737,
3742
],
[
4035,
4040
],
[
4554,
4559
],
[
5064,
5069
]
],
[
[
119,
122
],
[
2147,
2150
]
],
[
[
171,
191
],
[
2304,
2324
]
],
[
[
241,
252
],
[
1755,
1766
]
],
[
[
296,
303
],
[
1777,
1784
],
[
2580,
2587
]
],
[
[
305,
309
],
[
3416,
3420
]
],
[
[
360,
370
],
[
523,
533
],
[
724,
734
],
[
3386,
3396
],
[
3480,
3490
],
[
3632,
3642
]
],
[
[
424,
445
],
[
568,
589
]
],
[
[
453,
468
],
[
1521,
1536
]
],
[
[
475,
500
]
],
[
[
1731,
1745
]
],
[
[
2372,
2393
],
[
2966,
2987
]
],
[
[
2515,
2531
]
],
[
[
3342,
3363
]
],
[
[
4003,
4024
]
],
[
[
4522,
4543
]
],
[
[
5042,
5053
],
[
4437,
4448
],
[
4954,
4965
]
]
] |
from django.contrib import admin
from .models import CookiePageText, TOSPageText, StatutPageText
# Register your models here.
class CookieAdmin(admin.ModelAdmin):
pass
class TOSPageTextAdmin(admin.ModelAdmin):
pass
class StatutPageTextAdmin(admin.ModelAdmin):
pass
admin.site.register(CookiePageText, CookieAdmin)
admin.site.register(TOSPageText, TOSPageTextAdmin)
admin.site.register(StatutPageText, StatutPageTextAdmin)
| [
[
[
27,
32
],
[
145,
150
],
[
196,
201
],
[
250,
255
],
[
279,
284
],
[
328,
333
],
[
379,
384
]
],
[
[
53,
67
],
[
299,
313
]
],
[
[
69,
80
],
[
348,
359
]
],
[
[
82,
96
],
[
399,
413
]
],
[
[
133,
144
],
[
315,
326
]
],
[
[
179,
195
],
[
361,
377
]
],
[
[
230,
249
],
[
415,
434
]
]
] |
"""
We want to simplify the operations for pandas dataframes assuming we are using timeseries as the main objects.
When we have multiple timeseries, we will:
1) calculate joint index using df_index()
2) reindex each timeseries to the joint index
We then need to worry about multiple columns if there are. If none, each timeseries will be considered as pd.Series
If there are multiple columns, we will perform the calculations columns by columns.
"""
from pyg_base._types import is_df, is_str, is_num, is_tss, is_int, is_arr, is_ts, is_arrs, is_tuples, is_pd
from pyg_base._dictable import dictable
from pyg_base._as_list import as_list
from pyg_base._zip import zipper
from pyg_base._reducer import reducing, reducer
from pyg_base._decorators import wrapper
from pyg_base._loop import loop
from pyg_base._dates import dt
import pandas as pd
import numpy as np
from copy import copy
import inspect
import datetime
from operator import add, mul
__all__ = ['df_fillna', 'df_index', 'df_reindex', 'df_columns', 'presync', 'np_reindex', 'nona', 'df_slice', 'df_unslice', 'min_', 'max_', 'add_', 'mul_', 'sub_', 'div_', 'pow_']
def _list(values):
"""
>>> assert _list([1,2,[3,4,5,[6,7]],dict(a =[8,9], b=[10,[11,12]])]) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
>>> assert _list(1) == [1]
>>> assert _list(dict(a=1, b=2)) == [1,2]
"""
if isinstance(values, list):
return sum([_list(df) for df in values], [])
elif isinstance(values, dict):
return _list(list(values.values()))
else:
return [values]
@loop(list, tuple, dict)
def _index(ts):
if isinstance(ts, pd.Index):
return ts
elif is_pd(ts):
return ts.index
elif is_arr(ts):
return len(ts)
else:
raise ValueError('did not provide an index')
def _df_index(indexes, index):
if len(indexes) > 0:
if is_str(index):
if index[0].lower() == 'i':#nner
return reducing('intersection')(indexes)
elif index[0].lower() == 'o':#uter
return reducing('union')(indexes)
elif index[0].lower() == 'l':#uter
return indexes[0]
elif index[0].lower() == 'r':#uter
return indexes[-1]
else:
return _index(index)
else:
return None
def _np_index(indexes, index):
if len(indexes) > 0:
if index[0].lower() == 'i':#nner
return min(indexes)
elif index[0].lower() == 'o':#uter
return max(indexes)
elif index[0].lower() == 'l':#uter
return indexes[0]
elif index[0].lower() == 'r':#uter
return indexes[-1]
else:
return None
def df_index(seq, index = 'inner'):
"""
Determines a joint index of multiple timeseries objects.
:Parameters:
----------------
seq : sequence whose index needs to be determined
a (possible nested) sequence of timeseries/non-timeseries object within lists/dicts
index : str, optional
method to determine the index. The default is 'inner'.
:Returns:
-------
pd.Index
The joint index.
:Example:
---------
>>> tss = [pd.Series(np.random.normal(0,1,10), drange(-i, 9-i)) for i in range(5)]
>>> more_tss_as_dict = dict(zip('abcde',[pd.Series(np.random.normal(0,1,10), drange(-i, 9-i)) for i in range(5)]))
>>> res = df_index(tss + [more_tss_as_dict], 'inner')
>>> assert len(res) == 6
>>> res = df_index(more_tss_as_dict, 'outer')
>>> assert len(res) == 14
"""
listed = _list(seq)
indexes = [ts.index for ts in listed if is_pd(ts)]
if len(indexes):
return _df_index(indexes, index)
arrs = [len(ts) for ts in listed if is_arr(ts)]
if len(arrs):
return _np_index(arrs, index)
else:
return None
def df_columns(seq, index = 'inner'):
"""
returns the columns of the joint object
:Example:
---------
>>> a = pd.DataFrame(np.random.normal(0,1,(100,5)), drange(-99), list('abcde'))
>>> b = pd.DataFrame(np.random.normal(0,1,(100,5)), drange(-99), list('bcdef'))
>>> assert list(df_columns([a,b])) == list('bcde')
>>> assert list(df_columns([a,b], 'oj')) == list('abcdef')
>>> assert list(df_columns([a,b], 'lj')) == list('abcde')
>>> assert list(df_columns([a,b], 'rj')) == list('bcdef')
:Parameters:
----------
seq : sequence of dataframes
DESCRIPTION.
index : str, optional
how to inner-join. The default is 'inner'.
:Returns:
-------
pd.Index
list of columns.
"""
listed = _list(seq)
indexes= [ts.columns for ts in listed if is_df(ts) and ts.shape[1]>1 and len(set(ts.columns)) == ts.shape[1]] #dataframe with non-unique columns are treated like arrays
if len(indexes):
return _df_index(indexes, index)
arrs = [ts.shape[1] for ts in listed if (is_arr(ts) or is_df(ts)) and len(ts.shape)>1 and ts.shape[1]>1]
if len(arrs):
return _np_index(arrs, index)
return None
@loop(list, tuple, dict)
def _df_fillna(df, method = None, axis = 0, limit = None):
methods = as_list(method)
if len(methods) == 0:
return df
if is_arr(df):
return df_fillna(pd.DataFrame(df) if len(df.shape)==2 else pd.Series(df), method, axis, limit).values
res = df
for m in methods:
if is_num(m):
res = res.fillna(value = m, axis = axis, limit = limit)
elif m in ['backfill', 'bfill', 'pad', 'ffill']:
res = res.fillna(method = m, axis = axis, limit = limit)
elif m in ('fnna', 'nona'):
nonan = ~np.isnan(res)
if len(res.shape)==2:
nonan = nonan.max(axis=1)
if m == 'fnna':
nonan = nonan[nonan.values]
if len(nonan):
res = res[nonan.index[0]:]
else:
res = res.iloc[:0]
elif m == 'nona':
res = res[nonan.values]
else:
if is_num(limit) and limit<0:
res = res.interpolate(method = m, axis = axis, limit = abs(limit),
limit_direction = 'backward')
else:
res = res.interpolate(method = m, axis = axis, limit = limit)
return res
def df_fillna(df, method = None, axis = 0, limit = None):
"""
Equivelent to df.fillna() except:
- support np.ndarray as well as dataframes
- support multiple methods of filling/interpolation
- supports removal of nan from the start/all of the timeseries
- supports action on multiple timeseries
:Parameters:
----------------
df : dataframe/numpy array
method : string, list of strings or None, optional
Either a fill method (bfill, ffill, pad)
Or an interplation method: 'linear', 'time', 'index', 'values', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', 'barycentric', 'krogh', 'spline', 'polynomial', 'from_derivatives', 'piecewise_polynomial', 'pchip', 'akima', 'cubicspline'
Or 'fnna': removes all to the first non nan
Or 'nona': removes all nans
axis : int, optional
axis. The default is 0.
limit : TYPE, optional
when filling, how many nan get filled. The default is None (indefinite)
:Example: method ffill or bfill
-----------------------------------------------
>>> from pyg import *; import numpy as np
>>> df = np.array([np.nan, 1., np.nan, 9, np.nan, 25])
>>> assert eq(df_fillna(df, 'ffill'), np.array([ np.nan, 1., 1., 9., 9., 25.]))
>>> assert eq(df_fillna(df, ['ffill','bfill']), np.array([ 1., 1., 1., 9., 9., 25.]))
>>> assert eq(df_fillna(df, ['ffill','bfill']), np.array([ 1., 1., 1., 9., 9., 25.]))
>>> df = np.array([np.nan, 1., np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, 9, np.nan, 25])
>>> assert eq(df_fillna(df, 'ffill', limit = 2), np.array([np.nan, 1., 1., 1., np.nan, np.nan, np.nan, np.nan, 9., 9., 25.]))
df_fillna does not maintain state of latest 'prev' value: use ffill_ for that.
:Example: interpolation methods
-----------------------------------------------
>>> from pyg import *; import numpy as np
>>> df = np.array([np.nan, 1., np.nan, 9, np.nan, 25])
>>> assert eq(df_fillna(df, 'linear'), np.array([ np.nan, 1., 5., 9., 17., 25.]))
>>> assert eq(df_fillna(df, 'quadratic'), np.array([ np.nan, 1., 4., 9., 16., 25.]))
:Example: method = fnna and nona
---------------------------------------------
>>> from pyg import *; import numpy as np
>>> ts = np.array([np.nan] * 10 + [1.] * 10 + [np.nan])
>>> assert eq(df_fillna(ts, 'fnna'), np.array([1.]*10 + [np.nan]))
>>> assert eq(df_fillna(ts, 'nona'), np.array([1.]*10))
>>> assert len(df_fillna(np.array([np.nan]), 'nona')) == 0
>>> assert len(df_fillna(np.array([np.nan]), 'fnna')) == 0
:Returns:
-------
array/dataframe with nans removed/filled
"""
return _df_fillna(df, method = method, axis = axis, limit = limit)
@loop(dict, list, tuple)
def _nona(df, value = np.nan):
if np.isnan(value):
mask = np.isnan(df)
elif np.isinf(value):
mask = np.isinf(df)
else:
mask = df == value
if len(mask.shape) == 2:
mask = mask.min(axis=1) == 1
return df[~mask]
def nona(a, value = np.nan):
"""
removes rows that are entirely nan (or a specific other value)
:Parameters:
----------------
a : dataframe/ndarray
value : float, optional
value to be removed. The default is np.nan.
:Example:
----------
>>> from pyg import *
>>> a = np.array([1,np.nan,2,3])
>>> assert eq(nona(a), np.array([1,2,3]))
:Example: multiple columns
---------------------------
>>> a = np.array([[1,np.nan,2,np.nan], [np.nan, np.nan, np.nan, 3]]).T
>>> b = np.array([[1,2,np.nan], [np.nan, np.nan, 3]]).T ## 2nd row has nans across
>>> assert eq(nona(a), b)
"""
return _nona(a)
@loop(list, tuple, dict)
def _df_reindex(ts, index, method = None, limit = None):
methods = as_list(method)
if is_pd(ts):
if is_int(index):
raise ValueError('trying to reindex dataframe %s using numpy interval length %i'%(ts, index))
if len(methods) and methods[0] in ['backfill', 'bfill', 'pad', 'ffill']:
res = _nona(ts).reindex(index, method = methods[0], limit = limit)
res = _df_fillna(res, method = methods[1:], limit = limit)
else:
res = ts.reindex(index)
res = _df_fillna(res, method = method, limit = limit)
return res
elif is_arr(ts):
if isinstance(index, pd.Index):
if len(index) == len(ts):
return ts
else:
raise ValueError('trying to reindex numpy array %s using pandas index %s'%(ts, index))
elif is_int(index):
if index<len(ts):
res = ts[-index:]
elif index>len(ts):
shape = (index - len(ts),) + ts.shape[1:]
res = np.concatenate([np.full(shape, np.nan),ts])
else:
res = ts
return df_fillna(res, method = methods, limit = limit)
else:
return ts
else:
return ts
@loop(list, tuple, dict)
def _df_recolumn(ts, columns):
if columns is not None and is_df(ts) and ts.shape[1] > 1 and len(set(ts.columns)) == ts.shape[1]:
return pd.DataFrame({col: ts[col].values if col in ts.columns else np.nan for col in columns}, index = ts.index)
else:
return ts
def df_recolumn(ts, columns = None):
return _df_recolumn(ts, columns)
def np_reindex(ts, index, columns = None):
"""
pyg assumes that when working with numpy arrays representing timeseries, you:
- determine a global timestamp
- resample all timeseries to that one, and then covert to numpy.array, possibly truncating leading nan's.
- do the maths you need to do
- having worked with numpy arrays, if we want to reindex them back into dataframe, use np_reindex
:Example:
-------
>>> from pyg import *
>>> ts = np.array(np.random.normal(0,1,1000))
>>> index = pd.Index(drange(-1999))
>>> np_reindex(ts, index)
:Parameters:
----------------
ts : numpy array
index : pandas.Index
columns: list/array of columns names
:Returns:
----------
pd.DataFrame/pd.Series
"""
if is_pd(index):
index = index.index
if len(index)>len(ts):
index = index[-len(ts):]
elif len(index)<len(ts):
ts = ts[-len(index):]
res = pd.Series(ts, index) if len(ts.shape)<2 else pd.DataFrame(ts, index)
if columns is not None:
if is_df(columns):
columns = columns.columns
res.columns = columns
return res
def df_reindex(ts, index = None, method = None, limit = None):
"""
A slightly more general version of df.reindex(index)
:Parameters:
----------------
ts : dataframe or numpy array (or list/dict of theses)
timeseries to be reindexed
index : str, timeseries, pd.Index.
The new index
method : str, list of str, float, optional
various methods of handling nans are available. The default is None.
See df_fillna for a full list.
:Returns:
-------
timeseries/np.ndarray (or list/dict of theses)
timeseries reindex.
:Example: index = inner/outer
-----------------------------
>>> tss = [pd.Series(np.random.normal(0,1,10), drange(-i, 9-i)) for i in range(5)]
>>> res = df_reindex(tss, 'inner')
>>> assert len(res[0]) == 6
>>> res = df_reindex(tss, 'outer')
>>> assert len(res[0]) == 14
:Example: index provided
-----------------------------
>>> tss = [pd.Series(np.random.normal(0,1,10), drange(-i, 9-i)) for i in range(5)]
>>> res = df_reindex(tss, tss[0])
>>> assert eq(res[0], tss[0])
>>> res = df_reindex(tss, tss[0].index)
>>> assert eq(res[0], tss[0])
"""
if index is None:
return ts
elif is_str(index):
index = df_index(ts, index)
elif is_ts(index):
index = index.index
elif is_arr(index):
index = pd.Index(index)
return _df_reindex(ts, index = index, method = method, limit = limit)
def df_concat(objs, columns = None, axis=1, join = 'outer'):
"""
simple concatenator,
- defaults to to concatenating by date (for timeseries)
- supports columns renaming
:Parameters:
----------
objs : list/dict
collection of timeseries
columns : str/list
Names of new columns. The default is None.
axis : int, optional
axis to merge. The default is 1.
join : str, optional
join method inner/outer, see pd.concat. The default is 'outer'.
:Returns:
-------
res : pd.DataFrame
joined dataframe
:Example:
---------
>>> objs = [pd.Series([1,2,3], [4,5,6]), pd.Series([3,4,5], [1,2,4])]
>>> columns = ['a', 'b'];
>>> axis = 1; join = 'outer'
>>> res = df_concat(objs, columns)
>>> res
>>> a b
>>> 1 NaN 3.0
>>> 2 NaN 4.0
>>> 4 1.0 5.0
>>> 5 2.0 NaN
>>> 6 3.0 NaN
>>> df_concat(res, dict(a = 'x', b = 'y'))
>>> res
>>> x y
>>> 1 NaN 3.0
>>> 2 NaN 4.0
>>> 4 1.0 5.0
>>> 5 2.0 NaN
>>> 6 3.0 NaN
"""
if isinstance(objs, dict):
columns = list(objs.keys())
objs = list(objs.values())
if isinstance(objs, list):
df_objs = [o for o in objs if is_pd(o)]
res = pd.concat(df_objs, axis = axis, join = join)
if len(df_objs) < len(objs):
df_objs = [o if is_pd(o) else pd.Series(o, res.index) for o in objs]
res = pd.concat(df_objs, axis = axis, join = join)
elif isinstance(objs, pd.DataFrame):
res = objs.copy() if columns is not None else objs
if columns is not None:
if isinstance(columns, list):
res.columns = columns
else:
res = res.rename(columns = columns)
return res
@loop(list, dict, tuple)
def _df_column(ts, column, i = None, n = None):
"""
This is mostly a helper function to help us loop through multiple columns.
Function grabs a column from a dataframe/2d array
:Parameters:
----------
ts : datafrane
the original dataframe or 2-d numpy array
column : str
name of the column to grab.
i : int, optional
Can grab the column using its index. The default is None.
n : int, optional
asserting the number of columns, ts.shape[1]. The default is None.
:Returns:
-------
a series or a 1-d numpy array
"""
if is_df(ts):
if ts.shape[1] == 1:
return ts[ts.columns[0]]
elif column in ts.columns:
return ts[column]
elif column is None and i is not None:
if len(set(ts.columns)) == ts.shape[1]: #unique columns, don't call me using i
raise ValueError('trying to grab %ith column from a dataframe with proper columns: %s'%(i, ts.columns))
elif n is not None and ts.shape[1]!=n:
raise ValueError('trying to grab %ith column and asserting must have %i columns but have %i'%(i, n, ts.shape[1]))
else:
if i<ts.shape[1]:
return ts.iloc[:,i]
else:
return np.nan
else:
return np.nan
elif is_arr(ts) and len(ts.shape) == 2:
if ts.shape[1] == 1:
return ts.T[0]
elif i is not None:
if n is not None and ts.shape[1]!=n:
raise ValueError('trying to grab %ith column and asserting must have %i columns but have %i'%(i, n, ts.shape[1]))
elif i<ts.shape[1]:
return ts.T[i]
else:
return np.nan
else:
return ts
else:
return ts
def df_column(ts, column, i = None, n = None):
"""
This is mostly a helper function to help us loop through multiple columns.
Function grabs a column from a dataframe/2d array
:Parameters:
----------
ts : datafrane
the original dataframe or 2-d numpy array
column : str
name of the column to grab.
i : int, optional
Can grab the column using its index. The default is None.
n : int, optional
asserting the number of columns, ts.shape[1]. The default is None.
:Returns:
-------
a series or a 1-d numpy array
"""
return _df_column(ts = ts, column = column, i = i, n = n)
def _convert(res, columns):
"""
We run a result per each column, now we want to convert it back to objects
----------
res : dict
results run per each column.
"""
values = list(res.values())
if is_tss(values):
return pd.DataFrame(res)
elif is_arrs(values) and is_int(columns):
return np.array(values).T
elif is_tuples(values):
return tuple([_convert(dict(zip(res.keys(), row)), columns) for row in zipper(*values)])
else:
return np.array(values) if is_int(columns) else pd.Series(res)
def df_sync(dfs, join = 'ij', method = None, columns = 'ij'):
"""
df_sync performs a sync of multiple dataframes
:Parameters:
----------
dfs : list or dict of timeseries
dataframes to be synched
join : str, optional
index join method. The default is 'ij'.
method : str/float, optional
how the nan's are to be filled once reindexing occurs. The default is None.
columns : str, optional
how to sync multi-column timeseries. The default is 'ij'.
:Example:
-------
>>> a = pd.DataFrame(np.random.normal(0,1,(100,5)), drange(-100,-1), list('abcde'))
>>> b = pd.DataFrame(np.random.normal(0,1,(100,5)), drange(-99), list('bcdef'))
>>> c = 'not a timeseries'
>>> d = pd.DataFrame(np.random.normal(0,1,(100,1)), drange(-98,1), ['single_column_df'])
>>> s = pd.Series(np.random.normal(0,1,105), drange(-104))
:Example: inner join on index and columns
--------------------------------
>>> dfs = [a,b,c,d,s]
>>> join = 'ij'; method = None; columns = 'ij'
>>> res = df_sync(dfs, 'ij')
>>> assert len(res[0]) == len(res[1]) == len(res[-1]) == 98
>>> assert res[2] == 'not a timeseries'
>>> assert list(res[0].columns) == list('bcde')
:Example: outer join on index and inner join on columns
--------------------------------
>>> res = df_sync(dfs, join = 'oj')
>>> assert len(res[0]) == len(res[1]) == len(res[-1]) == 106; assert res[2] == 'not a timeseries'
>>> assert list(res[0].columns) == list('bcde')
>>> res = df_sync(dfs, join = 'oj', method = 1)
>>> assert res[0].iloc[0].sum() == 4
:Example: outer join on index and columns
-------------------------------------------
>>> res = df_sync(dfs, join = 'oj', method = 1, columns = 'oj')
>>> assert res[0].iloc[0].sum() == 5
>>> assert list(res[0].columns) == list('abcdef')
>>> assert list(res[-2].columns) == ['single_column_df'] # single column unaffected
:Example: synching of dict rather than a list
-------------------------------------------
>>> dfs = Dict(a = a, b = b, c = c, d = d, s = s)
>>> res = df_sync(dfs, join = 'oj', method = 1, columns = 'oj')
>>> assert res.c == 'not a timeseries'
>>> assert res.a.shape == (106,6)
"""
if isinstance(dfs, dict):
values = list(dfs.values())
elif isinstance(dfs, (list, tuple)):
values = list(dfs)
else:
return dfs
listed = _list(values)
tss = [ts for ts in listed if is_ts(ts)]
index = df_index(listed, join)
dfs = df_reindex(dfs, index, method = method)
### now we do the columns
if columns is False or columns is None:
return dfs
else:
cols = df_columns(tss, columns)
dfs = df_recolumn(dfs, cols)
return dfs
class presync(wrapper):
"""
Much of timeseries analysis in Pandas is spent aligning multiple timeseries before feeding them into a function.
presync allows easy presynching of all paramters of a function.
:Parameters:
----------
function : callable, optional
function to be presynched. The default is None.
index : str, optional
index join policy. The default is 'inner'.
method : str/int/list of these, optional
method of nan handling. The default is None.
columns : str, optional
columns join policy. The default is 'inner'.
default : float, optional
value when no data is available. The default is np.nan.
:Returns:
-------
presynch-decorated function
:Example:
-------
>>> from pyg import *
>>> x = pd.Series([1,2,3,4], drange(-3))
>>> y = pd.Series([1,2,3,4], drange(-4,-1))
>>> z = pd.DataFrame([[1,2],[3,4]], drange(-3,-2), ['a','b'])
>>> addition = lambda a, b: a+b
#We get some nonsensical results:
>>> assert list(addition(x,z).columns) == list(x.index) + ['a', 'b']
#But:
>>> assert list(presync(addition)(x,z).columns) == ['a', 'b']
>>> res = presync(addition, index='outer', method = 'ffill')(x,z)
>>> assert eq(res.a.values, np.array([2,5,6,7]))
:Example 2: alignment works for parameters 'buried' within...
-------------------------------------------------------
>>> function = lambda a, b: a['x'] + a['y'] + b
>>> f = presync(function, 'outer', method = 'ffill')
>>> res = f(dict(x = x, y = y), b = z)
>>> assert eq(res, pd.DataFrame(dict(a = [np.nan, 4, 8, 10, 11], b = [np.nan, 5, 9, 11, 12]), index = drange(-4)))
:Example 3: alignment of numpy arrays
-------------------------------------
>>> addition = lambda a, b: a+b
>>> a = presync(addition)
>>> assert eq(a(pd.Series([1,2,3,4], drange(-3)), np.array([[1,2,3,4]]).T), pd.Series([2,4,6,8], drange(-3)))
>>> assert eq(a(pd.Series([1,2,3,4], drange(-3)), np.array([1,2,3,4])), pd.Series([2,4,6,8], drange(-3)))
>>> assert eq(a(pd.Series([1,2,3,4], drange(-3)), np.array([[1,2,3,4],[5,6,7,8]]).T), pd.DataFrame({0:[2,4,6,8], 1:[6,8,10,12]}, drange(-3)))
>>> assert eq(a(np.array([1,2,3,4]), np.array([[1,2,3,4]]).T), np.array([2,4,6,8]))
:Example 4: inner join alignment of columns in dataframes by default
---------------------------------------------------------------------
>>> x = pd.DataFrame({'a':[2,4,6,8], 'b':[6,8,10,12.]}, drange(-3))
>>> y = pd.DataFrame({'wrong':[2,4,6,8], 'columns':[6,8,10,12]}, drange(-3))
>>> assert len(a(x,y)) == 0
>>> y = pd.DataFrame({'a':[2,4,6,8], 'other':[6,8,10,12.]}, drange(-3))
>>> assert eq(a(x,y),x[['a']]*2)
>>> y = pd.DataFrame({'a':[2,4,6,8], 'b':[6,8,10,12.]}, drange(-3))
>>> assert eq(a(x,y),x*2)
>>> y = pd.DataFrame({'column name for a single column dataframe is ignored':[1,1,1,1]}, drange(-3))
>>> assert eq(a(x,y),x+1)
>>> a = presync(addition, columns = 'outer')
>>> y = pd.DataFrame({'other':[2,4,6,8], 'a':[6,8,10,12]}, drange(-3))
>>> assert sorted(a(x,y).columns) == ['a','b','other']
:Example 4: ffilling, bfilling
------------------------------
>>> x = pd.Series([1.,np.nan,3.,4.], drange(-3))
>>> y = pd.Series([1.,np.nan,3.,4.], drange(-4,-1))
>>> assert eq(a(x,y), pd.Series([np.nan, np.nan,7], drange(-3,-1)))
but, we provide easy conversion of internal parameters of presync:
>>> assert eq(a.ffill(x,y), pd.Series([2,4,7], drange(-3,-1)))
>>> assert eq(a.bfill(x,y), pd.Series([4,6,7], drange(-3,-1)))
>>> assert eq(a.oj(x,y), pd.Series([np.nan, np.nan, np.nan, 7, np.nan], drange(-4)))
>>> assert eq(a.oj.ffill(x,y), pd.Series([np.nan, 2, 4, 7, 8], drange(-4)))
:Example 5: indexing to a specific index
----------------------------------------
>>> index = pd.Index([dt(-3), dt(-1)])
>>> a = presync(addition, index = index)
>>> x = pd.Series([1.,np.nan,3.,4.], drange(-3))
>>> y = pd.Series([1.,np.nan,3.,4.], drange(-4,-1))
>>> assert eq(a(x,y), pd.Series([np.nan, 7], index))
:Example 6: returning complicated stuff
----------------------------------------
>>> from pyg import *
>>> a = pd.DataFrame(np.random.normal(0,1,(100,10)), drange(-99))
>>> b = pd.DataFrame(np.random.normal(0,1,(100,10)), drange(-99))
>>> def f(a, b):
>>> return (a*b, ts_sum(a), ts_sum(b))
>>> old = f(a,b)
>>> self = presync(f)
>>> args = (); kwargs = dict(a = a, b = b)
>>> new = self(*args, **kwargs)
>>> assert eq(new, old)
"""
def __init__(self, function = None, index = 'inner', method = None, columns = 'inner', default = np.nan):
super(presync, self).__init__(function = function, index = index, method = method, columns = columns , default = default)
@property
def ij(self):
return copy(self) + dict(index = 'inner')
@property
def oj(self):
return self + dict(index = 'outer')
@property
def lj(self):
return self + dict(index = 'left')
@property
def rj(self):
return self + dict(index = 'right')
@property
def ffill(self):
return copy(self) + dict(method = 'ffill')
@property
def bfill(self):
return self + dict(method = 'bfill')
def wrapped(self, *args, **kwargs):
_idx = kwargs.pop('join', self.index)
_method = kwargs.pop('method', self.method)
_columns = kwargs.pop('columns', self.columns)
values = list(args) + list(kwargs.values())
listed = _list(values)
tss = [ts for ts in listed if is_ts(ts)]
callargs = inspect.getcallargs(self.function, *args, **kwargs)
if is_str(_idx) and _idx in callargs:
index = _index(callargs[_idx])
else:
index = df_index(listed, _idx)
args_= df_reindex(args, index, method = _method)
kwargs_= df_reindex(kwargs, index, method = _method)
### now we do the columns
if _columns is False:
return self.function(*args_, **kwargs_)
else:
cols = [tuple(ts.columns) for ts in tss if is_df(ts) and ts.shape[1]>1]
if len(set(cols))==1: # special case where all 2-d dataframes have same column headers
columns = cols[0]
n = len(columns)
res = {column: self.function(*df_column(args_,column = column, i = i, n = n), **df_column(kwargs_, column=column, i = i, n = n)) for i, column in enumerate(columns)}
else:
columns = df_columns(listed, _columns)
if is_int(columns):
res = {i: self.function(*df_column(args_, column = None, i = i), **df_column(kwargs_, column=None, i = i)) for i in range(columns)}
elif columns is None:
return self.function(*df_column(args_, column = None), **df_column(kwargs_, column = None))
else:
columns = list(columns) if isinstance(columns, pd.Index) else as_list(columns)
columns = sorted(columns)
res = {column: self.function(*df_column(args_,column = column), **df_column(kwargs_, column=column)) for column in columns}
converted = _convert(res, columns)
return converted
@presync
def _div_(a, b):
"""
division of a by b supporting presynching (inner join) of timeseries
"""
return a/b
@presync
def _sub_(a, b):
"""
subtraction of b from a supporting presynching (inner join) of timeseries
"""
return a-b
@presync
def _add_(a, b):
"""
addition of a and b supporting presynching (inner join) of timeseries
"""
return a + b
@presync
def _mul_(a, b):
"""
multiplication of b and a supporting presynching (inner join) of timeseries
"""
return a * b
@presync
def _pow_(a, b):
"""
equivalent to a**b supporting presynching (inner join) of timeseries
"""
return a**b
def add_(a, b = None, join = 'ij', method = None, columns = 'ij'):
"""
a = pd.Series([1,2,3], drange(-2))
b = pd.Series([1,2,3], drange(-3,-1))
add_(a,b, 'oj', method = 0)
addition of a and b supporting presynching (inner join) of timeseries
"""
dfs = as_list(a) + as_list(b)
f = lambda a, b: _add_(a, b, join = join, method = method, columns = columns)
return reducer(f, dfs)
def mul_(a, b = None, join = 'ij', method = None, columns = 'ij'):
"""
multiplication of a and b supporting presynching (inner join) of timeseries
mul_(a,b,join = 'oj', method = 'ffill')
cell(mul_, a = a, b = b, join = 'oj')()
"""
dfs = as_list(a) + as_list(b)
f = lambda a, b: _mul_(a, b, join = join, method = method, columns = columns)
return reducer(f, dfs)
def div_(a, b, join = 'ij', method = None, columns = 'ij'):
"""
division of a by b supporting presynching (inner join) of timeseries
"""
if isinstance(a, list):
a = mul_(a, join = join, method = method, columns = columns)
if isinstance(b, list):
b = mul_(b, join = join, method = method, columns = columns)
return _div_(a, b, join = join, method = method, columns = columns)
def sub_(a, b, join = 'ij', method = None, columns = 'ij'):
"""
subtraction of b from a supporting presynching (inner join) of timeseries
"""
if isinstance(a, list):
a = add_(a, join = join, method = method, columns = columns)
if isinstance(b, list):
b = add_(b, join = join, method = method, columns = columns)
return _sub_(a, b, join = join, method = method, columns = columns)
def pow_(a, b, join = 'ij', method = None, columns = 'ij'):
"""
equivalent to a**b supporting presynching (inner join) of timeseries
"""
return _pow_(a,b, join = join, method = method, columns = columns)
def min_(a, b = None, join = 'ij', method = None, columns = 'ij'):
"""
equivalent to redced np.minimum operation supporting presynching of timeseries
"""
dfs = as_list(a) + as_list(b)
dfs = df_sync(dfs, join = join, method = method, columns = columns)
return reducer(np.minimum, dfs)
def max_(a, b = None, join = 'ij', method = None, columns = 'ij'):
"""
equivalent to redced np.minimum operation supporting presynching of timeseries
"""
dfs = as_list(a) + as_list(b)
dfs = df_sync(dfs, join = join, method = method, columns = columns)
return reducer(np.maximum, dfs)
def _closed(oc):
if oc in '()oO':
return False
elif oc in '[]cC':
return True
else:
raise ValueError('not sure how to parse boundary %s'%oc)
def _df_slice(df, lb = None, ub = None, openclose = '[)'):
"""
Performs a one-time slice of the dataframe. Does not stich slices together
pandas slices has two issues:
1) it fails for timeseries quite a but
2) for timeseries df[dt1:dt2] is close-close while for normal dataframe df[lb,ub] is close-open
"""
if isinstance(df, (pd.Index, pd.Series, pd.DataFrame)) and len(df)>0 and (ub is not None or lb is not None):
l,u = openclose if openclose else '[)'
l = _closed(l); u = _closed(u)
if is_ts(df):
lb = lb if lb is None or isinstance(lb, datetime.time) else dt(lb)
ub = ub if ub is None or isinstance(ub, datetime.time) else dt(ub)
if (l or lb is None) and (u or ub is None):
try:
return df[lb:ub]
except Exception:
pass
elif (l or lb is None) and (ub is None or not u):
try:
return df[lb:ub]
except Exception:
pass
if lb is not None:
index = df if isinstance(df, pd.Index) else df.index
if isinstance(lb, datetime.time):
index = index.time
df = df[index>=lb] if l else df[index>lb]
if ub is not None:
index = df if isinstance(df, pd.Index) else df.index
if isinstance(ub, datetime.time):
index = index.time
df = df[index<=ub] if u else df[index<ub]
return df
def df_slice(df, lb = None, ub = None, openclose = '(]', n = 1):
"""
slices a dataframe/series/index based on lower/upper bounds.
If multiple timeseries are sliced at different times, will then stitch them together.
:Parameters:
----------
df : dataframe
Either a single dataframe or a list of dataframes.
lb : single or multiple lower bounds
lower bounds to cut the data.
ub : single or multiple upper bounds
upper bounds to cut the data
openclose : 2-character string
defines how left/right boundary behave.
[,] or c : close
(,) or o : open
' ' : do not cut
:Returns:
-------
filtered (and possibly stictched) timeseries
:Example: single timeseries filtering
---------
>>> df = pd.Series(np.random.normal(0,1,1000), drange(-999))
>>> df_slice(df, None, '-1m')
>>> df_slice(df, '-1m', None)
:Example: single timeseries, multiple filtering
---------
>>> df = pd.Series(np.random.normal(0,1,1000), drange(-999))
>>> lb = jan1 = drange(2018, None, '1y')
>>> ub = feb1 = drange(dt(2018,2,1), None, '1y')
>>> assert set(df_slice(df, jan1, feb1).index.month) == {1}
:Example: single timeseries time of day filtering
---------
>>> dates = drange(-5, 0, '5n')
>>> df = pd.Series(np.random.normal(0,1,12*24*5+1), dates)
>>> assert len(df_slice(df, None, datetime.time(hour = 10))) == 606
>>> assert len(df_slice(df, datetime.time(hour = 5), datetime.time(hour = 10))) == 300
>>> assert len(df_slice(df, lb = datetime.time(hour = 10), ub = datetime.time(hour = 5))) == len(dates) - 300
:Example: stitching together multiple future contracts for a continuous price
---------
>>> ub = drange(1980, 2000, '3m')
>>> df = [pd.Series(np.random.normal(0,1,1000), drange(-999, date)) for date in ub]
>>> df_slice(df, ub = ub)
:Example: stitching together multiple future contracts for a continuous price in front 5 contracts
---------
>>> ub = drange(1980, 2000, '3m')
>>> df = [pd.Series(np.random.normal(0,1,1000), drange(-999, date)) for date in ub]
>>> df_slice(df, ub = ub, n = 5).iloc[500:]
:Example: stitching together symbols
---------
>>> from pyg import *
>>> ub = drange(1980, 2000, '3m')
>>> df = loop(list)(dt2str)(ub)
>>> df_slice(df, ub = ub, n = 3)
"""
if isinstance(lb, tuple) and len(lb) == 2 and ub is None:
lb, ub = lb
if isinstance(ub, datetime.time) and isinstance(lb, datetime.time) and lb>ub:
pre = df_slice(df, None, ub)
post = df_slice(df, lb, None)
return pd.concat([pre, post]).sort_index()
if isinstance(df, list):
if isinstance(lb, list) and ub is None:
ub = lb[1:] + [None]
elif isinstance(ub, list) and lb is None:
lb = [None] + ub[:-1]
boundaries = sorted(set([date for date in lb + ub if date is not None]))
df = [d if is_pd(d) else pd.Series(d, boundaries) for d in df]
if n > 1:
df = [pd.concat(df[i: i+n], axis = 1) for i in range(len(df))]
for d in df:
d.columns = range(d.shape[1])
dfs = as_list(df)
dlu = zipper(dfs, lb, ub)
res = [_df_slice(d, lb = l, ub = u, openclose = openclose) for d, l, u in dlu]
if len(res) == 0:
return None
elif len(res) == 1:
return res[0]
elif isinstance(lb, list) and isinstance(ub, list):
res = pd.concat(res)
return res
def df_unslice(df, ub):
"""
If we have a rolled multi-column timeseries, and we want to know where each timeseries is originally associated with.
As long as you provide the stiching points, forming the upper bound of each original timeseries,
df_unslice will return a dict from each upper bound to a single-column timeseries
:Example:
---------
>>> ub = drange(1980, 2000, '3m')
>>> dfs = [pd.Series(date.year * 100 + date.month, drange(-999, date)) for date in ub]
>>> df = df_slice(dfs, ub = ub, n = 10)
>>> df.iloc[700:-700:]
>>> 0 1 2 3 4 5 6 7 8 9
>>> 1979-03-08 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> 1979-03-09 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> 1979-03-10 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> 1979-03-11 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> 1979-03-12 198001.0 198004.0 198007.0 198010.0 198101.0 198104.0 198107.0 198110.0 NaN NaN
>>> ... ... ... ... ... ... ... ... .. ..
>>> 1998-01-27 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> 1998-01-28 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> 1998-01-29 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> 1998-01-30 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> 1998-01-31 199804.0 199807.0 199810.0 199901.0 199904.0 199907.0 199910.0 200001.0 NaN NaN
>>> res = df_unslice(df, ub)
>>> res[ub[0]]
>>> 1977-04-07 198001.0
>>> 1977-04-08 198001.0
>>> 1977-04-09 198001.0
>>> 1977-04-10 198001.0
>>> 1977-04-11 198001.0
>>> ...
>>> 1979-12-28 198001.0
>>> 1979-12-29 198001.0
>>> 1979-12-30 198001.0
>>> 1979-12-31 198001.0
>>> 1980-01-01 198001.0
>>> Name: 0, Length: 1000, dtype: float64
We can then even slice the data again:
>>> assert eq(df_slice(list(res.values()), ub = ub, n = 10), df)
"""
n = df.shape[1] if is_df(df) else 1
res = dictable(ub = ub, lb = [None] + ub[:-1], i = range(len(ub)))
res = res(ts = lambda lb, ub: df_slice(df, lb, ub, '(]'))
res = res(rs = lambda i, ts: dictable(u = ub[i: i+n], j = range(len(ub[i: i+n])))(ts = lambda j: ts[j]))
rs = dictable.concat(res.rs).listby('u').do([pd.concat, nona], 'ts')
return dict(rs['u', 'ts']) | [
[
[
500,
505
],
[
4766,
4771
],
[
5015,
5020
],
[
11584,
11589
],
[
12967,
12972
],
[
17036,
17041
],
[
28682,
28687
],
[
40795,
40800
]
],
[
[
507,
513
],
[
1903,
1909
],
[
14324,
14330
],
[
28244,
28250
]
],
[
[
515,
521
],
[
5471,
5477
],
[
6133,
6139
]
],
[
[
523,
529
],
[
19178,
19184
]
],
[
[
531,
537
],
[
10336,
10342
],
[
11082,
11088
],
[
19256,
19262
],
[
19481,
19487
],
[
29151,
29157
]
],
[
[
539,
545
],
[
1732,
1738
],
[
3817,
3823
],
[
5001,
5007
],
[
5303,
5309
],
[
10832,
10838
],
[
14435,
14441
],
[
17814,
17820
]
],
[
[
547,
552
],
[
14384,
14389
],
[
22040,
22045
],
[
28151,
28156
],
[
33796,
33801
]
],
[
[
554,
561
],
[
19236,
19243
]
],
[
[
563,
572
],
[
19316,
19325
]
],
[
[
574,
579
],
[
1688,
1693
],
[
3704,
3709
],
[
10314,
10319
],
[
12688,
12693
],
[
15859,
15864
],
[
15993,
15998
],
[
37799,
37804
]
],
[
[
611,
619
],
[
40822,
40830
],
[
41063,
41071
],
[
40978,
40986
]
],
[
[
650,
657
],
[
5236,
5243
],
[
10291,
10298
],
[
29574,
29581
],
[
30835,
30842
],
[
30848,
30855
],
[
31235,
31242
],
[
31248,
31255
],
[
32603,
32610
],
[
32616,
32623
],
[
32916,
32923
],
[
32929,
32936
],
[
38025,
38032
]
],
[
[
684,
690
],
[
19414,
19420
],
[
38047,
38053
]
],
[
[
721,
729
],
[
1986,
1994
],
[
2098,
2106
]
],
[
[
731,
738
],
[
30952,
30959
],
[
31352,
31359
],
[
32714,
32721
],
[
33023,
33030
]
],
[
[
773,
780
],
[
22352,
22359
]
],
[
[
808,
812
],
[
1588,
1592
],
[
5139,
5143
],
[
9215,
9219
],
[
10196,
10200
],
[
11498,
11502
],
[
16402,
16406
]
],
[
[
841,
843
],
[
33879,
33881
],
[
33958,
33960
]
],
[
[
851,
863
],
[
1650,
1652
],
[
5340,
5342
],
[
5382,
5384
],
[
10873,
10875
],
[
11670,
11672
],
[
12859,
12861
],
[
12904,
12906
],
[
14466,
14468
],
[
15883,
15885
],
[
16007,
16009
],
[
16064,
16066
],
[
16147,
16149
],
[
19209,
19211
],
[
19502,
19504
],
[
29559,
29561
],
[
33609,
33611
],
[
33619,
33621
],
[
33630,
33632
],
[
34365,
34367
],
[
34592,
34594
],
[
37460,
37462
],
[
37813,
37815
],
[
37887,
37889
],
[
38308,
38310
],
[
41103,
41105
]
],
[
[
871,
882
],
[
9261,
9263
],
[
9521,
9523
],
[
27202,
27204
],
[
5733,
5735
],
[
9277,
9279
],
[
9309,
9311
],
[
9331,
9333
],
[
9363,
9365
],
[
11273,
11275
],
[
11289,
11291
],
[
11304,
11306
],
[
11730,
11732
],
[
17758,
17760
],
[
17798,
17800
],
[
18216,
18218
],
[
19288,
19290
],
[
19461,
19463
],
[
32722,
32724
],
[
33031,
33033
]
],
[
[
900,
904
],
[
27393,
27397
],
[
27709,
27713
]
],
[
[
912,
919
],
[
28181,
28188
]
],
[
[
927,
935
],
[
33859,
33867
],
[
33938,
33946
],
[
34419,
34427
],
[
34646,
34654
],
[
37309,
37317
],
[
37343,
37351
]
],
[
[
957,
960
]
],
[
[
962,
965
]
],
[
[
968,
975
]
],
[
[
1152,
1157
],
[
1439,
1444
],
[
1522,
1527
],
[
3649,
3654
],
[
4710,
4715
],
[
21992,
21997
],
[
28099,
28104
]
],
[
[
1616,
1622
],
[
2329,
2335
],
[
28299,
28305
]
],
[
[
1840,
1849
],
[
3751,
3760
],
[
4930,
4939
]
],
[
[
2379,
2388
],
[
3862,
3871
],
[
5098,
5107
]
],
[
[
2778,
2786
],
[
14355,
14363
],
[
22063,
22071
],
[
28356,
28364
]
],
[
[
3925,
3935
],
[
22255,
22265
],
[
29103,
29113
]
],
[
[
5167,
5177
],
[
9153,
9163
],
[
10635,
10645
],
[
10756,
10766
]
],
[
[
6428,
6437
],
[
5330,
5339
],
[
11379,
11388
]
],
[
[
9243,
9248
],
[
10184,
10189
],
[
10556,
10561
]
],
[
[
9505,
9509
],
[
41114,
41118
]
],
[
[
10224,
10235
],
[
14493,
14504
]
],
[
[
11526,
11538
],
[
11853,
11865
]
],
[
[
11809,
11820
],
[
22294,
22305
]
],
[
[
11884,
11894
]
],
[
[
13071,
13081
],
[
22096,
22106
],
[
28394,
28404
],
[
28453,
28463
]
],
[
[
14562,
14571
]
],
[
[
16430,
16440
],
[
18897,
18907
]
],
[
[
18293,
18302
],
[
28923,
28932
],
[
28973,
28982
],
[
29213,
29222
],
[
29255,
29264
],
[
29400,
29409
],
[
29435,
29444
],
[
29687,
29696
],
[
29723,
29732
]
],
[
[
18953,
18961
],
[
19357,
19365
],
[
29821,
29829
]
],
[
[
19522,
19529
],
[
32637,
32644
],
[
32950,
32957
]
],
[
[
22344,
22351
],
[
29877,
29884
],
[
30008,
30015
],
[
30144,
30151
],
[
30278,
30285
],
[
30418,
30425
],
[
27225,
27232
]
],
[
[
29889,
29894
],
[
31723,
31728
]
],
[
[
30020,
30025
],
[
32144,
32149
]
],
[
[
30156,
30161
],
[
30880,
30885
]
],
[
[
30290,
30295
],
[
31280,
31285
]
],
[
[
30430,
30435
],
[
32366,
32371
]
],
[
[
30554,
30558
],
[
31979,
31983
],
[
32076,
32080
]
],
[
[
30978,
30982
],
[
31558,
31562
],
[
31655,
31659
]
],
[
[
31373,
31377
]
],
[
[
31789,
31793
]
],
[
[
32210,
32214
]
],
[
[
32431,
32435
]
],
[
[
32744,
32748
]
],
[
[
33054,
33061
],
[
33758,
33765
],
[
33774,
33781
]
],
[
[
33237,
33246
],
[
38078,
38087
]
],
[
[
34783,
34791
],
[
37384,
37392
],
[
37422,
37430
],
[
40917,
40925
]
],
[
[
38343,
38353
]
]
] |
"""Reports views"""
# Django
from django.views.generic import TemplateView
# Shortcuts
from django.shortcuts import render
from django.shortcuts import redirect, reverse, get_object_or_404
from django.contrib.auth import authenticate
from django.http import (
HttpResponse,
HttpResponseNotFound,
HttpResponseServerError,
HttpResponseRedirect,
)
# Rest framework
from rest_framework.views import APIView
from rest_framework import status
from rest_framework.permissions import (
IsAuthenticated,
IsAdminUser,
)
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
# Menus
from ...incubator.helpers.helperDictionaries import getReportsIndexMenus, getReportIndexAnalytics
class ReportsIndex(TemplateView):
template_name = 'gepiandashboard/pages/reports_index.html'
context = {}
def get(self, request):
if not request.user.is_authenticated:
return render(request, 'errors/401.html')
self.context['menus'] = getReportsIndexMenus()
self.context['analytics'] = getReportIndexAnalytics()
return render(request, self.template_name, self.context)
| [
[
[
63,
75
],
[
749,
761
]
],
[
[
117,
123
],
[
938,
944
],
[
1105,
1111
]
],
[
[
153,
161
]
],
[
[
163,
170
]
],
[
[
172,
189
]
],
[
[
222,
234
]
],
[
[
265,
277
]
],
[
[
283,
303
]
],
[
[
309,
332
]
],
[
[
338,
358
]
],
[
[
413,
420
]
],
[
[
448,
454
]
],
[
[
500,
515
]
],
[
[
521,
532
]
],
[
[
578,
599
]
],
[
[
601,
620
]
],
[
[
682,
702
],
[
1005,
1025
]
],
[
[
704,
727
],
[
1064,
1087
]
],
[
[
736,
748
]
]
] |
import unittest
import random
import subprocess
import signal
import sys
import os
import thread_affinity
# Test results may vary if executed in different systems
# with different amount of CPUUs
def get_random_mask():
"""Return a random, valid affinity mask
Which is a subset of {0, 1, ..., 2 ** num_procs - 1}
"""
num_procs = thread_affinity.get_nprocs()
r = random.randint(1, 2 ** num_procs)
return [i for i in range(num_procs) if (r & (1 << i))]
class TestThreadAffinityLibrary(unittest.TestCase):
"""Test basic Thread Affinity features.
"""
def test_set_get_affinity(self):
"""Test if a simple set & get works
"""
random.seed(1)
proc_list = get_random_mask()
thread_affinity.setaffinity(proc_list)
self.assertEqual(proc_list, thread_affinity.get_affinity())
def test_set_get_incorrect_affinity(self):
"""Test if the program sets the default affinity in case of illegal masks
"""
illegal_mask = [-1]
default_affinity = thread_affinity.get_default_affinity()
thread_affinity.setaffinity(illegal_mask)
self.assertEqual(default_affinity, thread_affinity.get_affinity())
def test_set_get_affinity_subprocess(self):
"""Test if the affinity of a subprocess can be controlled from above
"""
random.seed(3)
proc_list = get_random_mask()
import subprocess
proc = subprocess.Popen(["python", "-c", "while True: pass"])
thread_affinity.set_affinity(proc_list, proc.pid)
self.assertEqual(proc_list, thread_affinity.get_affinity(proc.pid))
proc.send_signal(signal.SIGKILL)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestThreadAffinityLibrary)
unittest.TextTestRunner(verbosity = 2).run(suite)
| [
[
[
7,
15
],
[
492,
500
],
[
1561,
1569
],
[
1633,
1641
]
],
[
[
23,
29
],
[
369,
375
],
[
638,
644
],
[
1235,
1241
]
],
[
[
37,
47
]
],
[
[
55,
61
],
[
1507,
1513
]
],
[
[
69,
72
]
],
[
[
80,
82
]
],
[
[
90,
105
],
[
335,
350
],
[
687,
702
],
[
756,
771
],
[
958,
973
],
[
999,
1014
],
[
1078,
1093
],
[
1368,
1383
],
[
1448,
1463
]
],
[
[
203,
218
],
[
667,
682
],
[
1264,
1279
]
],
[
[
466,
491
],
[
1605,
1630
]
],
[
[
1553,
1558
],
[
1676,
1681
]
]
] |
# sqlalchemy/log.py
# Copyright (C) 2006-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
# Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Logging control and utilities.
Control of logging for SA can be performed from the regular python logging
module. The regular dotted module namespace is used, starting at
'sqlalchemy'. For class-level logging, the class name is appended.
The "echo" keyword parameter, available on SQLA :class:`_engine.Engine`
and :class:`_pool.Pool` objects, corresponds to a logger specific to that
instance only.
"""
from __future__ import annotations
import logging
import sys
from typing import Any
from typing import Optional
from typing import overload
from typing import Set
from typing import Type
from typing import TypeVar
from typing import Union
from .util import py311
from .util import py38
from .util.typing import Literal
if py38:
STACKLEVEL = True
# needed as of py3.11.0b1
# #8019
STACKLEVEL_OFFSET = 2 if py311 else 1
else:
STACKLEVEL = False
STACKLEVEL_OFFSET = 0
_IT = TypeVar("_IT", bound="Identified")
_EchoFlagType = Union[None, bool, Literal["debug"]]
# set initial level to WARN. This so that
# log statements don't occur in the absence of explicit
# logging being enabled for 'sqlalchemy'.
rootlogger = logging.getLogger("sqlalchemy")
if rootlogger.level == logging.NOTSET:
rootlogger.setLevel(logging.WARN)
def _add_default_handler(logger: logging.Logger) -> None:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s %(name)s %(message)s")
)
logger.addHandler(handler)
_logged_classes: Set[Type["Identified"]] = set()
def _qual_logger_name_for_cls(cls: Type["Identified"]) -> str:
return (
getattr(cls, "_sqla_logger_namespace", None)
or cls.__module__ + "." + cls.__name__
)
def class_logger(cls: Type[_IT]) -> Type[_IT]:
logger = logging.getLogger(_qual_logger_name_for_cls(cls))
cls._should_log_debug = lambda self: logger.isEnabledFor( # type: ignore[assignment] # noqa: E501
logging.DEBUG
)
cls._should_log_info = lambda self: logger.isEnabledFor( # type: ignore[assignment] # noqa: E501
logging.INFO
)
cls.logger = logger
_logged_classes.add(cls)
return cls
_IdentifiedLoggerType = Union[logging.Logger, "InstanceLogger"]
class Identified:
__slots__ = ()
logging_name: Optional[str] = None
logger: _IdentifiedLoggerType
_echo: _EchoFlagType
def _should_log_debug(self) -> bool:
return self.logger.isEnabledFor(logging.DEBUG)
def _should_log_info(self) -> bool:
return self.logger.isEnabledFor(logging.INFO)
class InstanceLogger:
"""A logger adapter (wrapper) for :class:`.Identified` subclasses.
This allows multiple instances (e.g. Engine or Pool instances)
to share a logger, but have its verbosity controlled on a
per-instance basis.
The basic functionality is to return a logging level
which is based on an instance's echo setting.
Default implementation is:
'debug' -> logging.DEBUG
True -> logging.INFO
False -> Effective level of underlying logger (
logging.WARNING by default)
None -> same as False
"""
# Map echo settings to logger levels
_echo_map = {
None: logging.NOTSET,
False: logging.NOTSET,
True: logging.INFO,
"debug": logging.DEBUG,
}
_echo: _EchoFlagType
__slots__ = ("echo", "logger")
def __init__(self, echo: _EchoFlagType, name: str):
self.echo = echo
self.logger = logging.getLogger(name)
# if echo flag is enabled and no handlers,
# add a handler to the list
if self._echo_map[echo] <= logging.INFO and not self.logger.handlers:
_add_default_handler(self.logger)
#
# Boilerplate convenience methods
#
def debug(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a debug call to the underlying logger."""
self.log(logging.DEBUG, msg, *args, **kwargs)
def info(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate an info call to the underlying logger."""
self.log(logging.INFO, msg, *args, **kwargs)
def warning(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a warning call to the underlying logger."""
self.log(logging.WARNING, msg, *args, **kwargs)
warn = warning
def error(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""
Delegate an error call to the underlying logger.
"""
self.log(logging.ERROR, msg, *args, **kwargs)
def exception(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate an exception call to the underlying logger."""
kwargs["exc_info"] = 1
self.log(logging.ERROR, msg, *args, **kwargs)
def critical(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a critical call to the underlying logger."""
self.log(logging.CRITICAL, msg, *args, **kwargs)
def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a log call to the underlying logger.
The level here is determined by the echo
flag as well as that of the underlying logger, and
logger._log() is called directly.
"""
# inline the logic from isEnabledFor(),
# getEffectiveLevel(), to avoid overhead.
if self.logger.manager.disable >= level:
return
selected_level = self._echo_map[self.echo]
if selected_level == logging.NOTSET:
selected_level = self.logger.getEffectiveLevel()
if level >= selected_level:
if STACKLEVEL:
kwargs["stacklevel"] = (
kwargs.get("stacklevel", 1) + STACKLEVEL_OFFSET
)
self.logger._log(level, msg, args, **kwargs)
def isEnabledFor(self, level: int) -> bool:
"""Is this logger enabled for level 'level'?"""
if self.logger.manager.disable >= level:
return False
return level >= self.getEffectiveLevel()
def getEffectiveLevel(self) -> int:
"""What's the effective level for this logger?"""
level = self._echo_map[self.echo]
if level == logging.NOTSET:
level = self.logger.getEffectiveLevel()
return level
def instance_logger(
instance: Identified, echoflag: _EchoFlagType = None
) -> None:
"""create a logger for an instance that implements :class:`.Identified`."""
if instance.logging_name:
name = "%s.%s" % (
_qual_logger_name_for_cls(instance.__class__),
instance.logging_name,
)
else:
name = _qual_logger_name_for_cls(instance.__class__)
instance._echo = echoflag # type: ignore
logger: Union[logging.Logger, InstanceLogger]
if echoflag in (False, None):
# if no echo setting or False, return a Logger directly,
# avoiding overhead of filtering
logger = logging.getLogger(name)
else:
# if a specified echo flag, return an EchoLogger,
# which checks the flag, overrides normal log
# levels by calling logger._log()
logger = InstanceLogger(echoflag, name)
instance.logger = logger # type: ignore
class echo_property:
__doc__ = """\
When ``True``, enable log output for this element.
This has the effect of setting the Python logging level for the namespace
of this element's class and object reference. A value of boolean ``True``
indicates that the loglevel ``logging.INFO`` will be set for the logger,
whereas the string value ``debug`` will set the loglevel to
``logging.DEBUG``.
"""
@overload
def __get__(
self, instance: Literal[None], owner: Type[Identified]
) -> echo_property:
...
@overload
def __get__(
self, instance: Identified, owner: Type[Identified]
) -> _EchoFlagType:
...
def __get__(
self, instance: Optional[Identified], owner: Type[Identified]
) -> Union[echo_property, _EchoFlagType]:
if instance is None:
return self
else:
return instance._echo
def __set__(self, instance: Identified, value: _EchoFlagType) -> None:
instance_logger(instance, echoflag=value)
| [
[
[
735,
746
]
],
[
[
755,
762
],
[
1455,
1462
],
[
1510,
1517
],
[
1550,
1557
],
[
2520,
2527
],
[
3530,
3537
],
[
3561,
3568
],
[
3591,
3598
],
[
3622,
3629
],
[
1599,
1606
],
[
1638,
1645
],
[
1706,
1713
],
[
2108,
2115
],
[
2776,
2783
],
[
2872,
2879
],
[
3809,
3816
],
[
3956,
3963
],
[
4242,
4249
],
[
4425,
4432
],
[
4612,
4619
],
[
4836,
4843
],
[
5060,
5067
],
[
5250,
5257
],
[
5837,
5844
],
[
6555,
6562
],
[
7114,
7121
],
[
7304,
7311
],
[
2270,
2277
],
[
2401,
2408
]
],
[
[
770,
773
],
[
1660,
1663
]
],
[
[
793,
796
],
[
4133,
4136
],
[
4148,
4151
],
[
4316,
4319
],
[
4331,
4334
],
[
4501,
4504
],
[
4516,
4519
],
[
4709,
4712
],
[
4724,
4727
],
[
4915,
4918
],
[
4930,
4933
],
[
5138,
5141
],
[
5153,
5156
],
[
5338,
5341
],
[
5353,
5356
]
],
[
[
816,
824
],
[
2612,
2620
],
[
8314,
8322
]
],
[
[
844,
852
],
[
8019,
8027
],
[
8150,
8158
]
],
[
[
872,
875
],
[
1830,
1833
]
],
[
[
895,
899
],
[
1834,
1838
],
[
1899,
1903
],
[
2084,
2088
],
[
2070,
2074
],
[
8091,
8095
],
[
8219,
8223
],
[
8343,
8347
]
],
[
[
919,
926
],
[
1212,
1219
]
],
[
[
946,
951
],
[
1264,
1269
],
[
2514,
2519
],
[
7108,
7113
],
[
8369,
8374
]
],
[
[
971,
976
],
[
1137,
1142
]
],
[
[
995,
999
],
[
1038,
1042
]
],
[
[
1025,
1032
],
[
1282,
1289
],
[
8069,
8076
]
],
[
[
1048,
1058
],
[
5967,
5977
]
],
[
[
1112,
1129
],
[
6070,
6087
]
],
[
[
1160,
1170
],
[
5967,
5977
]
],
[
[
1183,
1200
],
[
6070,
6087
]
],
[
[
1206,
1209
],
[
2089,
2092
],
[
2075,
2078
]
],
[
[
1248,
1261
],
[
2680,
2693
],
[
3655,
3668
],
[
3735,
3748
],
[
6703,
6716
],
[
8245,
8258
],
[
8390,
8403
],
[
8559,
8572
]
],
[
[
1442,
1452
],
[
1490,
1500
],
[
1530,
1540
]
],
[
[
1570,
1590
],
[
4011,
4031
]
],
[
[
1813,
1828
],
[
1830,
1853
],
[
2448,
2463
]
],
[
[
1868,
1893
],
[
2126,
2151
],
[
6885,
6910
],
[
7002,
7027
]
],
[
[
2052,
2064
]
],
[
[
2490,
2511
],
[
2646,
2667
]
],
[
[
2562,
2572
],
[
6681,
6691
],
[
8096,
8106
],
[
8200,
8210
],
[
8224,
8234
],
[
8323,
8333
],
[
8348,
8358
],
[
8540,
8550
]
],
[
[
2894,
2908
],
[
7130,
7144
],
[
7509,
7523
]
],
[
[
6650,
6665
],
[
8591,
8606
]
],
[
[
7594,
7607
],
[
8117,
8130
],
[
8375,
8388
]
]
] |
"""
Stream IO interposition
"""
import io
class InterposedStringIO(io.StringIO):
def __init__(self, newline="\n", line_buffering = False, onflush=None):
super().__init__(newline=newline)
self._line_buffering = line_buffering
self._onflush = onflush
def flush(self):
s = self.getvalue()
self.seek(io.SEEK_SET, 0)
self.truncate()
if self._onflush:
self._onflush(s)
def write(self, s):
super().write(s)
if self._line_buffering and ('\n' in s or '\r' in s):
self.flush()
| [
[
[
44,
46
],
[
75,
77
],
[
361,
363
]
],
[
[
56,
74
]
]
] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._virtual_machine_scale_set_vms_operations import build_deallocate_request_initial, build_delete_request_initial, build_get_instance_view_request, build_get_request, build_list_request, build_power_off_request_initial, build_reimage_all_request_initial, build_reimage_request_initial, build_restart_request_initial, build_start_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineScaleSetVMsOperations:
"""VirtualMachineScaleSetVMsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2017_03_30.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _reimage_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reimage_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._reimage_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reimage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore
@distributed_trace_async
async def begin_reimage(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Reimages (upgrade the operating system) a specific virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reimage_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reimage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore
async def _reimage_all_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reimage_all_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._reimage_all_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reimage_all_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimageall'} # type: ignore
@distributed_trace_async
async def begin_reimage_all(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Allows you to re-image all the disks ( including data disks ) in the a VM scale set instance.
This operation is only supported for managed disks.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reimage_all_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reimage_all.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimageall'} # type: ignore
async def _deallocate_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_deallocate_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._deallocate_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_deallocate_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore
@distributed_trace_async
async def begin_deallocate(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Deallocates a specific virtual machine in a VM scale set. Shuts down the virtual machine and
releases the compute resources it uses. You are not billed for the compute resources of this
virtual machine once it is deallocated.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._deallocate_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_deallocate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Deletes a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVM":
"""Gets a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVM, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.VirtualMachineScaleSetVM
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVM"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVM', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace_async
async def get_instance_view(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVMInstanceView":
"""Gets the status of a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVMInstanceView, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.VirtualMachineScaleSetVMInstanceView
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMInstanceView"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_instance_view_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get_instance_view.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVMInstanceView', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_instance_view.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/instanceView'} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
filter: Optional[str] = None,
select: Optional[str] = None,
expand: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.VirtualMachineScaleSetVMListResult"]:
"""Gets a list of all virtual machines in a VM scale sets.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the VM scale set.
:type virtual_machine_scale_set_name: str
:param filter: The filter to apply to the operation. Allowed values are
'startswith(instanceView/statuses/code, 'PowerState') eq true', 'properties/latestModelApplied
eq true', 'properties/latestModelApplied eq false'.
:type filter: str
:param select: The list parameters. Allowed values are 'instanceView', 'instanceView/statuses'.
:type select: str
:param expand: The expand expression to apply to the operation. Allowed values are
'instanceView'.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualMachineScaleSetVMListResult or the result
of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2017_03_30.models.VirtualMachineScaleSetVMListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualMachineScaleSetVMListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines'} # type: ignore
async def _power_off_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_power_off_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._power_off_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_power_off_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore
@distributed_trace_async
async def begin_power_off(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Power off (stop) a virtual machine in a VM scale set. Note that resources are still attached
and you are getting charged for the resources. Instead, use deallocate to release resources and
avoid charges.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._power_off_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_power_off.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore
async def _restart_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_restart_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._restart_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore
@distributed_trace_async
async def begin_restart(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Restarts a virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._restart_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore
async def _start_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_start_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._start_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore
@distributed_trace_async
async def begin_start(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Starts a virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._start_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore
| [
[
[
474,
483
]
],
[
[
503,
506
],
[
1817,
1820
],
[
1824,
1827
],
[
2834,
2837
],
[
4669,
4672
],
[
8215,
8218
],
[
10069,
10072
],
[
13693,
13696
],
[
15543,
15546
],
[
19249,
19252
],
[
21077,
21080
],
[
24528,
24531
],
[
26911,
26914
],
[
29499,
29502
],
[
33748,
33751
],
[
35592,
35595
],
[
39273,
39276
],
[
41108,
41111
],
[
44608,
44611
],
[
46433,
46436
]
],
[
[
508,
521
],
[
29512,
29525
]
],
[
[
523,
531
],
[
1744,
1752
]
],
[
[
533,
537
],
[
1807,
1811
]
],
[
[
539,
546
]
],
[
[
548,
556
],
[
1735,
1743
],
[
2847,
2855
],
[
8228,
8236
],
[
13706,
13714
],
[
19262,
19270
],
[
29383,
29391
],
[
29421,
29429
],
[
29459,
29467
],
[
33761,
33769
],
[
39286,
39294
],
[
44621,
44629
]
],
[
[
558,
565
],
[
1712,
1719
]
],
[
[
567,
572
]
],
[
[
580,
588
]
],
[
[
626,
640
],
[
33317,
33331
]
],
[
[
642,
651
],
[
32751,
32760
]
],
[
[
686,
711
],
[
3031,
3056
],
[
8412,
8437
],
[
13890,
13915
],
[
19446,
19471
],
[
25391,
25416
],
[
27836,
27861
],
[
30993,
31018
],
[
33945,
33970
],
[
39470,
39495
],
[
44805,
44830
]
],
[
[
713,
730
],
[
3920,
3937
],
[
9309,
9326
],
[
14785,
14802
],
[
20338,
20355
],
[
26250,
26267
],
[
28723,
28740
],
[
34838,
34855
],
[
40359,
40376
],
[
45690,
45707
],
[
33196,
33213
]
],
[
[
732,
751
],
[
3091,
3110
],
[
8472,
8491
],
[
13950,
13969
],
[
19506,
19525
],
[
25451,
25470
],
[
27896,
27915
],
[
31053,
31072
],
[
34005,
34024
],
[
39530,
39549
],
[
44865,
44884
]
],
[
[
753,
774
],
[
3063,
3084
],
[
8444,
8465
],
[
13922,
13943
],
[
19478,
19499
],
[
25423,
25444
],
[
27868,
27889
],
[
31025,
31046
],
[
33977,
33998
],
[
39502,
39523
],
[
44837,
44858
]
],
[
[
776,
785
],
[
3818,
3827
],
[
9207,
9216
],
[
14683,
14692
],
[
20236,
20245
],
[
26148,
26157
],
[
28621,
28630
],
[
34736,
34745
],
[
40257,
40266
],
[
45588,
45597
],
[
33090,
33099
]
],
[
[
818,
834
],
[
1754,
1770
]
],
[
[
877,
894
],
[
1784,
1801
]
],
[
[
926,
940
],
[
4682,
4696
],
[
7459,
7473
],
[
7742,
7756
],
[
10082,
10096
],
[
12931,
12945
],
[
13214,
13228
],
[
15556,
15570
],
[
18492,
18506
],
[
18775,
18789
],
[
21090,
21104
],
[
23769,
23783
],
[
24052,
24066
],
[
35605,
35619
],
[
38518,
38532
],
[
38801,
38815
],
[
41121,
41135
],
[
43858,
43872
],
[
44141,
44155
],
[
46446,
46460
],
[
49179,
49193
],
[
49462,
49476
]
],
[
[
942,
956
],
[
7361,
7375
],
[
12833,
12847
],
[
18394,
18408
],
[
23671,
23685
],
[
38420,
38434
],
[
43760,
43774
],
[
49081,
49095
]
],
[
[
958,
976
]
],
[
[
1005,
1016
],
[
1771,
1782
]
],
[
[
1058,
1075
],
[
29242,
29259
]
],
[
[
1123,
1146
],
[
4492,
4515
],
[
9888,
9911
],
[
15363,
15386
],
[
20901,
20924
],
[
24361,
24384
],
[
26730,
26753
],
[
35413,
35436
],
[
40931,
40954
],
[
46258,
46281
]
],
[
[
1186,
1200
],
[
3970,
3984
],
[
9359,
9373
],
[
14835,
14849
],
[
20388,
20402
],
[
26300,
26314
],
[
28773,
28787
],
[
34888,
34902
],
[
40409,
40423
],
[
45740,
45754
],
[
33246,
33260
]
],
[
[
1255,
1270
],
[
7218,
7233
],
[
12690,
12705
],
[
18251,
18266
],
[
23586,
23601
],
[
38277,
38292
],
[
43617,
43632
],
[
48938,
48953
]
],
[
[
1288,
1305
],
[
2456,
2463
]
],
[
[
1329,
1345
],
[
3523,
3539
],
[
8912,
8928
],
[
14388,
14404
],
[
19936,
19952
],
[
25858,
25874
],
[
28331,
28347
],
[
34441,
34457
],
[
39962,
39978
],
[
45293,
45309
],
[
31694,
31710
],
[
32272,
32288
]
],
[
[
1414,
1446
],
[
14062,
14094
]
],
[
[
1448,
1476
],
[
19618,
19646
]
],
[
[
1478,
1509
],
[
28008,
28039
]
],
[
[
1511,
1528
],
[
25563,
25580
]
],
[
[
1530,
1548
],
[
31255,
31273
],
[
31849,
31867
]
],
[
[
1550,
1581
],
[
34117,
34148
]
],
[
[
1583,
1616
],
[
8584,
8617
]
],
[
[
1618,
1647
],
[
3203,
3232
]
],
[
[
1649,
1678
],
[
39642,
39671
]
],
[
[
1680,
1707
],
[
44977,
45004
]
],
[
[
1708,
1709
],
[
1804,
1805
]
],
[
[
1725,
1732
]
],
[
[
1837,
1872
]
]
] |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# --------------------------------------------------------------------------------
# Written By: Ekhtiar Syed
# Last Update: 8th April 2016
# Caveat: This Dag will not run because of missing scripts.
# The purpose of this is to give you a sample of a real world example DAG!
# --------------------------------------------------------------------------------
# --------------------------------------------------------------------------------
# Load The Dependencies
# --------------------------------------------------------------------------------
"""
This is an example dag for managing twitter data.
"""
from datetime import date, timedelta
import airflow
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.hive_operator import HiveOperator
from airflow.operators.python_operator import PythonOperator
# --------------------------------------------------------------------------------
# Create a few placeholder scripts. In practice these would be different python
# script files, which are imported in this section with absolute or relative imports
# --------------------------------------------------------------------------------
def fetchtweets():
"""
This is a placeholder for fetchtweets.
"""
def cleantweets():
"""
This is a placeholder for cleantweets.
"""
def analyzetweets():
"""
This is a placeholder for analyzetweets.
"""
def transfertodb():
"""
This is a placeholder for transfertodb.
"""
# --------------------------------------------------------------------------------
# set default arguments
# --------------------------------------------------------------------------------
default_args = {
'owner': 'Ekhtiar',
'depends_on_past': False,
'start_date': airflow.utils.dates.days_ago(5),
'email': ['airflow@example.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
with DAG(
dag_id='example_twitter_dag',
default_args=default_args,
schedule_interval="@daily"
) as dag:
# --------------------------------------------------------------------------------
# This task should call Twitter API and retrieve tweets from yesterday from and to
# for the four twitter users (Twitter_A,..,Twitter_D) There should be eight csv
# output files generated by this task and naming convention
# is direction(from or to)_twitterHandle_date.csv
# --------------------------------------------------------------------------------
fetch_tweets = PythonOperator(
task_id='fetch_tweets',
python_callable=fetchtweets
)
# --------------------------------------------------------------------------------
# Clean the eight files. In this step you can get rid of or cherry pick columns
# and different parts of the text
# --------------------------------------------------------------------------------
clean_tweets = PythonOperator(
task_id='clean_tweets',
python_callable=cleantweets
)
clean_tweets << fetch_tweets
# --------------------------------------------------------------------------------
# In this section you can use a script to analyze the twitter data. Could simply
# be a sentiment analysis through algorithms like bag of words or something more
# complicated. You can also take a look at Web Services to do such tasks
# --------------------------------------------------------------------------------
analyze_tweets = PythonOperator(
task_id='analyze_tweets',
python_callable=analyzetweets
)
analyze_tweets << clean_tweets
# --------------------------------------------------------------------------------
# Although this is the last task, we need to declare it before the next tasks as we
# will use set_downstream This task will extract summary from Hive data and store
# it to MySQL
# --------------------------------------------------------------------------------
hive_to_mysql = PythonOperator(
task_id='hive_to_mysql',
python_callable=transfertodb
)
# --------------------------------------------------------------------------------
# The following tasks are generated using for loop. The first task puts the eight
# csv files to HDFS. The second task loads these files from HDFS to respected Hive
# tables. These two for loops could be combined into one loop. However, in most cases,
# you will be running different analysis on your incoming incoming and outgoing tweets,
# and hence they are kept separated in this example.
# --------------------------------------------------------------------------------
from_channels = ['fromTwitter_A', 'fromTwitter_B', 'fromTwitter_C', 'fromTwitter_D']
to_channels = ['toTwitter_A', 'toTwitter_B', 'toTwitter_C', 'toTwitter_D']
yesterday = date.today() - timedelta(days=1)
dt = yesterday.strftime("%Y-%m-%d")
# define where you want to store the tweets csv file in your local directory
local_dir = "/tmp/"
# define the location where you want to store in HDFS
hdfs_dir = " /tmp/"
for channel in to_channels:
file_name = "to_" + channel + "_" + yesterday.strftime("%Y-%m-%d") + ".csv"
load_to_hdfs = BashOperator(
task_id="put_" + channel + "_to_hdfs",
bash_command="HADOOP_USER_NAME=hdfs hadoop fs -put -f " +
local_dir + file_name +
hdfs_dir + channel + "/"
)
load_to_hdfs << analyze_tweets
load_to_hive = HiveOperator(
task_id="load_" + channel + "_to_hive",
hql="LOAD DATA INPATH '" +
hdfs_dir + channel + "/" + file_name + "' "
"INTO TABLE " + channel + " "
"PARTITION(dt='" + dt + "')"
)
load_to_hive << load_to_hdfs
load_to_hive >> hive_to_mysql
for channel in from_channels:
file_name = "from_" + channel + "_" + yesterday.strftime("%Y-%m-%d") + ".csv"
load_to_hdfs = BashOperator(
task_id="put_" + channel + "_to_hdfs",
bash_command="HADOOP_USER_NAME=hdfs hadoop fs -put -f " +
local_dir + file_name +
hdfs_dir + channel + "/"
)
load_to_hdfs << analyze_tweets
load_to_hive = HiveOperator(
task_id="load_" + channel + "_to_hive",
hql="LOAD DATA INPATH '" +
hdfs_dir + channel + "/" + file_name + "' "
"INTO TABLE " + channel + " "
"PARTITION(dt='" + dt + "')"
)
load_to_hive << load_to_hdfs
load_to_hive >> hive_to_mysql
| [
[
[
1439,
1443
],
[
5885,
5889
]
],
[
[
1445,
1454
],
[
2778,
2787
],
[
5900,
5909
]
],
[
[
1463,
1470
],
[
2610,
2617
]
],
[
[
1491,
1494
],
[
2932,
2935
]
],
[
[
1539,
1551
],
[
6287,
6299
],
[
7080,
7092
]
],
[
[
1596,
1608
],
[
6595,
6607
],
[
7388,
7400
]
],
[
[
1655,
1669
],
[
3527,
3541
],
[
3934,
3948
],
[
4502,
4516
],
[
5020,
5034
]
],
[
[
2008,
2019
],
[
3599,
3610
]
],
[
[
2088,
2099
],
[
4006,
4017
]
],
[
[
2168,
2181
],
[
4576,
4589
]
],
[
[
2252,
2264
],
[
5093,
5105
]
],
[
[
2521,
2533
],
[
2988,
3000
]
],
[
[
3038,
3041
]
],
[
[
3512,
3524
],
[
4045,
4057
]
],
[
[
3919,
3931
],
[
4029,
4041
],
[
4619,
4631
]
],
[
[
4485,
4499
],
[
4601,
4615
],
[
6556,
6570
],
[
7349,
7363
]
],
[
[
5004,
5017
],
[
6922,
6935
],
[
7716,
7729
]
],
[
[
5705,
5718
],
[
6956,
6969
]
],
[
[
5794,
5805
],
[
6165,
6176
]
],
[
[
5873,
5882
],
[
5927,
5936
],
[
6223,
6232
],
[
7017,
7026
]
],
[
[
5922,
5924
],
[
6841,
6843
],
[
7634,
7636
]
],
[
[
6043,
6052
],
[
6447,
6456
],
[
7240,
7249
]
],
[
[
6125,
6133
],
[
6496,
6504
],
[
6716,
6724
],
[
7289,
7297
],
[
7509,
7517
]
],
[
[
6154,
6161
],
[
6207,
6214
],
[
6330,
6337
],
[
6507,
6514
],
[
6639,
6646
],
[
6727,
6734
],
[
6792,
6799
]
],
[
[
6187,
6196
],
[
6459,
6468
],
[
6743,
6752
]
],
[
[
6272,
6284
],
[
6540,
6552
],
[
6885,
6897
]
],
[
[
6580,
6592
],
[
6869,
6881
],
[
6906,
6918
]
],
[
[
6945,
6952
],
[
7001,
7008
],
[
7123,
7130
],
[
7300,
7307
],
[
7432,
7439
],
[
7520,
7527
],
[
7585,
7592
]
],
[
[
6979,
6988
],
[
7252,
7261
],
[
7536,
7545
]
],
[
[
7065,
7077
],
[
7333,
7345
],
[
7679,
7691
]
],
[
[
7373,
7385
],
[
7663,
7675
],
[
7700,
7712
]
]
] |
"""
========================================================
06. Remove epochs based on peak-to-peak (PTP) amplitudes
========================================================
Epochs containing peak-to-peak above the thresholds defined
in the 'reject' parameter are removed from the data.
This step will drop epochs containing non-biological artifacts
but also epochs containing biological artifacts not sufficiently
corrected by the ICA or the SSP processing.
"""
import itertools
import logging
from typing import Optional
import mne
from mne.utils import BunchConst
from mne.parallel import parallel_func
from mne_bids import BIDSPath
import config
from config import gen_log_kwargs, on_error, failsafe_run
logger = logging.getLogger('mne-bids-pipeline')
@failsafe_run(on_error=on_error, script_path=__file__)
def drop_ptp(*, cfg, subject, session=None):
bids_path = BIDSPath(subject=subject,
session=session,
task=cfg.task,
acquisition=cfg.acq,
run=None,
recording=cfg.rec,
space=cfg.space,
suffix='epo',
extension='.fif',
datatype=cfg.datatype,
root=cfg.deriv_root,
check=False)
infile_processing = cfg.spatial_filter
fname_in = bids_path.copy().update(processing=infile_processing)
fname_out = bids_path.copy().update(processing='clean')
msg = f'Input: {fname_in}, Output: {fname_out}'
logger.info(**gen_log_kwargs(message=msg, subject=subject,
session=session))
# Get rejection parameters and drop bad epochs
epochs = mne.read_epochs(fname_in, preload=True)
reject = config.get_reject(epochs=epochs)
if cfg.ica_reject is not None:
for ch_type, threshold in cfg.ica_reject.items():
if (ch_type in reject and
threshold < reject[ch_type]):
# This can only ever happen in case of
# reject = 'autoreject_global'
msg = (f'Adjusting PTP rejection threshold proposed by '
f'autoreject, as it is greater than ica_reject: '
f'{ch_type}: {reject[ch_type]} -> {threshold}')
logger.info(**gen_log_kwargs(message=msg,
subject=subject, session=session))
reject[ch_type] = threshold
msg = f'Using PTP rejection thresholds: {reject}'
logger.info(**gen_log_kwargs(message=msg, subject=subject,
session=session))
n_epochs_before_reject = len(epochs)
epochs.reject_tmin = cfg.reject_tmin
epochs.reject_tmax = cfg.reject_tmax
epochs.drop_bad(reject=reject)
n_epochs_after_reject = len(epochs)
if 0 < n_epochs_after_reject < 0.5 * n_epochs_before_reject:
msg = ('More than 50% of all epochs rejected. Please check the '
'rejection thresholds.')
logger.warning(**gen_log_kwargs(message=msg, subject=subject,
session=session))
elif n_epochs_after_reject == 0:
raise RuntimeError('No epochs remaining after peak-to-peak-based '
'rejection. Cannot continue.')
msg = 'Saving cleaned, baseline-corrected epochs …'
epochs.apply_baseline(cfg.baseline)
epochs.save(fname_out, overwrite=True)
def get_config(
subject: Optional[str] = None,
session: Optional[str] = None
) -> BunchConst:
cfg = BunchConst(
task=config.get_task(),
datatype=config.get_datatype(),
acq=config.acq,
rec=config.rec,
space=config.space,
baseline=config.baseline,
reject_tmin=config.reject_tmin,
reject_tmax=config.reject_tmax,
spatial_filter=config.spatial_filter,
ica_reject=config.get_ica_reject(),
deriv_root=config.get_deriv_root(),
decim=config.decim
)
return cfg
def main():
"""Run epochs."""
parallel, run_func, _ = parallel_func(drop_ptp, n_jobs=config.get_n_jobs())
logs = parallel(
run_func(cfg=get_config(), subject=subject, session=session)
for subject, session in
itertools.product(config.get_subjects(),
config.get_sessions())
)
config.save_logs(logs)
if __name__ == '__main__':
main()
| [
[
[
474,
483
],
[
4346,
4355
]
],
[
[
491,
498
],
[
724,
731
]
],
[
[
518,
526
],
[
3561,
3569
],
[
3596,
3604
]
],
[
[
535,
538
],
[
1775,
1778
]
],
[
[
561,
571
],
[
3622,
3632
],
[
3644,
3654
]
],
[
[
597,
610
],
[
4164,
4177
]
],
[
[
632,
640
],
[
881,
889
]
],
[
[
649,
655
],
[
1828,
1834
],
[
3669,
3675
],
[
3705,
3711
],
[
3740,
3746
],
[
3764,
3770
],
[
3790,
3796
],
[
3821,
3827
],
[
3858,
3864
],
[
3898,
3904
],
[
3941,
3947
],
[
3983,
3989
],
[
4027,
4033
],
[
4066,
4072
],
[
4195,
4201
],
[
4364,
4370
],
[
4413,
4419
],
[
4447,
4453
]
],
[
[
675,
689
],
[
1614,
1628
],
[
2392,
2406
],
[
2617,
2631
],
[
3116,
3130
]
],
[
[
691,
699
],
[
788,
796
]
],
[
[
701,
713
],
[
766,
778
]
],
[
[
715,
721
],
[
1600,
1606
],
[
2378,
2384
],
[
2603,
2609
],
[
3099,
3105
]
],
[
[
824,
832
],
[
4178,
4186
]
],
[
[
3536,
3546
],
[
4258,
4268
]
],
[
[
4106,
4110
],
[
4503,
4507
]
]
] |
import datetime
import hashlib
import json
import numpy as np
import pandas as pd
import tifffile
def timestamp():
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
class MicroManagerTIFF:
def __init__(self, src_filepath, verbose=True):
'''
'''
self.verbose = verbose
self.src_filepath = src_filepath
self.events = []
self.global_metadata = {'processing_timestamp': timestamp()}
self.open_tiff()
def event_logger(self, message):
'''
'''
if self.verbose:
print('EVENT: %s' % message)
self.events.append({'message': message, 'timestamp': timestamp()})
def save_events(self, dst_filepath):
if not self.events:
return
pd.DataFrame(data=self.events).to_csv(dst_filepath, index=False)
def save_global_metadata(self, dst_filepath):
with open(dst_filepath, 'w') as file:
json.dump(self.global_metadata, file)
def save_mm_metadata(self, dst_filepath):
self.mm_metadata.to_csv(dst_filepath, index=False)
def calc_hash(self):
'''
Calculate the sha1 hash from the file contents
'''
sha1 = hashlib.sha1()
with open(self.src_filepath, 'rb') as file:
sha1.update(file.read())
hash_value = sha1.hexdigest()
self.global_metadata['sha1_hash'] = hash_value
return hash_value
def open_tiff(self):
'''
Open the stack using tifffile.TiffFile
'''
self.tiff = tifffile.TiffFile(self.src_filepath)
@staticmethod
def _parse_mm_tag_schema_v1(mm_tag):
'''
Parse a MicroManagerMetadata tag in the 'old' schema
(KC: I believe this schema corresponds to MicroManager 1.x)
'''
metadata = {
'slice_ind': mm_tag['SliceIndex'],
'frame_ind': mm_tag['FrameIndex'],
'channel_ind': mm_tag['ChannelIndex'],
'position_ind': mm_tag['PositionIndex'],
'exposure_time': mm_tag['AndorEMCCD-Exposure'],
'laser_status_405': mm_tag['AndorILE-A-Laser 405-Power Enable'],
'laser_power_405': mm_tag['AndorILE-A-Laser 405-Power Setpoint'],
'laser_status_488': mm_tag['AndorILE-A-Laser 488-Power Enable'],
'laser_power_488': mm_tag['AndorILE-A-Laser 488-Power Setpoint'],
}
return metadata
@staticmethod
def _parse_mm_tag_schema_v2(mm_tag):
'''
Parse a MicroManagerMetadata tag in the 'new' schema
(KC: I believe this schema corresponds to MicroManager 2.x)
'''
metadata = {
'slice_ind': mm_tag['SliceIndex'],
'frame_ind': mm_tag['FrameIndex'],
'channel_ind': mm_tag['ChannelIndex'],
'position_ind': mm_tag['PositionIndex'],
'exposure_time': mm_tag.get('Andor EMCCD-Exposure')['PropVal'],
'laser_status_405': mm_tag.get('Andor ILE-A-Laser 405-Power Enable')['PropVal'],
'laser_power_405': mm_tag.get('Andor ILE-A-Laser 405-Power Setpoint')['PropVal'],
'laser_status_488': mm_tag.get('Andor ILE-A-Laser 488-Power Enable')['PropVal'],
'laser_power_488': mm_tag.get('Andor ILE-A-Laser 488-Power Setpoint')['PropVal'],
}
return metadata
def parse_micromanager_metadata(self):
'''
Parse the MicroManager metadata for each page in the TIFF file
'''
# the IJMetadata appears only in the first page
ij_metadata = None
try:
ij_metadata = self.tiff.pages[0].tags['IJMetadata'].value['Info']
except Exception:
self.event_logger('There was no IJMetadata tag found on the first page')
if ij_metadata is not None:
try:
ij_metadata = json.loads(ij_metadata)
except Exception:
self.event_logger('IJMetadata could not be parsed by json.loads')
mm_metadata_rows = []
for ind, page in enumerate(self.tiff.pages):
mm_metadata_row = {
'page_ind': ind,
'error': False
}
mm_tag = page.tags.get('MicroManagerMetadata')
if not isinstance(mm_tag, tifffile.tifffile.TiffTag):
self.event_logger('There was no MicroManagerMetadata tag found on page %s' % ind)
mm_metadata_row['error'] = True
mm_metadata_rows.append(mm_metadata_row)
continue
try:
page_metadata_v1 = self._parse_mm_tag_schema_v1(mm_tag.value)
except Exception:
page_metadata_v1 = None
try:
page_metadata_v2 = self._parse_mm_tag_schema_v2(mm_tag.value)
except Exception:
page_metadata_v2 = None
page_metadata = {}
mm_metadata_version = None
if page_metadata_v1 is not None:
mm_metadata_version = 'v1'
page_metadata = page_metadata_v1
elif page_metadata_v2 is not None:
mm_metadata_version = 'v2'
page_metadata = page_metadata_v2
else:
mm_metadata_row['error'] = True
self.event_logger('Unable to parse MicroManagerMetadata tag from page %s' % ind)
mm_metadata_rows.append({**mm_metadata_row, **page_metadata})
self.mm_metadata = pd.DataFrame(data=mm_metadata_rows)
self.global_metadata['mm_metadata_version'] = mm_metadata_version
class RawPipelineTIFF(MicroManagerTIFF):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# the channels we expect to find in a Pipeline-like TIFF
self.laser_405 = '405'
self.laser_488 = '488'
def validate_micromanager_metadata(self):
'''
Validate the parsed MicroManager metadata tags for a raw Pipeline-like TIFF file
(these are TIFFs found in the 'PlateMicroscopy' directory)
Generates validated_mm_metadata and sets various flags
that determine whether and how to split the pages into the 405 and 488 channels
Steps
------
- drop rows with any NAs
- check that the dropped rows had a parsing error
- check for two channel_inds and an equal number of pages from each
- if there are no channel_inds, check for an even number of pages
- if there are two channel_inds, check that slice_inds
and exposure settings are consistent within each channel
'''
# whether the MM metadata has two channel inds with an equal number of slices
self.has_valid_channel_inds = False
# whether the MM metadata for each channel has slice_inds that increment by one
self.has_valid_slice_inds = False
# whether it is safe to split the TIFF stack into channels by splitting the pages in half,
# when there are not valid channel inds
self.safe_to_split_in_half = False
md = self.mm_metadata.copy()
# remove the error flag column
errors = md['error']
md = md.drop(labels='error', axis=1)
# drop rows with NAs in any of the columns parsed from the MicroManagerMetadata tag
parsed_columns = set(md.columns).difference(['page_ind'])
md = md.dropna(how='any', subset=parsed_columns, axis=0)
# check that the dropped rows had an error
# (note that 'error' means either there was no MM tag or it could not be parsed)
num_error_rows = errors.sum()
num_dropped_rows = self.mm_metadata.shape[0] - md.shape[0]
if num_dropped_rows != num_error_rows:
self.event_logger(
'%s rows with NAs were dropped but %s rows had errors'
% (num_dropped_rows, num_error_rows)
)
# check that we can coerce the parsed columns as expected
int_columns = ['slice_ind', 'channel_ind']
for column in int_columns:
md[column] = md[column].apply(int)
float_columns = ['laser_power_405', 'laser_power_488', 'exposure_time']
for column in float_columns:
md[column] = md[column].apply(float)
# if there are two distinct channels, we assign the first to 405 and the second to 488
self.channel_inds = None
unique_channel_inds = sorted(md.channel_ind.unique())
if len(unique_channel_inds) == 2:
self.channel_inds = {
self.laser_405: min(unique_channel_inds),
self.laser_488: max(unique_channel_inds),
}
# if there are three channel_inds, we assume the third channel is brightfield
elif set(unique_channel_inds) == set([0, 1, 2]):
self.event_logger('There were three channel inds')
self.channel_inds = {
self.laser_405: 0,
self.laser_488: 1,
}
# if there's one channel index, check for an even number of pages
elif len(unique_channel_inds) == 1:
if np.mod(md.shape[0], 2) == 0:
self.safe_to_split_in_half = True
else:
self.event_logger('There is one channel_ind and an odd number of pages')
else:
self.event_logger('Unexpected number of channel_inds (%s)' % unique_channel_inds)
# if there were valid channel_inds, check for an equal number of pages from each channel
if self.channel_inds is not None:
num_405 = (md.channel_ind == self.channel_inds[self.laser_405]).sum()
num_488 = (md.channel_ind == self.channel_inds[self.laser_488]).sum()
if num_405 == num_488:
self.has_valid_channel_inds = True
else:
self.event_logger(
'Channels have unequal number of slices: %s and %s' % (num_405, num_488)
)
# in each channel, check that slice_ind increments by 1.0
# and that exposure time and laser power are consistent
for channel_ind in unique_channel_inds:
md_channel = md.loc[md.channel_ind == channel_ind]
steps = np.unique(np.diff(md_channel.slice_ind))
# check that slice inds are contiguous
if len(steps) == 1 and steps[0] == 1:
self.has_valid_slice_inds = True
elif len(steps) == 1:
self.event_logger(
'Unexpected slice_ind increment %s for channel_ind %s'
% (steps[0], channel_ind)
)
elif len(steps) > 1:
self.event_logger(
'The slice_inds are not contiguous for channel_ind %s' % channel_ind
)
for column in float_columns:
steps = np.unique(np.diff(md_channel[column]))
if len(steps) > 1 or steps[0] != 0:
self.event_logger(
'Inconsistent values found in column %s for channel_ind %s'
% (column, channel_ind)
)
self.validated_mm_metadata = md
@staticmethod
def tag_and_coerce_metadata(row, tag):
'''
Transform `row` to a dict, prepend the keys with `tag`,
and do some hackish type coercion
'''
d = {}
for key, val in dict(row).items():
key = '%s_%s' % (key, tag)
try:
val = float(val)
except Exception:
pass
d[key] = val
return d
def split_channels(self):
'''
Split the pages of the pipeline-like TIFF into 405 and 488 channels
to construct the z-stack for each channel and, if possible,
extract the channel-specific MM metadata (i.e., exposure time and laser power)
Overview
--------
In a perfect world, this would be easy: we would simple use the two unique channel_inds
to split the pages by channel (and verify the page order using the slice_inds).
Unfortunately, due to a bug, the MM metadata tag in some TIFFs is the same on every page
(this is notably true for 'disentangled' TIFFs from Plates 16,17,18).
In these cases, we split the tiff into channels simply by splitting the pages in half.
Note that we use the flags set in self.validate_mm_metadata to determine
which of these methods to use.
Assignment of channels
----------------------
When there are two valid channel_inds, the 405 laser is assigned
to the lower channel_ind (which is either 0 or -1).
When there are no channel_inds, the 405 laser is assigned
to the first half of the pages.
'''
self.did_split_channels = True
self.stacks = {}
md = self.validated_mm_metadata.copy()
if self.has_valid_channel_inds:
for channel_name in (self.laser_405, self.laser_488):
channel_md = md.loc[md.channel_ind == self.channel_inds[channel_name]]
self.global_metadata.update(
self.tag_and_coerce_metadata(channel_md.iloc[0], tag=channel_name)
)
self.stacks[channel_name] = self.concat_pages(channel_md.page_ind.values)
elif self.safe_to_split_in_half:
n = int(md.shape[0]/2)
self.stacks[self.laser_405] = self.concat_pages(md.iloc[:n].page_ind.values)
self.stacks[self.laser_488] = self.concat_pages(md.iloc[n:].page_ind.values)
else:
self.event_logger('Unable to safely split pages by channel')
self.did_split_channels = False
def concat_pages(self, page_inds):
'''
'''
stack = np.array([self.tiff.pages[ind].asarray() for ind in page_inds])
return stack
def project_stack(self, channel_name, axis, dst_filepath=None):
'''
Generate x-, y-, or z-projections and log the max and min intensities
'''
axis_inds = {'x': 1, 'y': 2, 'z': 0}
if axis not in axis_inds.keys():
raise ValueError("Axis must be one of 'x', 'y', or 'z'")
axis_ind = axis_inds[axis]
try:
proj = self.stacks[channel_name].max(axis=axis_ind)
minmax = {
'min_intensity': int(proj.min()),
'max_intensity': int(proj.max()),
}
self.global_metadata.update(self.tag_and_coerce_metadata(minmax, tag=channel_name))
if dst_filepath is not None:
tifffile.imsave(dst_filepath, proj)
except Exception:
self.event_logger(
'An error occured while %s-projecting the %s channel' % (axis, channel_name)
)
def calculate_z_profiles(self, channel):
'''
Calculate various statistics of the intensities for each z-slice
'''
stack = self.stacks[channel]
return {
'min': np.array([zslice.min() for zslice in stack]).astype(int),
'max': np.array([zslice.max() for zslice in stack]).astype(int),
'mean': np.array([zslice.mean() for zslice in stack]).astype(int),
'p9999': np.array([np.percentile(zslice, 99.99) for zslice in stack]).astype(int),
}
@staticmethod
def find_cell_layer(stack):
'''
Estimate the center of the cell layer using the center of mass
of the z-profile of the mean intensity of the Hoechst staining
'''
# z-profile of the mean intensity in the Hoechst channel
raw_profile = np.array([zslice.mean() for zslice in stack]).astype(float)
profile = raw_profile - raw_profile.mean()
profile[profile < 0] = 0
x = np.arange(len(profile))
center_of_mass = (profile * x).sum()/profile.sum()
return center_of_mass, raw_profile
def align_cell_layer(
self, cell_layer_bottom, cell_layer_top, step_size, bottom_wiggle_room=0
):
'''
Approximately align the 405 and 488 stacks to correct for chromatic aberration,
and crop around the cell layer so that it is in the center of the stack
cell_layer_bottom : the position of the bottom of the cell layer, in microns,
relative to the center of the cell layer (should be negative)
cell_layer_top : the position of the top of cell layer, in microns,
relative to the center (should be positive)
step_size : the z-step size of the stack (in microns)
(note that the step size is not included in the MicroManager metadata,
so it must be provided by the user)
bottom_wiggle_room : optional 'wiggle room', in microns, for the cell_layer_bottom;
if the actual bottom of the stack is within this distance of cell_layer_bottom,
the stack is still cropped, and the bottom of the cropped stack padded with zeros.
For example, if cell_layer_bottom is -5um but the actual bottom is at -4.5um,
setting bottom_wiggle_room to 1um would allow the stack to be cropped
(because -4.5 + 5 < 1)
'''
stacks = {}
result = {}
stack_405 = self.stacks[self.laser_405].copy()
stack_488 = self.stacks[self.laser_488].copy()
# hard-coded chromatic aberration offset in microns
# this is an empirically estimated median offset,
# obtained by inspecting z-stacks from nucleus-localized targets
chromatic_aberration_offset = 1.0
offset_ind = int(chromatic_aberration_offset/step_size)
stack_405 = stack_405[:-offset_ind, :, :]
stack_488 = stack_488[offset_ind:, :, :]
# estimate the cell layer center and round it the nearest z-slice
cell_layer_center, _ = self.find_cell_layer(stack_405)
cell_layer_center = np.round(cell_layer_center)
# absolute position, in number of z-slices, of the top and bottom of the cell layer
bottom_ind = int(np.floor(cell_layer_center + cell_layer_bottom/step_size))
top_ind = int(np.ceil(cell_layer_center + cell_layer_top/step_size))
# log some parameters (for debugging, mostly)
result['padded'] = False
result['stack_shape'] = stack_405.shape
result['crop_window'] = [bottom_ind, top_ind]
result['cell_layer_center'] = cell_layer_center
result['chromatic_aberration_offset'] = offset_ind
pad_depth = None
if bottom_ind < 0:
if abs(bottom_ind) <= np.round(bottom_wiggle_room/step_size):
pad_depth = abs(bottom_ind)
bottom_ind = 0
else:
result['error'] = 'The cell layer center was too close to the bottom of the stack'
return stacks, result
if top_ind >= stack_405.shape[0]:
result['error'] = 'The cell layer center was too close to the top of the stack'
return stacks, result
stack_405 = stack_405[bottom_ind:top_ind, :, :]
stack_488 = stack_488[bottom_ind:top_ind, :, :]
# pad the bottom of the stack if necessary
if pad_depth:
result['padded'] = True
result['pad_depth'] = pad_depth
padding = np.zeros((pad_depth, *stack_405.shape[1:]), dtype=stack_405.dtype)
stack_405 = np.concatenate((padding, stack_405), axis=0)
stack_488 = np.concatenate((padding, stack_488), axis=0)
stacks = {'405': stack_405, '488': stack_488}
return stacks, result
| [
[
[
7,
15
],
[
128,
136
]
],
[
[
23,
30
],
[
1223,
1230
]
],
[
[
38,
42
],
[
957,
961
],
[
3866,
3870
]
],
[
[
50,
61
],
[
9156,
9158
],
[
10266,
10268
],
[
10276,
10278
],
[
10907,
10909
],
[
10917,
10919
],
[
13881,
13883
],
[
15116,
15118
],
[
15193,
15195
],
[
15271,
15273
],
[
15351,
15353
],
[
15361,
15363
],
[
15741,
15743
],
[
15898,
15900
],
[
18020,
18022
],
[
18166,
18168
],
[
18247,
18249
],
[
18694,
18696
],
[
19422,
19424
],
[
19513,
19515
],
[
19582,
19584
]
],
[
[
69,
81
],
[
782,
784
],
[
5494,
5496
]
],
[
[
89,
97
],
[
1565,
1573
],
[
4294,
4302
],
[
14698,
14706
]
],
[
[
104,
113
],
[
441,
450
],
[
670,
679
]
],
[
[
190,
206
],
[
5629,
5645
]
],
[
[
5613,
5628
]
]
] |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'FlowLabelTlvCodeEnum' : _MetaInfoEnum('FlowLabelTlvCodeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'17':'Y_17',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BackupDisableEnum' : _MetaInfoEnum('BackupDisableEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'never':'never',
'delay':'delay',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteTargetFormatEnum' : _MetaInfoEnum('BgpRouteTargetFormatEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'two-byte-as':'two_byte_as',
'four-byte-as':'four_byte_as',
'ipv4-address':'ipv4_address',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'LoadBalanceEnum' : _MetaInfoEnum('LoadBalanceEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'source-dest-mac':'source_dest_mac',
'source-dest-ip':'source_dest_ip',
'pseudowire-label':'pseudowire_label',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'InterworkingEnum' : _MetaInfoEnum('InterworkingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'ethernet':'ethernet',
'ipv4':'ipv4',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'PwSwitchingPointTlvEnum' : _MetaInfoEnum('PwSwitchingPointTlvEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'hide':'hide',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacAgingEnum' : _MetaInfoEnum('MacAgingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'absolute':'absolute',
'inactivity':'inactivity',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2Tpv3SequencingEnum' : _MetaInfoEnum('L2Tpv3SequencingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'off':'off',
'both':'both',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ErpPort1Enum' : _MetaInfoEnum('ErpPort1Enum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'port0':'port0',
'port1':'port1',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'InterfaceProfileEnum' : _MetaInfoEnum('InterfaceProfileEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'snoop':'snoop',
'dhcp-protocol':'dhcp_protocol',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2EncapsulationEnum' : _MetaInfoEnum('L2EncapsulationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'vlan':'vlan',
'ethernet':'ethernet',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'InterfaceTrafficFloodEnum' : _MetaInfoEnum('InterfaceTrafficFloodEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'traffic-flooding':'traffic_flooding',
'enable-flooding':'enable_flooding',
'disable-flooding':'disable_flooding',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2VpnLoggingEnum' : _MetaInfoEnum('L2VpnLoggingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'enable':'enable',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteTargetRoleEnum' : _MetaInfoEnum('BgpRouteTargetRoleEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'both':'both',
'import':'import_',
'export':'export',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ErpPortEnum' : _MetaInfoEnum('ErpPortEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'virtual':'virtual',
'interface':'interface',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacWithdrawBehaviorEnum' : _MetaInfoEnum('MacWithdrawBehaviorEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'legacy':'legacy',
'optimized':'optimized',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2TpCookieSizeEnum' : _MetaInfoEnum('L2TpCookieSizeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'zero':'zero',
'four':'four',
'eight':'eight',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'StormControlEnum' : _MetaInfoEnum('StormControlEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'unicast':'unicast',
'multicast':'multicast',
'broadcast':'broadcast',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2TpSignalingProtocolEnum' : _MetaInfoEnum('L2TpSignalingProtocolEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'l2tpv3':'l2tpv3',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'RplRoleEnum' : _MetaInfoEnum('RplRoleEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'owner':'owner',
'neighbor':'neighbor',
'next-neighbor':'next_neighbor',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacLimitActionEnum' : _MetaInfoEnum('MacLimitActionEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'flood':'flood',
'no-flood':'no_flood',
'shutdown':'shutdown',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'TypeOfServiceModeEnum' : _MetaInfoEnum('TypeOfServiceModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'reflect':'reflect',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacNotificationEnum' : _MetaInfoEnum('MacNotificationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'no-notif':'no_notif',
'syslog':'syslog',
'trap':'trap',
'syslog-snmp':'syslog_snmp',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2VpnVerificationEnum' : _MetaInfoEnum('L2VpnVerificationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'enable':'enable',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'LdpVplsIdEnum' : _MetaInfoEnum('LdpVplsIdEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'two-byte-as':'two_byte_as',
'ipv4-address':'ipv4_address',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacLearnEnum' : _MetaInfoEnum('MacLearnEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'default-learning':'default_learning',
'enable-learning':'enable_learning',
'disable-learning':'disable_learning',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'PortDownFlushEnum' : _MetaInfoEnum('PortDownFlushEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'port-down-flush':'port_down_flush',
'enable-port-down-flush':'enable_port_down_flush',
'disable-port-down-flush':'disable_port_down_flush',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2VpnCapabilityModeEnum' : _MetaInfoEnum('L2VpnCapabilityModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'high-mode':'high_mode',
'single-mode':'single_mode',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MplsSignalingProtocolEnum' : _MetaInfoEnum('MplsSignalingProtocolEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'ldp':'ldp',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteTargetEnum' : _MetaInfoEnum('BgpRouteTargetEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'no-stitching':'no_stitching',
'stitching':'stitching',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ControlWordEnum' : _MetaInfoEnum('ControlWordEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'enable':'enable',
'disable':'disable',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'PreferredPathEnum' : _MetaInfoEnum('PreferredPathEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'te-tunnel':'te_tunnel',
'ip-tunnel':'ip_tunnel',
'tp-tunnel':'tp_tunnel',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BridgeDomainTransportModeEnum' : _MetaInfoEnum('BridgeDomainTransportModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'vlan-passthrough':'vlan_passthrough',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'FlowLabelLoadBalanceEnum' : _MetaInfoEnum('FlowLabelLoadBalanceEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'off':'off',
'receive':'receive',
'transmit':'transmit',
'both':'both',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'BgpRouteDistinguisherEnum' : _MetaInfoEnum('BgpRouteDistinguisherEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'auto':'auto',
'two-byte-as':'two_byte_as',
'four-byte-as':'four_byte_as',
'ipv4-address':'ipv4_address',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'ErpapsEnum' : _MetaInfoEnum('ErpapsEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'interface':'interface',
'bridge-domain':'bridge_domain',
'xconnect':'xconnect',
'none':'none',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'VccvVerificationEnum' : _MetaInfoEnum('VccvVerificationEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'none':'none',
'lsp-ping':'lsp_ping',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'TransportModeEnum' : _MetaInfoEnum('TransportModeEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'ethernet':'ethernet',
'vlan':'vlan',
'vlan-passthrough':'vlan_passthrough',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MplsSequencingEnum' : _MetaInfoEnum('MplsSequencingEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'off':'off',
'transmit':'transmit',
'receive':'receive',
'both':'both',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'MacSecureActionEnum' : _MetaInfoEnum('MacSecureActionEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg',
{
'restrict':'restrict',
'none':'none',
'shutdown':'shutdown',
}, 'Cisco-IOS-XR-l2vpn-cfg', _yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg']),
'L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher' : {
'meta_info' : _MetaInfoClass('L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.PwRouting.PwRoutingBgp' : {
'meta_info' : _MetaInfoClass('L2Vpn.PwRouting.PwRoutingBgp',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-distinguisher', REFERENCE_CLASS, 'EvpnRouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'evpn_route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pw-routing-bgp',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.PwRouting' : {
'meta_info' : _MetaInfoClass('L2Vpn.PwRouting',
False,
[
_MetaInfoClassMember('pw-routing-bgp', REFERENCE_CLASS, 'PwRoutingBgp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.PwRouting.PwRoutingBgp',
[], [],
''' Enable Autodiscovery BGP Pseudowire-routing BGP
''',
'pw_routing_bgp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-routing-global-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire-routing Global ID
''',
'pw_routing_global_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pw-routing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Neighbor' : {
'meta_info' : _MetaInfoClass('L2Vpn.Neighbor',
False,
[
_MetaInfoClassMember('ldp-flap', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable targetted LDP session flap action
''',
'ldp_flap',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'neighbor',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Port0 interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('monitor', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Ethernet ring protection port0 monitor
''',
'monitor',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port0',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S',
False,
[
_MetaInfoClassMember('erp-port0', REFERENCE_LIST, 'ErpPort0' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0',
[], [],
''' Configure ERP main port0
''',
'erp_port0',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port0s',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl',
False,
[
_MetaInfoClassMember('port', REFERENCE_ENUM_CLASS, 'ErpPort1Enum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ErpPort1Enum',
[], [],
''' ERP main port number
''',
'port',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'RplRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'RplRoleEnum',
[], [],
''' RPL role
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'rpl',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1',
False,
[
_MetaInfoClassMember('aps-channel', ATTRIBUTE, 'str' , None, None,
[], [],
''' Port1 APS channel in the format of
InterfaceName, BDName or XconnectName
''',
'aps_channel',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('aps-type', REFERENCE_ENUM_CLASS, 'ErpapsEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ErpapsEnum',
[], [],
''' Port1 APS type
''',
'aps_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'port1',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable automatic protection switching
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('level', ATTRIBUTE, 'int' , None, None,
[('0', '7')], [],
''' Automatic protection switching level
''',
'level',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('port0', ATTRIBUTE, 'str' , None, None,
[], [],
''' Port0 APS channel in the format of
InterfaceName
''',
'port0',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('port1', REFERENCE_CLASS, 'Port1' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1',
[], [],
''' APS channel for ERP port1
''',
'port1',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'aps',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance',
False,
[
_MetaInfoClassMember('erp-instance-id', ATTRIBUTE, 'int' , None, None,
[('1', '2')], [],
''' ERP instance number
''',
'erp_instance_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('aps', REFERENCE_CLASS, 'Aps' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps',
[], [],
''' Automatic protection switching
''',
'aps',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('description', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Ethernet ring protection instance
description
''',
'description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('inclusion-list', ATTRIBUTE, 'str' , None, None,
[], [],
''' Associates a set of VLAN IDs with the G
.8032 instance
''',
'inclusion_list',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Ethernet ring protection instance profile
''',
'profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('rpl', REFERENCE_CLASS, 'Rpl' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl',
[], [],
''' Ring protection link
''',
'rpl',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-instance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances',
False,
[
_MetaInfoClassMember('erp-instance', REFERENCE_LIST, 'ErpInstance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance',
[], [],
''' Ethernet ring protection instance
''',
'erp_instance',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-instances',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_',
False,
[
_MetaInfoClassMember('monitor', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Ethernet ring protection port1 monitor
''',
'monitor',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'none',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Port1 interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('monitor', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Ethernet ring protection port1 monitor
''',
'monitor',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'virtual-or-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1',
False,
[
_MetaInfoClassMember('erp-port-type', REFERENCE_ENUM_CLASS, 'ErpPortEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ErpPortEnum',
[], [],
''' Port1 type
''',
'erp_port_type',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('none', REFERENCE_CLASS, 'None_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_',
[], [],
''' none
''',
'none',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('virtual-or-interface', REFERENCE_LIST, 'VirtualOrInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface',
[], [],
''' virtual or interface
''',
'virtual_or_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port1',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S',
False,
[
_MetaInfoClassMember('erp-port1', REFERENCE_LIST, 'ErpPort1' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1',
[], [],
''' Ethernet ring protection port1
''',
'erp_port1',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'erp-port1s',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings.G8032Ring' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings.G8032Ring',
False,
[
_MetaInfoClassMember('g8032-ring-name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the G8032 ring
''',
'g8032_ring_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('erp-instances', REFERENCE_CLASS, 'ErpInstances' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances',
[], [],
''' List of ethernet ring protection instance
''',
'erp_instances',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('erp-port0s', REFERENCE_CLASS, 'ErpPort0S' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S',
[], [],
''' Ethernet ring protection port0
''',
'erp_port0s',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('erp-port1s', REFERENCE_CLASS, 'ErpPort1S' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S',
[], [],
''' Ethernet ring protection port0
''',
'erp_port1s',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('erp-provider-bridge', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Ethernet ring protection provider bridge
''',
'erp_provider_bridge',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('exclusion-list', ATTRIBUTE, 'str' , None, None,
[], [],
''' Vlan IDs in the format of a-b,c,d,e-f,g
,untagged
''',
'exclusion_list',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('open-ring', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Specify the G.8032 instance as open ring
''',
'open_ring',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'g8032-ring',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.G8032Rings' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.G8032Rings',
False,
[
_MetaInfoClassMember('g8032-ring', REFERENCE_LIST, 'G8032Ring' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings.G8032Ring',
[], [],
''' G8032 Ring
''',
'g8032_ring',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'g8032-rings',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits',
False,
[
_MetaInfoClassMember('backup-attachment-circuit', REFERENCE_LIST, 'BackupAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit',
[], [],
''' Backup attachment circuit
''',
'backup_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('remote-acid', ATTRIBUTE, 'int' , None, None,
[('1', '16777215')], [],
''' Remote AC ID
''',
'remote_acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('source-acid', ATTRIBUTE, 'int' , None, None,
[('1', '16777215')], [],
''' Source AC ID
''',
'source_acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns',
False,
[
_MetaInfoClassMember('pseudowire-evpn', REFERENCE_LIST, 'PseudowireEvpn' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn',
[], [],
''' EVPN P2P Service Configuration
''',
'pseudowire_evpn',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-evpns',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-mpls-static-labels', REFERENCE_CLASS, 'BackupMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'backup_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('backup-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for the
backup PW
''',
'backup_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires',
False,
[
_MetaInfoClassMember('backup-pseudowire', REFERENCE_LIST, 'BackupPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire',
[], [],
''' Backup pseudowire for the cross connect
''',
'backup_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher remote cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower remote cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Remote cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-remote-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-secondary-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes',
False,
[
_MetaInfoClassMember('l2tp-local-cookie', REFERENCE_CLASS, 'L2TpLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie',
[], [],
''' L2TP local cookie
''',
'l2tp_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-local-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP local session ID
''',
'l2tp_local_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-cookie', REFERENCE_CLASS, 'L2TpRemoteCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie',
[], [],
''' L2TP remote cookie
''',
'l2tp_remote_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP remote session ID
''',
'l2tp_remote_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-secondary-local-cookie', REFERENCE_CLASS, 'L2TpSecondaryLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
[], [],
''' L2TP secondary local cookie
''',
'l2tp_secondary_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static-attributes',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire L2TPv3 static
configuration
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Pseudowire IPv4 address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-pseudowires', REFERENCE_CLASS, 'BackupPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires',
[], [],
''' List of pseudowires
''',
'backup_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bandwidth', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Pseudowire Bandwidth
''',
'bandwidth',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'class_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static', REFERENCE_CLASS, 'L2TpStatic' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic',
[], [],
''' Pseudowire L2TPv3 static configuration
''',
'l2tp_static',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static-attributes', REFERENCE_CLASS, 'L2TpStaticAttributes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes',
[], [],
''' L2TP Static Attributes
''',
'l2tp_static_attributes',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-static-labels', REFERENCE_CLASS, 'MplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels',
[], [],
''' MPLS static labels
''',
'mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', REFERENCE_UNION, 'str' , None, None,
[], [],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False, [
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
]),
_MetaInfoClassMember('tag-impose', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Tag Impose vlan tagged mode
''',
'tag_impose',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'neighbor',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-mpls-static-labels', REFERENCE_CLASS, 'BackupMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'backup_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('backup-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for the
backup PW
''',
'backup_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires',
False,
[
_MetaInfoClassMember('backup-pseudowire', REFERENCE_LIST, 'BackupPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire',
[], [],
''' Backup pseudowire for the cross connect
''',
'backup_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher remote cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower remote cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Remote cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-remote-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-secondary-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie',
False,
[
_MetaInfoClassMember('higher-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Higher local cookie value
''',
'higher_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('lower-value', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Lower local cookie value
''',
'lower_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Local cookie size
''',
'size',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-local-cookie',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes',
False,
[
_MetaInfoClassMember('l2tp-local-cookie', REFERENCE_CLASS, 'L2TpLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie',
[], [],
''' L2TP local cookie
''',
'l2tp_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-local-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP local session ID
''',
'l2tp_local_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-cookie', REFERENCE_CLASS, 'L2TpRemoteCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie',
[], [],
''' L2TP remote cookie
''',
'l2tp_remote_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-remote-session-id', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' L2TP remote session ID
''',
'l2tp_remote_session_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-secondary-local-cookie', REFERENCE_CLASS, 'L2TpSecondaryLocalCookie' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie',
[], [],
''' L2TP secondary local cookie
''',
'l2tp_secondary_local_cookie',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static-attributes',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire L2TPv3 static
configuration
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tp-static',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress',
False,
[
_MetaInfoClassMember('pseudowire-address', ATTRIBUTE, 'str' , None, None,
[], ['((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Pseudowire IPv6 address. A pseudowire
can have only one address: IPv4 or IPv6
''',
'pseudowire_address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-pseudowires', REFERENCE_CLASS, 'BackupPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires',
[], [],
''' List of pseudowires
''',
'backup_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bandwidth', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Pseudowire Bandwidth
''',
'bandwidth',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'class_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static', REFERENCE_CLASS, 'L2TpStatic' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic',
[], [],
''' Pseudowire L2TPv3 static configuration
''',
'l2tp_static',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tp-static-attributes', REFERENCE_CLASS, 'L2TpStaticAttributes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes',
[], [],
''' L2TP Static Attributes
''',
'l2tp_static_attributes',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-static-labels', REFERENCE_CLASS, 'MplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels',
[], [],
''' MPLS static labels
''',
'mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', REFERENCE_UNION, 'str' , None, None,
[], [],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False, [
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'],
''' Value of the Pseudowire source address.
Must be IPv6 only.
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
]),
_MetaInfoClassMember('tag-impose', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Tag Impose vlan tagged mode
''',
'tag_impose',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire',
False,
[
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('neighbor', REFERENCE_LIST, 'Neighbor' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor',
[], [],
''' keys: neighbor
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-address', REFERENCE_LIST, 'PseudowireAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress',
[], [],
''' keys: pseudowire-address
''',
'pseudowire_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires',
False,
[
_MetaInfoClassMember('pseudowire', REFERENCE_LIST, 'Pseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire',
[], [],
''' Pseudowire configuration
''',
'pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' Name of the monitor session
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable monitor session segment
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'monitor-session',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions',
False,
[
_MetaInfoClassMember('monitor-session', REFERENCE_LIST, 'MonitorSession' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession',
[], [],
''' Monitor session segment
''',
'monitor_session',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'monitor-sessions',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted',
False,
[
_MetaInfoClassMember('acid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Target AC ID
''',
'acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('global-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Target Global ID
''',
'global_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('prefix', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Target Prefix
''',
'prefix',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('sacid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Source AC ID
''',
'sacid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'class_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('tag-impose', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Tag Impose vlan tagged mode
''',
'tag_impose',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-routed',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds',
False,
[
_MetaInfoClassMember('pseudowire-routed', REFERENCE_LIST, 'PseudowireRouted' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted',
[], [],
''' Pseudowire configuration
''',
'pseudowire_routed',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-routeds',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable attachment circuit interface
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits',
False,
[
_MetaInfoClassMember('attachment-circuit', REFERENCE_LIST, 'AttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit',
[], [],
''' Attachment circuit interface
''',
'attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 38)], [],
''' Name of the point to point xconnect
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('attachment-circuits', REFERENCE_CLASS, 'AttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits',
[], [],
''' List of attachment circuits
''',
'attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('backup-attachment-circuits', REFERENCE_CLASS, 'BackupAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits',
[], [],
''' List of backup attachment circuits
''',
'backup_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interworking', REFERENCE_ENUM_CLASS, 'InterworkingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterworkingEnum',
[], [],
''' Interworking
''',
'interworking',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('monitor-sessions', REFERENCE_CLASS, 'MonitorSessions' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions',
[], [],
''' List of Monitor session segments
''',
'monitor_sessions',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('p2p-description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' cross connect description Name
''',
'p2p_description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-evpns', REFERENCE_CLASS, 'PseudowireEvpns' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns',
[], [],
''' List of EVPN Services
''',
'pseudowire_evpns',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-routeds', REFERENCE_CLASS, 'PseudowireRouteds' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds',
[], [],
''' List of pseudowire-routed
''',
'pseudowire_routeds',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowires', REFERENCE_CLASS, 'Pseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires',
[], [],
''' List of pseudowires
''',
'pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'p2p-xconnect',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects',
False,
[
_MetaInfoClassMember('p2p-xconnect', REFERENCE_LIST, 'P2PXconnect' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect',
[], [],
''' Point to point xconnect
''',
'p2p_xconnect',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'p2p-xconnects',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router distinguisher type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy',
False,
[
_MetaInfoClassMember('export', ATTRIBUTE, 'str' , None, None,
[], [],
''' Export route policy
''',
'export',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('import', ATTRIBUTE, 'str' , None, None,
[], [],
''' Import route policy
''',
'import_',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-route-policy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs',
False,
[
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'two-byte-as-or-four-byte-as',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ipv4-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget',
False,
[
_MetaInfoClassMember('format', REFERENCE_ENUM_CLASS, 'BgpRouteTargetFormatEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetFormatEnum',
[], [],
''' Format of the route target
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'BgpRouteTargetRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetRoleEnum',
[], [],
''' Role of the router target type
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ipv4-address', REFERENCE_LIST, 'Ipv4Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address',
[], [],
''' ipv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('two-byte-as-or-four-byte-as', REFERENCE_LIST, 'TwoByteAsOrFourByteAs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs',
[], [],
''' two byte as or four byte as
''',
'two_byte_as_or_four_byte_as',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-route-target',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets',
False,
[
_MetaInfoClassMember('mp2mp-route-target', REFERENCE_LIST, 'Mp2MpRouteTarget' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget',
[], [],
''' Name of the Route Target
''',
'mp2mp_route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-route-targets',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' The name of the Attachment Circuit
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('remote-ce-id', ATTRIBUTE, 'int' , None, None,
[('1', '16384')], [],
''' Remote Customer Edge Identifier
''',
'remote_ce_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'remote-ceid-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits',
False,
[
_MetaInfoClassMember('remote-ceid-attachment-circuit', REFERENCE_LIST, 'RemoteCeidAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit',
[], [],
''' AC And Remote Customer Edge Identifier
''',
'remote_ceid_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'remote-ceid-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid',
False,
[
_MetaInfoClassMember('ce-id', ATTRIBUTE, 'int' , None, None,
[('1', '16384')], [],
''' Local Customer Edge Identifier
''',
'ce_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('remote-ceid-attachment-circuits', REFERENCE_CLASS, 'RemoteCeidAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits',
[], [],
''' AC And Remote Customer Edge Identifier
Table
''',
'remote_ceid_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ceid',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids',
False,
[
_MetaInfoClassMember('ceid', REFERENCE_LIST, 'Ceid' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid',
[], [],
''' Local Customer Edge Identifier
''',
'ceid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ceids',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol',
False,
[
_MetaInfoClassMember('ce-range', ATTRIBUTE, 'int' , None, None,
[('11', '100')], [],
''' Local Customer Edge Identifier
''',
'ce_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ceids', REFERENCE_CLASS, 'Ceids' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids',
[], [],
''' Local Customer Edge Identifier Table
''',
'ceids',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable signaling protocol
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable auto-discovery
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-route-policy', REFERENCE_CLASS, 'Mp2MpRoutePolicy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy',
[], [],
''' Route policy
''',
'mp2mp_route_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-route-targets', REFERENCE_CLASS, 'Mp2MpRouteTargets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets',
[], [],
''' Route Target
''',
'mp2mp_route_targets',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-signaling-protocol', REFERENCE_CLASS, 'Mp2MpSignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol',
[], [],
''' signaling protocol in this MP2MP
''',
'mp2mp_signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('route-distinguisher', REFERENCE_CLASS, 'RouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 26)], [],
''' Name of the multi point to multi point
xconnect
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('mp2mp-auto-discovery', REFERENCE_CLASS, 'Mp2MpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery',
[], [],
''' auto-discovery in this MP2MP
''',
'mp2mp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-control-word', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable control word
''',
'mp2mp_control_word',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-interworking', REFERENCE_ENUM_CLASS, 'InterworkingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterworkingEnum',
[], [],
''' Interworking
''',
'mp2mp_interworking',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mp-shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' shutdown this MP2MP VPWS instance
''',
'mp2mp_shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mpl2-encapsulation', REFERENCE_ENUM_CLASS, 'L2EncapsulationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2EncapsulationEnum',
[], [],
''' Configure Layer 2 Encapsulation
''',
'mp2mpl2_encapsulation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mpmtu', ATTRIBUTE, 'int' , None, None,
[('64', '65535')], [],
''' Maximum transmission unit for this MP2MP
VPWS instance
''',
'mp2mpmtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mp2mpvpn-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' VPN Identifier
''',
'mp2mpvpn_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-xconnect',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects',
False,
[
_MetaInfoClassMember('mp2mp-xconnect', REFERENCE_LIST, 'Mp2MpXconnect' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect',
[], [],
''' Multi point to multi point xconnect
''',
'mp2mp_xconnect',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mp2mp-xconnects',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups.XconnectGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups.XconnectGroup',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the xconnect group
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('mp2mp-xconnects', REFERENCE_CLASS, 'Mp2MpXconnects' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects',
[], [],
''' List of multi point to multi point xconnects
''',
'mp2mp_xconnects',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('p2p-xconnects', REFERENCE_CLASS, 'P2PXconnects' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects',
[], [],
''' List of point to point xconnects
''',
'p2p_xconnects',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'xconnect-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.XconnectGroups' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.XconnectGroups',
False,
[
_MetaInfoClassMember('xconnect-group', REFERENCE_LIST, 'XconnectGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups.XconnectGroup',
[], [],
''' Xconnect group
''',
'xconnect_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'xconnect-groups',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit',
False,
[
_MetaInfoClassMember('kbits-per-sec', ATTRIBUTE, 'int' , None, None,
[('64', '1280000')], [],
''' Kilobits Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'kbits_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pkts-per-sec', ATTRIBUTE, 'int' , None, None,
[('1', '160000')], [],
''' Packets Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'pkts_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'storm-control-unit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl',
False,
[
_MetaInfoClassMember('sctype', REFERENCE_ENUM_CLASS, 'StormControlEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'StormControlEnum',
[], [],
''' Storm Control Type
''',
'sctype',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('storm-control-unit', REFERENCE_CLASS, 'StormControlUnit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit',
[], [],
''' Specify units for Storm Control Configuration
''',
'storm_control_unit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-storm-control',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls',
False,
[
_MetaInfoClassMember('bd-storm-control', REFERENCE_LIST, 'BdStormControl' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl',
[], [],
''' Storm Control Type
''',
'bd_storm_control',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-storm-controls',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress',
False,
[
_MetaInfoClassMember('mac-address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'mac_address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('next-hop-ip', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Enable Static Mac Address Configuration
''',
'next_hop_ip',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vni-static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses',
False,
[
_MetaInfoClassMember('member-vni-static-mac-address', REFERENCE_LIST, 'MemberVniStaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'member_vni_static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vni-static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni',
False,
[
_MetaInfoClassMember('vni', ATTRIBUTE, 'int' , None, None,
[('1', '16777215')], [],
''' VxLAN Network Identifier number
''',
'vni',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('member-vni-static-mac-addresses', REFERENCE_CLASS, 'MemberVniStaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'member_vni_static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vni',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis',
False,
[
_MetaInfoClassMember('member-vni', REFERENCE_LIST, 'MemberVni' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni',
[], [],
''' Bridge Domain Member VxLAN Network
Identifier
''',
'member_vni',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'member-vnis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit',
False,
[
_MetaInfoClassMember('bd-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' MAC address limit enforcement action
''',
'bd_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses after which MAC
limit action is taken
''',
'bd_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' Mac Address Limit Notification
''',
'bd_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('drop', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' MAC address for filtering
''',
'drop',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-filter',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters',
False,
[
_MetaInfoClassMember('bd-mac-filter', REFERENCE_LIST, 'BdMacFilter' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter',
[], [],
''' Static MAC address
''',
'bd_mac_filter',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-filters',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure',
False,
[
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging',
False,
[
_MetaInfoClassMember('bd-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'bd_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'bd_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac',
False,
[
_MetaInfoClassMember('bd-mac-aging', REFERENCE_CLASS, 'BdMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'bd_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-filters', REFERENCE_CLASS, 'BdMacFilters' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters',
[], [],
''' Filter Mac Address
''',
'bd_mac_filters',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-learn', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'bd_mac_learn',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-limit', REFERENCE_CLASS, 'BdMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'bd_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-port-down-flush', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable MAC Flush when Port goes Down
''',
'bd_mac_port_down_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Mac Withdraw
''',
'bd_mac_withdraw',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw-access-pw-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' MAC withdraw on Access PW
''',
'bd_mac_withdraw_access_pw_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw-behavior', REFERENCE_ENUM_CLASS, 'MacWithdrawBehaviorEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacWithdrawBehaviorEnum',
[], [],
''' MAC withdraw sent on bridge port down
''',
'bd_mac_withdraw_behavior',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-mac-withdraw-relay', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Mac withdraw sent from access PW to access
PW
''',
'bd_mac_withdraw_relay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mac-secure', REFERENCE_CLASS, 'MacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure',
[], [],
''' MAC Secure
''',
'mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable nV Satellite Settings
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('offload-ipv4-multicast-enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IPv4 Multicast Offload to Satellite
Nodes
''',
'offload_ipv4_multicast_enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'nv-satellite',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bmac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Backbone MAC address
''',
'bmac',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-static-mac-mapping',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings',
False,
[
_MetaInfoClassMember('pbb-static-mac-mapping', REFERENCE_LIST, 'PbbStaticMacMapping' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping',
[], [],
''' PBB Static Mac Address Mapping
Configuration
''',
'pbb_static_mac_mapping',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-static-mac-mappings',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-dhcp-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit',
False,
[
_MetaInfoClassMember('pbb-edge-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' MAC address limit enforcement action
''',
'pbb_edge_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses after which
MAC limit action is taken
''',
'pbb_edge_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
''',
'pbb_edge_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging',
False,
[
_MetaInfoClassMember('pbb-edge-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'pbb_edge_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'pbb_edge_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure',
False,
[
_MetaInfoClassMember('accept-shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Accept Virtual instance port to be
shutdown on mac violation
''',
'accept_shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Virtual instance port MAC
Secure
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac',
False,
[
_MetaInfoClassMember('pbb-edge-mac-aging', REFERENCE_CLASS, 'PbbEdgeMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'pbb_edge_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'pbb_edge_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-limit', REFERENCE_CLASS, 'PbbEdgeMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'pbb_edge_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac-secure', REFERENCE_CLASS, 'PbbEdgeMacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure',
[], [],
''' MAC Secure
''',
'pbb_edge_mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge',
False,
[
_MetaInfoClassMember('core-bd-name', ATTRIBUTE, 'str' , None, None,
[(0, 27)], [],
''' Core BD Name
''',
'core_bd_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('isid', ATTRIBUTE, 'int' , None, None,
[('256', '16777214')], [],
''' ISID
''',
'isid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pbb-edge-dhcp-profile', REFERENCE_CLASS, 'PbbEdgeDhcpProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile',
[], [],
''' Attach a DHCP profile
''',
'pbb_edge_dhcp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-igmp-profile', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'pbb_edge_igmp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edge-mac', REFERENCE_CLASS, 'PbbEdgeMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac',
[], [],
''' MAC configuration commands
''',
'pbb_edge_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-static-mac-mappings', REFERENCE_CLASS, 'PbbStaticMacMappings' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings',
[], [],
''' PBB Static Mac Address Mapping Table
''',
'pbb_static_mac_mappings',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('unknown-unicast-bmac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Configure Unknown Unicast BMAC address
for PBB Edge Port
''',
'unknown_unicast_bmac',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edge',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges',
False,
[
_MetaInfoClassMember('pbb-edge', REFERENCE_LIST, 'PbbEdge' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge',
[], [],
''' Configure BD as PBB Edge with ISID and
associated PBB Core BD
''',
'pbb_edge',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-edges',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging',
False,
[
_MetaInfoClassMember('pbb-core-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'pbb_core_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'pbb_core_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit',
False,
[
_MetaInfoClassMember('pbb-core-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' MAC address limit enforcement action
''',
'pbb_core_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses after which MAC
limit action is taken
''',
'pbb_core_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
''',
'pbb_core_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac',
False,
[
_MetaInfoClassMember('pbb-core-mac-aging', REFERENCE_CLASS, 'PbbCoreMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'pbb_core_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'pbb_core_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac-limit', REFERENCE_CLASS, 'PbbCoreMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'pbb_core_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-evi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis',
False,
[
_MetaInfoClassMember('pbb-core-evi', REFERENCE_LIST, 'PbbCoreEvi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi',
[], [],
''' PBB Core EVI
''',
'pbb_core_evi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-evis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core-dhcp-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Bridge Domain PBB Core
Configuration
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-dhcp-profile', REFERENCE_CLASS, 'PbbCoreDhcpProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile',
[], [],
''' Attach a DHCP profile
''',
'pbb_core_dhcp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-evis', REFERENCE_CLASS, 'PbbCoreEvis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis',
[], [],
''' PBB Core EVI Table
''',
'pbb_core_evis',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-igmp-profile', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'pbb_core_igmp_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mac', REFERENCE_CLASS, 'PbbCoreMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac',
[], [],
''' MAC configuration commands
''',
'pbb_core_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-core-mmrp-flood-optimization', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enabling MMRP PBB-VPLS Flood Optimization
''',
'pbb_core_mmrp_flood_optimization',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vlan-id', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' VLAN ID to push
''',
'vlan_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb-core',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb',
False,
[
_MetaInfoClassMember('pbb-core', REFERENCE_CLASS, 'PbbCore' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore',
[], [],
''' PBB Core
''',
'pbb_core',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb-edges', REFERENCE_CLASS, 'PbbEdges' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges',
[], [],
''' PBB Edge
''',
'pbb_edges',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-pbb',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-evi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis',
False,
[
_MetaInfoClassMember('bridge-domain-evi', REFERENCE_LIST, 'BridgeDomainEvi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi',
[], [],
''' Bridge Domain EVI
''',
'bridge_domain_evi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-evis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation',
False,
[
_MetaInfoClassMember('destination-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Destination MAC Verification
''',
'destination_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ipv4-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' IPv4 Verification
''',
'ipv4_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Source MAC Verification
''',
'source_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-dai-address-validation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Dynamic ARP Inspection
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Access Pseudowire Dynamic ARP
Inspection
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-dai-address-validation', REFERENCE_CLASS, 'PseudowireDaiAddressValidation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation',
[], [],
''' Address Validation
''',
'pseudowire_dai_address_validation',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-dai',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit',
False,
[
_MetaInfoClassMember('kbits-per-sec', ATTRIBUTE, 'int' , None, None,
[('64', '1280000')], [],
''' Kilobits Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'kbits_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pkts-per-sec', ATTRIBUTE, 'int' , None, None,
[('1', '160000')], [],
''' Packets Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'pkts_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'storm-control-unit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType',
False,
[
_MetaInfoClassMember('sctype', REFERENCE_ENUM_CLASS, 'StormControlEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'StormControlEnum',
[], [],
''' Storm Control Type
''',
'sctype',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('storm-control-unit', REFERENCE_CLASS, 'StormControlUnit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit',
[], [],
''' Specify units for Storm Control Configuration
''',
'storm_control_unit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdpw-storm-control-type',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes',
False,
[
_MetaInfoClassMember('bdpw-storm-control-type', REFERENCE_LIST, 'BdpwStormControlType' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType',
[], [],
''' Storm Control Type
''',
'bdpw_storm_control_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdpw-storm-control-types',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses',
False,
[
_MetaInfoClassMember('bd-pw-static-mac-address', REFERENCE_LIST, 'BdPwStaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'bd_pw_static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Dynamic IP source guard
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IP Source Guard
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-ip-source-guard',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure',
False,
[
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Pseudowire MAC Secure
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging',
False,
[
_MetaInfoClassMember('pseudowire-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' MAC Aging Time
''',
'pseudowire_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'pseudowire_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit',
False,
[
_MetaInfoClassMember('pseudowire-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' Bridge Access Pseudowire MAC address
limit enforcement action
''',
'pseudowire_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses on a Bridge
Access Pseudowire after which MAC limit
action is taken
''',
'pseudowire_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
in a Bridge Access Pseudowire
''',
'pseudowire_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Bridge-domain Pseudowire MAC
configuration mode
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-aging', REFERENCE_CLASS, 'PseudowireMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'pseudowire_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable MAC Learning
''',
'pseudowire_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-limit', REFERENCE_CLASS, 'PseudowireMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'pseudowire_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-port-down-flush', REFERENCE_ENUM_CLASS, 'PortDownFlushEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PortDownFlushEnum',
[], [],
''' Enable/Disable MAC Flush When Port goes
down
''',
'pseudowire_mac_port_down_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac-secure', REFERENCE_CLASS, 'PseudowireMacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure',
[], [],
''' MAC Secure
''',
'pseudowire_mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable split horizon group
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-split-horizon-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon',
False,
[
_MetaInfoClassMember('bd-pw-split-horizon-group', REFERENCE_CLASS, 'BdPwSplitHorizonGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup',
[], [],
''' Split Horizon Group
''',
'bd_pw_split_horizon_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-split-horizon',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pw-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bridge-domain-backup-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for this
pseudowire
''',
'bridge_domain_backup_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-backup-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires',
False,
[
_MetaInfoClassMember('bridge-domain-backup-pseudowire', REFERENCE_LIST, 'BridgeDomainBackupPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire',
[], [],
''' Backup pseudowire configuration
''',
'bridge_domain_backup_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-backup-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bd-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for this
pseudowire
''',
'bd_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pw-mpls-static-labels', REFERENCE_CLASS, 'BdPwMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'bd_pw_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pw-split-horizon', REFERENCE_CLASS, 'BdPwSplitHorizon' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon',
[], [],
''' Split Horizon
''',
'bd_pw_split_horizon',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pw-static-mac-addresses', REFERENCE_CLASS, 'BdPwStaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'bd_pw_static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bdpw-storm-control-types', REFERENCE_CLASS, 'BdpwStormControlTypes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes',
[], [],
''' Storm Control
''',
'bdpw_storm_control_types',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-backup-pseudowires', REFERENCE_CLASS, 'BridgeDomainBackupPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires',
[], [],
''' List of pseudowires
''',
'bridge_domain_backup_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-dai', REFERENCE_CLASS, 'PseudowireDai' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai',
[], [],
''' Access Pseudowire Dynamic ARP Inspection
''',
'pseudowire_dai',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-flooding', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Bridge-domain Pseudowire flooding
''',
'pseudowire_flooding',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-flooding-unknown-unicast', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Bridge-domain Pseudowire flooding Unknown
Unicast
''',
'pseudowire_flooding_unknown_unicast',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-igmp-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'pseudowire_igmp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-ip-source-guard', REFERENCE_CLASS, 'PseudowireIpSourceGuard' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard',
[], [],
''' IP Source Guard
''',
'pseudowire_ip_source_guard',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mac', REFERENCE_CLASS, 'PseudowireMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac',
[], [],
''' Bridge-domain Pseudowire MAC
configuration commands
''',
'pseudowire_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-mld-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a MLD Snooping profile
''',
'pseudowire_mld_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-profile', REFERENCE_CLASS, 'PseudowireProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile',
[], [],
''' Attach a DHCP profile
''',
'pseudowire_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires',
False,
[
_MetaInfoClassMember('bd-pseudowire', REFERENCE_LIST, 'BdPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire',
[], [],
''' Pseudowire configuration
''',
'bd_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport',
False,
[
_MetaInfoClassMember('transport-name', ATTRIBUTE, 'str' , None, None,
[], ['(RSVP_TE)'],
''' Transport Type
''',
'transport_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('attribute-set-name', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' Multicast P2MP TE Attribute Set Name
''',
'attribute_set_name',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'transport',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports',
False,
[
_MetaInfoClassMember('transport', REFERENCE_LIST, 'Transport' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport',
[], [],
''' Multicast P2MP Transport Type
''',
'transport',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'transports',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling',
False,
[
_MetaInfoClassMember('signaling-name', ATTRIBUTE, 'str' , None, None,
[], ['(BGP)'],
''' Signaling Type
''',
'signaling_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'signaling',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings',
False,
[
_MetaInfoClassMember('signaling', REFERENCE_LIST, 'Signaling' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling',
[], [],
''' Multicast P2MP Signaling Type
''',
'signaling',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'signalings',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery P2MP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('signalings', REFERENCE_CLASS, 'Signalings' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings',
[], [],
''' Multicast P2MP Signaling Type
''',
'signalings',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transports', REFERENCE_CLASS, 'Transports' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports',
[], [],
''' Multicast P2MP Transport
''',
'transports',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'multicast-p2mp',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pw-dhcp-snoop',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels',
False,
[
_MetaInfoClassMember('local-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire local static label
''',
'local_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('remote-static-label', ATTRIBUTE, 'int' , None, None,
[('16', '1048575')], [],
''' Pseudowire remote static label
''',
'remote_static_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pw-mpls-static-labels',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses',
False,
[
_MetaInfoClassMember('pseudowire-static-mac-address', REFERENCE_LIST, 'PseudowireStaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'pseudowire_static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire',
False,
[
_MetaInfoClassMember('neighbor', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Neighbor IP address
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-id', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Pseudowire ID
''',
'pseudowire_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('pseudowire-static-mac-addresses', REFERENCE_CLASS, 'PseudowireStaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'pseudowire_static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-class', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' PW class template name to use for this
pseudowire
''',
'vfi_pw_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-dhcp-snoop', REFERENCE_CLASS, 'VfiPwDhcpSnoop' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop',
[], [],
''' Attach a DHCP Snooping profile
''',
'vfi_pw_dhcp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-igmp-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'vfi_pw_igmp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-mld-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a MLD Snooping profile
''',
'vfi_pw_mld_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pw-mpls-static-labels', REFERENCE_CLASS, 'VfiPwMplsStaticLabels' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels',
[], [],
''' MPLS static labels
''',
'vfi_pw_mpls_static_labels',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires',
False,
[
_MetaInfoClassMember('vfi-pseudowire', REFERENCE_LIST, 'VfiPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire',
[], [],
''' Pseudowire configuration
''',
'vfi_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi-pseudowires',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address-index', ATTRIBUTE, 'int' , None, None,
[('0', '32767')], [],
''' Address index
''',
'address_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '65535')], [],
''' Two byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS index
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'LdpVplsIdEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'LdpVplsIdEnum',
[], [],
''' VPLS-ID Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vplsid',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable LDP as Signaling Protocol
.Deletion of this object also causes
deletion of all objects under
LDPSignalingProtocol.
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vplsid', REFERENCE_CLASS, 'Vplsid' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid',
[], [],
''' VPLS ID
''',
'vplsid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ldp-signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy',
False,
[
_MetaInfoClassMember('export', ATTRIBUTE, 'str' , None, None,
[], [],
''' Export route policy
''',
'export',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-route-policy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable BGP as Signaling Protocol
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ve-range', ATTRIBUTE, 'int' , None, None,
[('11', '100')], [],
''' Local Virtual Edge Block Configurable
Range
''',
've_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('veid', ATTRIBUTE, 'int' , None, None,
[('1', '16384')], [],
''' Local Virtual Edge Identifier
''',
'veid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs',
False,
[
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'two-byte-as-or-four-byte-as',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ipv4-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget',
False,
[
_MetaInfoClassMember('format', REFERENCE_ENUM_CLASS, 'BgpRouteTargetFormatEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetFormatEnum',
[], [],
''' Format of the route target
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'BgpRouteTargetRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetRoleEnum',
[], [],
''' Role of the router target type
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ipv4-address', REFERENCE_LIST, 'Ipv4Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address',
[], [],
''' ipv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('two-byte-as-or-four-byte-as', REFERENCE_LIST, 'TwoByteAsOrFourByteAs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs',
[], [],
''' two byte as or four byte as
''',
'two_byte_as_or_four_byte_as',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-target',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets',
False,
[
_MetaInfoClassMember('route-target', REFERENCE_LIST, 'RouteTarget' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget',
[], [],
''' Name of the Route Target
''',
'route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'route-targets',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery',
False,
[
_MetaInfoClassMember('ad-control-word', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable control-word for this VFI
''',
'ad_control_word',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bgp-route-policy', REFERENCE_CLASS, 'BgpRoutePolicy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy',
[], [],
''' Route policy
''',
'bgp_route_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bgp-signaling-protocol', REFERENCE_CLASS, 'BgpSignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol',
[], [],
''' Enable Signaling Protocol BGP in this
VFI
''',
'bgp_signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ldp-signaling-protocol', REFERENCE_CLASS, 'LdpSignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol',
[], [],
''' Signaling Protocol LDP in this VFI
configuration
''',
'ldp_signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('route-distinguisher', REFERENCE_CLASS, 'RouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('route-targets', REFERENCE_CLASS, 'RouteTargets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets',
[], [],
''' Route Target
''',
'route_targets',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('table-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' Table Policy for installation of
forwarding data to L2FIB
''',
'table_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the Virtual Forwarding Interface
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bgp-auto-discovery', REFERENCE_CLASS, 'BgpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery',
[], [],
''' Enable Autodiscovery BGP in this VFI
''',
'bgp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('multicast-p2mp', REFERENCE_CLASS, 'MulticastP2Mp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp',
[], [],
''' Enable Multicast P2MP in this VFI
''',
'multicast_p2mp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-pseudowires', REFERENCE_CLASS, 'VfiPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires',
[], [],
''' List of pseudowires
''',
'vfi_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi-shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enabling Shutdown
''',
'vfi_shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vpnid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' VPN Identifier
''',
'vpnid',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis',
False,
[
_MetaInfoClassMember('vfi', REFERENCE_LIST, 'Vfi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi',
[], [],
''' Name of the Virtual Forwarding Interface
''',
'vfi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vfis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Interface Dynamic IP source
guard
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IP Source Guard
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-ip-source-guard',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation',
False,
[
_MetaInfoClassMember('destination-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Destination MAC Verification
''',
'destination_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Address Validation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ipv4-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' IPv4 Verification
''',
'ipv4_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-mac-verification', REFERENCE_ENUM_CLASS, 'L2VpnVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnVerificationEnum',
[], [],
''' Source MAC Verification
''',
'source_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-dai-address-validation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai',
False,
[
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Interface Dynamic ARP
Inspection
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable L2 Interface Dynamic ARP
Inspection
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-dai-address-validation', REFERENCE_CLASS, 'InterfaceDaiAddressValidation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation',
[], [],
''' Address Validation
''',
'interface_dai_address_validation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' Logging Type
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-dai',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile',
False,
[
_MetaInfoClassMember('dhcp-snooping-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' Disable DHCP snooping
''',
'dhcp_snooping_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('profile-id', REFERENCE_ENUM_CLASS, 'InterfaceProfileEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceProfileEnum',
[], [],
''' Set the snooping profile
''',
'profile_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-profile',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit',
False,
[
_MetaInfoClassMember('kbits-per-sec', ATTRIBUTE, 'int' , None, None,
[('64', '1280000')], [],
''' Kilobits Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'kbits_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pkts-per-sec', ATTRIBUTE, 'int' , None, None,
[('1', '160000')], [],
''' Packets Per Second, PktsPerSec and KbitsPerSec
cannot be configured together
''',
'pkts_per_sec',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'storm-control-unit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType',
False,
[
_MetaInfoClassMember('sctype', REFERENCE_ENUM_CLASS, 'StormControlEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'StormControlEnum',
[], [],
''' Storm Control Type
''',
'sctype',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('storm-control-unit', REFERENCE_CLASS, 'StormControlUnit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit',
[], [],
''' Specify units for Storm Control Configuration
''',
'storm_control_unit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdac-storm-control-type',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes',
False,
[
_MetaInfoClassMember('bdac-storm-control-type', REFERENCE_LIST, 'BdacStormControlType' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType',
[], [],
''' Storm Control Type
''',
'bdac_storm_control_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bdac-storm-control-types',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable split horizon group
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'split-horizon-group-id',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon',
False,
[
_MetaInfoClassMember('split-horizon-group-id', REFERENCE_CLASS, 'SplitHorizonGroupId' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId',
[], [],
''' Split Horizon Group ID
''',
'split_horizon_group_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'split-horizon',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress',
False,
[
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Static MAC address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'static-mac-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses',
False,
[
_MetaInfoClassMember('static-mac-address', REFERENCE_LIST, 'StaticMacAddress' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress',
[], [],
''' Static Mac Address Configuration
''',
'static_mac_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'static-mac-addresses',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging',
False,
[
_MetaInfoClassMember('interface-mac-aging-time', ATTRIBUTE, 'int' , None, None,
[('300', '30000')], [],
''' Mac Aging Time
''',
'interface_mac_aging_time',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-aging-type', REFERENCE_ENUM_CLASS, 'MacAgingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacAgingEnum',
[], [],
''' MAC address aging type
''',
'interface_mac_aging_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac-aging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure',
False,
[
_MetaInfoClassMember('action', REFERENCE_ENUM_CLASS, 'MacSecureActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacSecureActionEnum',
[], [],
''' MAC secure enforcement action
''',
'action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable L2 Interface MAC Secure
''',
'disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MAC Secure
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', REFERENCE_ENUM_CLASS, 'L2VpnLoggingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnLoggingEnum',
[], [],
''' MAC Secure Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac-secure',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit',
False,
[
_MetaInfoClassMember('interface-mac-limit-action', REFERENCE_ENUM_CLASS, 'MacLimitActionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLimitActionEnum',
[], [],
''' Interface MAC address limit enforcement
action
''',
'interface_mac_limit_action',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-limit-max', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of MAC addresses on an Interface
after which MAC limit action is taken
''',
'interface_mac_limit_max',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-limit-notif', REFERENCE_ENUM_CLASS, 'MacNotificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacNotificationEnum',
[], [],
''' MAC address limit notification action
in a Interface
''',
'interface_mac_limit_notif',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac-limit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac',
False,
[
_MetaInfoClassMember('interface-mac-aging', REFERENCE_CLASS, 'InterfaceMacAging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging',
[], [],
''' MAC-Aging configuration commands
''',
'interface_mac_aging',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-learning', REFERENCE_ENUM_CLASS, 'MacLearnEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MacLearnEnum',
[], [],
''' Enable Mac Learning
''',
'interface_mac_learning',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-limit', REFERENCE_CLASS, 'InterfaceMacLimit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit',
[], [],
''' MAC-Limit configuration commands
''',
'interface_mac_limit',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-port-down-flush', REFERENCE_ENUM_CLASS, 'PortDownFlushEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PortDownFlushEnum',
[], [],
''' Enable/Disable MAC Flush When Port goes
down
''',
'interface_mac_port_down_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac-secure', REFERENCE_CLASS, 'InterfaceMacSecure' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure',
[], [],
''' MAC Secure
''',
'interface_mac_secure',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface-mac',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' The name of the Attachment Circuit
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bdac-storm-control-types', REFERENCE_CLASS, 'BdacStormControlTypes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes',
[], [],
''' Storm Control
''',
'bdac_storm_control_types',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-dai', REFERENCE_CLASS, 'InterfaceDai' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai',
[], [],
''' L2 Interface Dynamic ARP Inspection
''',
'interface_dai',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-flooding', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Enable or Disable Flooding
''',
'interface_flooding',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-flooding-unknown-unicast', REFERENCE_ENUM_CLASS, 'InterfaceTrafficFloodEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'InterfaceTrafficFloodEnum',
[], [],
''' Enable or Disable Unknown Unicast
Flooding
''',
'interface_flooding_unknown_unicast',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-igmp-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a IGMP Snooping profile
''',
'interface_igmp_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-ip-source-guard', REFERENCE_CLASS, 'InterfaceIpSourceGuard' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard',
[], [],
''' IP Source Guard
''',
'interface_ip_source_guard',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mac', REFERENCE_CLASS, 'InterfaceMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac',
[], [],
''' MAC configuration commands
''',
'interface_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-mld-snoop', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach a MLD Snooping profile
''',
'interface_mld_snoop',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-profile', REFERENCE_CLASS, 'InterfaceProfile' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile',
[], [],
''' Attach a DHCP profile
''',
'interface_profile',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('split-horizon', REFERENCE_CLASS, 'SplitHorizon' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon',
[], [],
''' Split Horizon
''',
'split_horizon',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static-mac-addresses', REFERENCE_CLASS, 'StaticMacAddresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses',
[], [],
''' Static Mac Address Table
''',
'static_mac_addresses',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits',
False,
[
_MetaInfoClassMember('bd-attachment-circuit', REFERENCE_LIST, 'BdAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit',
[], [],
''' Name of the Attachment Circuit
''',
'bd_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn',
False,
[
_MetaInfoClassMember('acid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' AC ID
''',
'acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowire-evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns',
False,
[
_MetaInfoClassMember('bd-pseudowire-evpn', REFERENCE_LIST, 'BdPseudowireEvpn' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn',
[], [],
''' EVPN Pseudowire configuration
''',
'bd_pseudowire_evpn',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bd-pseudowire-evpns',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IP Source Guard
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ip-source-guard',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation',
False,
[
_MetaInfoClassMember('destination-mac-verification', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Destination MAC Verification
''',
'destination_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Address Validation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ipv4-verification', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable IPv4 Verification
''',
'ipv4_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-mac-verification', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Source MAC Verification
''',
'source_mac_verification',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'dai-address-validation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai',
False,
[
_MetaInfoClassMember('dai-address-validation', REFERENCE_CLASS, 'DaiAddressValidation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation',
[], [],
''' Address Validation
''',
'dai_address_validation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Dynamic ARP Inspection
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('logging', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Logging
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'dai',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' The name of the Routed Interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'routed-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces',
False,
[
_MetaInfoClassMember('routed-interface', REFERENCE_LIST, 'RoutedInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface',
[], [],
''' Bridge Domain Routed Interface
''',
'routed_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'routed-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 27)], [],
''' Name of the bridge domain
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bd-attachment-circuits', REFERENCE_CLASS, 'BdAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits',
[], [],
''' Attachment Circuit table
''',
'bd_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pseudowire-evpns', REFERENCE_CLASS, 'BdPseudowireEvpns' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns',
[], [],
''' List of EVPN pseudowires
''',
'bd_pseudowire_evpns',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-pseudowires', REFERENCE_CLASS, 'BdPseudowires' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires',
[], [],
''' List of pseudowires
''',
'bd_pseudowires',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bd-storm-controls', REFERENCE_CLASS, 'BdStormControls' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls',
[], [],
''' Storm Control
''',
'bd_storm_controls',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-evis', REFERENCE_CLASS, 'BridgeDomainEvis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis',
[], [],
''' Bridge Domain EVI Table
''',
'bridge_domain_evis',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-mac', REFERENCE_CLASS, 'BridgeDomainMac' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac',
[], [],
''' MAC configuration commands
''',
'bridge_domain_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-mtu', ATTRIBUTE, 'int' , None, None,
[('46', '65535')], [],
''' Maximum transmission unit for this Bridge
Domain
''',
'bridge_domain_mtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bridge-domain-pbb', REFERENCE_CLASS, 'BridgeDomainPbb' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb',
[], [],
''' Bridge Domain PBB
''',
'bridge_domain_pbb',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('coupled-mode', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Coupled-mode configuration
''',
'coupled_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('dai', REFERENCE_CLASS, 'Dai' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai',
[], [],
''' Dynamic ARP Inspection
''',
'dai',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('dhcp', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' DHCPv4 Snooping profile name
''',
'dhcp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flooding', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable flooding
''',
'flooding',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flooding-unknown-unicast', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable Unknown Unicast flooding
''',
'flooding_unknown_unicast',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('igmp-snooping', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach IGMP Snooping Profile Name
''',
'igmp_snooping',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('igmp-snooping-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable IGMP Snooping
''',
'igmp_snooping_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('ip-source-guard', REFERENCE_CLASS, 'IpSourceGuard' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard',
[], [],
''' IP Source Guard
''',
'ip_source_guard',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('member-vnis', REFERENCE_CLASS, 'MemberVnis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis',
[], [],
''' Bridge Domain VxLAN Network Identifier
Table
''',
'member_vnis',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mld-snooping', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Attach MLD Snooping Profile Name
''',
'mld_snooping',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('nv-satellite', REFERENCE_CLASS, 'NvSatellite' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite',
[], [],
''' nV Satellite
''',
'nv_satellite',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('routed-interfaces', REFERENCE_CLASS, 'RoutedInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces',
[], [],
''' Bridge Domain Routed Interface Table
''',
'routed_interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('shutdown', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' shutdown the Bridge Domain
''',
'shutdown',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transport-mode', REFERENCE_ENUM_CLASS, 'BridgeDomainTransportModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BridgeDomainTransportModeEnum',
[], [],
''' Bridge Domain Transport mode
''',
'transport_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfis', REFERENCE_CLASS, 'Vfis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis',
[], [],
''' Specify the virtual forwarding interface
name
''',
'vfis',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains',
False,
[
_MetaInfoClassMember('bridge-domain', REFERENCE_LIST, 'BridgeDomain' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain',
[], [],
''' bridge domain
''',
'bridge_domain',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domains',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the Bridge group
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('bridge-domains', REFERENCE_CLASS, 'BridgeDomains' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains',
[], [],
''' List of Bridge Domain
''',
'bridge_domains',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.BridgeDomainGroups' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.BridgeDomainGroups',
False,
[
_MetaInfoClassMember('bridge-domain-group', REFERENCE_LIST, 'BridgeDomainGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup',
[], [],
''' Bridge group
''',
'bridge_domain_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bridge-domain-groups',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing',
False,
[
_MetaInfoClassMember('resync-threshold', ATTRIBUTE, 'int' , None, None,
[('5', '65535')], [],
''' Out of sequence threshold
''',
'resync_threshold',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_ENUM_CLASS, 'L2Tpv3SequencingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Tpv3SequencingEnum',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'sequencing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService',
False,
[
_MetaInfoClassMember('type-of-service-mode', REFERENCE_ENUM_CLASS, 'TypeOfServiceModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'TypeOfServiceModeEnum',
[], [],
''' Type of service mode
''',
'type_of_service_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type-of-service-value', ATTRIBUTE, 'int' , None, None,
[('0', '255')], [],
''' Type of service value
''',
'type_of_service_value',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'type-of-service',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol',
False,
[
_MetaInfoClassMember('l2tpv3-class-name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the L2TPv3 class name
''',
'l2tpv3_class_name',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('protocol', REFERENCE_ENUM_CLASS, 'L2TpSignalingProtocolEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpSignalingProtocolEnum',
[], [],
''' L2TPv3 signaling protocol
''',
'protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'signaling-protocol',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable path MTU
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('max-path-mtu', ATTRIBUTE, 'int' , None, None,
[('68', '65535')], [],
''' Maximum path maximum transmission unit
''',
'max_path_mtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'path-mtu',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation',
False,
[
_MetaInfoClassMember('cookie-size', REFERENCE_ENUM_CLASS, 'L2TpCookieSizeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2TpCookieSizeEnum',
[], [],
''' Cookie size
''',
'cookie_size',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('df-bit-set', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Set the do not fragment bit to 1
''',
'df_bit_set',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable L2TPv3 encapsulation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('path-mtu', REFERENCE_CLASS, 'PathMtu' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu',
[], [],
''' Path maximum transmission unit
''',
'path_mtu',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_CLASS, 'Sequencing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('signaling-protocol', REFERENCE_CLASS, 'SignalingProtocol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol',
[], [],
''' L2TPv3 signaling protocol
''',
'signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Source IP address
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('time-to-live', ATTRIBUTE, 'int' , None, None,
[('1', '255')], [],
''' Time to live
''',
'time_to_live',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transport-mode', REFERENCE_ENUM_CLASS, 'TransportModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'TransportModeEnum',
[], [],
''' Transport mode
''',
'transport_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type-of-service', REFERENCE_CLASS, 'TypeOfService' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService',
[], [],
''' Type of service
''',
'type_of_service',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2tpv3-encapsulation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay',
False,
[
_MetaInfoClassMember('disable-backup', ATTRIBUTE, 'int' , None, None,
[('0', '180')], [],
''' Disable backup delay
''',
'disable_backup',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BackupDisableEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BackupDisableEnum',
[], [],
''' Delay or Never
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'backup-disable-delay',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing',
False,
[
_MetaInfoClassMember('resync-threshold', ATTRIBUTE, 'int' , None, None,
[('5', '65535')], [],
''' Out of sequence threshold
''',
'resync_threshold',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_ENUM_CLASS, 'MplsSequencingEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MplsSequencingEnum',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'sequencing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy',
False,
[
_MetaInfoClassMember('redundancy-initial-delay', ATTRIBUTE, 'int' , None, None,
[('0', '120')], [],
''' Initial delay before activating the
redundant PW, in seconds
''',
'redundancy_initial_delay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('redundancy-one-way', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Force one-way PW redundancy behavior in
Redundancy Group
''',
'redundancy_one_way',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-redundancy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath',
False,
[
_MetaInfoClassMember('fallback-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Fallback disable
''',
'fallback_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interface-tunnel-number', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Interface Tunnel number for preferred path
''',
'interface_tunnel_number',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'PreferredPathEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PreferredPathEnum',
[], [],
''' Preferred Path Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'preferred-path',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance',
False,
[
_MetaInfoClassMember('flow-label', REFERENCE_ENUM_CLASS, 'FlowLabelLoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelLoadBalanceEnum',
[], [],
''' Flow Label load balance type
''',
'flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Static Flow Label
''',
'static',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flow-label-load-balance',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup',
False,
[
_MetaInfoClassMember('flow-label-load-balance', REFERENCE_CLASS, 'FlowLabelLoadBalance' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance',
[], [],
''' Enable Flow Label based load balancing
''',
'flow_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flow-label-load-balance-code', REFERENCE_ENUM_CLASS, 'FlowLabelTlvCodeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'FlowLabelTlvCodeEnum',
[], [],
''' Enable Legacy Flow Label TLV code
''',
'flow_label_load_balance_code',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-label-load-balance', REFERENCE_ENUM_CLASS, 'LoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'LoadBalanceEnum',
[], [],
''' Enable PW Label based Load Balancing
''',
'pw_label_load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'load-balance-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation',
False,
[
_MetaInfoClassMember('control-word', REFERENCE_ENUM_CLASS, 'ControlWordEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'ControlWordEnum',
[], [],
''' Enable control word
''',
'control_word',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MPLS encapsulation
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('load-balance-group', REFERENCE_CLASS, 'LoadBalanceGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup',
[], [],
''' Load Balancing
''',
'load_balance_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-redundancy', REFERENCE_CLASS, 'MplsRedundancy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy',
[], [],
''' Redundancy options for MPLS encapsulation
''',
'mpls_redundancy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('preferred-path', REFERENCE_CLASS, 'PreferredPath' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath',
[], [],
''' Preferred path
''',
'preferred_path',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-switching-tlv', REFERENCE_ENUM_CLASS, 'PwSwitchingPointTlvEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'PwSwitchingPointTlvEnum',
[], [],
''' Pseudowire Switching Point Tlv
''',
'pw_switching_tlv',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('sequencing', REFERENCE_CLASS, 'Sequencing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing',
[], [],
''' Sequencing
''',
'sequencing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('signaling-protocol', REFERENCE_ENUM_CLASS, 'MplsSignalingProtocolEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'MplsSignalingProtocolEnum',
[], [],
''' MPLS signaling protocol
''',
'signaling_protocol',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('source-address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Source IP address
''',
'source_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('static-tag-rewrite', ATTRIBUTE, 'int' , None, None,
[('1', '4094')], [],
''' Static Tag rewrite
''',
'static_tag_rewrite',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('transport-mode', REFERENCE_ENUM_CLASS, 'TransportModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'TransportModeEnum',
[], [],
''' Transport mode
''',
'transport_mode',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vccv-type', REFERENCE_ENUM_CLASS, 'VccvVerificationEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'VccvVerificationEnum',
[], [],
''' VCCV verification type
''',
'vccv_type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mpls-encapsulation',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses.PseudowireClass' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses.PseudowireClass',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the pseudowire class
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('backup-disable-delay', REFERENCE_CLASS, 'BackupDisableDelay' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay',
[], [],
''' Back Up Pseudowire class
''',
'backup_disable_delay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire class
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2tpv3-encapsulation', REFERENCE_CLASS, 'L2Tpv3Encapsulation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation',
[], [],
''' L2TPv3 encapsulation
''',
'l2tpv3_encapsulation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mac-withdraw', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable backup MAC withdraw
''',
'mac_withdraw',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mpls-encapsulation', REFERENCE_CLASS, 'MplsEncapsulation' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation',
[], [],
''' MPLS encapsulation
''',
'mpls_encapsulation',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-class',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.PseudowireClasses' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.PseudowireClasses',
False,
[
_MetaInfoClassMember('pseudowire-class', REFERENCE_LIST, 'PseudowireClass' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses.PseudowireClass',
[], [],
''' Pseudowire class
''',
'pseudowire_class',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pseudowire-classes',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-attachment-circuit',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits',
False,
[
_MetaInfoClassMember('vlan-unaware-fxc-attachment-circuit', REFERENCE_LIST, 'VlanUnawareFxcAttachmentCircuit' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit',
[], [],
''' Attachment circuit interface
''',
'vlan_unaware_fxc_attachment_circuit',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-attachment-circuits',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn',
False,
[
_MetaInfoClassMember('acid', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' AC ID
''',
'acid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' Ethernet VPN ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-pseudowire-evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns',
False,
[
_MetaInfoClassMember('vlan-unaware-fxc-pseudowire-evpn', REFERENCE_LIST, 'VlanUnawareFxcPseudowireEvpn' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn',
[], [],
''' EVPN FXC Service Configuration
''',
'vlan_unaware_fxc_pseudowire_evpn',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-fxc-pseudowire-evpns',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[(0, 23)], [],
''' Name of the Flexible XConnect Service
''',
'name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('vlan-unaware-fxc-attachment-circuits', REFERENCE_CLASS, 'VlanUnawareFxcAttachmentCircuits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits',
[], [],
''' List of attachment circuits
''',
'vlan_unaware_fxc_attachment_circuits',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vlan-unaware-fxc-pseudowire-evpns', REFERENCE_CLASS, 'VlanUnawareFxcPseudowireEvpns' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns',
[], [],
''' List of EVPN Services
''',
'vlan_unaware_fxc_pseudowire_evpns',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-flexible-xconnect-service',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices',
False,
[
_MetaInfoClassMember('vlan-unaware-flexible-xconnect-service', REFERENCE_LIST, 'VlanUnawareFlexibleXconnectService' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService',
[], [],
''' Flexible XConnect Service
''',
'vlan_unaware_flexible_xconnect_service',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'vlan-unaware-flexible-xconnect-services',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.FlexibleXconnectServiceTable' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.FlexibleXconnectServiceTable',
False,
[
_MetaInfoClassMember('vlan-unaware-flexible-xconnect-services', REFERENCE_CLASS, 'VlanUnawareFlexibleXconnectServices' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices',
[], [],
''' List of Vlan-Unaware Flexible XConnect
Services
''',
'vlan_unaware_flexible_xconnect_services',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'flexible-xconnect-service-table',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Interface name
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('mac-flush-tcn', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable STP-TCN MAC flushing
''',
'mac_flush_tcn',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('primary-vlan-range', ATTRIBUTE, 'str' , None, None,
[], [],
''' Primary VLAN range, in the form of 1-3,5
,8-11
''',
'primary_vlan_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('recovery-delay', ATTRIBUTE, 'int' , None, None,
[('30', '3600')], [],
''' Failure clear recovery delay
''',
'recovery_delay',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('secondary-vlan-range', ATTRIBUTE, 'str' , None, None,
[], [],
''' Secondary VLAN range, in the form of 1-3,5
,8-11
''',
'secondary_vlan_range',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces',
False,
[
_MetaInfoClassMember('iccp-interface', REFERENCE_LIST, 'IccpInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface',
[], [],
''' Interface name
''',
'iccp_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup',
False,
[
_MetaInfoClassMember('group-id', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Group ID
''',
'group_id',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('iccp-interfaces', REFERENCE_CLASS, 'IccpInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces',
[], [],
''' List of interfaces
''',
'iccp_interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('multi-homing-node-id', ATTRIBUTE, 'int' , None, None,
[('0', '254')], [],
''' ICCP-based service multi-homing node ID
''',
'multi_homing_node_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-redundancy-group',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy.IccpRedundancyGroups' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy.IccpRedundancyGroups',
False,
[
_MetaInfoClassMember('iccp-redundancy-group', REFERENCE_LIST, 'IccpRedundancyGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup',
[], [],
''' ICCP Redundancy group
''',
'iccp_redundancy_group',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'iccp-redundancy-groups',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database.Redundancy' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database.Redundancy',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable redundancy groups
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('iccp-redundancy-groups', REFERENCE_CLASS, 'IccpRedundancyGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy.IccpRedundancyGroups',
[], [],
''' List of Inter-Chassis Communication Protocol
redundancy groups
''',
'iccp_redundancy_groups',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'redundancy',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Database' : {
'meta_info' : _MetaInfoClass('L2Vpn.Database',
False,
[
_MetaInfoClassMember('bridge-domain-groups', REFERENCE_CLASS, 'BridgeDomainGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.BridgeDomainGroups',
[], [],
''' List of bridge groups
''',
'bridge_domain_groups',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('flexible-xconnect-service-table', REFERENCE_CLASS, 'FlexibleXconnectServiceTable' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.FlexibleXconnectServiceTable',
[], [],
''' List of Flexible XConnect Services
''',
'flexible_xconnect_service_table',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('g8032-rings', REFERENCE_CLASS, 'G8032Rings' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.G8032Rings',
[], [],
''' List of G8032 Ring
''',
'g8032_rings',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-classes', REFERENCE_CLASS, 'PseudowireClasses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.PseudowireClasses',
[], [],
''' List of pseudowire classes
''',
'pseudowire_classes',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('redundancy', REFERENCE_CLASS, 'Redundancy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.Redundancy',
[], [],
''' Redundancy groups
''',
'redundancy',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('xconnect-groups', REFERENCE_CLASS, 'XconnectGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database.XconnectGroups',
[], [],
''' List of xconnect groups
''',
'xconnect_groups',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'database',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Pbb' : {
'meta_info' : _MetaInfoClass('L2Vpn.Pbb',
False,
[
_MetaInfoClassMember('backbone-source-mac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Backbone Source MAC
''',
'backbone_source_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'pbb',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.AutoDiscovery.BgpSignaling' : {
'meta_info' : _MetaInfoClass('L2Vpn.AutoDiscovery.BgpSignaling',
False,
[
_MetaInfoClassMember('mtu-mismatch-ignore', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Ignore MTU mismatch for auto-discovered
pseudowires
''',
'mtu_mismatch_ignore',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'bgp-signaling',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.AutoDiscovery' : {
'meta_info' : _MetaInfoClass('L2Vpn.AutoDiscovery',
False,
[
_MetaInfoClassMember('bgp-signaling', REFERENCE_CLASS, 'BgpSignaling' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.AutoDiscovery.BgpSignaling',
[], [],
''' Global bgp signaling attributes
''',
'bgp_signaling',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Utility.Logging' : {
'meta_info' : _MetaInfoClass('L2Vpn.Utility.Logging',
False,
[
_MetaInfoClassMember('bridge-domain-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Bridge Domain state change logging
''',
'bridge_domain_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('nsr-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Non Stop Routing state change logging
''',
'nsr_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pseudowire-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire state change logging
''',
'pseudowire_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pwhe-replication-state-change', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable PW-HE Replication state change logging
''',
'pwhe_replication_state_change',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('vfi', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable VFI state change logging
''',
'vfi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'logging',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Utility' : {
'meta_info' : _MetaInfoClass('L2Vpn.Utility',
False,
[
_MetaInfoClassMember('logging', REFERENCE_CLASS, 'Logging' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Utility.Logging',
[], [],
''' L2VPN logging utility
''',
'logging',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'utility',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib.MibInterface.Format' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib.MibInterface.Format',
False,
[
_MetaInfoClassMember('external-interface-format', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Set MIB interface name output in slash
format (/)
''',
'external_interface_format',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'format',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib.MibInterface' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib.MibInterface',
False,
[
_MetaInfoClassMember('format', REFERENCE_CLASS, 'Format' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib.MibInterface.Format',
[], [],
''' MIB interface name output format
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mib-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib.MibPseudowire' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib.MibPseudowire',
False,
[
_MetaInfoClassMember('statistics', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable pseudowire statistics in MIB output
''',
'statistics',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mib-pseudowire',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp.Mib' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp.Mib',
False,
[
_MetaInfoClassMember('mib-interface', REFERENCE_CLASS, 'MibInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib.MibInterface',
[], [],
''' Interface related configuration for MIB
''',
'mib_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mib-pseudowire', REFERENCE_CLASS, 'MibPseudowire' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib.MibPseudowire',
[], [],
''' Pseudowire related configuration for MIB
''',
'mib_pseudowire',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'mib',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn.Snmp' : {
'meta_info' : _MetaInfoClass('L2Vpn.Snmp',
False,
[
_MetaInfoClassMember('mib', REFERENCE_CLASS, 'Mib' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp.Mib',
[], [],
''' MIB related configuration
''',
'mib',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'snmp',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'L2Vpn' : {
'meta_info' : _MetaInfoClass('L2Vpn',
False,
[
_MetaInfoClassMember('auto-discovery', REFERENCE_CLASS, 'AutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.AutoDiscovery',
[], [],
''' Global auto-discovery attributes
''',
'auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('capability', REFERENCE_ENUM_CLASS, 'L2VpnCapabilityModeEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2VpnCapabilityModeEnum',
[], [],
''' L2VPN Capability Mode
''',
'capability',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('database', REFERENCE_CLASS, 'Database' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Database',
[], [],
''' L2VPN databases
''',
'database',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable L2VPN feature
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('l2vpn-router-id', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' Global L2VPN Router ID
''',
'l2vpn_router_id',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('load-balance', REFERENCE_ENUM_CLASS, 'LoadBalanceEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'LoadBalanceEnum',
[], [],
''' Enable flow load balancing on l2vpn bridges
''',
'load_balance',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mspw-description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' MS-PW global description
''',
'mspw_description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mtu-mismatch-ignore', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Ignore MTU Mismatch for XCs
''',
'mtu_mismatch_ignore',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('neighbor', REFERENCE_CLASS, 'Neighbor' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Neighbor',
[], [],
''' L2VPN neighbor submode
''',
'neighbor',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('nsr', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Non-Stop Routing
''',
'nsr',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pbb', REFERENCE_CLASS, 'Pbb' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Pbb',
[], [],
''' L2VPN PBB Global
''',
'pbb',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-grouping', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable PW grouping
''',
'pw_grouping',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-routing', REFERENCE_CLASS, 'PwRouting' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.PwRouting',
[], [],
''' Pseudowire-routing attributes
''',
'pw_routing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pw-status-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Disable PW status
''',
'pw_status_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('pwoam-refresh', ATTRIBUTE, 'int' , None, None,
[('1', '4095')], [],
''' Configure PW OAM refresh interval
''',
'pwoam_refresh',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('snmp', REFERENCE_CLASS, 'Snmp' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Snmp',
[], [],
''' SNMP related configuration
''',
'snmp',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('tcn-propagation', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Topology change notification propagation
''',
'tcn_propagation',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('utility', REFERENCE_CLASS, 'Utility' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'L2Vpn.Utility',
[], [],
''' L2VPN utilities
''',
'utility',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'l2vpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists.GenericInterface.Interfaces.Interface' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists.GenericInterface.Interfaces.Interface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable interface
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists.GenericInterface.Interfaces' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists.GenericInterface.Interfaces',
False,
[
_MetaInfoClassMember('interface', REFERENCE_LIST, 'Interface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'GenericInterfaceLists.GenericInterface.Interfaces.Interface',
[], [],
''' Interface
''',
'interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists.GenericInterface' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists.GenericInterface',
False,
[
_MetaInfoClassMember('generic-interface-list-name', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Name of the interface list
''',
'generic_interface_list_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable interface list
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('interfaces', REFERENCE_CLASS, 'Interfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'GenericInterfaceLists.GenericInterface.Interfaces',
[], [],
''' Interface table
''',
'interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'generic-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'GenericInterfaceLists' : {
'meta_info' : _MetaInfoClass('GenericInterfaceLists',
False,
[
_MetaInfoClassMember('generic-interface', REFERENCE_LIST, 'GenericInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'GenericInterfaceLists.GenericInterface',
[], [],
''' Bridge group
''',
'generic_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'generic-interface-lists',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnTimers' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnTimers',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVPN timers
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-peering', ATTRIBUTE, 'int' , None, None,
[('0', '300')], [],
''' Global Peering timer
''',
'evpn_peering',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-recovery', ATTRIBUTE, 'int' , None, None,
[('20', '3600')], [],
''' Global Recovery timer
''',
'evpn_recovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-timers',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVI Loadbalancing
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evi-flow-label', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Flow Label based load balancing
''',
'evi_flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evi-load-balancing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs',
False,
[
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'two-byte-as-or-four-byte-as',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', True),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ipv4-address',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget',
False,
[
_MetaInfoClassMember('format', REFERENCE_ENUM_CLASS, 'BgpRouteTargetFormatEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetFormatEnum',
[], [],
''' Format of the route target
''',
'format',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('role', REFERENCE_ENUM_CLASS, 'BgpRouteTargetRoleEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetRoleEnum',
[], [],
''' Role of the router target type
''',
'role',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('stitching', REFERENCE_ENUM_CLASS, 'BgpRouteTargetEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteTargetEnum',
[], [],
''' whether RT is Stitching RT
''',
'stitching',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ipv4-address', REFERENCE_LIST, 'Ipv4Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address',
[], [],
''' ipv4 address
''',
'ipv4_address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('two-byte-as-or-four-byte-as', REFERENCE_LIST, 'TwoByteAsOrFourByteAs' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs',
[], [],
''' two byte as or four byte as
''',
'two_byte_as_or_four_byte_as',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-target',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets',
False,
[
_MetaInfoClassMember('evpn-route-target', REFERENCE_LIST, 'EvpnRouteTarget' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget',
[], [],
''' Name of the Route Target
''',
'evpn_route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-targets',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-distinguisher', REFERENCE_CLASS, 'EvpnRouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'evpn_route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-targets', REFERENCE_CLASS, 'EvpnRouteTargets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets',
[], [],
''' Route Target
''',
'evpn_route_targets',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('table-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' Table Policy for installation of forwarding
data to L2FIB
''',
'table_policy',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnevibgp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis.Evpnevi' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis.Evpnevi',
False,
[
_MetaInfoClassMember('eviid', ATTRIBUTE, 'int' , None, None,
[('1', '65534')], [],
''' EVI ID
''',
'eviid',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('evi-load-balancing', REFERENCE_CLASS, 'EviLoadBalancing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing',
[], [],
''' Enter EVI Loadbalancing configuration submode
''',
'evi_load_balancing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-evi-cw-disable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' CW disable for EVPN EVI
''',
'evpn_evi_cw_disable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnevi-description', ATTRIBUTE, 'str' , None, None,
[(0, 64)], [],
''' Description for EVPN EVI
''',
'evpnevi_description',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnevibgp-auto-discovery', REFERENCE_CLASS, 'EvpnevibgpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery',
[], [],
''' Enable Autodiscovery BGP in EVPN EVI
''',
'evpnevibgp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnevi',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.Evpnevis' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.Evpnevis',
False,
[
_MetaInfoClassMember('evpnevi', REFERENCE_LIST, 'Evpnevi' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis.Evpnevi',
[], [],
''' Enter EVPN EVI configuration submode
''',
'evpnevi',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnevis',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnLoadBalancing' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnLoadBalancing',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVPN Loadbalancing
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-flow-label', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Flow Label based load balancing
''',
'evpn_flow_label',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-load-balancing',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher',
False,
[
_MetaInfoClassMember('addr-index', ATTRIBUTE, 'int' , None, None,
[('0', '65535')], [],
''' Addr index
''',
'addr_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('address', ATTRIBUTE, 'str' , None, None,
[], ['(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'],
''' IPV4 address
''',
'address',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as', ATTRIBUTE, 'int' , None, None,
[('1', '4294967295')], [],
''' Two byte or 4 byte AS number
''',
'as_',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('as-index', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' AS:nn (hex or decimal format)
''',
'as_index',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('type', REFERENCE_ENUM_CLASS, 'BgpRouteDistinguisherEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'BgpRouteDistinguisherEnum',
[], [],
''' Router Distinguisher Type
''',
'type',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-route-distinguisher',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnbgpAutoDiscovery' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnbgpAutoDiscovery',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Autodiscovery BGP
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-route-distinguisher', REFERENCE_CLASS, 'EvpnRouteDistinguisher' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher',
[], [],
''' Route Distinguisher
''',
'evpn_route_distinguisher',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnbgp-auto-discovery',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Interface-specific timers
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnac-peering', ATTRIBUTE, 'int' , None, None,
[('0', '300')], [],
''' Interface-specific Peering timer
''',
'evpnac_peering',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnac-recovery', ATTRIBUTE, 'int' , None, None,
[('20', '3600')], [],
''' Interface-specific Recovery timer
''',
'evpnac_recovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpnac-timers',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0',
False,
[
_MetaInfoClassMember('bytes1', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 1st Byte
''',
'bytes1',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes23', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 2nd and 3rd Bytes
''',
'bytes23',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes45', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 4th and 5th Bytes
''',
'bytes45',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes67', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 6th and 7th Bytes
''',
'bytes67',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('bytes89', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{1,8}'],
''' Type 0's 8th and 9th Bytes
''',
'bytes89',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'identifier-type0',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList',
False,
[
_MetaInfoClassMember('primary', ATTRIBUTE, 'str' , None, None,
[(0, 150)], [],
''' Primary services list
''',
'primary',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('secondary', ATTRIBUTE, 'str' , None, None,
[(0, 150)], [],
''' Secondary services list
''',
'secondary',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'service-list',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Manual service carving
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('service-list', REFERENCE_CLASS, 'ServiceList' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList',
[], [],
''' Manual service carving primary,secondary
lists
''',
'service_list',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'manual-service-carving',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment',
False,
[
_MetaInfoClassMember('backbone-source-mac', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' Backbone Source MAC
''',
'backbone_source_mac',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable Ethernet Segment
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('es-import-route-target', ATTRIBUTE, 'str' , None, None,
[], ['[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'],
''' ES-Import Route Target
''',
'es_import_route_target',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('force-single-homed', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Force ethernet segment to remain
single-homed
''',
'force_single_homed',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('identifier-type0', REFERENCE_CLASS, 'IdentifierType0' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0',
[], [],
''' Ethernet segment identifier (Type 0)
''',
'identifier_type0',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('load-balancing-per-service', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable per service load balancing mode
''',
'load_balancing_per_service',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('manual-service-carving', REFERENCE_CLASS, 'ManualServiceCarving' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving',
[], [],
''' Enter Manual service carving configuration
submode
''',
'manual_service_carving',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'ethernet-segment',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces.EvpnInterface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3,4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3,4}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the attachment circuit interface
''',
'interface_name',
'Cisco-IOS-XR-l2vpn-cfg', True),
_MetaInfoClassMember('ethernet-segment', REFERENCE_CLASS, 'EthernetSegment' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment',
[], [],
''' Enter Ethernet Segment configuration submode
''',
'ethernet_segment',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnac-timers', REFERENCE_CLASS, 'EvpnacTimers' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers',
[], [],
''' Enter Interface-specific timers configuration
submode
''',
'evpnac_timers',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('mac-flush', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable MVRP MAC Flush mode
''',
'mac_flush',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-interface',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables.EvpnInterfaces' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables.EvpnInterfaces',
False,
[
_MetaInfoClassMember('evpn-interface', REFERENCE_LIST, 'EvpnInterface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces.EvpnInterface',
[], [],
''' Attachment circuit interface
''',
'evpn_interface',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn.EvpnTables' : {
'meta_info' : _MetaInfoClass('Evpn.EvpnTables',
False,
[
_MetaInfoClassMember('evpn-interfaces', REFERENCE_CLASS, 'EvpnInterfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnInterfaces',
[], [],
''' Attachment Circuit interfaces
''',
'evpn_interfaces',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-load-balancing', REFERENCE_CLASS, 'EvpnLoadBalancing' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnLoadBalancing',
[], [],
''' Enter EVPN Loadbalancing configuration submode
''',
'evpn_load_balancing',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-timers', REFERENCE_CLASS, 'EvpnTimers' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnTimers',
[], [],
''' Enter EVPN timers configuration submode
''',
'evpn_timers',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnbgp-auto-discovery', REFERENCE_CLASS, 'EvpnbgpAutoDiscovery' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.EvpnbgpAutoDiscovery',
[], [],
''' Enable Autodiscovery BGP in EVPN
''',
'evpnbgp_auto_discovery',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpnevis', REFERENCE_CLASS, 'Evpnevis' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables.Evpnevis',
[], [],
''' Enter EVPN EVI configuration submode
''',
'evpnevis',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn-tables',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
'Evpn' : {
'meta_info' : _MetaInfoClass('Evpn',
False,
[
_MetaInfoClassMember('enable', ATTRIBUTE, 'Empty' , None, None,
[], [],
''' Enable EVPN feature
''',
'enable',
'Cisco-IOS-XR-l2vpn-cfg', False),
_MetaInfoClassMember('evpn-tables', REFERENCE_CLASS, 'EvpnTables' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg', 'Evpn.EvpnTables',
[], [],
''' EVPN submodes
''',
'evpn_tables',
'Cisco-IOS-XR-l2vpn-cfg', False),
],
'Cisco-IOS-XR-l2vpn-cfg',
'evpn',
_yang_ns._namespaces['Cisco-IOS-XR-l2vpn-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_l2vpn_cfg'
),
},
}
_meta_table['L2Vpn.PwRouting.PwRoutingBgp.EvpnRouteDistinguisher']['meta_info'].parent =_meta_table['L2Vpn.PwRouting.PwRoutingBgp']['meta_info']
_meta_table['L2Vpn.PwRouting.PwRoutingBgp']['meta_info'].parent =_meta_table['L2Vpn.PwRouting']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S.ErpPort0']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps.Port1']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Rpl']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance.Aps']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances.ErpInstance']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.None_']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1.VirtualOrInterface']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S.ErpPort1']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort0S']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpInstances']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring.ErpPort1S']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings.G8032Ring']['meta_info'].parent =_meta_table['L2Vpn.Database.G8032Rings']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits.BackupAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns.PseudowireEvpn']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires.BackupPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpRemoteCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpSecondaryLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes.L2TpLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.MplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.BackupPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStaticAttributes']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor.L2TpStatic']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire.BackupMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires.BackupPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpRemoteCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpSecondaryLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes.L2TpLocalCookie']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.MplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.BackupPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStaticAttributes']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress.L2TpStatic']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.Neighbor']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire.PseudowireAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires.Pseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions.MonitorSession']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds.PseudowireRouted']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits.AttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.BackupAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireEvpns']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.Pseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.MonitorSessions']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.PseudowireRouteds']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect.AttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects.P2PXconnect']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.TwoByteAsOrFourByteAs']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget.Ipv4Address']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets.Mp2MpRouteTarget']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits.RemoteCeidAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid.RemoteCeidAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids.Ceid']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol.Ceids']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.RouteDistinguisher']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRoutePolicy']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpRouteTargets']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery.Mp2MpSignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect.Mp2MpAutoDiscovery']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects.Mp2MpXconnect']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.P2PXconnects']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup.Mp2MpXconnects']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups.XconnectGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.XconnectGroups']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl.StormControlUnit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls.BdStormControl']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses.MemberVniStaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni.MemberVniStaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis.MemberVni']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters.BdMacFilter']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacFilters']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.MacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac.BdMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings.PbbStaticMacMapping']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac.PbbEdgeMacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbStaticMacMappings']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeDhcpProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge.PbbEdgeMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges.PbbEdge']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac.PbbCoreMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis.PbbCoreEvi']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreEvis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore.PbbCoreDhcpProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbEdges']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb.PbbCore']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis.BridgeDomainEvi']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai.PseudowireDaiAddressValidation']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType.StormControlUnit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes.BdpwStormControlType']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses.BdPwStaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac.PseudowireMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon.BdPwSplitHorizonGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires.BridgeDomainBackupPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireDai']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdpwStormControlTypes']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwStaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireIpSourceGuard']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.PseudowireMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwSplitHorizon']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BdPwMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire.BridgeDomainBackupPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires.BdPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports.Transport']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings.Signaling']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Transports']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp.Signalings']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses.PseudowireStaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwDhcpSnoop']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.VfiPwMplsStaticLabels']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire.PseudowireStaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires.VfiPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.Vplsid']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.TwoByteAsOrFourByteAs']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget.Ipv4Address']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets.RouteTarget']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.LdpSignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpRoutePolicy']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteDistinguisher']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.BgpSignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery.RouteTargets']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.MulticastP2Mp']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.VfiPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi.BgpAutoDiscovery']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis.Vfi']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai.InterfaceDaiAddressValidation']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType.StormControlUnit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes.BdacStormControlType']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon.SplitHorizonGroupId']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses.StaticMacAddress']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacAging']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacSecure']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac.InterfaceMacLimit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceIpSourceGuard']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceDai']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceProfile']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.BdacStormControlTypes']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.SplitHorizon']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.StaticMacAddresses']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit.InterfaceMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits.BdAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns.BdPseudowireEvpn']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai.DaiAddressValidation']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces.RoutedInterface']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdStormControls']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.MemberVnis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainMac']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.NvSatellite']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainPbb']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BridgeDomainEvis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowires']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Vfis']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.BdPseudowireEvpns']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.IpSourceGuard']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.Dai']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain.RoutedInterfaces']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains.BridgeDomain']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup.BridgeDomains']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups.BridgeDomainGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.BridgeDomainGroups']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.Sequencing']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.TypeOfService']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.SignalingProtocol']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation.PathMtu']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup.FlowLabelLoadBalance']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.Sequencing']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.MplsRedundancy']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.PreferredPath']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation.LoadBalanceGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.L2Tpv3Encapsulation']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.BackupDisableDelay']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass.MplsEncapsulation']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses.PseudowireClass']['meta_info'].parent =_meta_table['L2Vpn.Database.PseudowireClasses']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits.VlanUnawareFxcAttachmentCircuit']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns.VlanUnawareFxcPseudowireEvpn']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcAttachmentCircuits']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService.VlanUnawareFxcPseudowireEvpns']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices.VlanUnawareFlexibleXconnectService']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable.VlanUnawareFlexibleXconnectServices']['meta_info'].parent =_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces.IccpInterface']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup.IccpInterfaces']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups.IccpRedundancyGroup']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups']['meta_info']
_meta_table['L2Vpn.Database.Redundancy.IccpRedundancyGroups']['meta_info'].parent =_meta_table['L2Vpn.Database.Redundancy']['meta_info']
_meta_table['L2Vpn.Database.G8032Rings']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.XconnectGroups']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.BridgeDomainGroups']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.PseudowireClasses']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.FlexibleXconnectServiceTable']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.Database.Redundancy']['meta_info'].parent =_meta_table['L2Vpn.Database']['meta_info']
_meta_table['L2Vpn.AutoDiscovery.BgpSignaling']['meta_info'].parent =_meta_table['L2Vpn.AutoDiscovery']['meta_info']
_meta_table['L2Vpn.Utility.Logging']['meta_info'].parent =_meta_table['L2Vpn.Utility']['meta_info']
_meta_table['L2Vpn.Snmp.Mib.MibInterface.Format']['meta_info'].parent =_meta_table['L2Vpn.Snmp.Mib.MibInterface']['meta_info']
_meta_table['L2Vpn.Snmp.Mib.MibInterface']['meta_info'].parent =_meta_table['L2Vpn.Snmp.Mib']['meta_info']
_meta_table['L2Vpn.Snmp.Mib.MibPseudowire']['meta_info'].parent =_meta_table['L2Vpn.Snmp.Mib']['meta_info']
_meta_table['L2Vpn.Snmp.Mib']['meta_info'].parent =_meta_table['L2Vpn.Snmp']['meta_info']
_meta_table['L2Vpn.PwRouting']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Neighbor']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Database']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Pbb']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.AutoDiscovery']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Utility']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['L2Vpn.Snmp']['meta_info'].parent =_meta_table['L2Vpn']['meta_info']
_meta_table['GenericInterfaceLists.GenericInterface.Interfaces.Interface']['meta_info'].parent =_meta_table['GenericInterfaceLists.GenericInterface.Interfaces']['meta_info']
_meta_table['GenericInterfaceLists.GenericInterface.Interfaces']['meta_info'].parent =_meta_table['GenericInterfaceLists.GenericInterface']['meta_info']
_meta_table['GenericInterfaceLists.GenericInterface']['meta_info'].parent =_meta_table['GenericInterfaceLists']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.TwoByteAsOrFourByteAs']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget.Ipv4Address']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets.EvpnRouteTarget']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteTargets']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery.EvpnRouteDistinguisher']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EviLoadBalancing']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi.EvpnevibgpAutoDiscovery']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis.Evpnevi']['meta_info'].parent =_meta_table['Evpn.EvpnTables.Evpnevis']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery.EvpnRouteDistinguisher']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving.ServiceList']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.IdentifierType0']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment.ManualServiceCarving']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EvpnacTimers']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface.EthernetSegment']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces.EvpnInterface']['meta_info'].parent =_meta_table['Evpn.EvpnTables.EvpnInterfaces']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnTimers']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.Evpnevis']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnLoadBalancing']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnbgpAutoDiscovery']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables.EvpnInterfaces']['meta_info'].parent =_meta_table['Evpn.EvpnTables']['meta_info']
_meta_table['Evpn.EvpnTables']['meta_info'].parent =_meta_table['Evpn']['meta_info']
| [
[
[
10,
12
]
],
[
[
20,
31
]
],
[
[
50,
54
]
],
[
[
92,
112
],
[
12825,
12845
],
[
13090,
13110
],
[
13466,
13486
],
[
13739,
13759
],
[
14024,
14044
],
[
14748,
14768
],
[
15007,
15027
],
[
15747,
15767
],
[
16128,
16148
],
[
16764,
16784
],
[
17449,
17469
],
[
17933,
17953
],
[
18816,
18836
],
[
19613,
19633
],
[
19936,
19956
],
[
20678,
20698
],
[
21009,
21029
],
[
21753,
21773
],
[
22025,
22045
],
[
22302,
22322
],
[
22597,
22617
],
[
23386,
23406
],
[
23665,
23685
],
[
24034,
24054
],
[
24345,
24365
],
[
24664,
24684
],
[
24947,
24967
],
[
25704,
25724
],
[
26520,
26540
],
[
27454,
27474
],
[
27938,
27958
],
[
28850,
28870
],
[
29178,
29198
],
[
29521,
29541
],
[
30326,
30346
],
[
31071,
31091
],
[
31350,
31370
],
[
31743,
31763
],
[
32113,
32133
],
[
32483,
32503
],
[
32784,
32804
],
[
33098,
33118
],
[
33729,
33749
],
[
34603,
34623
],
[
35613,
35633
],
[
36607,
36627
],
[
36866,
36886
],
[
37137,
37157
],
[
37881,
37901
],
[
38859,
38879
],
[
39162,
39182
],
[
40092,
40112
],
[
40395,
40415
],
[
41286,
41306
],
[
41670,
41690
],
[
41948,
41968
],
[
42462,
42482
],
[
43326,
43346
],
[
44399,
44419
],
[
44689,
44709
],
[
44976,
44996
],
[
45909,
45929
],
[
46198,
46218
],
[
46484,
46504
],
[
47407,
47427
],
[
47696,
47716
],
[
47982,
48002
],
[
48863,
48883
],
[
49332,
49352
],
[
49630,
49650
],
[
50104,
50124
],
[
50405,
50425
],
[
51456,
51476
],
[
52253,
52273
],
[
52641,
52661
],
[
53097,
53117
],
[
53375,
53395
],
[
53642,
53662
],
[
54089,
54109
],
[
54562,
54582
],
[
55015,
55035
],
[
55353,
55373
],
[
55869,
55889
],
[
56467,
56487
],
[
57298,
57318
],
[
57601,
57621
],
[
58549,
58569
],
[
58852,
58872
],
[
59761,
59781
],
[
60145,
60165
],
[
60423,
60443
],
[
60946,
60966
],
[
61828,
61848
],
[
62928,
62948
],
[
63218,
63238
],
[
63505,
63525
],
[
64456,
64476
],
[
64745,
64765
],
[
65031,
65051
],
[
65972,
65992
],
[
66261,
66281
],
[
66547,
66567
],
[
67446,
67466
],
[
67924,
67944
],
[
68222,
68242
],
[
68705,
68725
],
[
69006,
69026
],
[
70084,
70104
],
[
70899,
70919
],
[
71447,
71467
],
[
71912,
71932
],
[
72190,
72210
],
[
72457,
72477
],
[
72913,
72933
],
[
73395,
73415
],
[
73857,
73877
],
[
74195,
74215
],
[
74711,
74731
],
[
75309,
75329
],
[
76080,
76100
],
[
76358,
76378
],
[
76761,
76781
],
[
77672,
77692
],
[
78581,
78601
],
[
78843,
78863
],
[
79582,
79602
],
[
80525,
80545
],
[
80784,
80804
],
[
81057,
81077
],
[
81431,
81451
],
[
81692,
81712
],
[
81959,
81979
],
[
82719,
82739
],
[
83679,
83699
],
[
84168,
84188
],
[
84920,
84940
],
[
85816,
85836
],
[
86086,
86106
],
[
86522,
86542
],
[
86991,
87011
],
[
87330,
87350
],
[
87759,
87779
],
[
88047,
88067
],
[
88465,
88485
],
[
88895,
88915
],
[
89707,
89727
],
[
90621,
90641
],
[
90886,
90906
],
[
91262,
91282
],
[
91535,
91555
],
[
91820,
91840
],
[
92699,
92719
],
[
92951,
92971
],
[
93808,
93828
],
[
94080,
94100
],
[
94957,
94977
],
[
95221,
95241
],
[
96150,
96170
],
[
96506,
96526
],
[
96858,
96878
],
[
97308,
97328
],
[
98349,
98369
],
[
99395,
99415
],
[
99762,
99782
],
[
100691,
100711
],
[
101174,
101194
],
[
102091,
102111
],
[
103234,
103254
],
[
103508,
103528
],
[
104612,
104632
],
[
105568,
105588
],
[
105848,
105868
],
[
106285,
106305
],
[
106545,
106565
],
[
107546,
107566
],
[
107802,
107822
],
[
108240,
108260
],
[
108682,
108702
],
[
109164,
109184
],
[
110069,
110089
],
[
110367,
110387
],
[
110810,
110830
],
[
111089,
111109
],
[
111440,
111460
],
[
111724,
111744
],
[
112106,
112126
],
[
112428,
112448
],
[
113124,
113144
],
[
113928,
113948
],
[
114189,
114209
],
[
114601,
114621
],
[
115355,
115375
],
[
116275,
116295
],
[
116632,
116652
],
[
117510,
117530
],
[
117842,
117862
],
[
118825,
118845
],
[
119876,
119896
],
[
120172,
120192
],
[
121156,
121176
],
[
122208,
122228
],
[
122482,
122502
],
[
123466,
123486
],
[
124432,
124452
],
[
124813,
124833
],
[
125162,
125182
],
[
126077,
126097
],
[
126365,
126385
],
[
127138,
127158
],
[
128089,
128109
],
[
128439,
128459
],
[
128691,
128711
],
[
129456,
129476
],
[
129741,
129761
],
[
130582,
130602
],
[
131019,
131039
],
[
131450,
131470
],
[
131788,
131808
],
[
132225,
132245
],
[
132529,
132549
],
[
132802,
132822
],
[
133116,
133136
],
[
133518,
133538
],
[
133844,
133864
],
[
134740,
134760
],
[
135003,
135023
],
[
135954,
135974
],
[
136242,
136262
],
[
137103,
137123
],
[
138216,
138236
],
[
138490,
138510
],
[
139432,
139452
],
[
139825,
139845
],
[
140186,
140206
],
[
141164,
141184
],
[
141461,
141481
],
[
142410,
142430
],
[
142739,
142759
],
[
143089,
143109
],
[
143382,
143402
],
[
143634,
143654
],
[
144522,
144542
],
[
145009,
145029
],
[
145365,
145385
],
[
145852,
145872
],
[
146845,
146865
],
[
147108,
147128
],
[
147359,
147379
],
[
147836,
147856
],
[
148136,
148156
],
[
148584,
148604
],
[
149084,
149104
],
[
149957,
149977
],
[
150990,
151010
],
[
151287,
151307
],
[
152216,
152236
],
[
152609,
152629
],
[
152970,
152990
],
[
153898,
153918
],
[
154376,
154396
],
[
154732,
154752
],
[
155762,
155782
],
[
156556,
156576
],
[
157537,
157557
],
[
157811,
157831
],
[
158681,
158701
],
[
158975,
158995
],
[
159443,
159463
],
[
159878,
159898
],
[
160178,
160198
],
[
160617,
160637
],
[
160945,
160965
],
[
161694,
161714
],
[
162093,
162113
],
[
163025,
163045
],
[
163786,
163806
],
[
164836,
164856
],
[
165233,
165253
],
[
165597,
165617
],
[
166540,
166560
],
[
166807,
166827
],
[
167105,
167125
],
[
167434,
167454
],
[
168598,
168618
],
[
168955,
168975
],
[
169911,
169931
],
[
170243,
170263
],
[
171338,
171358
],
[
172395,
172415
],
[
172669,
172689
],
[
173630,
173650
],
[
174488,
174508
],
[
175576,
175596
],
[
175844,
175864
],
[
176101,
176121
],
[
177024,
177044
],
[
177374,
177394
],
[
177643,
177663
],
[
177895,
177915
],
[
178817,
178837
],
[
179118,
179138
],
[
180071,
180091
],
[
180509,
180529
],
[
180920,
180940
],
[
181912,
181932
],
[
182210,
182230
],
[
182704,
182724
],
[
183064,
183084
],
[
183558,
183578
],
[
183983,
184003
],
[
185051,
185071
],
[
185871,
185891
],
[
186932,
186952
],
[
187235,
187255
],
[
188183,
188203
],
[
188567,
188587
],
[
188845,
188865
],
[
189787,
189807
],
[
190869,
190889
],
[
191253,
191273
],
[
191531,
191551
],
[
191846,
191866
],
[
192324,
192344
],
[
192779,
192799
],
[
193271,
193291
],
[
193746,
193766
],
[
194259,
194279
],
[
194725,
194745
],
[
195117,
195137
],
[
195573,
195593
],
[
195873,
195893
],
[
196358,
196378
],
[
196851,
196871
],
[
197148,
197168
],
[
198098,
198118
],
[
199078,
199098
],
[
199351,
199371
],
[
200174,
200194
],
[
201161,
201181
],
[
201953,
201973
],
[
202898,
202918
],
[
203158,
203178
],
[
203595,
203615
],
[
204593,
204613
],
[
204867,
204887
],
[
205805,
205825
],
[
206108,
206128
],
[
207073,
207093
],
[
207970,
207990
],
[
209076,
209096
],
[
209460,
209480
],
[
209738,
209758
],
[
210263,
210283
],
[
210580,
210600
],
[
211055,
211075
],
[
211347,
211367
],
[
211636,
211656
],
[
212637,
212657
],
[
213652,
213672
],
[
214028,
214048
],
[
214302,
214322
],
[
214560,
214580
],
[
214824,
214844
],
[
215729,
215749
],
[
216096,
216116
],
[
216911,
216931
],
[
217315,
217335
],
[
217831,
217851
],
[
218804,
218824
],
[
219608,
219628
],
[
219873,
219893
],
[
220249,
220269
],
[
220522,
220542
],
[
220807,
220827
],
[
221762,
221782
],
[
222129,
222149
],
[
222944,
222964
],
[
223211,
223231
],
[
223727,
223747
],
[
224036,
224056
],
[
224922,
224942
],
[
225194,
225214
],
[
226077,
226097
],
[
226341,
226361
],
[
227276,
227296
],
[
227632,
227652
],
[
227984,
228004
],
[
228437,
228457
],
[
229491,
229511
],
[
230455,
230475
],
[
230740,
230760
],
[
231183,
231203
],
[
231695,
231715
],
[
231954,
231974
],
[
232473,
232493
],
[
232937,
232957
],
[
233370,
233390
],
[
234170,
234190
],
[
234445,
234465
],
[
234903,
234923
],
[
235344,
235364
],
[
235775,
235795
],
[
236039,
236059
],
[
236762,
236782
],
[
237741,
237761
],
[
238038,
238058
],
[
238295,
238315
],
[
239263,
239283
],
[
239660,
239680
],
[
239920,
239940
],
[
240284,
240304
],
[
241252,
241272
],
[
241548,
241568
],
[
241841,
241861
],
[
242380,
242400
],
[
243284,
243304
],
[
243558,
243578
],
[
244578,
244598
],
[
244935,
244955
],
[
245919,
245939
],
[
246251,
246271
],
[
247388,
247408
],
[
248517,
248537
],
[
249354,
249374
],
[
250470,
250490
],
[
251342,
251362
],
[
252456,
252476
],
[
252755,
252775
],
[
253731,
253751
],
[
254081,
254101
],
[
254349,
254369
],
[
254601,
254621
],
[
255546,
255566
],
[
255967,
255987
],
[
256346,
256366
],
[
257346,
257366
],
[
257849,
257869
],
[
258207,
258227
],
[
258710,
258730
],
[
259133,
259153
],
[
260159,
260179
],
[
260642,
260662
],
[
261131,
261151
],
[
261602,
261622
],
[
261985,
262005
],
[
262432,
262452
],
[
262730,
262750
],
[
263225,
263245
],
[
263687,
263707
],
[
263982,
264002
],
[
264455,
264475
],
[
264904,
264924
],
[
265899,
265919
],
[
266912,
266932
],
[
267164,
267184
],
[
267923,
267943
],
[
268875,
268895
],
[
269132,
269152
],
[
269894,
269914
],
[
270208,
270228
],
[
270468,
270488
],
[
270749,
270769
],
[
271524,
271544
],
[
271975,
271995
],
[
272239,
272259
],
[
273005,
273025
],
[
274002,
274022
],
[
274918,
274938
],
[
275178,
275198
],
[
275631,
275651
],
[
276072,
276092
],
[
276490,
276510
],
[
276912,
276932
],
[
277348,
277368
],
[
277783,
277803
],
[
278117,
278137
],
[
278543,
278563
],
[
278816,
278836
],
[
279195,
279215
],
[
279459,
279479
],
[
279714,
279734
],
[
280017,
280037
],
[
280304,
280324
],
[
280590,
280610
],
[
281006,
281026
],
[
281453,
281473
],
[
281737,
281757
],
[
282140,
282160
],
[
282587,
282607
],
[
282852,
282872
],
[
283237,
283257
],
[
284092,
284112
],
[
284892,
284912
],
[
285151,
285171
],
[
285919,
285939
],
[
286770,
286790
],
[
287062,
287082
],
[
287867,
287887
],
[
288240,
288260
],
[
289013,
289033
],
[
289304,
289324
],
[
290126,
290146
],
[
290376,
290396
],
[
291108,
291128
],
[
291448,
291468
],
[
291723,
291743
],
[
291985,
292005
],
[
292380,
292400
],
[
292765,
292785
],
[
293195,
293215
],
[
293590,
293610
],
[
293859,
293879
],
[
294206,
294226
],
[
295056,
295076
],
[
295337,
295357
],
[
296128,
296148
],
[
296420,
296440
],
[
297219,
297239
],
[
297576,
297596
],
[
298372,
298392
],
[
298643,
298663
],
[
298966,
298986
],
[
299810,
299830
],
[
300177,
300197
],
[
300908,
300928
],
[
301382,
301402
],
[
301782,
301802
],
[
302601,
302621
],
[
302945,
302965
],
[
303205,
303225
],
[
303620,
303640
],
[
304052,
304072
],
[
304453,
304473
],
[
304832,
304852
],
[
305215,
305235
],
[
305595,
305615
],
[
305990,
306010
],
[
306278,
306298
],
[
306625,
306645
],
[
307380,
307400
],
[
307643,
307663
],
[
308058,
308078
],
[
308316,
308336
],
[
308729,
308749
],
[
309002,
309022
],
[
309775,
309795
],
[
310817,
310837
],
[
311925,
311945
],
[
313170,
313190
],
[
313422,
313442
],
[
314291,
314311
],
[
315402,
315422
],
[
315674,
315694
],
[
316218,
316238
],
[
317230,
317250
],
[
318159,
318179
],
[
319191,
319211
],
[
319674,
319694
],
[
319950,
319970
],
[
320269,
320289
],
[
320560,
320580
],
[
321351,
321371
],
[
322203,
322223
],
[
322476,
322496
],
[
322885,
322905
],
[
323600,
323620
],
[
324375,
324395
],
[
324634,
324654
],
[
325416,
325436
],
[
325795,
325815
],
[
326228,
326248
],
[
326569,
326589
],
[
326946,
326966
],
[
327284,
327304
],
[
327962,
327982
],
[
328633,
328653
],
[
329302,
329322
],
[
330015,
330035
],
[
330331,
330351
],
[
330630,
330650
],
[
330937,
330957
],
[
331263,
331283
],
[
331846,
331866
],
[
332540,
332560
],
[
333228,
333248
],
[
333937,
333957
],
[
334554,
334574
],
[
334924,
334944
],
[
335612,
335632
],
[
336230,
336250
],
[
336588,
336608
],
[
336946,
336966
],
[
337265,
337285
],
[
337520,
337540
],
[
337922,
337942
],
[
338290,
338310
],
[
338574,
338594
],
[
338862,
338882
],
[
339188,
339208
],
[
339440,
339460
],
[
339740,
339760
],
[
340003,
340023
],
[
340342,
340362
],
[
340616,
340636
],
[
340909,
340929
],
[
341223,
341243
],
[
341516,
341536
],
[
342244,
342264
],
[
342734,
342754
],
[
343382,
343402
],
[
344118,
344138
],
[
344425,
344445
],
[
344681,
344701
],
[
345390,
345410
],
[
346120,
346140
],
[
346373,
346393
],
[
346650,
346670
],
[
347331,
347351
],
[
347590,
347610
],
[
348409,
348429
],
[
348681,
348701
],
[
349484,
349504
],
[
349748,
349768
],
[
350603,
350623
],
[
350959,
350979
],
[
351311,
351331
],
[
351661,
351681
],
[
352074,
352094
],
[
353005,
353025
],
[
353898,
353918
],
[
354163,
354183
],
[
354539,
354559
],
[
354812,
354832
],
[
355097,
355117
],
[
355877,
355897
],
[
356136,
356156
],
[
356570,
356590
],
[
356973,
356993
],
[
357670,
357690
],
[
357920,
357940
],
[
358332,
358352
],
[
358616,
358636
],
[
358906,
358926
],
[
359682,
359702
],
[
360400,
360420
],
[
360660,
360680
],
[
361378,
361398
],
[
361643,
361663
],
[
362019,
362039
],
[
362292,
362312
],
[
362577,
362597
],
[
363317,
363337
],
[
363576,
363596
],
[
364416,
364436
],
[
364683,
364703
],
[
364976,
364996
],
[
365729,
365749
],
[
365997,
366017
],
[
366276,
366296
],
[
366555,
366575
],
[
366834,
366854
],
[
367605,
367625
],
[
367869,
367889
],
[
368603,
368623
],
[
368867,
368887
],
[
369755,
369775
],
[
370069,
370089
],
[
370327,
370347
],
[
370650,
370670
],
[
370970,
370990
],
[
371395,
371415
],
[
371708,
371728
],
[
372579,
372599
],
[
373088,
373108
],
[
373505,
373525
],
[
373935,
373955
],
[
374566,
374586
],
[
375279,
375299
],
[
375648,
375668
],
[
376048,
376068
],
[
376411,
376431
],
[
376809,
376829
],
[
377468,
377488
],
[
377722,
377742
]
],
[
[
114,
128
],
[
12709,
12723
],
[
14655,
14669
],
[
15667,
15681
],
[
16685,
16699
],
[
17330,
17344
],
[
18706,
18720
],
[
19484,
19498
],
[
20543,
20557
],
[
21624,
21638
],
[
23261,
23275
],
[
25591,
25605
],
[
26395,
26409
],
[
27316,
27330
],
[
28731,
28745
],
[
30216,
30230
],
[
30971,
30985
],
[
33639,
33653
],
[
34421,
34435
],
[
35455,
35469
],
[
36443,
36457
],
[
37732,
37746
],
[
38677,
38691
],
[
39869,
39883
],
[
41086,
41100
],
[
43143,
43157
],
[
44196,
44210
],
[
45698,
45712
],
[
47205,
47219
],
[
48677,
48691
],
[
51280,
51294
],
[
52088,
52102
],
[
57107,
57121
],
[
58317,
58331
],
[
59552,
59566
],
[
61636,
61650
],
[
62716,
62730
],
[
64236,
64250
],
[
65761,
65775
],
[
67251,
67265
],
[
69899,
69913
],
[
70725,
70739
],
[
75924,
75938
],
[
77527,
77541
],
[
78417,
78431
],
[
79433,
79447
],
[
80357,
80371
],
[
82568,
82582
],
[
83509,
83523
],
[
84768,
84782
],
[
85683,
85697
],
[
89586,
89600
],
[
90446,
90460
],
[
92526,
92540
],
[
93595,
93609
],
[
94754,
94768
],
[
95959,
95973
],
[
98175,
98189
],
[
99195,
99209
],
[
100444,
100458
],
[
101872,
101886
],
[
103044,
103058
],
[
104427,
104441
],
[
105389,
105403
],
[
107390,
107404
],
[
109932,
109946
],
[
113001,
113015
],
[
113820,
113834
],
[
115261,
115275
],
[
116084,
116098
],
[
117336,
117350
],
[
118666,
118680
],
[
119658,
119672
],
[
120964,
120978
],
[
122044,
122058
],
[
123312,
123326
],
[
124262,
124276
],
[
125893,
125907
],
[
126966,
126980
],
[
127920,
127934
],
[
129286,
129300
],
[
130423,
130437
],
[
134585,
134599
],
[
135737,
135751
],
[
136906,
136920
],
[
138021,
138035
],
[
139229,
139243
],
[
140961,
140975
],
[
142206,
142220
],
[
144335,
144349
],
[
146669,
146683
],
[
149789,
149803
],
[
150796,
150810
],
[
152022,
152036
],
[
153720,
153734
],
[
155572,
155586
],
[
156377,
156391
],
[
157351,
157365
],
[
158514,
158528
],
[
161535,
161549
],
[
162849,
162863
],
[
163626,
163640
],
[
164621,
164635
],
[
166356,
166370
],
[
168368,
168382
],
[
169698,
169712
],
[
171146,
171160
],
[
172207,
172221
],
[
173416,
173430
],
[
174295,
174309
],
[
175382,
175396
],
[
176820,
176834
],
[
178614,
178628
],
[
179868,
179882
],
[
181728,
181742
],
[
184842,
184856
],
[
185684,
185698
],
[
186741,
186755
],
[
187954,
187968
],
[
189587,
189601
],
[
190699,
190713
],
[
197941,
197955
],
[
198891,
198905
],
[
199997,
200011
],
[
200974,
200988
],
[
201776,
201790
],
[
202732,
202746
],
[
204397,
204411
],
[
205602,
205616
],
[
206836,
206850
],
[
207760,
207774
],
[
208895,
208909
],
[
212470,
212484
],
[
213455,
213469
],
[
215518,
215532
],
[
216721,
216735
],
[
218620,
218634
],
[
219420,
219434
],
[
221551,
221565
],
[
222754,
222768
],
[
224706,
224720
],
[
225871,
225885
],
[
227082,
227096
],
[
229309,
229323
],
[
230286,
230300
],
[
234018,
234032
],
[
236614,
236628
],
[
237534,
237548
],
[
239036,
239050
],
[
241055,
241069
],
[
243083,
243097
],
[
244334,
244348
],
[
245692,
245706
],
[
247182,
247196
],
[
248300,
248314
],
[
249157,
249171
],
[
250250,
250264
],
[
251139,
251153
],
[
252241,
252255
],
[
253515,
253529
],
[
255331,
255345
],
[
257149,
257163
],
[
259975,
259989
],
[
265735,
265749
],
[
266734,
266748
],
[
267762,
267776
],
[
268718,
268732
],
[
269726,
269740
],
[
271377,
271391
],
[
272829,
272843
],
[
273842,
273856
],
[
274775,
274789
],
[
283962,
283976
],
[
284776,
284790
],
[
285821,
285835
],
[
286626,
286640
],
[
287720,
287734
],
[
288862,
288876
],
[
289985,
289999
],
[
290975,
290989
],
[
294924,
294938
],
[
295986,
296000
],
[
297073,
297087
],
[
298227,
298241
],
[
299641,
299655
],
[
300760,
300774
],
[
302470,
302484
],
[
307267,
307281
],
[
309678,
309692
],
[
310573,
310587
],
[
311713,
311727
],
[
312932,
312946
],
[
314082,
314096
],
[
315223,
315237
],
[
317086,
317100
],
[
318051,
318065
],
[
319031,
319045
],
[
321205,
321219
],
[
322072,
322086
],
[
323489,
323503
],
[
324285,
324299
],
[
325337,
325351
],
[
327888,
327902
],
[
328536,
328550
],
[
329218,
329232
],
[
329929,
329943
],
[
331768,
331782
],
[
332441,
332455
],
[
333136,
333150
],
[
333844,
333858
],
[
334475,
334489
],
[
335537,
335551
],
[
336160,
336174
],
[
342120,
342134
],
[
343268,
343282
],
[
344015,
344029
],
[
345304,
345318
],
[
346029,
346043
],
[
347217,
347231
],
[
348233,
348247
],
[
349318,
349332
],
[
350449,
350463
],
[
352867,
352881
],
[
353754,
353768
],
[
355756,
355770
],
[
357573,
357587
],
[
359593,
359607
],
[
360302,
360316
],
[
361254,
361268
],
[
363216,
363230
],
[
364294,
364308
],
[
365588,
365602
],
[
367447,
367461
],
[
368457,
368471
],
[
369630,
369644
],
[
372470,
372484
],
[
374471,
374485
],
[
375199,
375213
],
[
377399,
377413
]
],
[
[
130,
143
],
[
547,
560
],
[
815,
828
],
[
1087,
1100
],
[
1453,
1466
],
[
1804,
1817
],
[
2078,
2091
],
[
2313,
2326
],
[
2592,
2605
],
[
2849,
2862
],
[
3112,
3125
],
[
3398,
3411
],
[
3677,
3690
],
[
4042,
4055
],
[
4317,
4330
],
[
4613,
4626
],
[
4917,
4930
],
[
5199,
5212
],
[
5489,
5502
],
[
5810,
5823
],
[
6077,
6090
],
[
6388,
6401
],
[
6726,
6739
],
[
6999,
7012
],
[
7346,
7359
],
[
7617,
7630
],
[
7899,
7912
],
[
8252,
8265
],
[
8642,
8655
],
[
8941,
8954
],
[
9209,
9222
],
[
9495,
9508
],
[
9764,
9777
],
[
10094,
10107
],
[
10371,
10384
],
[
10713,
10726
],
[
11075,
11088
],
[
11422,
11435
],
[
11694,
11707
],
[
12015,
12028
],
[
12345,
12358
]
],
[
[
166,
171
]
],
[
[
173,
178
]
],
[
[
180,
189
]
],
[
[
191,
197
]
],
[
[
199,
208
]
],
[
[
210,
223
]
],
[
[
260,
269
],
[
12860,
12869
],
[
13122,
13131
],
[
13493,
13502
],
[
13772,
13781
],
[
14779,
14788
],
[
16173,
16182
],
[
16797,
16806
],
[
17488,
17497
],
[
17965,
17974
],
[
20714,
20723
],
[
21784,
21793
],
[
22055,
22064
],
[
22332,
22341
],
[
23426,
23435
],
[
24070,
24079
],
[
24384,
24393
],
[
24696,
24705
],
[
26552,
26561
],
[
27493,
27502
],
[
27970,
27979
],
[
31111,
31120
],
[
32527,
32536
],
[
32823,
32832
],
[
33132,
33141
],
[
34642,
34651
],
[
36637,
36646
],
[
36902,
36911
],
[
37173,
37182
],
[
38902,
38911
],
[
39206,
39215
],
[
40135,
40144
],
[
40439,
40448
],
[
41319,
41328
],
[
41708,
41717
],
[
42502,
42511
],
[
44436,
44445
],
[
44725,
44734
],
[
45946,
45955
],
[
46234,
46243
],
[
47444,
47453
],
[
47732,
47741
],
[
49378,
49387
],
[
50151,
50160
],
[
51487,
51496
],
[
52286,
52295
],
[
53131,
53140
],
[
53405,
53414
],
[
55392,
55401
],
[
55908,
55917
],
[
56502,
56511
],
[
57341,
57350
],
[
57645,
57654
],
[
58592,
58601
],
[
58896,
58905
],
[
59794,
59803
],
[
60183,
60192
],
[
60986,
60995
],
[
62965,
62974
],
[
63254,
63263
],
[
64493,
64502
],
[
64781,
64790
],
[
66009,
66018
],
[
66297,
66306
],
[
67970,
67979
],
[
68752,
68761
],
[
70115,
70124
],
[
70942,
70951
],
[
71946,
71955
],
[
72220,
72229
],
[
74234,
74243
],
[
74750,
74759
],
[
75344,
75353
],
[
76118,
76127
],
[
78610,
78619
],
[
78874,
78883
],
[
80554,
80563
],
[
80818,
80827
],
[
81088,
81097
],
[
81461,
81470
],
[
81722,
81731
],
[
81994,
82003
],
[
83708,
83717
],
[
84199,
84208
],
[
85845,
85854
],
[
87799,
87808
],
[
90656,
90665
],
[
90918,
90927
],
[
91289,
91298
],
[
91568,
91577
],
[
92730,
92739
],
[
92982,
92991
],
[
93835,
93844
],
[
94113,
94122
],
[
94992,
95001
],
[
95253,
95262
],
[
99793,
99802
],
[
100720,
100729
],
[
101211,
101220
],
[
103264,
103273
],
[
105601,
105610
],
[
106316,
106325
],
[
107577,
107586
],
[
110098,
110107
],
[
110853,
110862
],
[
111479,
111488
],
[
112139,
112148
],
[
112464,
112473
],
[
113957,
113966
],
[
116313,
116322
],
[
116669,
116678
],
[
119912,
119921
],
[
120208,
120217
],
[
122236,
122245
],
[
124854,
124863
],
[
126109,
126118
],
[
126394,
126403
],
[
128470,
128479
],
[
128723,
128732
],
[
129498,
129507
],
[
132272,
132281
],
[
132569,
132578
],
[
132860,
132869
],
[
133564,
133573
],
[
134771,
134780
],
[
135057,
135066
],
[
135986,
135995
],
[
136271,
136280
],
[
138257,
138266
],
[
139872,
139881
],
[
141212,
141221
],
[
142450,
142459
],
[
143121,
143130
],
[
143413,
143422
],
[
146882,
146891
],
[
147137,
147146
],
[
147882,
147891
],
[
149129,
149138
],
[
151038,
151047
],
[
152656,
152665
],
[
155792,
155801
],
[
157578,
157587
],
[
158712,
158721
],
[
159924,
159933
],
[
160674,
160683
],
[
160977,
160986
],
[
163055,
163064
],
[
166572,
166581
],
[
166838,
166847
],
[
168636,
168645
],
[
168992,
169001
],
[
172436,
172445
],
[
173662,
173671
],
[
175608,
175617
],
[
175875,
175884
],
[
177406,
177415
],
[
177674,
177683
],
[
178867,
178876
],
[
180558,
180567
],
[
181943,
181952
],
[
185082,
185091
],
[
186975,
186984
],
[
187279,
187288
],
[
188216,
188225
],
[
188605,
188614
],
[
188899,
188908
],
[
190902,
190911
],
[
191291,
191300
],
[
191567,
191576
],
[
195619,
195628
],
[
196896,
196905
],
[
199117,
199126
],
[
199394,
199403
],
[
201200,
201209
],
[
202929,
202938
],
[
204634,
204643
],
[
205848,
205857
],
[
206152,
206161
],
[
207105,
207114
],
[
209109,
209118
],
[
209498,
209507
],
[
210300,
210309
],
[
211097,
211106
],
[
211388,
211397
],
[
213684,
213693
],
[
214066,
214075
],
[
214329,
214338
],
[
214593,
214602
],
[
216127,
216136
],
[
216942,
216951
],
[
218835,
218844
],
[
219643,
219652
],
[
219905,
219914
],
[
220276,
220285
],
[
220555,
220564
],
[
222160,
222169
],
[
222975,
222984
],
[
223760,
223769
],
[
224065,
224074
],
[
224949,
224958
],
[
225227,
225236
],
[
226112,
226121
],
[
226373,
226382
],
[
230495,
230504
],
[
231726,
231735
],
[
233407,
233416
],
[
234199,
234208
],
[
235812,
235821
],
[
236069,
236078
],
[
237773,
237782
],
[
238069,
238078
],
[
239691,
239700
],
[
241284,
241293
],
[
241579,
241588
],
[
243325,
243334
],
[
244616,
244625
],
[
244972,
244981
],
[
248548,
248557
],
[
250502,
250511
],
[
252505,
252514
],
[
254113,
254122
],
[
254380,
254389
],
[
256015,
256024
],
[
260188,
260197
],
[
262477,
262486
],
[
263731,
263740
],
[
266941,
266950
],
[
267194,
267203
],
[
268906,
268915
],
[
269164,
269173
],
[
269947,
269956
],
[
270239,
270248
],
[
270510,
270519
],
[
270797,
270806
],
[
272006,
272015
],
[
272271,
272280
],
[
273044,
273053
],
[
274947,
274956
],
[
277825,
277834
],
[
278580,
278589
],
[
279224,
279233
],
[
279492,
279501
],
[
279763,
279772
],
[
280055,
280064
],
[
280350,
280359
],
[
281490,
281499
],
[
282620,
282629
],
[
284921,
284930
],
[
286811,
286820
],
[
288286,
288295
],
[
289055,
289064
],
[
290157,
290166
],
[
290413,
290422
],
[
291483,
291492
],
[
291754,
291763
],
[
293234,
293243
],
[
293627,
293636
],
[
295095,
295104
],
[
296169,
296178
],
[
297268,
297277
],
[
297619,
297628
],
[
298413,
298422
],
[
298691,
298700
],
[
300208,
300217
],
[
302976,
302985
],
[
305634,
305643
],
[
306033,
306042
],
[
307409,
307418
],
[
308089,
308098
],
[
308766,
308775
],
[
310846,
310855
],
[
313199,
313208
],
[
313452,
313461
],
[
315431,
315440
],
[
319230,
319239
],
[
319712,
319721
],
[
319993,
320002
],
[
320308,
320317
],
[
320605,
320614
],
[
322236,
322245
],
[
322930,
322939
],
[
324406,
324415
],
[
328006,
328015
],
[
328677,
328686
],
[
330066,
330075
],
[
330372,
330381
],
[
330678,
330687
],
[
330991,
331000
],
[
331291,
331300
],
[
332590,
332599
],
[
333972,
333981
],
[
337296,
337305
],
[
337560,
337569
],
[
338331,
338340
],
[
338618,
338627
],
[
339216,
339225
],
[
339776,
339785
],
[
340384,
340393
],
[
340654,
340663
],
[
341263,
341272
],
[
342283,
342292
],
[
342765,
342774
],
[
344170,
344179
],
[
344456,
344465
],
[
346151,
346160
],
[
346410,
346419
],
[
346688,
346697
],
[
347362,
347371
],
[
347629,
347638
],
[
348436,
348445
],
[
348714,
348723
],
[
349519,
349528
],
[
349780,
349789
],
[
353933,
353942
],
[
354195,
354204
],
[
354566,
354575
],
[
354845,
354854
],
[
355908,
355917
],
[
357010,
357019
],
[
357700,
357709
],
[
358376,
358385
],
[
358660,
358669
],
[
360431,
360440
],
[
360700,
360709
],
[
361413,
361422
],
[
361675,
361684
],
[
362046,
362055
],
[
362325,
362334
],
[
363348,
363357
],
[
364447,
364456
],
[
364722,
364731
],
[
365016,
365025
],
[
365760,
365769
],
[
366029,
366038
],
[
366308,
366317
],
[
366587,
366596
],
[
366866,
366875
],
[
367637,
367646
],
[
367903,
367912
],
[
368634,
368643
],
[
369799,
369808
],
[
370100,
370109
],
[
370374,
370383
],
[
370693,
370702
],
[
371446,
371455
],
[
372618,
372627
],
[
373969,
373978
],
[
377499,
377508
]
],
[
[
271,
286
],
[
15056,
15071
],
[
15786,
15801
],
[
22627,
22642
],
[
23693,
23708
],
[
24975,
24990
],
[
29207,
29222
],
[
31388,
31403
],
[
31778,
31793
],
[
32148,
32163
],
[
41998,
42013
],
[
48905,
48920
],
[
49673,
49688
],
[
50457,
50472
],
[
52684,
52699
],
[
53678,
53693
],
[
54136,
54151
],
[
54605,
54620
],
[
60473,
60488
],
[
67488,
67503
],
[
68265,
68280
],
[
69058,
69073
],
[
71490,
71505
],
[
72493,
72508
],
[
72960,
72975
],
[
73438,
73453
],
[
86130,
86145
],
[
86573,
86588
],
[
87371,
87386
],
[
88088,
88103
],
[
88508,
88523
],
[
88931,
88946
],
[
103564,
103579
],
[
105878,
105893
],
[
106593,
106608
],
[
107845,
107860
],
[
108284,
108299
],
[
108731,
108746
],
[
109208,
109223
],
[
110412,
110427
],
[
114229,
114244
],
[
114639,
114654
],
[
117885,
117900
],
[
122538,
122553
],
[
130619,
130634
],
[
131058,
131073
],
[
131825,
131840
],
[
133879,
133894
],
[
144565,
144580
],
[
145408,
145423
],
[
145896,
145911
],
[
147405,
147420
],
[
148173,
148188
],
[
148632,
148647
],
[
153941,
153956
],
[
154775,
154790
],
[
159021,
159036
],
[
159481,
159496
],
[
160215,
160230
],
[
161727,
161742
],
[
162127,
162142
],
[
167492,
167507
],
[
170286,
170301
],
[
182255,
182270
],
[
183109,
183124
],
[
184029,
184044
],
[
185921,
185936
],
[
191895,
191910
],
[
192368,
192383
],
[
192830,
192845
],
[
193320,
193335
],
[
193803,
193818
],
[
194298,
194313
],
[
195924,
195939
],
[
196397,
196412
],
[
197191,
197206
],
[
203193,
203208
],
[
203630,
203645
],
[
209794,
209809
],
[
210622,
210637
],
[
211686,
211701
],
[
217363,
217378
],
[
217862,
217877
],
[
223259,
223274
],
[
230781,
230796
],
[
231230,
231245
],
[
232001,
232016
],
[
232517,
232532
],
[
232975,
232990
],
[
234488,
234503
],
[
234942,
234957
],
[
235384,
235399
],
[
241898,
241913
],
[
246294,
246309
],
[
249401,
249416
],
[
257390,
257405
],
[
258251,
258266
],
[
259178,
259193
],
[
260691,
260706
],
[
261169,
261184
],
[
262780,
262795
],
[
263263,
263278
],
[
264024,
264039
],
[
264493,
264508
],
[
264949,
264964
],
[
271571,
271586
],
[
275225,
275240
],
[
275675,
275690
],
[
276111,
276126
],
[
276532,
276547
],
[
276955,
276970
],
[
277390,
277405
],
[
278159,
278174
],
[
278844,
278859
],
[
280630,
280645
],
[
281042,
281057
],
[
281774,
281789
],
[
282182,
282197
],
[
283266,
283281
],
[
285190,
285205
],
[
292018,
292033
],
[
292415,
292430
],
[
292808,
292823
],
[
294246,
294261
],
[
300956,
300971
],
[
303248,
303263
],
[
303660,
303675
],
[
304091,
304106
],
[
304867,
304882
],
[
307688,
307703
],
[
308361,
308376
],
[
309045,
309060
],
[
315735,
315750
],
[
316276,
316291
],
[
318223,
318238
],
[
322516,
322531
],
[
324681,
324696
],
[
325461,
325476
],
[
325851,
325866
],
[
326264,
326279
],
[
326612,
326627
],
[
326981,
326996
],
[
327324,
327339
],
[
329340,
329355
],
[
331878,
331893
],
[
333259,
333274
],
[
334592,
334607
],
[
334963,
334978
],
[
335640,
335655
],
[
336269,
336284
],
[
336979,
336994
],
[
338895,
338910
],
[
339468,
339483
],
[
340038,
340053
],
[
340938,
340953
],
[
341548,
341563
],
[
344716,
344731
],
[
356185,
356200
],
[
356613,
356628
],
[
357963,
357978
],
[
358956,
358971
],
[
363625,
363640
],
[
368904,
368919
],
[
371011,
371026
],
[
371755,
371770
],
[
373129,
373144
],
[
373543,
373558
],
[
375319,
375334
],
[
375692,
375707
],
[
376084,
376099
],
[
376458,
376473
],
[
376842,
376857
],
[
377758,
377773
]
],
[
[
288,
302
],
[
18850,
18864
],
[
25741,
25755
],
[
29566,
29580
],
[
30360,
30374
],
[
33764,
33778
],
[
35663,
35677
],
[
37921,
37935
],
[
43368,
43382
],
[
61870,
61884
],
[
76391,
76405
],
[
76804,
76818
],
[
77707,
77721
],
[
79622,
79636
],
[
82761,
82775
],
[
84963,
84977
],
[
89744,
89758
],
[
96895,
96909
],
[
97360,
97374
],
[
98392,
98406
],
[
102146,
102160
],
[
104641,
104655
],
[
113163,
113177
],
[
115394,
115408
],
[
118866,
118880
],
[
121210,
121224
],
[
123501,
123515
],
[
127176,
127190
],
[
137150,
137164
],
[
149990,
150004
],
[
156593,
156607
],
[
163828,
163842
],
[
171386,
171400
],
[
174537,
174551
],
[
189843,
189857
],
[
198136,
198150
],
[
200208,
200222
],
[
201987,
202001
],
[
208024,
208038
],
[
212676,
212690
],
[
228021,
228035
],
[
228489,
228503
],
[
229528,
229542
],
[
236790,
236804
],
[
247436,
247450
],
[
251385,
251399
],
[
265945,
265959
],
[
267966,
267980
],
[
274043,
274057
],
[
284130,
284144
],
[
285963,
285977
],
[
309816,
309830
],
[
311985,
311999
],
[
314348,
314362
],
[
317293,
317307
],
[
321390,
321404
],
[
323646,
323660
],
[
343416,
343430
],
[
345432,
345446
],
[
351698,
351712
],
[
352126,
352140
],
[
353047,
353061
],
[
359714,
359728
],
[
374605,
374619
]
],
[
[
304,
322
]
],
[
[
328,
352
]
],
[
[
354,
374
],
[
14053,
14073
],
[
19642,
19662
],
[
19965,
19985
],
[
21042,
21062
],
[
28888,
28908
],
[
45005,
45025
],
[
46513,
46533
],
[
48011,
48031
],
[
63534,
63554
],
[
65060,
65080
],
[
66576,
66596
],
[
87028,
87048
],
[
91849,
91869
],
[
96181,
96201
],
[
96535,
96555
],
[
99430,
99450
],
[
111132,
111152
],
[
111770,
111790
],
[
117541,
117561
],
[
124476,
124496
],
[
125205,
125225
],
[
128120,
128140
],
[
129783,
129803
],
[
131487,
131507
],
[
133165,
133185
],
[
138525,
138545
],
[
139482,
139502
],
[
140235,
140255
],
[
141509,
141529
],
[
142770,
142790
],
[
143666,
143686
],
[
145055,
145075
],
[
151335,
151355
],
[
152266,
152286
],
[
153019,
153039
],
[
154422,
154442
],
[
157846,
157866
],
[
164889,
164909
],
[
165275,
165295
],
[
165645,
165665
],
[
167137,
167157
],
[
169942,
169962
],
[
172704,
172724
],
[
176133,
176153
],
[
177055,
177075
],
[
177927,
177947
],
[
179168,
179188
],
[
180123,
180143
],
[
180971,
180991
],
[
182752,
182772
],
[
183613,
183633
],
[
194769,
194789
],
[
195177,
195197
],
[
204902,
204922
],
[
214853,
214873
],
[
215764,
215784
],
[
220836,
220856
],
[
221797,
221817
],
[
227307,
227327
],
[
227661,
227681
],
[
238327,
238347
],
[
239316,
239336
],
[
239962,
239982
],
[
240332,
240352
],
[
242412,
242432
],
[
243593,
243613
],
[
245950,
245970
],
[
252804,
252824
],
[
253762,
253782
],
[
254633,
254653
],
[
255597,
255617
],
[
256396,
256416
],
[
257896,
257916
],
[
258764,
258784
],
[
261645,
261665
],
[
262044,
262064
],
[
282891,
282911
],
[
287097,
287117
],
[
287912,
287932
],
[
289337,
289357
],
[
291144,
291164
],
[
293898,
293918
],
[
295366,
295386
],
[
296455,
296475
],
[
298995,
299015
],
[
299845,
299865
],
[
301435,
301455
],
[
301828,
301848
],
[
302638,
302658
],
[
304494,
304514
],
[
305258,
305278
],
[
306317,
306337
],
[
306659,
306679
],
[
336623,
336643
],
[
337959,
337979
],
[
350634,
350654
],
[
350988,
351008
],
[
351345,
351365
],
[
355126,
355146
],
[
362606,
362626
]
],
[
[
376,
390
]
],
[
[
392,
407
],
[
55054,
55069
],
[
73896,
73911
]
],
[
[
432,
440
]
],
[
[
442,
455
]
],
[
[
492,
500
],
[
740,
748
],
[
1005,
1013
],
[
1380,
1388
],
[
1730,
1738
],
[
1997,
2005
],
[
2243,
2251
],
[
2514,
2522
],
[
2779,
2787
],
[
3034,
3042
],
[
3321,
3329
],
[
3594,
3602
],
[
3968,
3976
],
[
4237,
4245
],
[
4544,
4552
],
[
4836,
4844
],
[
5123,
5131
],
[
5415,
5423
],
[
5727,
5735
],
[
6008,
6016
],
[
6312,
6320
],
[
6647,
6655
],
[
6922,
6930
],
[
7267,
7275
],
[
7546,
7554
],
[
7829,
7837
],
[
8177,
8185
],
[
8561,
8569
],
[
8858,
8866
],
[
9133,
9141
],
[
9422,
9430
],
[
9689,
9697
],
[
10007,
10015
],
[
10289,
10297
],
[
10630,
10638
],
[
11007,
11015
],
[
11344,
11352
],
[
11619,
11627
],
[
11939,
11947
],
[
12268,
12276
],
[
12576,
12584
],
[
14471,
14479
],
[
15496,
15504
],
[
16515,
16523
],
[
17120,
17128
],
[
18505,
18513
],
[
19264,
19272
],
[
20317,
20325
],
[
21404,
21412
],
[
23045,
23053
],
[
25387,
25395
],
[
26179,
26187
],
[
27087,
27095
],
[
28521,
28529
],
[
30015,
30023
],
[
30780,
30788
],
[
33458,
33466
],
[
34148,
34156
],
[
35206,
35214
],
[
36188,
36196
],
[
37492,
37500
],
[
38404,
38412
],
[
39555,
39563
],
[
40795,
40803
],
[
42869,
42877
],
[
43902,
43910
],
[
45396,
45404
],
[
46912,
46920
],
[
48400,
48408
],
[
51013,
51021
],
[
51832,
51840
],
[
56825,
56833
],
[
57994,
58002
],
[
59252,
59260
],
[
61353,
61361
],
[
62413,
62421
],
[
63925,
63933
],
[
65459,
65467
],
[
66965,
66973
],
[
69623,
69631
],
[
70460,
70468
],
[
75677,
75685
],
[
77291,
77299
],
[
78162,
78170
],
[
79193,
79201
],
[
80098,
80106
],
[
82326,
82334
],
[
83248,
83256
],
[
84525,
84533
],
[
85459,
85467
],
[
89374,
89382
],
[
90180,
90188
],
[
92262,
92270
],
[
93291,
93299
],
[
94460,
94468
],
[
95677,
95685
],
[
97910,
97918
],
[
98904,
98912
],
[
100106,
100114
],
[
101562,
101570
],
[
102763,
102771
],
[
104151,
104159
],
[
105119,
105127
],
[
107143,
107151
],
[
109704,
109712
],
[
112787,
112795
],
[
113621,
113629
],
[
115076,
115084
],
[
115802,
115810
],
[
117071,
117079
],
[
118416,
118424
],
[
119349,
119357
],
[
120681,
120689
],
[
121789,
121797
],
[
123067,
123075
],
[
124001,
124009
],
[
125618,
125626
],
[
126703,
126711
],
[
127660,
127668
],
[
129025,
129033
],
[
130173,
130181
],
[
134339,
134347
],
[
135429,
135437
],
[
136618,
136626
],
[
137735,
137743
],
[
138935,
138943
],
[
140667,
140675
],
[
141911,
141919
],
[
144057,
144065
],
[
146402,
146410
],
[
149530,
149538
],
[
150511,
150519
],
[
151737,
151745
],
[
153451,
153459
],
[
155291,
155299
],
[
156107,
156115
],
[
157074,
157082
],
[
158256,
158264
],
[
161285,
161293
],
[
162582,
162590
],
[
163375,
163383
],
[
164315,
164323
],
[
166081,
166089
],
[
168047,
168055
],
[
169394,
169402
],
[
170863,
170871
],
[
171928,
171936
],
[
173111,
173119
],
[
174011,
174019
],
[
175097,
175105
],
[
176525,
176533
],
[
178320,
178328
],
[
179574,
179582
],
[
181453,
181461
],
[
184542,
184550
],
[
185406,
185414
],
[
186459,
186467
],
[
187634,
187642
],
[
189296,
189304
],
[
190438,
190446
],
[
197693,
197701
],
[
198613,
198621
],
[
199729,
199737
],
[
200696,
200704
],
[
201508,
201516
],
[
202475,
202483
],
[
204110,
204118
],
[
205308,
205316
],
[
206508,
206516
],
[
207459,
207467
],
[
208623,
208631
],
[
212212,
212220
],
[
213167,
213175
],
[
215216,
215224
],
[
216440,
216448
],
[
218345,
218353
],
[
219141,
219149
],
[
221249,
221257
],
[
222473,
222481
],
[
224399,
224407
],
[
225574,
225582
],
[
226797,
226805
],
[
229036,
229044
],
[
230026,
230034
],
[
233775,
233783
],
[
236375,
236383
],
[
237236,
237244
],
[
238718,
238726
],
[
240767,
240775
],
[
242791,
242799
],
[
243999,
244007
],
[
245374,
245382
],
[
246885,
246893
],
[
247992,
248000
],
[
248869,
248877
],
[
249939,
249947
],
[
250845,
250853
],
[
251935,
251943
],
[
253208,
253216
],
[
255025,
255033
],
[
256861,
256869
],
[
259700,
259708
],
[
265480,
265488
],
[
266465,
266473
],
[
267510,
267518
],
[
268470,
268478
],
[
269467,
269475
],
[
271139,
271147
],
[
272562,
272570
],
[
273591,
273599
],
[
274541,
274549
],
[
283741,
283749
],
[
284569,
284577
],
[
285632,
285640
],
[
286391,
286399
],
[
287482,
287490
],
[
288620,
288628
],
[
289753,
289761
],
[
290751,
290759
],
[
294701,
294709
],
[
295753,
295761
],
[
296836,
296844
],
[
297991,
297999
],
[
299381,
299389
],
[
300521,
300529
],
[
302248,
302256
],
[
307063,
307071
],
[
309490,
309498
],
[
310238,
310246
],
[
311410,
311418
],
[
312603,
312611
],
[
313782,
313790
],
[
314953,
314961
],
[
316851,
316859
],
[
317852,
317860
],
[
318780,
318788
],
[
320968,
320976
],
[
321850,
321858
],
[
323287,
323295
],
[
324104,
324112
],
[
325167,
325175
],
[
327723,
327731
],
[
328348,
328356
],
[
329043,
329051
],
[
329752,
329760
],
[
331599,
331607
],
[
332251,
332259
],
[
332953,
332961
],
[
333660,
333668
],
[
334305,
334313
],
[
335371,
335379
],
[
335999,
336007
],
[
341905,
341913
],
[
343063,
343071
],
[
343821,
343829
],
[
345127,
345135
],
[
345847,
345855
],
[
347012,
347020
],
[
347966,
347974
],
[
349061,
349069
],
[
350204,
350212
],
[
352638,
352646
],
[
353519,
353527
],
[
355544,
355552
],
[
357385,
357393
],
[
359413,
359421
],
[
360113,
360121
],
[
361039,
361047
],
[
363024,
363032
],
[
364081,
364089
],
[
365356,
365364
],
[
367198,
367206
],
[
368220,
368228
],
[
369414,
369422
],
[
372270,
372278
],
[
374285,
374293
],
[
375028,
375036
],
[
377239,
377247
],
[
378121,
378129
]
],
[
[
502,
513
],
[
378334,
378345
],
[
378246,
378257
],
[
378456,
378467
],
[
378391,
378402
],
[
378591,
378602
],
[
378500,
378511
],
[
378772,
378783
],
[
378665,
378676
],
[
378966,
378977
],
[
378865,
378876
],
[
379156,
379167
],
[
379055,
379066
],
[
379342,
379353
],
[
379245,
379256
],
[
379516,
379527
],
[
379419,
379430
],
[
379709,
379720
],
[
379599,
379610
],
[
379883,
379894
],
[
379792,
379803
],
[
380039,
380050
],
[
379957,
379968
],
[
380188,
380199
],
[
380103,
380114
],
[
380334,
380345
],
[
380252,
380263
],
[
380470,
380481
],
[
380398,
380409
],
[
380678,
380689
],
[
380524,
380535
],
[
380936,
380947
],
[
380800,
380811
],
[
381244,
381255
],
[
381049,
381060
],
[
381580,
381591
],
[
381408,
381419
],
[
381902,
381913
],
[
381727,
381738
],
[
382235,
382246
],
[
382052,
382063
],
[
382559,
382570
],
[
382385,
382396
],
[
382863,
382874
],
[
382709,
382720
],
[
383147,
383158
],
[
382992,
383003
],
[
383434,
383445
],
[
383276,
383287
],
[
383711,
383722
],
[
383563,
383574
],
[
384044,
384055
],
[
383840,
383851
],
[
384398,
384409
],
[
384217,
384228
],
[
384738,
384749
],
[
384554,
384565
],
[
385089,
385100
],
[
384897,
384908
],
[
385431,
385442
],
[
385248,
385259
],
[
385753,
385764
],
[
385590,
385601
],
[
386055,
386066
],
[
385891,
385902
],
[
386360,
386371
],
[
386193,
386204
],
[
386655,
386666
],
[
386498,
386509
],
[
386930,
386941
],
[
386793,
386804
],
[
387196,
387207
],
[
387050,
387061
],
[
387444,
387455
],
[
387316,
387327
],
[
387689,
387700
],
[
387553,
387564
],
[
387942,
387953
],
[
387802,
387813
],
[
388199,
388210
],
[
388057,
388068
],
[
388445,
388456
],
[
388315,
388326
],
[
388663,
388674
],
[
388542,
388553
],
[
388877,
388888
],
[
388760,
388771
],
[
389095,
389106
],
[
388974,
388985
],
[
389315,
389326
],
[
389192,
389203
],
[
389536,
389547
],
[
389412,
389423
],
[
389738,
389749
],
[
389633,
389644
],
[
390008,
390019
],
[
389823,
389834
],
[
390338,
390349
],
[
390163,
390174
],
[
390656,
390667
],
[
390493,
390504
],
[
391013,
391024
],
[
390794,
390805
],
[
391387,
391398
],
[
391196,
391207
],
[
391703,
391714
],
[
391541,
391552
],
[
392024,
392035
],
[
391852,
391863
],
[
392324,
392335
],
[
392167,
392178
],
[
392614,
392625
],
[
392467,
392478
],
[
392879,
392890
],
[
392734,
392745
],
[
393145,
393156
],
[
392999,
393010
],
[
393416,
393427
],
[
393265,
393276
],
[
393664,
393675
],
[
393536,
393547
],
[
393874,
393885
],
[
393765,
393776
],
[
394054,
394065
],
[
393961,
393972
],
[
394221,
394232
],
[
394126,
394137
],
[
394373,
394384
],
[
394293,
394304
],
[
394594,
394605
],
[
394431,
394442
],
[
394878,
394889
],
[
394732,
394743
],
[
395191,
395202
],
[
395001,
395012
],
[
395511,
395522
],
[
395347,
395358
],
[
395775,
395786
],
[
395639,
395650
],
[
396049,
396060
],
[
395893,
395904
],
[
396327,
396338
],
[
396185,
396196
],
[
396594,
396605
],
[
396450,
396461
],
[
396858,
396869
],
[
396717,
396728
],
[
397123,
397134
],
[
396981,
396992
],
[
397435,
397446
],
[
397246,
397257
],
[
397771,
397782
],
[
397596,
397607
],
[
398097,
398108
],
[
397922,
397933
],
[
398424,
398435
],
[
398248,
398259
],
[
398744,
398755
],
[
398575,
398586
],
[
399051,
399062
],
[
398884,
398895
],
[
399350,
399361
],
[
399191,
399202
],
[
399638,
399649
],
[
399490,
399501
],
[
399936,
399947
],
[
399770,
399781
],
[
400244,
400255
],
[
400078,
400089
],
[
400548,
400559
],
[
400386,
400397
],
[
400841,
400852
],
[
400691,
400702
],
[
401123,
401134
],
[
400972,
400983
],
[
401412,
401423
],
[
401254,
401265
],
[
401683,
401694
],
[
401543,
401554
],
[
401945,
401956
],
[
401806,
401817
],
[
402216,
402227
],
[
402068,
402079
],
[
402527,
402538
],
[
402340,
402351
],
[
402877,
402888
],
[
402675,
402686
],
[
403239,
403250
],
[
403054,
403065
],
[
403581,
403592
],
[
403395,
403406
],
[
403914,
403925
],
[
403738,
403749
],
[
404237,
404248
],
[
404062,
404073
],
[
404560,
404571
],
[
404385,
404396
],
[
404889,
404900
],
[
404708,
404719
],
[
405241,
405252
],
[
405040,
405051
],
[
405561,
405572
],
[
405405,
405416
],
[
405859,
405870
],
[
405695,
405706
],
[
406153,
406164
],
[
405993,
406004
],
[
406452,
406463
],
[
406287,
406298
],
[
406752,
406763
],
[
406586,
406597
],
[
407042,
407053
],
[
406886,
406897
],
[
407335,
407346
],
[
407176,
407187
],
[
407632,
407643
],
[
407469,
407480
],
[
407938,
407949
],
[
407766,
407777
],
[
408214,
408225
],
[
408072,
408083
],
[
408494,
408505
],
[
408335,
408346
],
[
408794,
408805
],
[
408635,
408646
],
[
409084,
409095
],
[
408935,
408946
],
[
409363,
409374
],
[
409214,
409225
],
[
409702,
409713
],
[
409493,
409504
],
[
410044,
410055
],
[
409876,
409887
],
[
410364,
410375
],
[
410189,
410200
],
[
410691,
410702
],
[
410509,
410520
],
[
410989,
411000
],
[
410836,
410847
],
[
411289,
411300
],
[
411120,
411131
],
[
411626,
411637
],
[
411443,
411454
],
[
411963,
411974
],
[
411780,
411791
],
[
412305,
412316
],
[
412117,
412128
],
[
412641,
412652
],
[
412463,
412474
],
[
412965,
412976
],
[
412799,
412810
],
[
413273,
413284
],
[
413111,
413122
],
[
413562,
413573
],
[
413406,
413417
],
[
413855,
413866
],
[
413695,
413706
],
[
414150,
414161
],
[
413988,
413999
],
[
414437,
414448
],
[
414283,
414294
],
[
414708,
414719
],
[
414570,
414581
],
[
414963,
414974
],
[
414824,
414835
],
[
415220,
415231
],
[
415079,
415090
],
[
415460,
415471
],
[
415336,
415347
],
[
415771,
415782
],
[
415572,
415583
],
[
416148,
416159
],
[
415932,
415943
],
[
416538,
416549
],
[
416339,
416350
],
[
416897,
416908
],
[
416708,
416719
],
[
417250,
417261
],
[
417058,
417069
],
[
417604,
417615
],
[
417417,
417428
],
[
417953,
417964
],
[
417765,
417776
],
[
418301,
418312
],
[
418114,
418125
],
[
418641,
418652
],
[
418462,
418473
],
[
418958,
418969
],
[
418789,
418800
],
[
419279,
419290
],
[
419106,
419117
],
[
419605,
419616
],
[
419427,
419438
],
[
419922,
419933
],
[
419753,
419764
],
[
420245,
420256
],
[
420070,
420081
],
[
420562,
420573
],
[
420393,
420404
],
[
420866,
420877
],
[
420710,
420721
],
[
421144,
421155
],
[
420994,
421005
],
[
421409,
421420
],
[
421269,
421280
],
[
421668,
421679
],
[
421520,
421531
],
[
421923,
421934
],
[
421792,
421803
],
[
422156,
422167
],
[
422030,
422041
],
[
422394,
422405
],
[
422263,
422274
],
[
422628,
422639
],
[
422501,
422512
],
[
422866,
422877
],
[
422735,
422746
],
[
423105,
423116
],
[
422973,
422984
],
[
423341,
423352
],
[
423212,
423223
],
[
423568,
423579
],
[
423448,
423459
],
[
423811,
423822
],
[
423675,
423686
],
[
424051,
424062
],
[
423918,
423929
],
[
424287,
424298
],
[
424158,
424169
],
[
424513,
424524
],
[
424394,
424405
],
[
424752,
424763
],
[
424620,
424631
],
[
424974,
424985
],
[
424859,
424870
],
[
425170,
425181
],
[
425068,
425079
],
[
425338,
425349
],
[
425250,
425261
],
[
425516,
425527
],
[
425400,
425411
],
[
425732,
425743
],
[
425613,
425624
],
[
425952,
425963
],
[
425829,
425840
],
[
426162,
426173
],
[
426049,
426060
],
[
426400,
426411
],
[
426259,
426270
],
[
426626,
426637
],
[
426512,
426523
],
[
426839,
426850
],
[
426721,
426732
],
[
427051,
427062
],
[
426934,
426945
],
[
427266,
427277
],
[
427146,
427157
],
[
427466,
427477
],
[
427361,
427372
],
[
427647,
427658
],
[
427543,
427554
],
[
427827,
427838
],
[
427724,
427735
],
[
427989,
428000
],
[
427904,
427915
],
[
428266,
428277
],
[
428050,
428061
],
[
428652,
428663
],
[
428442,
428453
],
[
429009,
429020
],
[
428825,
428836
],
[
429333,
429344
],
[
429152,
429163
],
[
429627,
429638
],
[
429476,
429487
],
[
429851,
429862
],
[
429735,
429746
],
[
430055,
430066
],
[
429923,
429934
],
[
430283,
430294
],
[
430165,
430176
],
[
430481,
430492
],
[
430378,
430389
],
[
430639,
430650
],
[
430556,
430567
],
[
430755,
430766
],
[
430693,
430704
],
[
430864,
430875
],
[
430798,
430809
],
[
430977,
430988
],
[
430907,
430918
],
[
431089,
431100
],
[
431020,
431031
],
[
431212,
431223
],
[
431132,
431143
],
[
431317,
431328
],
[
431255,
431266
],
[
431429,
431440
],
[
431360,
431371
],
[
431535,
431546
],
[
431477,
431488
],
[
431648,
431659
],
[
431577,
431588
],
[
431768,
431779
],
[
431704,
431715
],
[
431876,
431887
],
[
431811,
431822
],
[
431970,
431981
],
[
431919,
431930
],
[
432061,
432072
],
[
432009,
432020
],
[
432146,
432157
],
[
432095,
432106
],
[
432231,
432242
],
[
432180,
432191
],
[
432311,
432322
],
[
432265,
432276
],
[
432401,
432412
],
[
432345,
432356
],
[
432485,
432496
],
[
432435,
432446
],
[
432566,
432577
],
[
432519,
432530
],
[
432696,
432707
],
[
432600,
432611
],
[
432860,
432871
],
[
432774,
432785
],
[
433002,
433013
],
[
432927,
432938
],
[
433200,
433211
],
[
433052,
433063
],
[
433456,
433467
],
[
433318,
433329
],
[
433700,
433711
],
[
433574,
433585
],
[
433912,
433923
],
[
433802,
433813
],
[
434113,
434124
],
[
433997,
434008
],
[
434284,
434295
],
[
434198,
434209
],
[
434438,
434449
],
[
434345,
434356
],
[
434568,
434579
],
[
434499,
434510
],
[
434717,
434728
],
[
434621,
434632
],
[
434912,
434923
],
[
434782,
434793
],
[
435135,
435146
],
[
435022,
435033
],
[
435342,
435353
],
[
435224,
435235
],
[
435525,
435536
],
[
435431,
435442
],
[
435695,
435706
],
[
435598,
435609
],
[
435849,
435860
],
[
435768,
435779
],
[
435971,
435982
],
[
435908,
435919
],
[
436076,
436087
],
[
436015,
436026
],
[
436190,
436201
],
[
436120,
436131
],
[
436307,
436318
],
[
436234,
436245
],
[
436418,
436429
],
[
436351,
436362
],
[
436514,
436525
],
[
436462,
436473
]
]
] |
from . import models
import datetime
from discord import utils, TextChannel
def generate_id():
return utils.time_snowflake(datetime.datetime.now())
async def add_permanent_role(user_id: int, role_id: int):
await add_dbmember_if_not_exist(user_id)
if not await models.PermanentRole.query.where((models.PermanentRole.user_id == user_id) & (
models.PermanentRole.role_id == role_id)).gino.first():
return await models.PermanentRole.create(user_id=user_id, role_id=role_id)
async def remove_permanent_role(user_id: int, role_id: int):
permanent_role = await models.PermanentRole.query.where((models.PermanentRole.user_id == user_id) & (
models.PermanentRole.role_id == role_id)).gino.first()
if permanent_role:
await permanent_role.delete()
return permanent_role
async def get_permanent_roles(user_id: int):
db_member = await get_dbmember(user_id)
if db_member:
return await models.Role.query.where((models.Role.id == models.PermanentRole.role_id) & (models.PermanentRole.user_id == db_member.id)).gino.all()
async def add_staff(user_id: int, position: str):
await add_dbmember_if_not_exist(user_id)
staff = await get_staff(user_id) or await get_helper(user_id)
if staff:
await staff.update(position=position).apply()
else:
await models.Staff.create(id=user_id, position=position)
async def add_helper(user_id: int, position: str, console: str = None):
await add_dbmember_if_not_exist(user_id)
if staff := await get_staff(user_id):
await staff.update(console=console).apply()
else:
await models.Staff.create(id=user_id, position=position, console=console)
async def remove_staff(user_id: int):
staff = await get_staff(user_id)
if staff:
if staff.console:
await staff.update(position="Helper").apply()
else:
await staff.delete()
async def remove_helper(user_id: int):
helper = await get_helper(user_id)
if helper:
if helper.position != "Helper":
await helper.update(console=None).apply()
else:
await helper.delete()
async def get_staff_all():
return await models.Staff.query.where(models.Staff.position != 'Helper').gino.all()
async def get_staff(user_id: int):
return await models.Staff.query.where(
(models.Staff.position != 'Helper') & (models.Staff.id == user_id)).gino.first()
async def get_helpers():
return await models.Staff.query.where(models.Staff.console.isnot(None)).gino.all()
async def get_helper(user_id: int):
return await models.Staff.query.where(models.Staff.id == user_id).gino.first()
async def add_warn(user_id: int, issuer_id: int, reason: str):
await add_dbmember_if_not_exist(user_id)
await add_dbmember_if_not_exist(issuer_id)
await models.Warn.create(id=generate_id(), user=user_id, issuer=issuer_id, reason=reason)
async def copy_warn(user_id: int, warn: models.Warn):
await add_dbmember_if_not_exist(user_id)
warn.id = utils.time_snowflake(utils.snowflake_time(warn.id) + datetime.timedelta(milliseconds=1))
while await get_warn(warn.id):
warn.id = utils.time_snowflake(utils.snowflake_time(warn.id) + datetime.timedelta(milliseconds=1))
warn.user = user_id
await warn.create()
async def get_warn(warn_id: int):
return await models.Warn.get(warn_id)
async def get_warns(user_id: int):
return await models.Warn.query.where(models.Warn.user == user_id).gino.all()
async def remove_warn_id(user_id: int, index: int):
warn = await models.Warn.query.where(models.Warn.user == user_id).offset(index - 1).gino.first()
await warn.delete()
async def remove_warns(user_id: int):
n_warns = await (models.db.select([models.db.func.count()]).where(models.Warn.user == user_id).gino.scalar())
if n_warns:
await models.Warn.delete.where(models.Warn.user == user_id).gino.status()
return n_warns
async def add_timed_restriction(user_id: int, end_date: datetime.datetime, type: str):
await add_dbmember_if_not_exist(user_id)
await models.TimedRestriction.create(id=generate_id(), user=user_id, type=type,
end_date=end_date)
async def get_time_restrictions_by_user(user_id: int):
return await models.TimedRestriction.query.where(models.TimedRestriction.user == user_id).gino.all()
async def get_time_restrictions_by_user_type(user_id: int, type: str):
return await models.TimedRestriction.query.where((models.TimedRestriction.user == user_id) & (
models.TimedRestriction.type == type)).gino.first()
async def get_time_restrictions_by_type(type: str):
return await models.TimedRestriction.query.where(models.TimedRestriction.type == type).gino.all()
async def remove_timed_restriction(user_id: int, type: str):
time_restriction = await get_time_restrictions_by_user_type(user_id, type)
if time_restriction:
await time_restriction.delete()
async def set_time_restriction_alert(user_id: int, type: str):
time_restriction = await get_time_restrictions_by_user_type(user_id, type)
if time_restriction:
await time_restriction.update(alerted=True).apply()
async def add_timed_role(user_id: int, role_id: int, expiring_date: datetime.datetime):
await add_dbmember_if_not_exist(user_id)
entry = await get_time_role_by_user_type(user_id, role_id)
if not entry:
return await models.TimedRole.create(id=generate_id(), user_id=user_id, role_id=role_id, expiring_date=expiring_date)
await entry.update(expiring_date=expiring_date).apply()
return entry
async def remove_timed_role(user_id: int, role_id: int):
timed_role = await get_time_role_by_user_type(user_id, role_id)
if timed_role:
await timed_role.delete()
async def get_time_role_by_user_type(user_id: int, role_id: int):
return await models.TimedRole.query.where(
(models.TimedRole.user_id == user_id) & (models.TimedRole.role_id == role_id)).gino.first()
async def get_timed_roles():
return await models.TimedRole.query.gino.all()
async def add_flag(name: str):
await models.Flag.create(name=name)
async def get_flag(name: str):
if flag := await models.Flag.get(name):
return flag.value
return None
async def remove_flag(name: str):
flag = await get_flag(name)
if flag:
await flag.delete()
async def set_flag(name: str, value: bool):
flag = await get_flag(name)
if flag:
await flag.update(value=value).apply()
async def add_softban(user_id: int, issuer_id: int, reason: str):
await add_dbmember_if_not_exist(user_id)
await models.Softban.create(id=generate_id(), user=user_id, issuer=issuer_id, reason=reason)
async def remove_softban(user_id: int):
softban = await get_softban(user_id)
if softban:
await softban.delete()
async def add_dbmember(user_id: int):
return await models.Member.create(id=user_id)
async def add_dbmember_if_not_exist(user_id: int):
db_member = await get_dbmember(user_id)
if not db_member:
db_member = await add_dbmember(user_id)
return db_member
async def get_dbmember(user_id: int):
return await models.Member.get(user_id)
async def add_dbchannel(channel_id: int, name: str):
return await models.Channel.create(id=channel_id, name=name)
async def get_dbchannel(channel_id: int):
return await models.Channel.get(channel_id)
async def add_dbrole(role_id: int, name: str):
return await models.Role.create(id=role_id, name=name)
async def get_dbrole(role_id: int):
return await models.Role.get(role_id)
async def get_softban(user_id: int):
return await models.Softban.query.where(models.Softban.user == user_id).gino.first()
async def add_watch(user_id: int):
db_member = await add_dbmember_if_not_exist(user_id)
await db_member.update(watched=True).apply()
async def remove_watch(user_id: int):
db_member = await get_dbmember(user_id)
if db_member:
await db_member.update(watched=False).apply()
async def is_watched(user_id: int):
db_member = await get_dbmember(user_id)
return db_member.watched if db_member else False
async def add_nofilter(channel: TextChannel):
db_channel = await get_dbchannel(channel.id)
if not db_channel:
db_channel = await add_dbchannel(channel.id, channel.name)
await db_channel.update(nofilter=True).apply()
async def remove_nofilter(channel: TextChannel):
db_channel = await get_dbchannel(channel.id)
if db_channel:
await db_channel.update(nofilter=True).apply()
async def check_nofilter(channel: TextChannel):
channel = await models.Channel.get(channel.id)
return channel.nofilter if channel else False
async def add_friendcode_3ds(user_id: int, fc: int):
await add_dbmember_if_not_exist(user_id)
if fcs := await get_friendcode(user_id):
await fcs.update(fc_3ds=fc).apply()
return
await models.FriendCode.create(id=user_id, fc_3ds=fc)
async def add_friendcode_switch(user_id: int, fc: int):
await add_dbmember_if_not_exist(user_id)
if fcs := await get_friendcode(user_id):
await fcs.update(fc_switch=fc).apply()
return
await models.FriendCode.create(id=user_id, fc_switch=fc)
async def get_friendcode(user_id: int):
return await models.FriendCode.get(user_id)
async def delete_friendcode_3ds(user_id: int):
friendcodes = await get_friendcode(user_id)
if friendcodes:
await friendcodes.update(fc_3ds=None).apply()
if friendcodes.fc_3ds is None and friendcodes.fc_switch is None:
await friendcodes.delete()
async def delete_friendcode_switch(user_id: int):
friendcodes = await get_friendcode(user_id)
if friendcodes:
await friendcodes.update(fc_switch=None).apply()
if friendcodes.fc_3ds is None and friendcodes.fc_switch is None:
await friendcodes.delete()
async def add_rule(number: int, description: str):
rule = await get_rule(number)
if not rule:
await models.Rule.create(id=number, description=description)
async def edit_rule(number: int, description: str):
rule = await get_rule(number)
if rule:
await rule.update(description=description).apply()
async def delete_rule(number: int):
rule = await get_rule(number)
if rule:
await rule.delete()
async def get_rules():
return await models.Rule.query.order_by(models.Rule.id).gino.all()
async def get_rule(number: int):
return await models.Rule.get(number)
async def add_reminder(date: datetime.datetime, author: int, reminder: str):
await add_dbmember_if_not_exist(author)
await models.RemindMeEntry.create(id=generate_id(), date=date, author=author, reminder=reminder)
async def get_reminders() -> list[models.RemindMeEntry]:
return await models.RemindMeEntry.query.order_by(models.RemindMeEntry.date).gino.all()
async def remove_reminder(reminder_id: int):
db_reminder = await models.RemindMeEntry.get(reminder_id)
await db_reminder.delete()
async def create_tag(title: str, content: str, author: int):
await add_dbmember_if_not_exist(author)
await models.Tag.create(id=generate_id(), title=title, content=content, author=author)
async def get_tag(title: str) -> models.Tag:
return await models.Tag.query.where(models.Tag.title == title).gino.first()
async def get_tags() -> list[models.Tag]:
return await models.Tag.query.order_by(models.Tag.id).gino.all()
async def search_tags(query: str) -> list[models.Tag]:
return await models.Tag.query.where(models.Tag.title.ilike(f"%{query}%")).limit(10).gino.all()
async def delete_tag(title: str):
db_tag = await get_tag(title)
await db_tag.delete()
| [
[
[
14,
20
],
[
276,
282
],
[
310,
316
],
[
367,
373
],
[
444,
450
],
[
596,
602
],
[
630,
636
],
[
687,
693
],
[
963,
969
],
[
988,
994
],
[
1006,
1012
],
[
1039,
1045
],
[
1352,
1358
],
[
1640,
1646
],
[
2213,
2219
],
[
2238,
2244
],
[
2338,
2344
],
[
2373,
2379
],
[
2411,
2417
],
[
2497,
2503
],
[
2522,
2528
],
[
2622,
2628
],
[
2647,
2653
],
[
2855,
2861
],
[
2981,
2987
],
[
3386,
3392
],
[
3465,
3471
],
[
3489,
3495
],
[
3600,
3606
],
[
3624,
3630
],
[
3769,
3775
],
[
3787,
3793
],
[
3818,
3824
],
[
3892,
3898
],
[
3917,
3923
],
[
4123,
4129
],
[
4331,
4337
],
[
4367,
4373
],
[
4509,
4515
],
[
4546,
4552
],
[
4603,
4609
],
[
4726,
4732
],
[
4762,
4768
],
[
5484,
5490
],
[
5931,
5937
],
[
5970,
5976
],
[
6010,
6016
],
[
6109,
6115
],
[
6186,
6192
],
[
6270,
6276
],
[
6705,
6711
],
[
6979,
6985
],
[
7257,
7263
],
[
7356,
7362
],
[
7465,
7471
],
[
7562,
7568
],
[
7659,
7665
],
[
7740,
7746
],
[
7767,
7773
],
[
8728,
8734
],
[
9023,
9029
],
[
9291,
9297
],
[
9401,
9407
],
[
10122,
10128
],
[
10492,
10498
],
[
10519,
10525
],
[
10598,
10604
],
[
10755,
10761
],
[
10882,
10888
],
[
10922,
10928
],
[
10958,
10964
],
[
11067,
11073
],
[
11253,
11259
],
[
11369,
11375
],
[
11398,
11404
],
[
11421,
11427
],
[
11492,
11498
],
[
11522,
11528
],
[
11548,
11554
],
[
11618,
11624
],
[
11648,
11654
],
[
11671,
11677
]
],
[
[
28,
36
],
[
129,
137
],
[
3107,
3115
],
[
3249,
3257
],
[
4037,
4045
],
[
5317,
5325
],
[
10653,
10661
]
],
[
[
57,
62
],
[
108,
113
],
[
3054,
3059
],
[
3075,
3080
],
[
3196,
3201
],
[
3217,
3222
]
],
[
[
64,
75
],
[
8280,
8291
],
[
8521,
8532
],
[
8694,
8705
]
],
[
[
82,
93
],
[
2877,
2888
],
[
4157,
4168
],
[
5511,
5522
],
[
6730,
6741
],
[
10786,
10797
],
[
11274,
11285
]
],
[
[
156,
505
]
],
[
[
508,
832
]
],
[
[
835,
1096
]
],
[
[
1099,
1402
]
],
[
[
1405,
1707
]
],
[
[
1710,
1929
]
],
[
[
1932,
2166
]
],
[
[
2169,
2283
]
],
[
[
2286,
2452
],
[
1212,
1221
],
[
1544,
1553
],
[
1766,
1775
]
],
[
[
2455,
2566
]
],
[
[
2569,
2687
],
[
1240,
1250
],
[
1990,
2000
]
],
[
[
2690,
2938
]
],
[
[
2941,
3332
]
],
[
[
3335,
3410
],
[
3159,
3167
]
],
[
[
3413,
3528
]
],
[
[
3531,
3707
]
],
[
[
3710,
3978
]
],
[
[
3981,
4256
]
],
[
[
4259,
4418
]
],
[
[
4421,
4654
],
[
4903,
4937
],
[
5112,
5146
]
],
[
[
4657,
4810
]
],
[
[
4813,
5017
]
],
[
[
5020,
5246
]
],
[
[
5249,
5665
]
],
[
[
5668,
5845
]
],
[
[
5848,
6060
],
[
5400,
5426
],
[
5748,
5774
]
],
[
[
6063,
6142
]
],
[
[
6145,
6215
]
],
[
[
6218,
6334
],
[
6388,
6396
],
[
6507,
6515
]
],
[
[
6337,
6443
]
],
[
[
6446,
6581
]
],
[
[
6584,
6791
]
],
[
[
6794,
6921
]
],
[
[
6924,
7011
],
[
7157,
7169
]
],
[
[
7014,
7199
],
[
224,
249
],
[
1159,
1184
],
[
1487,
1512
],
[
2763,
2788
],
[
2808,
2833
],
[
3005,
3030
],
[
4078,
4103
],
[
5347,
5372
],
[
6660,
6685
],
[
7871,
7896
],
[
8874,
8899
],
[
9139,
9164
],
[
10711,
10736
],
[
11209,
11234
]
],
[
[
7202,
7283
],
[
902,
914
],
[
7087,
7099
],
[
8017,
8029
],
[
8171,
8183
]
],
[
[
7286,
7403
],
[
8393,
8406
]
],
[
[
7406,
7495
],
[
8317,
8330
],
[
8558,
8571
]
],
[
[
7498,
7603
]
],
[
[
7606,
7683
]
],
[
[
7686,
7811
],
[
6854,
6865
]
],
[
[
7814,
7954
]
],
[
[
7957,
8110
]
],
[
[
8113,
8245
]
],
[
[
8248,
8483
]
],
[
[
8486,
8657
]
],
[
[
8660,
8808
]
],
[
[
8811,
9070
]
],
[
[
9073,
9341
]
],
[
[
9344,
9431
],
[
8929,
8943
],
[
9194,
9208
],
[
9505,
9519
],
[
9791,
9805
]
],
[
[
9434,
9714
]
],
[
[
9717,
10003
]
],
[
[
10006,
10176
]
],
[
[
10179,
10336
]
],
[
[
10339,
10449
]
],
[
[
10452,
10545
]
],
[
[
10548,
10621
],
[
10074,
10082
],
[
10248,
10256
],
[
10392,
10400
]
],
[
[
10624,
10845
]
],
[
[
10848,
10995
]
],
[
[
10998,
11135
]
],
[
[
11138,
11333
]
],
[
[
11336,
11460
],
[
11785,
11792
]
],
[
[
11463,
11573
]
],
[
[
11576,
11729
]
],
[
[
11732,
11825
]
]
] |
import torch
import torch.nn as nn
from .base import BaseDetector
from .test_mixins import RPNTestMixin, BBoxTestMixin, MaskTestMixin
from .. import builder
from ..registry import DETECTORS
from mmdet.core import bbox2roi, bbox2result, build_assigner, build_sampler
@DETECTORS.register_module
class TwoStageDetector(BaseDetector, RPNTestMixin, BBoxTestMixin,
MaskTestMixin):
def __init__(self,
backbone,
neck=None,
shared_head=None,
rpn_head=None,
bbox_roi_extractor=None,
bbox_head=None,
mask_roi_extractor=None,
mask_head=None,
train_cfg=None,
test_cfg=None,
pretrained=None):
super(TwoStageDetector, self).__init__()
self.backbone = builder.build_backbone(backbone)
if neck is not None:
self.neck = builder.build_neck(neck)
if shared_head is not None:
self.shared_head = builder.build_shared_head(shared_head)
if rpn_head is not None:
self.rpn_head = builder.build_head(rpn_head)
if bbox_head is not None:
self.bbox_roi_extractor = builder.build_roi_extractor(
bbox_roi_extractor)
self.bbox_head = builder.build_head(bbox_head)
if mask_head is not None:
if mask_roi_extractor is not None:
self.mask_roi_extractor = builder.build_roi_extractor(
mask_roi_extractor)
self.share_roi_extractor = False
else:
self.share_roi_extractor = True
self.mask_roi_extractor = self.bbox_roi_extractor
self.mask_head = builder.build_head(mask_head)
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self.init_weights(pretrained=pretrained)
@property
def with_rpn(self):
return hasattr(self, 'rpn_head') and self.rpn_head is not None
def init_weights(self, pretrained=None):
super(TwoStageDetector, self).init_weights(pretrained)
self.backbone.init_weights(pretrained=pretrained)
if self.with_neck:
if isinstance(self.neck, nn.Sequential):
for m in self.neck:
m.init_weights()
else:
self.neck.init_weights()
if self.with_shared_head:
self.shared_head.init_weights(pretrained=pretrained)
if self.with_rpn:
self.rpn_head.init_weights()
if self.with_bbox:
self.bbox_roi_extractor.init_weights()
self.bbox_head.init_weights()
if self.with_mask:
self.mask_head.init_weights()
if not self.share_roi_extractor:
self.mask_roi_extractor.init_weights()
def extract_feat(self, img):
x = self.backbone(img)
if self.with_neck:
x = self.neck(x)
return x
def forward_train(self,
img,
img_meta,
gt_bboxes,
gt_labels,
gt_bboxes_ignore=None,
gt_masks=None,
proposals=None):
x = self.extract_feat(img)
losses = dict()
# RPN forward and loss
if self.with_rpn:
rpn_outs = self.rpn_head(x)
rpn_loss_inputs = rpn_outs + (gt_bboxes, img_meta,
self.train_cfg.rpn)
rpn_losses = self.rpn_head.loss(
*rpn_loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore)
losses.update(rpn_losses)
proposal_cfg = self.train_cfg.get('rpn_proposal',
self.test_cfg.rpn)
proposal_inputs = rpn_outs + (img_meta, proposal_cfg)
proposal_list = self.rpn_head.get_bboxes(*proposal_inputs)
else:
proposal_list = proposals
# assign gts and sample proposals
if self.with_bbox or self.with_mask:
bbox_assigner = build_assigner(self.train_cfg.rcnn.assigner)
bbox_sampler = build_sampler(
self.train_cfg.rcnn.sampler, context=self)
num_imgs = img.size(0)
if gt_bboxes_ignore is None:
gt_bboxes_ignore = [None for _ in range(num_imgs)]
sampling_results = []
for i in range(num_imgs):
assign_result = bbox_assigner.assign(proposal_list[i],
gt_bboxes[i],
gt_bboxes_ignore[i],
gt_labels[i])
sampling_result = bbox_sampler.sample(
assign_result,
proposal_list[i],
gt_bboxes[i],
gt_labels[i],
feats=[lvl_feat[i][None] for lvl_feat in x])
sampling_results.append(sampling_result)
# bbox head forward and loss
if self.with_bbox:
rois = bbox2roi([res.bboxes for res in sampling_results])
# TODO: a more flexible way to decide which feature maps to use
bbox_feats = self.bbox_roi_extractor(
x[:self.bbox_roi_extractor.num_inputs], rois)
if self.with_shared_head:
bbox_feats = self.shared_head(bbox_feats)
cls_score, bbox_pred = self.bbox_head(bbox_feats)
bbox_targets = self.bbox_head.get_target(sampling_results,
gt_bboxes, gt_labels,
self.train_cfg.rcnn)
loss_bbox = self.bbox_head.loss(cls_score, bbox_pred,
*bbox_targets)
losses.update(loss_bbox)
# mask head forward and loss
if self.with_mask:
if not self.share_roi_extractor:
pos_rois = bbox2roi(
[res.pos_bboxes for res in sampling_results])
mask_feats = self.mask_roi_extractor(
x[:self.mask_roi_extractor.num_inputs], pos_rois)
if self.with_shared_head:
mask_feats = self.shared_head(mask_feats)
else:
pos_inds = []
device = bbox_feats.device
for res in sampling_results:
pos_inds.append(
torch.ones(
res.pos_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds.append(
torch.zeros(
res.neg_bboxes.shape[0],
device=device,
dtype=torch.uint8))
pos_inds = torch.cat(pos_inds)
mask_feats = bbox_feats[pos_inds]
mask_pred = self.mask_head(mask_feats)
mask_targets = self.mask_head.get_target(sampling_results,
gt_masks,
self.train_cfg.rcnn)
pos_labels = torch.cat(
[res.pos_gt_labels for res in sampling_results])
loss_mask = self.mask_head.loss(mask_pred, mask_targets,
pos_labels)
losses.update(loss_mask)
return losses
def simple_test(self, img, img_meta, proposals=None, rescale=False):
"""Test without augmentation."""
assert self.with_bbox, "Bbox head must be implemented."
x = self.extract_feat(img)
proposal_list = self.simple_test_rpn(
x, img_meta, self.test_cfg.rpn) if proposals is None else proposals
det_bboxes, det_labels = self.simple_test_bboxes(
x, img_meta, proposal_list, self.test_cfg.rcnn, rescale=rescale)
bbox_results = bbox2result(det_bboxes, det_labels,
self.bbox_head.num_classes)
if not self.with_mask:
return bbox_results
else:
segm_results = self.simple_test_mask(
x, img_meta, det_bboxes, det_labels, rescale=rescale)
return bbox_results, segm_results
def aug_test(self, imgs, img_metas, rescale=False):
"""Test with augmentations.
If rescale is False, then returned bboxes and masks will fit the scale
of imgs[0].
"""
# recompute feats to save memory
proposal_list = self.aug_test_rpn(
self.extract_feats(imgs), img_metas, self.test_cfg.rpn)
det_bboxes, det_labels = self.aug_test_bboxes(
self.extract_feats(imgs), img_metas, proposal_list,
self.test_cfg.rcnn)
if rescale:
_det_bboxes = det_bboxes
else:
_det_bboxes = det_bboxes.clone()
_det_bboxes[:, :4] *= img_metas[0][0]['scale_factor']
bbox_results = bbox2result(_det_bboxes, det_labels,
self.bbox_head.num_classes)
# det_bboxes always keep the original scale
if self.with_mask:
segm_results = self.aug_test_mask(
self.extract_feats(imgs), img_metas, det_bboxes, det_labels)
return bbox_results, segm_results
else:
return bbox_results
| [
[
[
7,
12
],
[
6614,
6619
],
[
6756,
6761
],
[
6831,
6836
],
[
6974,
6979
],
[
7015,
7020
],
[
7370,
7375
]
],
[
[
20,
34
],
[
2270,
2272
]
],
[
[
54,
66
],
[
319,
331
]
],
[
[
92,
104
],
[
333,
345
]
],
[
[
106,
119
],
[
347,
360
]
],
[
[
121,
134
],
[
385,
398
]
],
[
[
150,
157
],
[
870,
877
],
[
957,
964
],
[
1050,
1057
],
[
1151,
1158
],
[
1253,
1260
],
[
1347,
1354
],
[
1501,
1508
],
[
1780,
1787
]
],
[
[
181,
190
],
[
270,
279
]
],
[
[
214,
222
],
[
5196,
5204
],
[
6113,
6121
]
],
[
[
224,
235
],
[
8132,
8143
],
[
9189,
9200
]
],
[
[
237,
251
],
[
4154,
4168
]
],
[
[
253,
266
],
[
4226,
4239
]
],
[
[
302,
318
],
[
811,
827
],
[
2099,
2115
]
]
] |
from __future__ import division
import discord, math, operator
from discord.ext import commands
from pyparsing import (Literal,CaselessLiteral,Word,Combine,Group,Optional,
ZeroOrMore,Forward,nums,alphas,oneOf)
__author__='Paul McGuire'
__version__ = '$Revision: 0.0 $'
__date__ = '$Date: 2009-03-20 $'
__source__ = """http://pyparsing.wikispaces.com/file/view/fourFn.py
http://pyparsing.wikispaces.com/message/view/home/15549426
"""
__note__ = """
This is a re-wrap of Paul McGuire's fourFn.py as a class, so it can
be used easily in other places of the code. Most of the work wad done
by corpnewt, all I did was clean it and create the results in embeds.
Also, the messages are deleted after, except for the correct answer.
"""
class NumericStringParserForPython3(object):
"""
Most of this code comes from the fourFn.py pyparsing example
"""
def pushFirst(self, strg, loc, toks):
self.exprStack.append(toks[0])
def pushUMinus(self, strg, loc, toks):
if toks and toks[0]=='-':
self.exprStack.append('unary -')
def __init__(self):
"""
Please use any of the following symbols:
expop :: '^'
multop :: '*' | '/'
addop :: '+' | '-'
integer :: ['+' | '-'] '0'..'9'+
"""
point = Literal(".")
e = CaselessLiteral("E")
fnumber = Combine(Word("+-"+nums, nums) +
Optional(point + Optional(Word(nums))) +
Optional(e + Word("+-"+nums, nums)))
ident = Word(alphas, alphas+nums+"_$")
plus = Literal("+")
minus = Literal("-")
mult = Literal("*")
div = Literal("/")
lpar = Literal("(").suppress()
rpar = Literal(")").suppress()
addop = plus | minus
multop = mult | div
expop = Literal("^")
pi = CaselessLiteral("PI")
expr = Forward()
atom = ((Optional(oneOf("- +")) +
(pi|e|fnumber|ident+lpar+expr+rpar).setParseAction(self.pushFirst))
| Optional(oneOf("- +")) + Group(lpar+expr+rpar)
).setParseAction(self.pushUMinus)
# by defining exponentiation as "atom [ ^ factor ]..." instead of
# "atom [ ^ atom ]...", we get right-to-left exponents, instead of left-to-right
# that is, 2^3^2 = 2^(3^2), not (2^3)^2.
factor = Forward()
factor << atom + ZeroOrMore((expop + factor).setParseAction(self.pushFirst))
term = factor + ZeroOrMore((multop + factor).setParseAction(self.pushFirst))
expr << term + ZeroOrMore((addop + term).setParseAction(self.pushFirst))
# addop_term = (addop + term).setParseAction(self.pushFirst)
# general_term = term + ZeroOrMore(addop_term) | OneOrMore(addop_term)
# expr << general_term
self.bnf = expr
# this will map operator symbols to their corresponding arithmetic operations
epsilon = 1e-12
self.opn = {
"+" : operator.add,
"-" : operator.sub,
"*" : operator.mul,
"/" : operator.truediv,
"^" : operator.pow }
self.fn = {
"sin" : math.sin,
"cos" : math.cos,
"tan" : math.tan,
"abs" : abs,
"trunc" : lambda a: int(a),
"round" : round,
"sgn" : lambda a: abs(a)>epsilon and cmp(a,0) or 0}
def evaluateStack(self, s):
op = s.pop()
if op == 'unary -':
return -self.evaluateStack(s)
if op in "+-*/^":
op2 = self.evaluateStack(s)
op1 = self.evaluateStack(s)
return self.opn[op](op1, op2)
elif op == "PI":
return math.pi # 3.1415926535
elif op == "E":
return math.e # 2.718281828
elif op in self.fn:
return self.fn[op](self.evaluateStack(s))
elif op[0].isalpha():
return 0
else:
return float(op)
def eval(self,num_string,parseAll=True):
self.exprStack=[]
results=self.bnf.parseString(num_string,parseAll)
val=self.evaluateStack(self.exprStack[:])
return val
class Calculator:
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot):
self.bot = bot
self.nsp=NumericStringParserForPython3()
self.user_color = discord.Colour(0xed791d) ## orange
self.mod_color = discord.Colour(0x7289da) ## blurple
@commands.command(description='Scientific calculator', aliases=['calculate', 'maths'])
async def calc(self, ctx, *, formula = None):
""" ✔ Do some math
thanks to Paul McGuire's fourFn.py. """
person = ctx.message.author
formula = formula.replace('x', '*').replace(' minus ', '-').replace(' plus ', '+').replace(' into ', '/') \
.replace(' sub ', '-').replace(' pi ', 'PI').replace(' divide ', '/').replace(' multiply ', '*') \
.replace(' add ', '+').replace(' div ', '/').replace(' multi ', '*').replace(' mul ', '*') \
.replace('π', 'PI').replace('÷', '/')
if formula == None:
# How can it calculate an empty message? Reee!
msg = f'\u200BUsage: `{ctx.prefix}{ctx.invoked_with} [any maths formula]`'
e = discord.Embed(color=self.user_color)
e.description = msg
try:
await ctx.send(embed=e, delete_after=23)
except discord.HTTPException:
await ctx.send(msg, delete_after=23)
return
try:
answer=self.nsp.eval(formula)
except:
# If there's a problem in the input, show examples
msg = f'\N{THINKING FACE} wrong `{formula}` input.\n\nTry any of these:'
e = discord.Embed(color=self.user_color)
e.description = f'\u200B{msg}'
e.add_field(name='multiply', value='`2 * 3 x 5 multiply 7`')
e.add_field(name='divide', value='`91 / 5 divide 3 into 2 ÷ 4`')
e.add_field(name='add', value='`1 + 4 plus 8 add 23`')
e.add_field(name='substract', value='`91 - 35 minus 3 sub 12`')
e.add_field(name='exponential', value="`7 ^ 5`")
e.add_field(name='Supported formulas',
value='```py\nround((cos(45) + (3+7^2)*2 + tan(369.18)) / π - 3)```')
try:
await ctx.send(embed=e, delete_after=23)
except discord.HTTPException:
error = f'\N{THINKING FACE} wrong `{formula}` input.\n\n ' \
f'Try any of these:```py\nround((cos(45) + (3+7^2)*2 + tan(369.18)) / π - 3)```'
await ctx.send(error, delete_after=23)
return
# Correct input prints correct answer
distance = self.bot or self.bot.message
duration = f'Calculated in {distance.ws.latency * 1000:.2f} ms'
success = round(answer, 2)
e = discord.Embed(color=self.user_color)
e.add_field(name='Input:', value=f'```py\n{formula}```', inline=True)
e.add_field(name='Result:', value=f'```css\n{success}```', inline=True)
e.set_footer(text=duration)
try:
await ctx.send(embed=e)
except discord.Forbidden: # FORBIDDEN (status code: 403): Missing Permissions
await ctx.send(f'```rust\n>Input: {formula}\nResult: {success}```')
def setup(bot):
bot.add_cog(Calculator(bot))
| [
[
[
23,
31
]
],
[
[
39,
46
],
[
4481,
4488
],
[
4541,
4548
],
[
5442,
5449
],
[
5605,
5612
],
[
5936,
5943
],
[
6610,
6617
],
[
7104,
7111
],
[
7400,
7407
]
],
[
[
48,
52
],
[
3234,
3238
],
[
3268,
3272
],
[
3302,
3306
],
[
3801,
3805
],
[
3867,
3871
]
],
[
[
54,
62
],
[
3026,
3034
],
[
3062,
3070
],
[
3098,
3106
],
[
3134,
3142
],
[
3174,
3182
]
],
[
[
88,
96
],
[
4583,
4591
]
],
[
[
120,
127
],
[
1320,
1327
],
[
1609,
1616
],
[
1638,
1645
],
[
1667,
1674
],
[
1696,
1703
],
[
1725,
1732
],
[
1765,
1772
],
[
1863,
1870
]
],
[
[
128,
143
],
[
1349,
1364
],
[
1892,
1907
]
],
[
[
144,
148
],
[
1396,
1400
],
[
1470,
1474
],
[
1522,
1526
],
[
1562,
1566
]
],
[
[
149,
156
],
[
1388,
1395
]
],
[
[
157,
162
],
[
2108,
2113
]
],
[
[
163,
171
],
[
1444,
1452
],
[
1461,
1469
],
[
1509,
1517
],
[
1956,
1964
],
[
2083,
2091
]
],
[
[
193,
203
],
[
2443,
2453
],
[
2527,
2537
],
[
2611,
2621
]
],
[
[
204,
211
],
[
1929,
1936
],
[
2408,
2415
]
],
[
[
212,
216
],
[
1406,
1410
],
[
1412,
1416
],
[
1475,
1479
],
[
1532,
1536
],
[
1538,
1542
],
[
1582,
1586
]
],
[
[
217,
223
],
[
1567,
1573
],
[
1575,
1581
]
],
[
[
224,
229
],
[
1965,
1970
],
[
2092,
2097
]
],
[
[
232,
242
]
],
[
[
258,
269
]
],
[
[
291,
299
]
],
[
[
324,
334
]
],
[
[
455,
463
]
],
[
[
761,
790
],
[
4423,
4452
]
],
[
[
4270,
4280
],
[
7586,
7596
]
],
[
[
7558,
7563
]
]
] |
import onfido
from onfido.regions import Region
import io
api = onfido.Api("<AN_API_TOKEN>", region=Region.EU)
fake_uuid = "58a9c6d2-8661-4dbd-96dc-b9b9d344a7ce"
def test_upload_photo(requests_mock):
mock_upload = requests_mock.post("https://api.eu.onfido.com/v3.2/live_photos/", json=[])
sample_file = open("sample_photo.png", "rb")
request_body = {"advanced_validation": "true"}
api.live_photo.upload(sample_file, request_body)
assert mock_upload.called is True
def test_find_live_photo(requests_mock):
mock_find = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/{fake_uuid}", json=[])
api.live_photo.find(fake_uuid)
assert mock_find.called is True
def test_list_live_photos(requests_mock):
mock_list = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/?applicant_id={fake_uuid}", json=[])
api.live_photo.all(fake_uuid)
assert mock_list.called is True
def test_download_live_photo(requests_mock):
mock_download = requests_mock.get(f"https://api.eu.onfido.com/v3.2/live_photos/{fake_uuid}/download", text="FAKE IMAGE BINARY", headers={"Content-type": "image/png"})
onfido_download = api.live_photo.download(fake_uuid)
assert mock_download.called is True
assert onfido_download.content_type == "image/png"
| [
[
[
7,
13
],
[
65,
71
]
],
[
[
41,
47
],
[
101,
107
]
],
[
[
55,
57
]
],
[
[
59,
62
],
[
403,
406
],
[
639,
642
],
[
869,
872
],
[
1174,
1177
]
],
[
[
113,
122
],
[
613,
622
],
[
659,
668
],
[
843,
852
],
[
888,
897
],
[
1065,
1074
],
[
1198,
1207
]
],
[
[
170,
187
]
],
[
[
496,
516
]
],
[
[
711,
732
]
],
[
[
940,
964
]
]
] |
#import OpenStack connection class from the SDK
from openstack import connection
# Create a connection object by calling the constructor and pass the security information
conn = connection.Connection(auth_url="http://192.168.0.106/identity",
project_name="demo",
username="admin",
password="manoj",
user_domain_id="default",
project_domain_id="default")
def create_volume(conn):
volume_properties = {'size':'2', 'name':'packtpub-volume-2'}
volume = conn.block_store.create_volume(**volume_properties)
def delete_volume(conn):
volume_id = "3b064701-aaa7-418a-9df7-cad52bd549ee"
conn.block_store.delete_volume(volume_id)
def create_snapshot(conn):
snapshot_properties = {'volume_id':'3b064701-aaa7-418a-9df7-cad52bd549ee'}
snapshot = conn.block_store.create_snapshot(**snapshot_properties)
def delete_snapshot(conn):
snapshot_id = "91ac5916-0baa-469e-ac4e-e37b2a3880dc"
conn.block_store.delete_snapshot(snapshot_id)
#create_snapshot(conn)
#delete_snapshot(conn)
#delete_volume(conn)
create_volume(conn)
| [
[
[
70,
80
],
[
179,
189
]
],
[
[
172,
176
],
[
1036,
1040
]
],
[
[
361,
374
],
[
1022,
1035
]
],
[
[
517,
530
]
],
[
[
644,
659
]
],
[
[
823,
838
]
]
] |
# -*- coding: utf-8 -*-
"""Top-level package for {{ cookiecutter.project_name }}"""
__version__ = '0.0.1'
| [
[
[
85,
96
]
]
] |
np.tanh(x) | [] |
import asyncio
import aioredis
async def main():
sentinel = await aioredis.create_sentinel(
["redis://localhost:26379", "redis://sentinel2:26379"]
)
redis = sentinel.master_for("mymaster")
ok = await redis.set("key", "value")
assert ok
val = await redis.get("key", encoding="utf-8")
assert val == "value"
asyncio.run(main())
| [
[
[
7,
14
],
[
346,
353
]
],
[
[
22,
30
],
[
72,
80
]
],
[
[
33,
343
],
[
358,
362
]
]
] |
import logging
from .DatabaseBase import DatabaseBase
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
class ImageResource(DatabaseBase):
def __init__(self):
super().__init__()
def get_product_images_by_id(self, id):
search_image_query = """Select * From images where productId = %s """
values = [id]
image_records = self.run_query(search_image_query, values, False)
return image_records
def get_threed_link_by_product_id(self, pid):
select_query = """ Select * from images where is3DModelType = 'Y' and productId = %s"""
values = [str(pid)]
records = self.run_query(select_query, values, True)
if records is None:
return None
return records[0]
def update_threed_link(self, url, id_list):
update_query = """UPDATE images SET threeDModelLocation = %s WHERE productId =%s and is3DModelType = 'Y' """
for id in id_list:
self.run_query(update_query, [url, id], False)
def insert_threed_model(self, url, id_list):
insert_query = """INSERT INTO images(threeDModelLocation, is3DModelType, productId) VALUES (%s, %s, %s)"""
values = []
for id in id_list:
temp = (url, 'Y', str(id))
values.append(temp)
try:
self.run_query_many(insert_query, values, True)
except Exception as e:
self.connection.rollback()
logger.error('Exception occurred when inserting order', e)
return 0
return 1
| [
[
[
7,
14
],
[
64,
71
],
[
92,
99
],
[
118,
125
]
],
[
[
41,
53
],
[
155,
167
]
],
[
[
55,
61
],
[
1476,
1482
]
],
[
[
141,
154
]
]
] |
"""weather_api URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(url='https://github.com/brian-duffy/yoyo-test/blob/master/README.md')),
url(r'weather_app/', include('weather_app.urls')),
]
| [
[
[
673,
676
],
[
754,
757
],
[
862,
865
]
],
[
[
678,
685
],
[
883,
890
]
],
[
[
719,
731
],
[
765,
777
]
],
[
[
734,
745
]
]
] |
"""Utility for currying functions."""
from functools import wraps
from inspect import signature, isbuiltin, isclass
def curry(func, args=None, kwargs=None, n=None, use_defaults=False):
if use_defaults:
return CurriedDefault(func, args, kwargs, n)
return Curried(func, args, kwargs, n)
class Curried:
def __init__(self, func, args=None, kwargs=None, target_arg_count=None):
if not callable(func):
raise TypeError('first argument must be callable')
wraps(func)(self)
self.func = func
self.args = or_else(args, tuple())
self.kwargs = or_else(kwargs, dict())
self.target_arg_count = or_else(target_arg_count, get_target_arg_count(func))
def __call__(self, *new_args, **new_kwargs):
args = self.args + new_args
kwargs = self.kwargs.copy()
kwargs.update(new_kwargs)
if self._have_enough_args(args, kwargs):
return self.func(*args, **kwargs)
return self._clone(args, kwargs)
def _clone(self, args, kwargs):
return Curried(self.func, args, kwargs, self.target_arg_count)
def _have_enough_args(self, args, kwargs):
return current_count(args, kwargs) == self.target_arg_count
class CurriedDefault(Curried):
def _clone(self, args, kwargs):
return CurriedDefault(self.func, args, kwargs, self.target_arg_count)
def _have_enough_args(self, args, kwargs):
count = current_count(args, kwargs)
return count == self.target_arg_count or count == (self.target_arg_count - count_defaults(self.func))
def or_else(x, default):
return x if x is not None else default
def current_count(next_args, next_kwargs):
return len(next_args) + len(next_kwargs)
def count_defaults(func):
length = 0
if func.__defaults__ is not None:
length += len(func.__defaults__)
if func.__kwdefaults__ is not None:
length += len(func.__kwdefaults__)
return length
def get_target_arg_count(func):
if isclass(func) or isbuiltin(func):
# builtins, e.g. `map`, refer to class rather than fn
func = func.__call__
sig = signature(func)
return len(sig.parameters)
| [
[
[
61,
66
],
[
502,
507
]
],
[
[
87,
96
],
[
2149,
2158
]
],
[
[
98,
107
],
[
2030,
2039
]
],
[
[
109,
116
],
[
2013,
2020
]
],
[
[
123,
128
]
],
[
[
313,
320
],
[
1263,
1270
],
[
274,
281
],
[
1068,
1075
]
],
[
[
1248,
1262
],
[
224,
238
],
[
1324,
1338
]
],
[
[
1595,
1602
],
[
566,
573
],
[
611,
618
],
[
667,
674
]
],
[
[
1665,
1678
],
[
1187,
1200
],
[
1451,
1464
]
],
[
[
1755,
1769
],
[
1562,
1576
]
],
[
[
1978,
1998
],
[
693,
713
]
]
] |
# emacs: -*- mode: python; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil -*-
# ex: set sts=4 ts=4 sw=4 et:
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the datalad package for the
# copyright and license terms.
#
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Obsolete module: moved to `local.download_url`
"""
import warnings
warnings.warn(
"DownloadURL has been moved to datalad.local.download_url. "
"This module was deprecated in 0.16.0, and will be removed in a future "
"release. Please adjust the import.",
DeprecationWarning)
# Import command class to ease 3rd-party transitions
from datalad.local.download_url import DownloadURL
| [
[
[
449,
457
],
[
458,
466
]
],
[
[
774,
785
]
]
] |
# -*- coding: utf-8 -*-
'''
flask.ext.login
---------------
This module provides user session management for Flask. It lets you log
your users in and out in a database-independent manner.
:copyright: (c) 2011 by Matthew Frazier.
:license: MIT/X11, see LICENSE for more details.
'''
__version_info__ = ('0', '2', '10')
__version__ = '.'.join(__version_info__)
__author__ = 'Matthew Frazier'
__license__ = 'MIT/X11'
__copyright__ = '(c) 2011 by Matthew Frazier'
__all__ = ['LoginManager']
from flask import (_request_ctx_stack, abort, current_app, flash, redirect,
request, session, url_for, has_request_context)
from flask.signals import Namespace
from werkzeug.local import LocalProxy
from werkzeug.security import safe_str_cmp
from werkzeug.urls import url_decode, url_encode
from datetime import datetime, timedelta
from functools import wraps
from hashlib import sha1, md5
import hmac
import warnings
import sys
if sys.version < '3': # pragma: no cover
from urlparse import urlparse, urlunparse
else: # pragma: no cover
from urllib.parse import urlparse, urlunparse
unicode = str
_signals = Namespace()
#: A proxy for the current user. If no user is logged in, this will be an
#: anonymous user
current_user = LocalProxy(lambda: _get_user())
#: The default name of the "remember me" cookie (``remember_token``)
COOKIE_NAME = 'remember_token'
#: The default time before the "remember me" cookie expires (365 days).
COOKIE_DURATION = timedelta(days=365)
#: Whether the "remember me" cookie requires Secure; defaults to ``None``
COOKIE_SECURE = None
#: Whether the "remember me" cookie uses HttpOnly or not; defaults to ``False``
COOKIE_HTTPONLY = False
#: The default flash message to display when users need to log in.
LOGIN_MESSAGE = u'Please log in to access this page.'
#: The default flash message category to display when users need to log in.
LOGIN_MESSAGE_CATEGORY = 'message'
#: The default flash message to display when users need to reauthenticate.
REFRESH_MESSAGE = u'Please reauthenticate to access this page.'
#: The default flash message category to display when users need to
#: reauthenticate.
REFRESH_MESSAGE_CATEGORY = 'message'
#: The default attribute to retreive the unicode id of the user
ID_ATTRIBUTE = 'get_id'
#: Default name of the auth header (``Authorization``)
AUTH_HEADER_NAME = 'Authorization'
class LoginManager(object):
'''
This object is used to hold the settings used for logging in. Instances of
:class:`LoginManager` are *not* bound to specific apps, so you can create
one in the main body of your code and then bind it to your
app in a factory function.
'''
def __init__(self, app=None, add_context_processor=True):
#: A class or factory function that produces an anonymous user, which
#: is used when no one is logged in.
self.anonymous_user = AnonymousUserMixin
#: The name of the view to redirect to when the user needs to log in.
#: (This can be an absolute URL as well, if your authentication
#: machinery is external to your application.)
self.login_view = None
#: The message to flash when a user is redirected to the login page.
self.login_message = LOGIN_MESSAGE
#: The message category to flash when a user is redirected to the login
#: page.
self.login_message_category = LOGIN_MESSAGE_CATEGORY
#: The name of the view to redirect to when the user needs to
#: reauthenticate.
self.refresh_view = None
#: The message to flash when a user is redirected to the 'needs
#: refresh' page.
self.needs_refresh_message = REFRESH_MESSAGE
#: The message category to flash when a user is redirected to the
#: 'needs refresh' page.
self.needs_refresh_message_category = REFRESH_MESSAGE_CATEGORY
#: The mode to use session protection in. This can be either
#: ``'basic'`` (the default) or ``'strong'``, or ``None`` to disable
#: it.
self.session_protection = 'basic'
#: If present, used to translate flash messages ``self.login_message``
#: and ``self.needs_refresh_message``
self.localize_callback = None
self.token_callback = None
self.user_callback = None
self.unauthorized_callback = None
self.needs_refresh_callback = None
self.id_attribute = ID_ATTRIBUTE
self.header_callback = None
self.request_callback = None
if app is not None:
self.init_app(app, add_context_processor)
def setup_app(self, app, add_context_processor=True): # pragma: no cover
'''
This method has been deprecated. Please use
:meth:`LoginManager.init_app` instead.
'''
warnings.warn('Warning setup_app is deprecated. Please use init_app.',
DeprecationWarning)
self.init_app(app, add_context_processor)
def init_app(self, app, add_context_processor=True):
'''
Configures an application. This registers an `after_request` call, and
attaches this `LoginManager` to it as `app.login_manager`.
:param app: The :class:`flask.Flask` object to configure.
:type app: :class:`flask.Flask`
:param add_context_processor: Whether to add a context processor to
the app that adds a `current_user` variable to the template.
Defaults to ``True``.
:type add_context_processor: bool
'''
app.login_manager = self
app.after_request(self._update_remember_cookie)
self._login_disabled = app.config.get('LOGIN_DISABLED',
app.config.get('TESTING', False))
if add_context_processor:
app.context_processor(_user_context_processor)
def unauthorized(self):
'''
This is called when the user is required to log in. If you register a
callback with :meth:`LoginManager.unauthorized_handler`, then it will
be called. Otherwise, it will take the following actions:
- Flash :attr:`LoginManager.login_message` to the user.
- Redirect the user to `login_view`. (The page they were attempting
to access will be passed in the ``next`` query string variable,
so you can redirect there if present instead of the homepage.)
If :attr:`LoginManager.login_view` is not defined, then it will simply
raise a HTTP 401 (Unauthorized) error instead.
This should be returned from a view or before/after_request function,
otherwise the redirect will have no effect.
'''
user_unauthorized.send(current_app._get_current_object())
if self.unauthorized_callback:
return self.unauthorized_callback()
if not self.login_view:
abort(401)
if self.login_message:
if self.localize_callback is not None:
flash(self.localize_callback(self.login_message),
category=self.login_message_category)
else:
flash(self.login_message, category=self.login_message_category)
return redirect(login_url(self.login_view, request.url))
def user_loader(self, callback):
'''
This sets the callback for reloading a user from the session. The
function you set should take a user ID (a ``unicode``) and return a
user object, or ``None`` if the user does not exist.
:param callback: The callback for retrieving a user object.
:type callback: unicode
'''
self.user_callback = callback
return callback
def header_loader(self, callback):
'''
This sets the callback for loading a user from a header value.
The function you set should take an authentication token and
return a user object, or `None` if the user does not exist.
:param callback: The callback for retrieving a user object.
'''
self.header_callback = callback
return callback
def request_loader(self, callback):
'''
This sets the callback for loading a user from a Flask request.
The function you set should take Flask request object and
return a user object, or `None` if the user does not exist.
:param callback: The callback for retrieving a user object.
'''
self.request_callback = callback
return callback
def token_loader(self, callback):
'''
This sets the callback for loading a user from an authentication
token. The function you set should take an authentication token
(a ``unicode``, as returned by a user's `get_auth_token` method) and
return a user object, or ``None`` if the user does not exist.
:param callback: The callback for retrieving a user object.
:type callback: unicode
'''
self.token_callback = callback
return callback
def unauthorized_handler(self, callback):
'''
This will set the callback for the `unauthorized` method, which among
other things is used by `login_required`. It takes no arguments, and
should return a response to be sent to the user instead of their
normal view.
:param callback: The callback for unauthorized users.
:type callback: function
'''
self.unauthorized_callback = callback
return callback
def needs_refresh_handler(self, callback):
'''
This will set the callback for the `needs_refresh` method, which among
other things is used by `fresh_login_required`. It takes no arguments,
and should return a response to be sent to the user instead of their
normal view.
:param callback: The callback for unauthorized users.
:type callback: function
'''
self.needs_refresh_callback = callback
return callback
def needs_refresh(self):
'''
This is called when the user is logged in, but they need to be
reauthenticated because their session is stale. If you register a
callback with `needs_refresh_handler`, then it will be called.
Otherwise, it will take the following actions:
- Flash :attr:`LoginManager.needs_refresh_message` to the user.
- Redirect the user to :attr:`LoginManager.refresh_view`. (The page
they were attempting to access will be passed in the ``next``
query string variable, so you can redirect there if present
instead of the homepage.)
If :attr:`LoginManager.refresh_view` is not defined, then it will
simply raise a HTTP 403 (Forbidden) error instead.
This should be returned from a view or before/after_request function,
otherwise the redirect will have no effect.
'''
user_needs_refresh.send(current_app._get_current_object())
if self.needs_refresh_callback:
return self.needs_refresh_callback()
if not self.refresh_view:
abort(403)
if self.localize_callback is not None:
flash(self.localize_callback(self.needs_refresh_message),
category=self.needs_refresh_message_category)
else:
flash(self.needs_refresh_message,
category=self.needs_refresh_message_category)
return redirect(login_url(self.refresh_view, request.url))
def reload_user(self, user=None):
ctx = _request_ctx_stack.top
if user is None:
user_id = session.get('user_id')
if user_id is None:
ctx.user = self.anonymous_user()
else:
user = self.user_callback(user_id)
if user is None:
logout_user()
else:
ctx.user = user
else:
ctx.user = user
def _load_user(self):
'''Loads user from session or remember_me cookie as applicable'''
user_accessed.send(current_app._get_current_object())
# first check SESSION_PROTECTION
config = current_app.config
if config.get('SESSION_PROTECTION', self.session_protection):
deleted = self._session_protection()
if deleted:
return self.reload_user()
# If a remember cookie is set, and the session is not, move the
# cookie user ID to the session.
#
# However, the session may have been set if the user has been
# logged out on this request, 'remember' would be set to clear,
# so we should check for that and not restore the session.
is_missing_user_id = 'user_id' not in session
if is_missing_user_id:
cookie_name = config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
header_name = config.get('AUTH_HEADER_NAME', AUTH_HEADER_NAME)
has_cookie = (cookie_name in request.cookies and
session.get('remember') != 'clear')
if has_cookie:
return self._load_from_cookie(request.cookies[cookie_name])
elif header_name in request.headers:
return self._load_from_header(request.headers[header_name])
else:
return self._load_from_request(request)
return self.reload_user()
def _session_protection(self):
sess = session._get_current_object()
ident = _create_identifier()
app = current_app._get_current_object()
mode = app.config.get('SESSION_PROTECTION', self.session_protection)
# if there is no '_id', then take the current one for good
if '_id' not in sess:
sess['_id'] = ident
# if the sess is empty, it's an anonymous user, or just logged out
# so we can skip this, unless 'strong' protection is active,
# in which case we need to double check for the remember me token
check_protection = sess or mode == 'strong'
if check_protection and ident != sess.get('_id', None):
if mode == 'basic' or sess.permanent:
sess['_fresh'] = False
session_protected.send(app)
return False
elif mode == 'strong':
sess.clear()
sess['remember'] = 'clear'
session_protected.send(app)
return True
return False
def _load_from_cookie(self, cookie):
if self.token_callback:
user = self.token_callback(cookie)
if user is not None:
session['user_id'] = getattr(user, self.id_attribute)()
session['_fresh'] = False
_request_ctx_stack.top.user = user
else:
self.reload_user()
else:
user_id = decode_cookie(cookie)
if user_id is not None:
session['user_id'] = user_id
session['_fresh'] = False
self.reload_user()
if _request_ctx_stack.top.user is not None:
app = current_app._get_current_object()
user_loaded_from_cookie.send(app, user=_get_user())
def _load_from_header(self, header):
user = None
if self.header_callback:
user = self.header_callback(header)
if user is not None:
self.reload_user(user=user)
app = current_app._get_current_object()
user_loaded_from_header.send(app, user=_get_user())
else:
self.reload_user()
def _load_from_request(self, request):
user = None
if self.request_callback:
user = self.request_callback(request)
if user is not None:
self.reload_user(user=user)
app = current_app._get_current_object()
user_loaded_from_request.send(app, user=_get_user())
else:
self.reload_user()
def _update_remember_cookie(self, response):
# Don't modify the session unless there's something to do.
if 'remember' in session:
operation = session.pop('remember', None)
if operation == 'set' and 'user_id' in session:
self._set_cookie(response)
elif operation == 'clear':
self._clear_cookie(response)
return response
def _set_cookie(self, response):
# cookie settings
config = current_app.config
cookie_name = config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
duration = config.get('REMEMBER_COOKIE_DURATION', COOKIE_DURATION)
domain = config.get('REMEMBER_COOKIE_DOMAIN')
secure = config.get('REMEMBER_COOKIE_SECURE', COOKIE_SECURE)
httponly = config.get('REMEMBER_COOKIE_HTTPONLY', COOKIE_HTTPONLY)
# prepare data
if self.token_callback:
data = current_user.get_auth_token()
else:
data = encode_cookie(str(session['user_id']))
expires = datetime.utcnow() + duration
# actually set it
response.set_cookie(cookie_name,
value=data,
expires=expires,
domain=domain,
secure=secure,
httponly=httponly)
def _clear_cookie(self, response):
config = current_app.config
cookie_name = config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
domain = config.get('REMEMBER_COOKIE_DOMAIN')
response.delete_cookie(cookie_name, domain=domain)
class UserMixin(object):
'''
This provides default implementations for the methods that Flask-Login
expects user objects to have.
'''
def is_active(self):
return True
def is_authenticated(self):
return True
def is_anonymous(self):
return False
def get_id(self):
try:
return unicode(self.id)
except AttributeError:
raise NotImplementedError('No `id` attribute - override `get_id`')
def __eq__(self, other):
'''
Checks the equality of two `UserMixin` objects using `get_id`.
'''
if isinstance(other, UserMixin):
return self.get_id() == other.get_id()
return NotImplemented
def __ne__(self, other):
'''
Checks the inequality of two `UserMixin` objects using `get_id`.
'''
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not equal
if sys.version_info[0] != 2: # pragma: no cover
# Python 3 implicitly set __hash__ to None if we override __eq__
# We set it back to its default implementation
__hash__ = object.__hash__
class AnonymousUserMixin(object):
'''
This is the default object for representing an anonymous user.
'''
def is_authenticated(self):
return False
def is_active(self):
return False
def is_anonymous(self):
return True
def get_id(self):
return
def encode_cookie(payload):
'''
This will encode a ``unicode`` value into a cookie, and sign that cookie
with the app's secret key.
:param payload: The value to encode, as `unicode`.
:type payload: unicode
'''
return u'{0}|{1}'.format(payload, _cookie_digest(payload))
def decode_cookie(cookie):
'''
This decodes a cookie given by `encode_cookie`. If verification of the
cookie fails, ``None`` will be implicitly returned.
:param cookie: An encoded cookie.
:type cookie: str
'''
try:
payload, digest = cookie.rsplit(u'|', 1)
if hasattr(digest, 'decode'):
digest = digest.decode('ascii') # pragma: no cover
except ValueError:
return
if safe_str_cmp(_cookie_digest(payload), digest):
return payload
def make_next_param(login_url, current_url):
'''
Reduces the scheme and host from a given URL so it can be passed to
the given `login` URL more efficiently.
:param login_url: The login URL being redirected to.
:type login_url: str
:param current_url: The URL to reduce.
:type current_url: str
'''
l = urlparse(login_url)
c = urlparse(current_url)
if (not l.scheme or l.scheme == c.scheme) and \
(not l.netloc or l.netloc == c.netloc):
return urlunparse(('', '', c.path, c.params, c.query, ''))
return current_url
def login_url(login_view, next_url=None, next_field='next'):
'''
Creates a URL for redirecting to a login page. If only `login_view` is
provided, this will just return the URL for it. If `next_url` is provided,
however, this will append a ``next=URL`` parameter to the query string
so that the login view can redirect back to that URL.
:param login_view: The name of the login view. (Alternately, the actual
URL to the login view.)
:type login_view: str
:param next_url: The URL to give the login view for redirection.
:type next_url: str
:param next_field: What field to store the next URL in. (It defaults to
``next``.)
:type next_field: str
'''
if login_view.startswith(('https://', 'http://', '/')):
base = login_view
else:
base = url_for(login_view)
if next_url is None:
return base
parts = list(urlparse(base))
md = url_decode(parts[4])
md[next_field] = make_next_param(base, next_url)
parts[4] = url_encode(md, sort=True)
return urlunparse(parts)
def make_secure_token(*args, **options):
'''
This will create a secure token that you can use as an authentication
token for your users. It uses heavy-duty HMAC encryption to prevent people
from guessing the information. (To make it even more effective, if you
will never need to regenerate the token, you can pass some random data
as one of the arguments.)
:param \*args: The data to include in the token.
:type args: args
:param \*\*options: To manually specify a secret key, pass ``key=THE_KEY``.
Otherwise, the ``current_app`` secret key will be used.
:type \*\*options: kwargs
'''
key = options.get('key')
key = _secret_key(key)
l = [s if isinstance(s, bytes) else s.encode('utf-8') for s in args]
payload = b'\0'.join(l)
token_value = hmac.new(key, payload, sha1).hexdigest()
if hasattr(token_value, 'decode'): # pragma: no cover
token_value = token_value.decode('utf-8') # ensure bytes
return token_value
def login_fresh():
'''
This returns ``True`` if the current login is fresh.
'''
return session.get('_fresh', False)
def login_user(user, remember=False, force=False):
'''
Logs a user in. You should pass the actual user object to this. If the
user's `is_active` method returns ``False``, they will not be logged in
unless `force` is ``True``.
This will return ``True`` if the log in attempt succeeds, and ``False`` if
it fails (i.e. because the user is inactive).
:param user: The user object to log in.
:type user: object
:param remember: Whether to remember the user after their session expires.
Defaults to ``False``.
:type remember: bool
:param force: If the user is inactive, setting this to ``True`` will log
them in regardless. Defaults to ``False``.
:type force: bool
'''
if not force and not user.is_active():
return False
user_id = getattr(user, current_app.login_manager.id_attribute)()
session['user_id'] = user_id
session['_fresh'] = True
session['_id'] = _create_identifier()
if remember:
session['remember'] = 'set'
_request_ctx_stack.top.user = user
user_logged_in.send(current_app._get_current_object(), user=_get_user())
return True
def logout_user():
'''
Logs a user out. (You do not need to pass the actual user.) This will
also clean up the remember me cookie if it exists.
'''
if 'user_id' in session:
session.pop('user_id')
if '_fresh' in session:
session.pop('_fresh')
cookie_name = current_app.config.get('REMEMBER_COOKIE_NAME', COOKIE_NAME)
if cookie_name in request.cookies:
session['remember'] = 'clear'
user = _get_user()
if user and not user.is_anonymous():
user_logged_out.send(current_app._get_current_object(), user=user)
current_app.login_manager.reload_user()
return True
def confirm_login():
'''
This sets the current session as fresh. Sessions become stale when they
are reloaded from a cookie.
'''
session['_fresh'] = True
session['_id'] = _create_identifier()
user_login_confirmed.send(current_app._get_current_object())
def login_required(func):
'''
If you decorate a view with this, it will ensure that the current user is
logged in and authenticated before calling the actual view. (If they are
not, it calls the :attr:`LoginManager.unauthorized` callback.) For
example::
@app.route('/post')
@login_required
def post():
pass
If there are only certain times you need to require that your user is
logged in, you can do so with::
if not current_user.is_authenticated():
return current_app.login_manager.unauthorized()
...which is essentially the code that this function adds to your views.
It can be convenient to globally turn off authentication when unit
testing. To enable this, if either of the application
configuration variables `LOGIN_DISABLED` or `TESTING` is set to
`True`, this decorator will be ignored.
:param func: The view function to decorate.
:type func: function
'''
@wraps(func)
def decorated_view(*args, **kwargs):
if current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated():
return current_app.login_manager.unauthorized()
return func(*args, **kwargs)
return decorated_view
def fresh_login_required(func):
'''
If you decorate a view with this, it will ensure that the current user's
login is fresh - i.e. there session was not restored from a 'remember me'
cookie. Sensitive operations, like changing a password or e-mail, should
be protected with this, to impede the efforts of cookie thieves.
If the user is not authenticated, :meth:`LoginManager.unauthorized` is
called as normal. If they are authenticated, but their session is not
fresh, it will call :meth:`LoginManager.needs_refresh` instead. (In that
case, you will need to provide a :attr:`LoginManager.refresh_view`.)
Behaves identically to the :func:`login_required` decorator with respect
to configutation variables.
:param func: The view function to decorate.
:type func: function
'''
@wraps(func)
def decorated_view(*args, **kwargs):
if current_app.login_manager._login_disabled:
return func(*args, **kwargs)
elif not current_user.is_authenticated():
return current_app.login_manager.unauthorized()
elif not login_fresh():
return current_app.login_manager.needs_refresh()
return func(*args, **kwargs)
return decorated_view
def _get_user():
if has_request_context() and not hasattr(_request_ctx_stack.top, 'user'):
current_app.login_manager._load_user()
return getattr(_request_ctx_stack.top, 'user', None)
def _cookie_digest(payload, key=None):
key = _secret_key(key)
return hmac.new(key, payload.encode('utf-8'), sha1).hexdigest()
def _get_remote_addr():
address = request.headers.get('X-Forwarded-For', request.remote_addr)
if address is not None:
address = address.encode('utf-8')
return address
def _create_identifier():
user_agent = request.headers.get('User-Agent')
if user_agent is not None:
user_agent = user_agent.encode('utf-8')
base = '{0}|{1}'.format(_get_remote_addr(), user_agent)
if str is bytes:
base = unicode(base, 'utf-8', errors='replace') # pragma: no cover
h = md5()
h.update(base.encode('utf8'))
return h.hexdigest()
def _user_context_processor():
return dict(current_user=_get_user())
def _secret_key(key=None):
if key is None:
key = current_app.config['SECRET_KEY']
if isinstance(key, unicode): # pragma: no cover
key = key.encode('latin1') # ensure bytes
return key
# Signals
#: Sent when a user is logged in. In addition to the app (which is the
#: sender), it is passed `user`, which is the user being logged in.
user_logged_in = _signals.signal('logged-in')
#: Sent when a user is logged out. In addition to the app (which is the
#: sender), it is passed `user`, which is the user being logged out.
user_logged_out = _signals.signal('logged-out')
#: Sent when the user is loaded from the cookie. In addition to the app (which
#: is the sender), it is passed `user`, which is the user being reloaded.
user_loaded_from_cookie = _signals.signal('loaded-from-cookie')
#: Sent when the user is loaded from the header. In addition to the app (which
#: is the #: sender), it is passed `user`, which is the user being reloaded.
user_loaded_from_header = _signals.signal('loaded-from-header')
#: Sent when the user is loaded from the request. In addition to the app (which
#: is the #: sender), it is passed `user`, which is the user being reloaded.
user_loaded_from_request = _signals.signal('loaded-from-request')
#: Sent when a user's login is confirmed, marking it as fresh. (It is not
#: called for a normal login.)
#: It receives no additional arguments besides the app.
user_login_confirmed = _signals.signal('login-confirmed')
#: Sent when the `unauthorized` method is called on a `LoginManager`. It
#: receives no additional arguments besides the app.
user_unauthorized = _signals.signal('unauthorized')
#: Sent when the `needs_refresh` method is called on a `LoginManager`. It
#: receives no additional arguments besides the app.
user_needs_refresh = _signals.signal('needs-refresh')
#: Sent whenever the user is accessed/loaded
#: receives no additional arguments besides the app.
user_accessed = _signals.signal('accessed')
#: Sent whenever session protection takes effect, and a session is either
#: marked non-fresh or deleted. It receives no additional arguments besides
#: the app.
session_protected = _signals.signal('session-protected')
| [
[
[
309,
325
],
[
368,
384
]
],
[
[
345,
356
]
],
[
[
386,
396
]
],
[
[
417,
428
]
],
[
[
441,
454
]
],
[
[
487,
494
]
],
[
[
534,
552
],
[
11656,
11674
],
[
14879,
14897
],
[
15192,
15210
],
[
23926,
23944
],
[
27610,
27628
],
[
27710,
27728
]
],
[
[
554,
559
],
[
6946,
6951
],
[
11218,
11223
]
],
[
[
561,
572
],
[
6778,
6789
],
[
11046,
11057
],
[
12195,
12206
],
[
12289,
12300
],
[
13653,
13664
],
[
15251,
15262
],
[
15579,
15590
],
[
15957,
15968
],
[
16600,
16611
],
[
17530,
17541
],
[
23721,
23732
],
[
23985,
23996
],
[
24358,
24369
],
[
24589,
24600
],
[
24640,
24651
],
[
24944,
24955
],
[
27651,
27662
],
[
28600,
28611
],
[
26035,
26046
],
[
26188,
26199
],
[
27196,
27207
],
[
27349,
27360
],
[
27441,
27452
]
],
[
[
574,
579
],
[
7056,
7061
],
[
7200,
7205
],
[
11289,
11294
],
[
11437,
11442
]
],
[
[
581,
589
],
[
7280,
7288
],
[
11551,
11559
]
],
[
[
610,
617
],
[
7316,
7323
],
[
11589,
11596
],
[
13101,
13108
],
[
13256,
13263
],
[
13318,
13325
],
[
13381,
13388
],
[
13476,
13483
],
[
24440,
24447
],
[
27925,
27932
],
[
27964,
27971
],
[
28119,
28126
]
],
[
[
619,
626
],
[
11727,
11734
],
[
12872,
12879
],
[
13147,
13154
],
[
13571,
13578
],
[
14765,
14772
],
[
14837,
14844
],
[
15077,
15084
],
[
15122,
15129
],
[
16243,
16250
],
[
16276,
16283
],
[
16358,
16365
],
[
17119,
17126
],
[
22864,
22871
],
[
23767,
23774
],
[
23800,
23807
],
[
23829,
23836
],
[
23893,
23900
],
[
24240,
24247
],
[
24257,
24264
],
[
24300,
24307
],
[
24317,
24324
],
[
24465,
24472
],
[
24847,
24854
],
[
24876,
24883
]
],
[
[
628,
635
],
[
21495,
21502
]
],
[
[
637,
656
],
[
27572,
27591
]
],
[
[
684,
693
],
[
1160,
1169
]
],
[
[
722,
732
],
[
1280,
1290
]
],
[
[
763,
775
],
[
19984,
19996
]
],
[
[
802,
812
],
[
21604,
21614
]
],
[
[
814,
824
],
[
21693,
21703
]
],
[
[
847,
855
],
[
17158,
17166
]
],
[
[
857,
866
],
[
1504,
1513
]
],
[
[
889,
894
],
[
25971,
25976
],
[
27132,
27137
]
],
[
[
915,
919
],
[
22591,
22595
],
[
27867,
27871
]
],
[
[
921,
924
],
[
28397,
28400
]
],
[
[
933,
937
],
[
22568,
22572
],
[
27828,
27832
]
],
[
[
945,
953
],
[
4850,
4858
]
],
[
[
961,
964
],
[
969,
972
],
[
18726,
18729
]
],
[
[
1033,
1041
],
[
20394,
20402
],
[
20422,
20430
],
[
21579,
21587
]
],
[
[
1043,
1053
],
[
20564,
20574
],
[
21730,
21740
]
],
[
[
1109,
1117
],
[
20394,
20402
],
[
20422,
20430
],
[
21579,
21587
]
],
[
[
1119,
1129
],
[
20564,
20574
],
[
21730,
21740
]
],
[
[
1134,
1141
],
[
18087,
18094
],
[
28328,
28335
],
[
28657,
28664
]
],
[
[
1149,
1157
],
[
28923,
28931
],
[
29112,
29120
],
[
29322,
29330
],
[
29543,
29551
],
[
29766,
29774
],
[
29990,
29998
],
[
30172,
30180
],
[
30353,
30361
],
[
30501,
30509
],
[
30712,
30720
]
],
[
[
1265,
1277
],
[
17038,
17050
],
[
26136,
26148
],
[
27297,
27309
]
],
[
[
1382,
1393
],
[
12972,
12983
],
[
16676,
16687
],
[
17606,
17617
],
[
24405,
24416
]
],
[
[
1486,
1501
],
[
16747,
16762
]
],
[
[
1599,
1612
],
[
16873,
16886
]
],
[
[
1701,
1716
],
[
16946,
16961
]
],
[
[
1793,
1806
],
[
3279,
3292
]
],
[
[
1924,
1946
],
[
3429,
3451
]
],
[
[
2035,
2050
],
[
3719,
3734
]
],
[
[
2187,
2211
],
[
3889,
3913
]
],
[
[
2289,
2301
],
[
4469,
4481
]
],
[
[
2369,
2385
],
[
13042,
13058
]
],
[
[
2412,
2424
]
],
[
[
17740,
17749
],
[
18368,
18377
]
],
[
[
18943,
18961
],
[
2916,
2934
]
],
[
[
19247,
19260
],
[
17101,
17114
]
],
[
[
19547,
19560
],
[
15003,
15016
]
],
[
[
20060,
20075
],
[
21646,
21661
]
],
[
[
20645,
20654
],
[
7289,
7298
],
[
11560,
11569
]
],
[
[
21754,
21771
]
],
[
[
22765,
22776
],
[
27407,
27418
]
],
[
[
22899,
22909
]
],
[
[
24060,
24071
],
[
11953,
11964
]
],
[
[
24702,
24715
]
],
[
[
24985,
24999
]
],
[
[
26298,
26318
]
],
[
[
27552,
27561
],
[
1299,
1308
],
[
15336,
15345
],
[
15664,
15673
],
[
16043,
16052
],
[
24025,
24034
],
[
24507,
24516
],
[
28524,
28533
]
],
[
[
27754,
27768
],
[
19516,
19530
],
[
19997,
20011
]
],
[
[
27891,
27907
],
[
28260,
28276
]
],
[
[
28080,
28098
],
[
13617,
13635
],
[
23846,
23864
],
[
24893,
24911
]
],
[
[
28468,
28491
],
[
5876,
5899
]
],
[
[
28543,
28554
],
[
22429,
22440
],
[
27799,
27810
]
],
[
[
28906,
28920
],
[
23965,
23979
]
],
[
[
29094,
29109
],
[
24568,
24583
]
],
[
[
29296,
29319
],
[
15297,
15320
]
],
[
[
29517,
29540
],
[
15625,
15648
]
],
[
[
29739,
29763
],
[
16003,
16027
]
],
[
[
29967,
29987
],
[
24918,
24938
]
],
[
[
30152,
30169
],
[
6755,
6772
]
],
[
[
30332,
30350
],
[
11022,
11040
]
],
[
[
30485,
30498
],
[
12176,
12189
]
],
[
[
30692,
30709
],
[
14337,
14354
],
[
14517,
14534
]
]
] |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import math
class lykke (Exchange):
def describe(self):
return self.deep_extend(super(lykke, self).describe(), {
'id': 'lykke',
'name': 'Lykke',
'countries': 'CH',
'version': 'v1',
'rateLimit': 200,
'has': {
'CORS': False,
'fetchOHLCV': False,
'fetchTrades': False,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchOrders': True,
},
'requiredCredentials': {
'apiKey': True,
'secret': False,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/34487620-3139a7b0-efe6-11e7-90f5-e520cef74451.jpg',
'api': {
'mobile': 'https://api.lykkex.com/api',
'public': 'https://hft-api.lykke.com/api',
'private': 'https://hft-api.lykke.com/api',
'test': {
'mobile': 'https://api.lykkex.com/api',
'public': 'https://hft-service-dev.lykkex.net/api',
'private': 'https://hft-service-dev.lykkex.net/api',
},
},
'www': 'https://www.lykke.com',
'doc': [
'https://hft-api.lykke.com/swagger/ui/',
'https://www.lykke.com/lykke_api',
],
'fees': 'https://www.lykke.com/trading-conditions',
},
'api': {
'mobile': {
'get': [
'AllAssetPairRates/{market}',
],
},
'public': {
'get': [
'AssetPairs',
'AssetPairs/{id}',
'IsAlive',
'OrderBooks',
'OrderBooks/{AssetPairId}',
],
},
'private': {
'get': [
'Orders',
'Orders/{id}',
'Wallets',
],
'post': [
'Orders/limit',
'Orders/market',
'Orders/{id}/Cancel',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.0, # as of 7 Feb 2018, see https://github.com/ccxt/ccxt/issues/1863
'taker': 0.0, # https://www.lykke.com/cp/wallet-fees-and-limits
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'BTC': 0.001,
},
'deposit': {
'BTC': 0,
},
},
},
})
def fetch_balance(self, params={}):
self.load_markets()
balances = self.privateGetWallets()
result = {'info': balances}
for i in range(0, len(balances)):
balance = balances[i]
currency = balance['AssetId']
total = balance['Balance']
used = balance['Reserved']
free = total - used
result[currency] = {
'free': free,
'used': used,
'total': total,
}
return self.parse_balance(result)
def cancel_order(self, id, symbol=None, params={}):
return self.privatePostOrdersIdCancel({'id': id})
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
query = {
'AssetPairId': market['id'],
'OrderAction': self.capitalize(side),
'Volume': amount,
}
if type == 'market':
query['Asset'] = market['base'] if (side == 'buy') else market['quote']
elif type == 'limit':
query['Price'] = price
method = 'privatePostOrders' + self.capitalize(type)
result = getattr(self, method)(self.extend(query, params))
return {
'id': None,
'info': result,
}
def fetch_markets(self):
markets = self.publicGetAssetPairs()
result = []
for i in range(0, len(markets)):
market = markets[i]
id = market['Id']
base = market['BaseAssetId']
quote = market['QuotingAssetId']
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = market['Name']
precision = {
'amount': market['Accuracy'],
'price': market['InvertedAccuracy'],
}
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'active': True,
'info': market,
'lot': math.pow(10, -precision['amount']),
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision['amount']),
'max': math.pow(10, precision['amount']),
},
'price': {
'min': math.pow(10, -precision['price']),
'max': math.pow(10, precision['price']),
},
},
})
return result
def parse_ticker(self, ticker, market=None):
timestamp = self.milliseconds()
symbol = None
if market:
symbol = market['symbol']
ticker = ticker['Result']
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': float(ticker['Rate']['Bid']),
'ask': float(ticker['Rate']['Ask']),
'vwap': None,
'open': None,
'close': None,
'first': None,
'last': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': None,
'quoteVolume': None,
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
ticker = self.mobileGetAllAssetPairRatesMarket(self.extend({
'market': market['id'],
}, params))
return self.parse_ticker(ticker, market)
def parse_order_status(self, status):
if status == 'Pending':
return 'open'
elif status == 'InOrderBook':
return 'open'
elif status == 'Processing':
return 'open'
elif status == 'Matched':
return 'closed'
elif status == 'Cancelled':
return 'canceled'
elif status == 'NotEnoughFunds':
return 'NotEnoughFunds'
elif status == 'NoLiquidity':
return 'NoLiquidity'
elif status == 'UnknownAsset':
return 'UnknownAsset'
elif status == 'LeadToNegativeSpread':
return 'LeadToNegativeSpread'
return status
def parse_order(self, order, market=None):
status = self.parse_order_status(order['Status'])
symbol = None
if not market:
if 'AssetPairId' in order:
if order['AssetPairId'] in self.markets_by_id:
market = self.markets_by_id[order['AssetPairId']]
if market:
symbol = market['symbol']
timestamp = None
if 'LastMatchTime' in order:
timestamp = self.parse8601(order['LastMatchTime'])
elif 'Registered' in order:
timestamp = self.parse8601(order['Registered'])
elif 'CreatedAt' in order:
timestamp = self.parse8601(order['CreatedAt'])
price = self.safe_float(order, 'Price')
amount = self.safe_float(order, 'Volume')
remaining = self.safe_float(order, 'RemainingVolume')
filled = amount - remaining
cost = filled * price
result = {
'info': order,
'id': order['Id'],
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': None,
'price': price,
'cost': cost,
'average': None,
'amount': amount,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': None,
}
return result
def fetch_order(self, id, symbol=None, params={}):
response = self.privateGetOrdersId(self.extend({
'id': id,
}, params))
return self.parse_order(response)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privateGetOrders()
return self.parse_orders(response, None, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privateGetOrders(self.extend({
'status': 'InOrderBook',
}, params))
return self.parse_orders(response, None, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privateGetOrders(self.extend({
'status': 'Matched',
}, params))
return self.parse_orders(response, None, since, limit)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
response = self.publicGetOrderBooksAssetPairId(self.extend({
'AssetPairId': self.market_id(symbol),
}, params))
orderbook = {
'timestamp': None,
'bids': [],
'asks': [],
}
timestamp = None
for i in range(0, len(response)):
side = response[i]
if side['IsBuy']:
orderbook['bids'] = self.array_concat(orderbook['bids'], side['Prices'])
else:
orderbook['asks'] = self.array_concat(orderbook['asks'], side['Prices'])
timestamp = self.parse8601(side['Timestamp'])
if not orderbook['timestamp']:
orderbook['timestamp'] = timestamp
else:
orderbook['timestamp'] = max(orderbook['timestamp'], timestamp)
if not timestamp:
timestamp = self.milliseconds()
return self.parse_order_book(orderbook, orderbook['timestamp'], 'bids', 'asks', 'Price', 'Volume')
def parse_bid_ask(self, bidask, priceKey=0, amountKey=1):
price = float(bidask[priceKey])
amount = float(bidask[amountKey])
if amount < 0:
amount = -amount
return [price, amount]
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if query:
url += '?' + self.urlencode(query)
elif api == 'private':
if method == 'GET':
if query:
url += '?' + self.urlencode(query)
self.check_required_credentials()
headers = {
'api-key': self.apiKey,
'Accept': 'application/json',
'Content-Type': 'application/json',
}
if method == 'POST':
if params:
body = self.json(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
| [
[
[
212,
220
],
[
248,
256
]
],
[
[
228,
232
],
[
5525,
5529
],
[
5692,
5696
],
[
5759,
5763
],
[
5879,
5883
],
[
5945,
5949
]
],
[
[
241,
246
],
[
322,
327
]
]
] |
from unittest import TestCase, skip
import copy
import numpy as np
from giant import rotations as at
from giant.ray_tracer import kdtree, shapes, rays
class TestKDTree(TestCase):
def setUp(self):
self.max_depth = 4
tri1 = np.array([[-5, -4, -4.5],
[0, 0, 1],
[0, 0, 0]])
tri2 = tri1+np.array([[2.5, 0, 0]]).T
tri3 = tri2+np.array([[2.5, 0, 0]]).T
tri4 = tri3+np.array([[2.5, 0, 0]]).T
self.triangles = shapes.Triangle64(np.hstack([tri1, tri2, tri3, tri4]).T, 1,
np.arange(12).reshape(-1, 3))
self.shapes = self.triangles
self.stacked_tries = shapes.Triangle64(np.hstack([tri1, tri2,
tri1+[[0], [0], [2.5]],
tri2 + [[0], [0], [2.5]]]).T, 1,
np.arange(12).reshape(-1, 3))
def test_creation(self):
tree = kdtree.KDTree(self.shapes, max_depth=self.max_depth)
self.assertEqual(tree.max_depth, self.max_depth)
self.assertEqual(tree.surface, self.shapes)
def test_build(self):
tree = kdtree.KDTree(self.shapes, max_depth=self.max_depth)
tree.build(force=True, print_progress=False)
facets = np.arange(12).reshape(-1, 3)
tris = [shapes.Triangle64(self.triangles.vertices, self.triangles.albedos, face)
for face in facets]
for tri in tris:
tri.bounding_box = None
node20 = kdtree.KDNode(tris[0])
node21 = kdtree.KDNode(tris[1])
node22 = kdtree.KDNode(tris[2])
node23 = kdtree.KDNode(tris[3])
node10 = kdtree.KDNode()
node10.bounding_box = shapes.AxisAlignedBoundingBox([-5, 0, 0], [-1.5, 1, 0])
node10.left = node20
node10.right = node21
node11 = kdtree.KDNode()
node11.bounding_box = shapes.AxisAlignedBoundingBox([0., 0, 0], [3.5, 1, 0])
node11.left = node22
node11.right = node23
node00 = kdtree.KDNode()
node00.bounding_box = self.triangles.bounding_box
node00.left = node10
node00.right = node11
node00.order = 0
self.assertEqual(node00, tree.root)
def test_trace(self):
with self.subTest(stacked=False):
tree = kdtree.KDTree(self.shapes, max_depth=self.max_depth)
tree.build(force=True, print_progress=False)
starts = np.array([[-4.5, -2, 0.5, 3],
[0.5, 0.5, 0.5, 0.5],
[1, 1, 1, 1]])
directions = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test = rays.Rays(starts, directions)
ints = tree.trace(rays_test)
nodes = [tree.root.left.left, tree.root.left.right, tree.root.right.left, tree.root.right.right]
with self.subTest(rotation=None, translation=None):
for ind, int_check in enumerate(ints):
with self.subTest(ignore=False, ind=ind):
self.assertTrue(int_check["check"])
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
ignore_ind = 2
rays_test.ignore = [nodes[ignore_ind].id*(10**(tree.root.order+1))]*rays_test.num_rays
ints = tree.trace(rays_test)
for ind, int_check in enumerate(ints):
with self.subTest(ignore=True, ind=ind):
if ind != ignore_ind:
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
else:
self.assertFalse(int_check["check"])
self.assertTrue(np.isnan(int_check["intersect"]).all())
self.assertTrue(np.isnan(int_check["normal"]).all())
self.assertTrue(np.isnan(int_check["albedo"]))
self.assertEqual(int_check["facet"], -1)
rotation = at.Rotation([0, 0, -np.pi / 2])
rays_test.ignore = None
with self.subTest(rotation=rotation, translation=None):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[0.5, 0.5, 0.5, 0.5],
[4.5, 2, -0.5, -3],
[1, 1, 1, 1]])
directions2 = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 0, 1])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
translation = [0, 0, -0.5]
with self.subTest(rotation=None, translation=translation):
tc = copy.deepcopy(tree)
tc.translate(translation)
ints = tc.trace(rays_test)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1.5])
np.testing.assert_array_almost_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
with self.subTest(rotation=rotation, translation=translation):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
tc.translate(translation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[0.5, 0.5, 0.5, 0.5],
[4.5, 2, -0.5, -3],
[1, 1, 1, 1]])
directions2 = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 0, 1.5])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
rotation = at.Rotation([np.pi / 2, 0, 0])
with self.subTest(rotation=rotation, translation=None):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[-4.5, -2, 0.5, 3],
[1, 1, 1, 1],
[0.5, 0.5, 0.5, 0.5]])
directions2 = np.array([[0, 0, 0, 0],
[-1, -1, -1, -1],
[0, 0, 0, 0]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 1, 0])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
translation = [2.5, 0, 0]
with self.subTest(rotation=None, translation=translation):
tc = copy.deepcopy(tree)
tc.translate(translation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"][0])
for ind, int_check in enumerate(ints[1:]):
ind += 1
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
np.testing.assert_array_almost_equal(int_check["normal"], self.triangles.normals[ind-1])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind-1].id*(10**(tc.root.order+1)))
translation = [0, -0.5, 0]
with self.subTest(rotation=rotation, translation=translation):
with self.subTest(order='rt'):
tc = copy.deepcopy(tree)
tc.rotate(rotation)
tc.translate(translation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[-4.5, -2, 0.5, 3],
[1, 1, 1, 1],
[0.5, 0.5, 0.5, 0.5]])
directions2 = np.array([[0, 0, 0, 0],
[-1, -1, -1, -1],
[0, 0, 0, 0]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 1.5, 0])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
with self.subTest(order='tr'):
tc = copy.deepcopy(tree)
tc.translate(translation)
tc.rotate(rotation)
ints = tc.trace(rays_test)
self.assertFalse(ints["check"].any())
starts2 = np.array([[-4.5, -2, 0.5, 3],
[1, 1, 1, 1],
[0, 0, 0, 0]])
directions2 = np.array([[0, 0, 0, 0],
[-1, -1, -1, -1],
[0, 0, 0, 0]], dtype=np.float64)
rays_test2 = rays.Rays(starts2, directions2)
ints = tc.trace(rays_test2)
for ind, int_check in enumerate(ints):
# int_check = int_check[0]
self.assertTrue(int_check["check"])
np.testing.assert_array_almost_equal(int_check["intersect"], starts2[:, ind]-[0, 1, 0])
np.testing.assert_array_equal(int_check["normal"], rotation.matrix@self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tc.root.order+1)))
with self.subTest(stacked=True):
tree = kdtree.KDTree(self.stacked_tries, max_depth=self.max_depth)
tree.build(force=True, print_progress=False)
starts = np.array([[-4.5, -2, -4.5, -2],
[0.5, 0.5, 0.5, 0.5],
[1, 1, 5, 5]])
directions = np.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[-1, -1, -1, -1]], dtype=np.float64)
rays_test = rays.Rays(starts, directions)
ints = tree.trace(rays_test)
nodes = [tree.root.left.left, tree.root.right.left, tree.root.left.right, tree.root.right.right]
for ind, int_check in enumerate(ints):
with self.subTest(ignore=False, ind=ind):
self.assertTrue(int_check["check"])
if ind < 2:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
else:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 2.5])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
ignore_ind = 2
rays_test.ignore = [nodes[ignore_ind].id*(10**(tree.root.order+1))]*rays_test.num_rays
ints = tree.trace(rays_test)
for ind, int_check in enumerate(ints):
with self.subTest(ignore=True, ind=ind):
if ind != ignore_ind:
# int_check = int_check[0]
self.assertTrue(int_check["check"])
if ind < 2:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 1])
else:
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 2.5])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[ind].id*(10**(tree.root.order+1)))
else:
self.assertTrue(int_check["check"])
np.testing.assert_array_equal(int_check["intersect"], starts[:, ind]-[0, 0, 5])
np.testing.assert_array_equal(int_check["normal"], self.triangles.normals[ind])
self.assertEqual(int_check["albedo"], 1.0)
self.assertEqual(int_check["facet"], 0+nodes[0].id*(10**(tree.root.order+1)))
class TestKDNode(TestCase):
def setUp(self):
tri1 = np.array([[-5, -4, -4.5],
[0, 0, 1],
[0, 0, 0]])
tri2 = tri1+np.array([[2.5, 0, 0]]).T
tri3 = tri2+np.array([[2.5, 0, 0]]).T
tri4 = tri3+np.array([[2.5, 0, 0]]).T
self.triangles = shapes.Triangle64(np.hstack([tri1, tri2, tri3, tri4]).T, 1, np.arange(12).reshape(-1, 3))
def test_creation(self):
node = kdtree.KDNode(surface=self.triangles)
self.assertEqual(node.surface, self.triangles)
self.assertEqual(node.bounding_box, self.triangles.bounding_box)
self.assertIsNone(node.left)
self.assertIsNone(node.right)
def test_compute_bounding_box(self):
node = kdtree.KDNode()
node.surface = self.triangles
node.has_surface = True
node.compute_bounding_box()
self.assertEqual(node.bounding_box, self.triangles.bounding_box)
def test_split(self):
node = kdtree.KDNode(surface=self.triangles)
node.split(force=True, print_progress=False)
left_tris = kdtree.KDNode(shapes.Triangle64(self.triangles.vertices, 1, np.arange(6).reshape(3, -1), compute_bounding_box=False))
right_tris = kdtree.KDNode(shapes.Triangle64(self.triangles.vertices, 1, np.arange(6, 12).reshape(3, -1), compute_bounding_box=False))
self.assertEqual(node.left, left_tris)
self.assertEqual(node.right, right_tris)
def test_trace(self):
# TODO: figure out how to implement this
pass
| [
[
[
21,
29
],
[
172,
180
],
[
16294,
16302
]
],
[
[
31,
35
]
],
[
[
43,
47
],
[
5159,
5163
],
[
6455,
6459
],
[
7200,
7204
],
[
8552,
8556
],
[
9847,
9851
],
[
10770,
10774
],
[
12125,
12129
]
],
[
[
56,
67
],
[
249,
251
],
[
369,
371
],
[
416,
418
],
[
463,
465
],
[
533,
535
],
[
633,
635
],
[
749,
751
],
[
982,
984
],
[
1390,
1392
],
[
2570,
2572
],
[
2724,
2726
],
[
2857,
2859
],
[
3345,
3347
],
[
3450,
3452
],
[
4199,
4201
],
[
4308,
4310
],
[
4711,
4713
],
[
4795,
4797
],
[
4876,
4878
],
[
5020,
5022
],
[
5342,
5344
],
[
5511,
5513
],
[
5654,
5656
],
[
5953,
5955
],
[
6062,
6064
],
[
6742,
6744
],
[
6852,
6854
],
[
7425,
7427
],
[
7594,
7596
],
[
7737,
7739
],
[
8036,
8038
],
[
8147,
8149
],
[
8443,
8445
],
[
8735,
8737
],
[
8904,
8906
],
[
9047,
9049
],
[
9346,
9348
],
[
9455,
9457
],
[
10219,
10221
],
[
10327,
10329
],
[
11015,
11017
],
[
11196,
11198
],
[
11347,
11349
],
[
11670,
11672
],
[
11785,
11787
],
[
12370,
12372
],
[
12543,
12545
],
[
12694,
12696
],
[
13017,
13019
],
[
13130,
13132
],
[
13598,
13600
],
[
13754,
13756
],
[
13887,
13889
],
[
14330,
14332
],
[
14460,
14462
],
[
14563,
14565
],
[
15308,
15310
],
[
15446,
15448
],
[
15553,
15555
],
[
15919,
15921
],
[
16024,
16026
],
[
16343,
16345
],
[
16463,
16465
],
[
16510,
16512
],
[
16557,
16559
],
[
16627,
16629
],
[
16669,
16671
],
[
17462,
17464
],
[
17601,
17603
]
],
[
[
87,
102
],
[
5000,
5002
],
[
8430,
8432
]
],
[
[
132,
138
],
[
1059,
1065
],
[
1265,
1271
],
[
1624,
1630
],
[
1665,
1671
],
[
1706,
1712
],
[
1747,
1753
],
[
1788,
1794
],
[
1967,
1973
],
[
2145,
2151
],
[
2437,
2443
],
[
13458,
13464
],
[
16745,
16751
],
[
17047,
17053
],
[
17288,
17294
],
[
17401,
17407
],
[
17541,
17547
]
],
[
[
140,
146
],
[
515,
521
],
[
731,
737
],
[
1435,
1441
],
[
1834,
1840
],
[
2013,
2019
],
[
16609,
16615
],
[
17415,
17421
],
[
17555,
17561
]
],
[
[
148,
152
],
[
2894,
2898
],
[
5696,
5700
],
[
7779,
7783
],
[
9089,
9093
],
[
11393,
11397
],
[
12740,
12744
],
[
13924,
13928
]
],
[
[
161,
171
]
],
[
[
16283,
16293
]
]
] |
#!/usr/bin/python3
from subprocess import call;
from sys import argv
from os import path
outPath = argv[1] if(len(argv)>1) else "/etc/dipicar/creds"
duration = 365
rsaLength = 4096
#Generate ssl keys
call([
"openssl",
"req",
"-x509",
"-newkey",
"rsa:"+str(rsaLength),
"-keyout", path.join(outPath,"key.pem"),
"-out", path.join(outPath,"cert.pem"),
"-days", str(duration),
"--batch",
"-nodes"
]) | [
[
[
43,
47
],
[
203,
207
]
],
[
[
65,
69
],
[
116,
120
],
[
101,
105
]
],
[
[
85,
89
],
[
311,
315
],
[
354,
358
]
],
[
[
91,
98
],
[
321,
328
],
[
364,
371
]
],
[
[
151,
159
],
[
403,
411
]
],
[
[
166,
175
],
[
283,
292
]
]
] |
import requests
import os
ROOT_URL = 'http://datamall2.mytransport.sg/ltaodataservice'
def get_taxi_availability_request():
result = None
try:
url = '{}/Taxi-Availability'.format(ROOT_URL)
headers = {
'AccountKey': os.getenv('ACCOUNT_KEY'),
'Accept': 'application/json'
}
response = requests.get(url, headers=headers)
print('response status = ', response.status_code)
print('response json = ', response.json())
if response.status_code == 200:
result = response.json()
except Exception as e:
print('error = ', e)
return result
| [
[
[
7,
15
],
[
351,
359
]
],
[
[
23,
25
],
[
255,
257
]
],
[
[
27,
35
],
[
199,
207
]
],
[
[
94,
123
]
]
] |
# The MIT License (MIT)
# Copyright (c) 2018 by EUMETSAT
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from ._mappings import MAPPINGS, API_URL_PREFIX
| [
[
[
1139,
1147
]
],
[
[
1149,
1163
]
]
] |
import re
from collections import defaultdict
from datetime import datetime
from elasticsearch_dsl import Keyword, Text
from protean import BaseAggregate, BaseValueObject
from protean.core.model import BaseModel
from protean.fields import DateTime, Integer, String
from protean.fields import Text as ProteanText
from protean.fields import ValueObject
class Person(BaseAggregate):
first_name = String(max_length=50, required=True)
last_name = String(max_length=50, required=True)
age = Integer(default=21)
created_at = DateTime(default=datetime.now())
class Alien(BaseAggregate):
first_name = String(max_length=50, required=True)
last_name = String(max_length=50, required=True)
age = Integer(default=21)
class User(BaseAggregate):
email = String(max_length=255, required=True, unique=True)
password = String(max_length=3026)
class Email(BaseValueObject):
REGEXP = r"\"?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)\"?"
# This is the external facing data attribute
address = String(max_length=254, required=True)
def clean(self):
"""Business rules of Email address"""
errors = defaultdict(list)
if not bool(re.match(Email.REGEXP, self.address)):
errors["address"].append("is invalid")
return errors
class ComplexUser(BaseAggregate):
email = ValueObject(Email, required=True)
password = String(required=True, max_length=255)
class Provider(BaseAggregate):
name = ProteanText()
about = ProteanText()
class ProviderCustomModel(BaseModel):
id = Keyword()
name = Text(fields={"raw": Keyword()})
about = Text()
class Meta:
schema = "providers"
class Receiver(BaseAggregate):
name = String()
age = Integer()
| [
[
[
7,
9
],
[
1180,
1182
]
],
[
[
35,
46
],
[
1141,
1152
]
],
[
[
68,
76
],
[
556,
564
]
],
[
[
108,
115
],
[
1561,
1568
],
[
1602,
1609
]
],
[
[
117,
121
],
[
1582,
1586
],
[
1626,
1630
]
],
[
[
143,
156
],
[
369,
382
],
[
586,
599
],
[
752,
765
],
[
1313,
1326
],
[
1445,
1458
],
[
1696,
1709
]
],
[
[
158,
173
],
[
884,
899
]
],
[
[
205,
214
],
[
1540,
1549
]
],
[
[
242,
250
],
[
539,
547
]
],
[
[
252,
259
],
[
502,
509
],
[
719,
726
],
[
1742,
1749
]
],
[
[
261,
267
],
[
402,
408
],
[
455,
461
],
[
619,
625
],
[
672,
678
],
[
780,
786
],
[
846,
852
],
[
1018,
1024
],
[
1390,
1396
],
[
1723,
1729
]
],
[
[
295,
314
],
[
1472,
1483
],
[
1498,
1509
]
],
[
[
342,
353
],
[
1341,
1352
]
],
[
[
362,
368
]
],
[
[
580,
585
]
],
[
[
747,
751
]
],
[
[
878,
883
],
[
1353,
1358
],
[
1189,
1194
]
],
[
[
1301,
1312
]
],
[
[
1436,
1444
]
],
[
[
1520,
1539
]
],
[
[
1687,
1695
]
]
] |
# https://leetcode.com/problems/design-twitter/
#
# algorithms
# Medium (27.98%)
# Total Accepted: 37,655
# Total Submissions: 134,594
from collections import defaultdict
from bisect import insort
class Twitter(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.follow_map = defaultdict(set) # 关注列表
self.followed_map = defaultdict(set) # 被关注列表
self.tweet_map = defaultdict(list) # 用户关注的列表
self.post_map = defaultdict(list) # 发布列表
self.tweet_stamp = 0
def postTweet(self, userId, tweetId):
"""
Compose a new tweet.
:type userId: int
:type tweetId: int
:rtype: None
"""
self.post_map[userId].append((self.tweet_stamp, tweetId))
for id in self.followed_map[userId]:
insort(self.tweet_map[id], (self.tweet_stamp, tweetId))
insort(self.tweet_map[userId], (self.tweet_stamp, tweetId))
self.tweet_stamp += 1
def getNewsFeed(self, userId):
"""
Retrieve the 10 most recent tweet ids in the user's news feed. Each item in the news feed must be posted by users who the user followed or by the user herself. Tweets must be ordered from most recent to least recent.
:type userId: int
:rtype: List[int]
"""
length = len(self.tweet_map[userId])
if length <= 10:
arr = self.tweet_map[userId]
else:
arr = self.tweet_map[userId][length - 10:]
return [item[-1] for item in reversed(arr)]
def follow(self, followerId, followeeId):
"""
Follower follows a followee. If the operation is invalid, it should be a no-op.
:type followerId: int
:type followeeId: int
:rtype: None
"""
if followerId == followeeId:
return
if followeeId in self.follow_map[followerId]:
return
self.follow_map[followerId].add(followeeId)
self.followed_map[followeeId].add(followerId)
for stamp, tweetId in self.post_map[followeeId]:
insort(self.tweet_map[followerId], (stamp, tweetId))
def unfollow(self, followerId, followeeId):
"""
Follower unfollows a followee. If the operation is invalid, it should be a no-op.
:type followerId: int
:type followeeId: int
:rtype: None
"""
if followerId == followeeId:
return
if followeeId not in self.follow_map[followerId]:
return
self.follow_map[followerId].remove(followeeId)
self.followed_map[followeeId].remove(followerId)
tweet_map = set()
for stamp, _ in self.post_map[followeeId]:
tweet_map.add(stamp)
tmp = []
for stamp, tweetId in self.tweet_map[followerId]:
if stamp not in tweet_map:
tmp.append((stamp, tweetId))
self.tweet_map[followerId] = tmp
# Your Twitter object will be instantiated and called as such:
# obj = Twitter()
# obj.postTweet(userId,tweetId)
# param_2 = obj.getNewsFeed(userId)
# obj.follow(followerId,followeeId)
# obj.unfollow(followerId,followeeId)
| [
[
[
164,
175
],
[
347,
358
],
[
400,
411
],
[
451,
462
],
[
504,
515
]
],
[
[
195,
201
],
[
853,
859
],
[
918,
924
],
[
2126,
2132
]
],
[
[
210,
217
]
]
] |
from const import result
import random
C, D = True, False
def opponent(r):
if r == result.COOP or r == result.DEFECT:
return True
return False
# tit for tat
class Tft:
def __init__(self) -> None:
self.score = 0
self.last_reaction = C
def run(self):
return self.last_reaction
def next(self, r):
self.score += r.value
self.last_reaction = opponent(r)
def end(self):
self.last_reaction = C
return self.score
# tit for two tat
class Tftt:
def __init__(self) -> None:
self.score = 0
self.last_reaction = C
self.last_last_reaction = C
def run(self):
return self.last_reaction | self.last_last_reaction
def next(self, r):
self.score += r.value
self.last_last_reaction = self.last_reaction
self.last_reaction = opponent(r)
def end(self):
self.last_reaction = C
self.last_last_reaction = C
return self.score
# always coop
class AlwaysCoop:
def __init__(self) -> None:
self.score = 0
def run(self):
return C
def next(self, r):
self.score += r.value
def end(self):
return self.score
# always defect
class AlwaysDefect:
def __init__(self) -> None:
self.score = 0
def run(self):
return D
def next(self, r):
self.score += r.value
def end(self):
return self.score
# perfect random(50%)
class Random:
def __init__(self) -> None:
self.score = 0
def run(self):
return random.choice([C, D])
def next(self, r):
self.score += r.value
def end(self):
return self.score
# first defect, opponent coop rate - coop(>50%) / defect(<=50%)
class Downing:
def __init__(self) -> None:
self.score = 0
self.game_count = 0
self.coop_count = 0
def run(self):
if self.game_count == 0:
return D
if self.coop_count / self.game_count > 0.5:
return C
return D
def next(self, r):
self.score += r.value
self.game_count += 1
if opponent(r):
self.coop_count += 1
def end(self):
self.game_count = self.coop_count = 0
return self.score
# first coop, opponent coop rate - coop(>=50%) / defect(<50%)
class Downing2:
def __init__(self) -> None:
self.score = 0
self.game_count = 0
self.coop_count = 0
def run(self):
if self.game_count == 0:
return C
if self.coop_count / self.game_count >= 0.5:
return C
return D
def next(self, r):
self.score += r.value
self.game_count += 1
if opponent(r):
self.coop_count += 1
def end(self):
self.game_count = self.coop_count = 0
return self.score
# coop, always defect once defected
class Grudger:
def __init__(self) -> None:
self.score = 0
self.defected = False
def run(self):
if self.defected:
return D
return C
def next(self, r):
self.score += r.value
if not opponent(r):
self.defected = True
def end(self):
return self.score
# tft but defect by 10% rate
class Joss:
def __init__(self) -> None:
self.score = 0
self.last_reaction = C
def run(self):
if random.randint(1, 10) == 1:
return D
return self.last_reaction
def next(self, r):
self.score += r.value
self.last_reaction = opponent(r)
def end(self):
self.last_reaction = C
return self.score
# wip
class Tester:
def __init__(self) -> None:
self.score = 0
self.decision = True
self.test_tft = False
self.game_count = 0
def run(self):
if self.game_count == 0:
return D
return self.decision
def next(self, r):
self.score += r.value
if self.game_count == 1 & (not opponent(r)):
self.test_tft = True
elif self.test_tft:
self.decision = opponent(r)
elif self.game_count <= 2:
self.decision = True
else:
self.decision = not self.decision
self.game_count += 1
def end(self):
self.decision = True
self.test_tft = False
self.game_count = 0
return self.score
| [
[
[
18,
24
],
[
90,
96
],
[
110,
116
]
],
[
[
32,
38
],
[
1608,
1614
],
[
3477,
3483
]
],
[
[
41,
42
],
[
273,
274
],
[
477,
478
],
[
621,
622
],
[
657,
658
],
[
940,
941
],
[
976,
977
],
[
1132,
1133
],
[
1623,
1624
],
[
2075,
2076
],
[
2597,
2598
],
[
2671,
2672
],
[
3150,
3151
],
[
3444,
3445
],
[
3708,
3709
]
],
[
[
44,
45
],
[
1370,
1371
],
[
1626,
1627
],
[
2002,
2003
],
[
2092,
2093
],
[
2688,
2689
],
[
3133,
3134
],
[
3524,
3525
],
[
3972,
3973
]
],
[
[
65,
73
],
[
412,
420
],
[
875,
883
],
[
2192,
2200
],
[
2788,
2796
],
[
3225,
3233
],
[
3643,
3651
],
[
4096,
4104
],
[
4199,
4207
]
],
[
[
184,
187
]
],
[
[
531,
535
]
],
[
[
1026,
1036
]
],
[
[
1262,
1274
]
],
[
[
1506,
1512
]
],
[
[
1806,
1813
]
],
[
[
2400,
2408
]
],
[
[
2970,
2977
]
],
[
[
3354,
3358
]
],
[
[
3750,
3756
]
]
] |
import os
import pytest
import sys
import numpy as np
import shutil
import subprocess
try:
import pymake
except:
msg = "Error. Pymake package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install https://github.com/modflowpy/pymake/zipball/master"
raise Exception(msg)
try:
import flopy
except:
msg = "Error. FloPy package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install flopy"
raise Exception(msg)
import targets
mf6_exe = os.path.abspath(targets.target_dict["mf6"])
testname = "uzf_3lay_srfdchk"
testdir = os.path.join("temp", testname)
os.makedirs(testdir, exist_ok=True)
everything_was_successful = True
iuz_cell_dict = {}
cell_iuz_dict = {}
def build_model():
nlay, nrow, ncol = 3, 1, 10
nper = 1
perlen = [20.0]
nstp = [10]
tsmult = len(perlen) * [1.0]
delr = 1.0
delc = 1.0
strt = -25
botm = [
[-5.0, -4.0, -3.0, -3.0, -2.0, -5.0, -4.0, -3.0, -3.0, -2.0],
[-20, -20, -20, -20, -20, -20, -20, -20, -20, -20],
[-30, -30, -30, -30, -30, -30, -30, -30, -30, -30],
]
nouter, ninner = 100, 300
hclose, rclose, relax = 1e-9, 1e-3, 0.97
tdis_rc = []
for i in range(nper):
tdis_rc.append((perlen[i], nstp[i], tsmult[i]))
name = testname
# build MODFLOW 6 files
ws = testdir
sim = flopy.mf6.MFSimulation(
sim_name=name, version="mf6", exe_name=mf6_exe, sim_ws=ws
)
# create tdis package
tdis = flopy.mf6.ModflowTdis(
sim, time_units="DAYS", nper=nper, perioddata=tdis_rc
)
# create gwf model
gwf = flopy.mf6.ModflowGwf(
sim, modelname=name, newtonoptions="NEWTON", save_flows=True
)
# create iterative model solution and register the gwf model with it
ims = flopy.mf6.ModflowIms(
sim,
print_option="SUMMARY",
complexity="MODERATE",
outer_dvclose=hclose,
outer_maximum=nouter,
under_relaxation="DBD",
inner_maximum=ninner,
inner_dvclose=hclose,
rcloserecord=rclose,
linear_acceleration="BICGSTAB",
scaling_method="NONE",
reordering_method="NONE",
relaxation_factor=relax,
)
sim.register_ims_package(ims, [gwf.name])
dis = flopy.mf6.ModflowGwfdis(
gwf,
nlay=nlay,
nrow=nrow,
ncol=ncol,
delr=delr,
delc=delc,
top=0.0,
botm=botm,
)
# initial conditions
ic = flopy.mf6.ModflowGwfic(gwf, strt=strt)
# node property flow
npf = flopy.mf6.ModflowGwfnpf(
gwf, save_flows=True, icelltype=1, k=100.0, k33=10
)
# aquifer storage
sto = flopy.mf6.ModflowGwfsto(
gwf, iconvert=1, ss=1e-5, sy=0.2, transient=True
)
# chd files
chdval = -3.0
chdspd = {0: [[(2, 0, 0), chdval]]}
chd = flopy.mf6.ModflowGwfchd(
gwf, print_flows=True, stress_period_data=chdspd
)
# transient uzf info
# iuzno cellid landflg ivertcn surfdp vks thtr thts thti eps [bndnm]
uzf_pkdat = [
[0, (0, 0, 1), 1, 8, 6, 1, 0.05, 0.35, 0.05, 4, "uzf01"],
[1, (0, 0, 2), 1, 9, 6, 1, 0.05, 0.35, 0.05, 4, "uzf02"],
[2, (0, 0, 3), 1, 10, 6, 1, 0.05, 0.35, 0.05, 4, "uzf03"],
[3, (0, 0, 4), 1, 11, 6, 1, 0.05, 0.35, 0.05, 4, "uzf04"],
[4, (0, 0, 5), 1, 12, 6, 1, 0.05, 0.35, 0.05, 4, "uzf05"],
[5, (0, 0, 6), 1, 13, 6, 1, 0.05, 0.35, 0.05, 4, "uzf06"],
[6, (0, 0, 7), 1, 14, 6, 1, 0.05, 0.35, 0.05, 4, "uzf07"],
[7, (0, 0, 8), 1, 15, 6, 1, 0.05, 0.35, 0.05, 4, "uzf08"],
[8, (1, 0, 1), 0, 16, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf08"],
[9, (1, 0, 2), 0, 17, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf10"],
[10, (1, 0, 3), 0, 18, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf11"],
[11, (1, 0, 4), 0, 19, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf12"],
[12, (1, 0, 5), 0, 20, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf13"],
[13, (1, 0, 6), 0, 21, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf14"],
[14, (1, 0, 7), 0, 22, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf15"],
[15, (1, 0, 8), 0, 23, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf16"],
[16, (2, 0, 1), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf17"],
[17, (2, 0, 2), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf18"],
[18, (2, 0, 3), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf19"],
[19, (2, 0, 4), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf20"],
[20, (2, 0, 5), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf21"],
[21, (2, 0, 6), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf22"],
[22, (2, 0, 7), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf23"],
[23, (2, 0, 8), 0, -1, 0.1, 1, 0.05, 0.35, 0.05, 4, "uzf24"],
]
for itm in uzf_pkdat:
iuz_cell_dict.update({itm[0]: (itm[1][0], itm[1][1], itm[1][2])})
cell_iuz_dict.update({(itm[1][0], itm[1][1], itm[1][2]): itm[0]})
extdp = 15.0
pet = 0.001
zero = 0.0
uzf_spd = {
0: [
[0, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[1, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[2, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[3, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[4, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[5, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[6, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[7, 0.01, pet, extdp, 7.0e-02, zero, zero, zero],
[8, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[9, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[10, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[11, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[12, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[13, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[14, zero, pet, extdp, 7.0e-02, zero, zero, zero],
[15, zero, pet, extdp, 7.0e-02, zero, zero, zero],
]
}
uzf = flopy.mf6.ModflowGwfuzf(
gwf,
print_flows=True,
save_flows=True,
simulate_et=True,
simulate_gwseep=True,
linear_gwet=True,
boundnames=True,
ntrailwaves=15,
nwavesets=40,
nuzfcells=len(uzf_pkdat),
packagedata=uzf_pkdat,
perioddata=uzf_spd,
budget_filerecord="{}.uzf.bud".format(name),
filename="{}.uzf".format(name),
)
# output control
oc = flopy.mf6.ModflowGwfoc(
gwf,
budget_filerecord="{}.cbc".format(name),
head_filerecord="{}.hds".format(name),
headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")],
saverecord=[("HEAD", "LAST"), ("BUDGET", "LAST")],
printrecord=[("HEAD", "LAST"), ("BUDGET", "LAST")],
filename="{}.oc".format(name),
)
return sim
# - No need to change any code below
def test_mf6model():
# build and run the test model
sim = build_model()
sim.write_simulation()
sim.run_simulation()
# ensure that the error msg is contained in the mfsim.lst file
f = open(os.path.join(testdir, "mfsim.lst"), "r")
lines = f.readlines()
error_count = 0
expected_msg = False
for line in lines:
if "SURFDEP" and "cannot" in line:
expected_msg = True
error_count += 1
assert error_count == 8, (
"error count = " + str(error_count) + "but should equal 8"
)
print("Finished running surfdep check")
return
def main():
# build and run the test model
sim = build_model()
sim.write_simulation()
sim.run_simulation()
# ensure that the error msg is contained in the mfsim.lst file
f = open(os.path.join(testdir, "mfsim.lst"), "r")
lines = f.readlines()
error_count = 0
expected_msg = False
for line in lines:
if "SURFDEP" and "cannot" in line:
expected_msg = True
error_count += 1
assert error_count == 8, (
"error count = " + str(error_count) + "but should equal 8"
)
print("Finished running surfdep check")
return
if __name__ == "__main__":
# print message
print("standalone run of {}".format(os.path.basename(__file__)))
# run main routine
main()
| [
[
[
7,
9
],
[
560,
562
],
[
644,
646
],
[
675,
677
],
[
8277,
8279
],
[
7180,
7182
],
[
7787,
7789
]
],
[
[
17,
23
]
],
[
[
31,
34
]
],
[
[
42,
53
]
],
[
[
61,
67
]
],
[
[
75,
85
]
],
[
[
103,
109
]
],
[
[
122,
125
],
[
176,
179
]
],
[
[
350,
355
],
[
1427,
1432
],
[
1561,
1566
],
[
1686,
1691
],
[
1867,
1872
],
[
2347,
2352
],
[
2557,
2562
],
[
2632,
2637
],
[
2755,
2760
],
[
2928,
2933
],
[
6070,
6075
],
[
6535,
6540
]
],
[
[
368,
371
],
[
421,
424
]
],
[
[
541,
548
],
[
576,
583
]
],
[
[
550,
557
],
[
1498,
1505
]
],
[
[
604,
612
],
[
665,
673
],
[
1362,
1370
]
],
[
[
634,
641
],
[
687,
694
],
[
1409,
1416
],
[
7193,
7200
],
[
7800,
7807
]
],
[
[
711,
736
]
],
[
[
745,
758
],
[
4827,
4840
]
],
[
[
764,
777
],
[
4901,
4914
]
],
[
[
789,
800
],
[
7033,
7044
],
[
7640,
7651
]
],
[
[
6971,
6984
]
],
[
[
7587,
7591
],
[
8334,
8338
]
]
] |
import numpy as np
# Read scec input file
fid = open("tpv29_tpv30_geometry_25m_data.txt")
line = fid.readline()
line = fid.readline()
header = [float(a) for a in line.split()]
nx, ny, lx, ly = header
roughness = np.loadtxt(fid)
roughness = roughness[:, 4]
fid.close()
# create x and y vectors
x = np.linspace(-lx / 2, lx / 2, int(nx) + 1)
y = np.linspace(0, ly, int(ny) + 1)
# write mytopo_tpv29
fout = open("mytopo_tpv29", "w")
fout.write("%d %d\n" % (nx + 1, ny + 1))
np.savetxt(fout, x, fmt="%f")
np.savetxt(fout, y, fmt="%f")
np.savetxt(fout, roughness, fmt="%f")
fout.close()
| [
[
[
7,
18
],
[
213,
215
],
[
299,
301
],
[
345,
347
],
[
473,
475
],
[
503,
505
],
[
533,
535
]
],
[
[
43,
46
],
[
98,
101
],
[
120,
123
],
[
224,
227
],
[
257,
260
]
],
[
[
91,
95
]
],
[
[
113,
117
],
[
163,
167
]
],
[
[
135,
141
],
[
194,
200
]
],
[
[
177,
179
],
[
332,
334
],
[
456,
458
]
],
[
[
181,
183
],
[
368,
370
],
[
464,
466
]
],
[
[
185,
187
],
[
312,
314
],
[
320,
322
]
],
[
[
189,
191
],
[
360,
362
]
],
[
[
201,
210
],
[
241,
250
]
],
[
[
229,
238
],
[
550,
559
]
],
[
[
295,
296
],
[
490,
491
]
],
[
[
341,
342
],
[
520,
521
]
],
[
[
399,
403
],
[
432,
436
],
[
484,
488
],
[
514,
518
],
[
544,
548
],
[
571,
575
]
]
] |
import pytest
from ebonite.client import Ebonite
from tests.client.conftest import create_client_hooks
@pytest.fixture
def inmemory_ebnt():
ebnt = Ebonite.inmemory()
yield ebnt
pytest_runtest_protocol, pytest_collect_file = create_client_hooks(inmemory_ebnt, 'inmemory')
| [
[
[
7,
13
],
[
107,
113
]
],
[
[
42,
49
],
[
154,
161
]
],
[
[
84,
103
],
[
237,
256
]
],
[
[
126,
139
],
[
257,
270
]
],
[
[
190,
213
]
],
[
[
215,
234
]
]
] |
"""
example1.py
"A simple example how to use the CubeSat-Power-Estimation tool."
@author: Johan Monster (https://github.com/Hans-Bananendans/)
"""
# Import packages
import numpy as np
import pandas as pd
from mission import Mission
# Defining the config
config = {
"years_passed" : 0, # How many [years] the satellite has been in space for
"battery_capacity" : 81000, # Battery capacity in [W.s] (or: Joule)
"battery_degradation_factor" : 0.04,
"battery_init" : 0.5, # 0.5 = Battery begins at 50% charge
"panel_degradation_factor" : 0.02,
"blip_period" : 30, # Currently unused, telemetry blip period
"blip_duration" : 1, # Currently unused, telemetry blip duration
"no_blips" : ["downlink"], # Currently unused
"orbital_altitude" : 550 # Orbital altitude in [km]
}
# List of the names of all used EPS channels.
channels = ["None", "5V_1", "5V_2", "5V_3", "5V_4", "3.3V_1", \
"3.3V_2", "3.3V_3", "3.3V_4", "Var_rail"]
# Dict of typical voltage supplied to each channel.
channel_voltages = {
"5V_1" : 5,
"5V_2" : 5,
"5V_3" : 5,
"5V_4" : 5,
"3.3V_1" : 3.3,
"3.3V_2" : 3.3,
"3.3V_3" : 3.3,
"3.3V_4" : 3.3,
"Var_rail" : 6.5 # Can between 6.5-8 VDC, highest current is at 6.5V
}
# Dict specifiying which device is on which EPS channel
device_channels = {
"adcs" : "5V_4",
"payload_dice" : "5V_3",
"payload_bitflip" : "3.3V_3",
"antenna" : "3.3V_4",
"obc" : "5V_2",
"obc_board" : "5V_2",
"rx" : "Var_rail",
"tx" : "Var_rail",
"eps" : "None",
"sensors_1" : "3.3V_2",
"sensors_2" : "3.3V_4",
}
# List of all possible OpStates the satellite can be in.
# This list must be consistent with the specified power.xlsx
state_list = ["idle","recharge","dice_payload","wheel_unloading", \
"transponder","downlink","safe_mode","recovery_mode", \
"detumbling_mode"]
# Dict of which colour will be used for each OpState whilst plotting
state_colours = {
"idle" : "#ffffff",
"recharge" : "#2ca02c",
"dice_payload" : "#8000ff",
"wheel_unloading" : "#0080ff",
"transponder" : "#ff8000",
"downlink" : "#ff0000",
"safe_mode" : "#4000ff",
"recovery_mode" : "#777777",
"detumbling_mode" : "#ff00ff"
}
# Baby's first satellite schedule
schedule1 = {
0 : "idle",
50 : "downlink",
100 : "recharge"
}
# Loading the power frame, or the device/OpState table
power_frame = pd.read_excel('power.xlsx',index_col=0)
# Loading the two power input vectors, generated by CubeSat-Solar-Estimator
p_sun = np.load("P_sun.npy")
p_alb = np.load("P_alb.npy")
# Assembling the mission object
m1 = Mission(config, device_channels, state_list, channels, \
power_frame, p_sun, p_alb)
# Calling the Mission.propagate() method to start the simulation
results = m1.propagate(schedule1, tsim=200, dt=1)
# Plotting
m1.plot_timeline_power(state_colours)
| [
[
[
175,
186
],
[
2814,
2816
],
[
2843,
2845
]
],
[
[
194,
206
],
[
2689,
2691
]
],
[
[
228,
235
],
[
2902,
2909
]
],
[
[
259,
265
],
[
2910,
2916
]
],
[
[
878,
886
],
[
2947,
2955
]
],
[
[
1049,
1065
]
],
[
[
1358,
1373
],
[
2918,
2933
]
],
[
[
1893,
1903
],
[
2935,
2945
]
],
[
[
2134,
2147
],
[
3150,
3163
]
],
[
[
2526,
2535
],
[
3088,
3097
]
],
[
[
2675,
2686
],
[
2972,
2983
]
],
[
[
2806,
2811
],
[
2985,
2990
]
],
[
[
2835,
2840
],
[
2992,
2997
]
],
[
[
2897,
2899
],
[
3075,
3077
],
[
3127,
3129
]
],
[
[
3065,
3072
]
]
] |
from __future__ import annotations
from coredis.response.callbacks import (
DictCallback,
ResponseCallback,
SimpleStringCallback,
)
from coredis.response.utils import flat_pairs_to_dict
from coredis.typing import Any, AnyStr, Mapping, Tuple, Union
class ACLLogCallback(ResponseCallback):
def transform(
self, response: Any, **options: Any
) -> Union[bool, Tuple[Mapping[AnyStr, AnyStr], ...]]:
if options.get("reset"):
return SimpleStringCallback()(response)
else:
return tuple(
DictCallback(transform_function=flat_pairs_to_dict)(r) for r in response
)
| [
[
[
23,
34
]
],
[
[
81,
93
],
[
566,
578
]
],
[
[
99,
115
],
[
284,
300
]
],
[
[
121,
141
],
[
477,
497
]
],
[
[
180,
198
],
[
598,
616
]
],
[
[
226,
229
],
[
346,
349
],
[
362,
365
]
],
[
[
231,
237
],
[
401,
407
],
[
409,
415
]
],
[
[
239,
246
],
[
393,
400
]
],
[
[
248,
253
],
[
387,
392
]
],
[
[
255,
260
],
[
375,
380
]
],
[
[
269,
283
]
]
] |
from tensornetwork.network_components import Node, CopyNode, Edge
_COMPONENTS = {
"Node": Node,
"CopyNode": CopyNode,
"Edge": Edge,
}
def get_component(name):
if name not in _COMPONENTS:
raise ValueError("Component {} does not exist".format(name))
return _COMPONENTS[name]
| [
[
[
45,
49
],
[
96,
100
]
],
[
[
51,
59
],
[
118,
126
]
],
[
[
61,
65
],
[
140,
144
]
],
[
[
68,
79
],
[
192,
203
],
[
279,
290
]
],
[
[
154,
167
]
]
] |
from .util import BitFormat
from . import packet
__all__ = ('ImageF0', 'ImageF1', 'ImageF2')
class ImageMessage:
def __repr__(self):
return '<Image Segment>'
class ImageF0(packet.Packet):
"""Image data
.. py:attribute:: segment_length
.. py:attribute:: iph
.. py:attribute:: sum
* 0 - Less than one complete image
* 1 - One complete image
* 2 - Multiple complete images
* 3 - Multiple incomplete images
.. py:attribute:: parts
Indicates which piece[s] are of the frame are contained in the packet:
"""
csdw_format = BitFormat('''
u27 length
u1 iph
u3 sum
u3 parts''')
class Message(packet.Message, ImageMessage):
"""
.. py:attribute:: ipts
If IPH is true (see above), containts intra-packet timestamp
"""
def __init__(self, *args, **kwargs):
packet.Packet.__init__(self, *args, **kwargs)
if self.iph:
self.Message.FORMAT = BitFormat('u64 ipts')
class ImageF1(packet.Packet):
"""Still imagery
.. py:attribute:: format
* 0 - MIL-STD-2500 National Imagery Transmission Format
* 1 - JPEG File Interchange Format
* 2 - JPEG 2000 (ISO/IEC 154444-1)
* 3 - Portable Network Graphics Format (PNG)
.. py:attribute:: iph
.. py:attribute:: sum
* 0 - Contains less than one complete image
* 1 - Contains one complete image
* 2 - Contains multiple complete images
* 3 - Contains multiple incomplete messages
.. py:attribute:: parts
* 0 - Doesn't contain first or last segment of the image
* 1 - Contains first segment of image
* 2 - Contains multiple complete messages
* 3 - Contains both first and last segment of image
"""
csdw_format = BitFormat('''
p23
u4 format
u1 iph
u2 sum
u2 parts''')
class Message(packet.Message, ImageMessage):
"""
.. py:attribute:: ipts
If IPH is true (see above), containts intra-packet timestamp
.. py:attribute:: length
Length of image or segment (bytes)
"""
def __init__(self, *args, **kwargs):
packet.Packet.__init__(self, *args, **kwargs)
fmt = ''
if self.iph:
fmt = 'u64 ipts\n'
self.Message.FORMAT = BitFormat(fmt + 'u32 length')
class ImageF2(packet.Packet):
"""Dynamic Imagery
.. py:attribute:: format
Refer to chapter 10 standard
.. py:attribute:: iph
.. py:attribute:: sum
* 0 - Contains less than one complete image (segment)
* 1 - Contains one complete image
* 2 - Contains multiple complete images
.. py:attribute:: parts
* 0 - Doesn't contain first or last segment of the image
* 1 - Contains first segment of image
* 2 - Contains last segment of image
"""
csdw_format = BitFormat('''
p21
u6 format
u1 iph
u2 sum
u2 parts''')
class Message(packet.Message, ImageMessage):
"""
.. py:attribute:: ipts
If IPH is true (see above), containts intra-packet timestamp
.. py:attribute:: length
Length of image or segment (bytes)
"""
def __init__(self, *args, **kwargs):
packet.Packet.__init__(self, *args, **kwargs)
fmt = ''
if self.iph:
fmt = 'u64 ipts\n'
self.Message.FORMAT = BitFormat(fmt + 'u32 length')
| [
[
[
19,
28
],
[
609,
618
],
[
1859,
1868
],
[
2982,
2991
],
[
1024,
1033
],
[
2410,
2419
],
[
3533,
3542
]
],
[
[
43,
49
],
[
192,
198
],
[
712,
718
],
[
1062,
1068
],
[
1973,
1979
],
[
2456,
2462
],
[
3096,
3102
],
[
922,
928
],
[
2265,
2271
],
[
3388,
3394
]
],
[
[
52,
59
]
],
[
[
104,
116
],
[
728,
740
],
[
1989,
2001
],
[
3112,
3124
]
],
[
[
184,
191
]
],
[
[
1054,
1061
]
],
[
[
2448,
2455
]
]
] |
from SDWLE.cards.base import HeroCard
from SDWLE.constants import CHARACTER_CLASS, MINION_TYPE
from SDWLE.powers import MagePower, DruidPower, HunterPower, PaladinPower, PriestPower, RoguePower,\
ShamanPower, WarlockPower, WarriorPower, JaraxxusPower, DieInsect
class Malfurion(HeroCard):
def __init__(self):
super().__init__("Malfurion Stormrage", CHARACTER_CLASS.DRUID, 30, DruidPower)
class Rexxar(HeroCard):
def __init__(self):
super().__init__("Rexxar", CHARACTER_CLASS.HUNTER, 30, HunterPower)
class Jaina(HeroCard):
def __init__(self):
super().__init__("Jaina Proudmoore", CHARACTER_CLASS.MAGE, 30, MagePower)
class Uther(HeroCard):
def __init__(self):
super().__init__("Uther the Lightbringer", CHARACTER_CLASS.PALADIN, 30, PaladinPower)
class Anduin(HeroCard):
def __init__(self):
super().__init__("Anduin Wrynn", CHARACTER_CLASS.PRIEST, 30, PriestPower)
class Valeera(HeroCard):
def __init__(self):
super().__init__("Valeera Sanguinar", CHARACTER_CLASS.ROGUE, 30, RoguePower)
class Thrall(HeroCard):
def __init__(self):
super().__init__("Thrall", CHARACTER_CLASS.SHAMAN, 30, ShamanPower)
class Guldan(HeroCard):
def __init__(self):
super().__init__("Gul'dan", CHARACTER_CLASS.WARLOCK, 30, WarlockPower)
class Garrosh(HeroCard):
def __init__(self):
super().__init__("Garrosh Hellscream", CHARACTER_CLASS.WARRIOR, 30, WarriorPower)
class Jaraxxus(HeroCard):
def __init__(self):
super().__init__("Lord Jaraxxus", CHARACTER_CLASS.WARLOCK, 15, JaraxxusPower, MINION_TYPE.DEMON,
ref_name="Lord Jarraxus (hero)")
class Ragnaros(HeroCard):
def __init__(self):
super().__init__("Ragnaros the Firelord (hero)", CHARACTER_CLASS.ALL, 8, DieInsect)
def hero_for_class(character_class):
if character_class == CHARACTER_CLASS.DRUID:
return Malfurion()
elif character_class == CHARACTER_CLASS.HUNTER:
return Rexxar()
elif character_class == CHARACTER_CLASS.MAGE:
return Jaina()
elif character_class == CHARACTER_CLASS.PRIEST:
return Anduin()
elif character_class == CHARACTER_CLASS.PALADIN:
return Uther()
elif character_class == CHARACTER_CLASS.ROGUE:
return Valeera()
elif character_class == CHARACTER_CLASS.SHAMAN:
return Thrall()
elif character_class == CHARACTER_CLASS.WARLOCK:
return Guldan()
elif character_class == CHARACTER_CLASS.WARRIOR:
return Garrosh()
else:
return Jaina()
__hero_lookup = {"Jaina": Jaina,
"Malfurion": Malfurion,
"Rexxar": Rexxar,
"Anduin": Anduin,
"Uther": Uther,
"Gul'dan": Guldan,
"Valeera": Valeera,
"Thrall": Thrall,
"Garrosh": Garrosh,
"Jaraxxus": Jaraxxus,
"Ragnaros": Ragnaros,
}
def hero_from_name(name):
return __hero_lookup[name]()
| [
[
[
29,
37
],
[
284,
292
],
[
421,
429
],
[
546,
554
],
[
677,
685
],
[
821,
829
],
[
954,
962
],
[
1089,
1097
],
[
1215,
1223
],
[
1345,
1353
],
[
1487,
1495
],
[
1702,
1710
]
],
[
[
66,
81
],
[
367,
382
],
[
491,
506
],
[
626,
641
],
[
763,
778
],
[
897,
912
],
[
1035,
1050
],
[
1159,
1174
],
[
1286,
1301
],
[
1427,
1442
],
[
1564,
1579
],
[
1794,
1809
],
[
1894,
1909
],
[
1972,
1987
],
[
2048,
2063
],
[
2121,
2136
],
[
2197,
2212
],
[
2273,
2288
],
[
2349,
2364
],
[
2425,
2440
],
[
2502,
2517
]
],
[
[
83,
94
],
[
1608,
1619
]
],
[
[
120,
129
],
[
652,
661
]
],
[
[
131,
141
],
[
394,
404
]
],
[
[
143,
154
],
[
519,
530
]
],
[
[
156,
168
],
[
792,
804
]
],
[
[
170,
181
],
[
925,
936
]
],
[
[
183,
193
],
[
1062,
1072
]
],
[
[
200,
211
],
[
1187,
1198
]
],
[
[
213,
225
],
[
1315,
1327
]
],
[
[
227,
239
],
[
1456,
1468
]
],
[
[
241,
254
],
[
1593,
1606
]
],
[
[
256,
265
],
[
1818,
1827
]
],
[
[
274,
283
],
[
2649,
2658
],
[
1932,
1941
]
],
[
[
414,
420
],
[
2687,
2693
],
[
2011,
2017
]
],
[
[
540,
545
],
[
2612,
2617
],
[
2085,
2090
],
[
2577,
2582
]
],
[
[
671,
676
],
[
2756,
2761
],
[
2237,
2242
]
],
[
[
814,
820
],
[
2722,
2728
],
[
2160,
2166
]
],
[
[
946,
953
],
[
2827,
2834
],
[
2311,
2318
]
],
[
[
1082,
1088
],
[
2863,
2869
],
[
2388,
2394
]
],
[
[
1208,
1214
],
[
2791,
2797
],
[
2465,
2471
]
],
[
[
1337,
1344
],
[
2899,
2906
],
[
2542,
2549
]
],
[
[
1478,
1486
],
[
2937,
2945
]
],
[
[
1693,
1701
],
[
2976,
2984
]
],
[
[
1835,
1849
]
],
[
[
2586,
2599
],
[
3044,
3057
]
],
[
[
3011,
3025
]
]
] |
# Cooccurrence matrix construction tools
# for fitting the GloVe model.
import numpy as np
try:
# Python 2 compat
import cPickle as pickle
except ImportError:
import pickle
from .corpus_cython import construct_cooccurrence_matrix
class Corpus(object):
"""
Class for constructing a cooccurrence matrix
from a corpus.
A dictionry mapping words to ids can optionally
be supplied. If left None, it will be constructed
from the corpus.
"""
def __init__(self, dictionary=None):
self.dictionary = {}
self.dictionary_supplied = False
self.matrix = None
if dictionary is not None:
self._check_dict(dictionary)
self.dictionary = dictionary
self.dictionary_supplied = True
def _check_dict(self, dictionary):
if (np.max(list(dictionary.values())) != (len(dictionary) - 1)):
raise Exception('The largest id in the dictionary '
'should be equal to its length minus one.')
if np.min(list(dictionary.values())) != 0:
raise Exception('Dictionary ids should start at zero')
def fit(self, corpus, window=10, ignore_missing=False):
"""
Perform a pass through the corpus to construct
the cooccurrence matrix.
Parameters:
- iterable of lists of strings corpus
- int window: the length of the (symmetric)
context window used for cooccurrence.
- bool ignore_missing: whether to ignore words missing from
the dictionary (if it was supplied).
Context window distances will be preserved
even if out-of-vocabulary words are
ignored.
If False, a KeyError is raised.
"""
self.matrix = construct_cooccurrence_matrix(corpus,
self.dictionary,
int(self.dictionary_supplied),
int(window),
int(ignore_missing))
def save(self, filename):
with open(filename, 'wb') as savefile:
pickle.dump((self.dictionary, self.matrix),
savefile,
protocol=pickle.HIGHEST_PROTOCOL)
@classmethod
def load(cls, filename):
instance = cls()
with open(filename, 'rb') as savefile:
instance.dictionary, instance.matrix = pickle.load(savefile)
return instance
| [
[
[
79,
90
],
[
834,
836
],
[
1043,
1045
]
],
[
[
129,
146
],
[
2312,
2318
],
[
2423,
2429
],
[
2620,
2626
]
],
[
[
178,
184
],
[
2312,
2318
],
[
2423,
2429
],
[
2620,
2626
]
],
[
[
213,
242
],
[
1893,
1922
]
],
[
[
251,
257
]
]
] |
r"""Compute action detection performance for the AVA dataset.
Please send any questions about this code to the Google Group ava-dataset-users:
https://groups.google.com/forum/#!forum/ava-dataset-users
Example usage:
python -O get_ava_performance.py \
-l ava/ava_action_list_v2.1_for_activitynet_2018.pbtxt.txt \
-g ava_val_v2.1.csv \
-e ava_val_excluded_timestamps_v2.1.csv \
-d your_results.csv
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
from collections import defaultdict
import csv
import heapq
import logging
import pprint
import sys
import time
import numpy as np
from evaluation.ava import object_detection_evaluation
from evaluation.ava import standard_fields
def print_time(message, start):
logging.info("==> %g seconds to %s", time.time() - start, message)
def make_image_key(video_id, timestamp):
"""Returns a unique identifier for a video id & timestamp."""
return "%s,%04d" % (video_id, int(timestamp))
def read_csv(csv_file, class_whitelist=None, capacity=0):
"""Loads boxes and class labels from a CSV file in the AVA format.
CSV file format described at https://research.google.com/ava/download.html.
Args:
csv_file: A file object.
class_whitelist: If provided, boxes corresponding to (integer) class labels
not in this set are skipped.
capacity: Maximum number of labeled boxes allowed for each example.
Default is 0 where there is no limit.
Returns:
boxes: A dictionary mapping each unique image key (string) to a list of
boxes, given as coordinates [y1, x1, y2, x2].
labels: A dictionary mapping each unique image key (string) to a list of
integer class lables, matching the corresponding box in `boxes`.
scores: A dictionary mapping each unique image key (string) to a list of
score values lables, matching the corresponding label in `labels`. If
scores are not provided in the csv, then they will default to 1.0.
"""
start = time.time()
entries = defaultdict(list)
boxes = defaultdict(list)
labels = defaultdict(list)
scores = defaultdict(list)
reader = csv.reader(csv_file)
for row in reader:
assert len(row) in [7, 8], "Wrong number of columns: " + row
image_key = make_image_key(row[0], row[1])
x1, y1, x2, y2 = [float(n) for n in row[2:6]]
action_id = int(row[6])
if class_whitelist and action_id not in class_whitelist:
continue
score = 1.0
if len(row) == 8:
score = float(row[7])
if capacity < 1 or len(entries[image_key]) < capacity:
heapq.heappush(entries[image_key],
(score, action_id, y1, x1, y2, x2))
elif score > entries[image_key][0][0]:
heapq.heapreplace(entries[image_key],
(score, action_id, y1, x1, y2, x2))
for image_key in entries:
# Evaluation API assumes boxes with descending scores
entry = sorted(entries[image_key], key=lambda tup: -tup[0])
for item in entry:
score, action_id, y1, x1, y2, x2 = item
boxes[image_key].append([y1, x1, y2, x2])
labels[image_key].append(action_id)
scores[image_key].append(score)
print_time("read file " + csv_file.name, start)
return boxes, labels, scores
def read_exclusions(exclusions_file):
"""Reads a CSV file of excluded timestamps.
Args:
exclusions_file: A file object containing a csv of video-id,timestamp.
Returns:
A set of strings containing excluded image keys, e.g. "aaaaaaaaaaa,0904",
or an empty set if exclusions file is None.
"""
excluded = set()
if exclusions_file:
reader = csv.reader(exclusions_file)
for row in reader:
assert len(row) == 2, "Expected only 2 columns, got: " + row
excluded.add(make_image_key(row[0], row[1]))
return excluded
def read_labelmap(labelmap_file):
"""Reads a labelmap without the dependency on protocol buffers.
Args:
labelmap_file: A file object containing a label map protocol buffer.
Returns:
labelmap: The label map in the form used by the object_detection_evaluation
module - a list of {"id": integer, "name": classname } dicts.
class_ids: A set containing all of the valid class id integers.
"""
labelmap = []
class_ids = set()
name = ""
class_id = ""
for line in labelmap_file:
if line.startswith(" name:"):
name = line.split('"')[1]
elif line.startswith(" id:") or line.startswith(" label_id:"):
class_id = int(line.strip().split(" ")[-1])
labelmap.append({"id": class_id, "name": name})
class_ids.add(class_id)
return labelmap, class_ids
def run_evaluation(labelmap, groundtruth, detections, exclusions):
"""Runs evaluations given input files.
Args:
labelmap: file object containing map of labels to consider, in pbtxt format
groundtruth: file object
detections: file object
exclusions: file object or None.
"""
categories, class_whitelist = read_labelmap(labelmap)
logging.info("CATEGORIES (%d):\n%s", len(categories),
pprint.pformat(categories, indent=2))
excluded_keys = read_exclusions(exclusions)
pascal_evaluator = object_detection_evaluation.PascalDetectionEvaluator(
categories)
# Reads the ground truth data.
boxes, labels, _ = read_csv(groundtruth, class_whitelist, 0)
start = time.time()
for image_key in boxes:
if image_key in excluded_keys:
logging.info(("Found excluded timestamp in ground truth: %s. "
"It will be ignored."), image_key)
continue
pascal_evaluator.add_single_ground_truth_image_info(
image_key, {
standard_fields.InputDataFields.groundtruth_boxes:
np.array(boxes[image_key], dtype=float),
standard_fields.InputDataFields.groundtruth_classes:
np.array(labels[image_key], dtype=int),
standard_fields.InputDataFields.groundtruth_difficult:
np.zeros(len(boxes[image_key]), dtype=bool)
})
print_time("convert groundtruth", start)
# Reads detections data.
boxes, labels, scores = read_csv(detections, class_whitelist, 50)
start = time.time()
for image_key in boxes:
if image_key in excluded_keys:
logging.info(("Found excluded timestamp in detections: %s. "
"It will be ignored."), image_key)
continue
pascal_evaluator.add_single_detected_image_info(
image_key, {
standard_fields.DetectionResultFields.detection_boxes:
np.array(boxes[image_key], dtype=float),
standard_fields.DetectionResultFields.detection_classes:
np.array(labels[image_key], dtype=int),
standard_fields.DetectionResultFields.detection_scores:
np.array(scores[image_key], dtype=float)
})
print_time("convert detections", start)
start = time.time()
metrics = pascal_evaluator.evaluate()
print_time("run_evaluator", start)
pprint.pprint(metrics, indent=2)
def parse_arguments():
"""Parses command-line flags.
Returns:
args: a named tuple containing three file objects args.labelmap,
args.groundtruth, and args.detections.
"""
parser = argparse.ArgumentParser()
parser.add_argument(
"-l",
"--labelmap",
help="Filename of label map",
type=argparse.FileType("r"),
default="./ava/ava_action_list_v2.1_for_activitynet_2018.pbtxt.txt")
parser.add_argument(
"-g",
"--groundtruth",
default='./ava_val_v2.2.csv',
help="CSV file containing ground truth.",
type=argparse.FileType("r"),
# required=True
)
parser.add_argument(
"-d",
"--detections",
default='results.csv',
help="CSV file containing inferred action detections.",
type=argparse.FileType("r"),
# required=True
)
parser.add_argument(
"-e",
"--exclusions",
help=("Optional CSV file containing videoid,timestamp pairs to exclude from evaluation."),
type=argparse.FileType("r"),
required=False)
return parser.parse_args()
def main():
logging.basicConfig(level=logging.INFO)
args = parse_arguments()
run_evaluation(**vars(args))
if __name__ == "__main__":
main()
| [
[
[
434,
449
]
],
[
[
473,
481
]
],
[
[
505,
519
]
],
[
[
528,
536
],
[
7223,
7231
],
[
7361,
7369
],
[
7627,
7635
],
[
7852,
7860
],
[
8081,
8089
]
],
[
[
561,
572
],
[
2056,
2067
],
[
2084,
2095
],
[
2113,
2124
],
[
2142,
2153
]
],
[
[
580,
583
],
[
2171,
2174
],
[
3645,
3648
]
],
[
[
591,
596
],
[
2610,
2615
],
[
2751,
2756
]
],
[
[
604,
611
],
[
803,
810
],
[
4999,
5006
],
[
5432,
5439
],
[
6250,
6257
],
[
8178,
8185
],
[
8204,
8211
]
],
[
[
619,
625
],
[
5068,
5074
],
[
6984,
6990
]
],
[
[
633,
636
]
],
[
[
644,
648
],
[
840,
844
],
[
2032,
2036
],
[
5353,
5357
],
[
6171,
6175
],
[
6893,
6897
]
],
[
[
656,
667
],
[
5722,
5724
],
[
5844,
5846
],
[
5967,
5969
],
[
6538,
6540
],
[
6664,
6666
],
[
6788,
6790
]
],
[
[
696,
723
],
[
5174,
5201
]
],
[
[
751,
766
],
[
5655,
5670
],
[
5775,
5790
],
[
5896,
5911
],
[
6467,
6482
],
[
6591,
6606
],
[
6716,
6731
]
],
[
[
773,
783
],
[
3198,
3208
],
[
6024,
6034
],
[
6842,
6852
],
[
6947,
6957
]
],
[
[
876,
890
],
[
2294,
2308
],
[
3782,
3796
]
],
[
[
1031,
1039
],
[
5301,
5309
],
[
6119,
6127
]
],
[
[
3283,
3298
],
[
5124,
5139
]
],
[
[
3838,
3851
],
[
4973,
4986
]
],
[
[
4648,
4662
],
[
8251,
8265
]
],
[
[
7023,
7038
],
[
8229,
8244
]
],
[
[
8166,
8170
],
[
8313,
8317
]
]
] |
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
| [
[
[
23,
38
]
],
[
[
173,
190
]
]
] |
import logging
import asyncio
from asyncio import CancelledError
from aiohttp import ClientSession, WSMsgType, ClientTimeout, WSServerHandshakeError
import json
import datetime
import traceback
import typing
from .const import (
LOGIN_URL,
DEX_URL,
Guid
)
from .space import NoonSpace
from .line import NoonLine
from .entity import NoonEntity
from .scene import NoonScene
from .exceptions import (
NoonAuthenticationError,
NoonUnknownError,
NoonProtocolError,
NoonDuplicateIdError
)
_LOGGER = logging.getLogger(__name__)
class Noon(object):
"""Base object for Noon Home."""
@property
async def spaces(self) -> typing.Dict[Guid, NoonSpace]:
if self._spaces is None:
await self._refreshDevices()
return self._spaces
@property
async def lines(self) -> typing.Dict[Guid, NoonLine]:
if self._lines is None:
await self._refreshDevices()
return self._lines
@property
def session(self) -> ClientSession:
return self._session
@property
def event_stream_connected(self) -> bool:
return self._event_stream_connected
@property
def event_stream_error(self) -> str:
return self._event_stream_error
def __init__(self, session, username, password):
"""Create a PyNoone object.
:param username: Noon username
:param password: Noon password
:returns PyNoon base object
"""
# Properties
self._spaces = None
self._lines = None
self._scenes = None
self._all_entities = {}
self._endpoints = {}
self._event_stream_connected = False
self._event_stream_error = None
# Store credentials
self._username = username
self._password = password
self._token = None
self._token_expires = None
# AIOHTTP
self._session = session
self._websocket_task = None
async def authenticate(self) -> bool:
"""Authenticate with Noon and store the authentication token."""
"""Reuse token if we have one."""
if self._token is not None and self._token_expires > datetime.datetime.now():
_LOGGER.debug("Using cached token, which should still be valid")
return True
""" Authenticate user, and get tokens """
_LOGGER.debug("No valid token or token expired. Authenticating...")
payload = {
"email": self._username,
"password": self._password
}
async with self.session.post(LOGIN_URL, json=payload) as login_response:
parsed_response = await login_response.json()
_LOGGER.debug("Response: {}".format(parsed_response))
# Invalid response from noon
if not isinstance(parsed_response, dict):
_LOGGER.error("Response from authentication was not a dictionary")
raise NoonProtocolError
# Single error from noon
if "error" in parsed_response.keys():
raise NoonAuthenticationError
# Errors from Noon
if parsed_response.get("errors") is not None:
_LOGGER.error("Multiple authentication errors from Noon - {}".format(parsed_response["errors"]))
raise NoonUnknownError
# Must have a token and lifetime
try:
self._token = parsed_response["token"]
self._token_expires = datetime.datetime.now() + datetime.timedelta(seconds = (parsed_response["lifetime"]-30))
_LOGGER.debug("Got token from Noon. Expires at {}".format(self._token_expires))
except KeyError:
_LOGGER.error("Failed to get token or lifetime from {}".format(parsed_response))
raise NoonUnknownError
# Get endpoints if needed
await self._refreshEndpoints()
# Success
return True
async def open_eventstream(self, event_loop=None):
"""Create a background task for the event stream."""
if event_loop is None:
_LOGGER.debug("Using main asyncio event loop")
event_loop = asyncio.get_running_loop()
assert self._websocket_task is None or self._websocket_task.cancelled(), "Already running an event stream task"
self._websocket_task = event_loop.create_task(self._internal_eventstream())
async def close_eventstream(self):
"""Close the event stream background task."""
if self._websocket_task is not None and not self._websocket_task.cancelled():
_LOGGER.debug("Canceling websocket task")
self._websocket_task.cancel()
async def _internal_eventstream(self):
"""Loop for connecting to the Noon notification stream."""
keep_looping = True
while keep_looping:
try:
await self.authenticate()
timeout = ClientTimeout(total=8, connect=20, sock_connect=20, sock_read=8)
event_stream_url = "{}/api/notifications".format(self._endpoints["notification-ws"])
_LOGGER.debug("Connecting to notification stream...")
async with self.session.ws_connect(event_stream_url, timeout=timeout, heartbeat=60, headers={"Authorization": "Token {}".format(self._token)}) as ws:
_LOGGER.debug("Connected to notification stream")
self._event_stream_connected = True
self._event_stream_error = None
async for msg in ws:
if msg.type == WSMsgType.TEXT:
_LOGGER.debug("Got websocket message: {}".format(msg.data))
parsed_data = json.loads(msg.data)
changes = parsed_data["data"].get("changes", [])
for change in changes:
await self._handle_change(change)
elif msg.type == WSMsgType.CLOSED:
_LOGGER.error("Socket closed")
raise NoonProtocolError("Notification stream closed unexpectedly")
elif msg.type == WSMsgType.ERROR:
_LOGGER.error("Websocket error")
raise NoonProtocolError("Unknown error on notification stream")
except CancelledError:
_LOGGER.debug("Loop canceled.")
self._event_stream_error = "Canceled"
keep_looping = False
except WSServerHandshakeError:
_LOGGER.error("Loop Fatal: Handshake error")
self._event_stream_error = "Handshake Error"
keep_looping = False
except Exception:
_LOGGER.exception("Loop Fatal: Generic exception during event loop")
self._event_stream_error = "Unknown exception - {}".format(traceback.format_exc())
keep_looping = False
finally:
_LOGGER.debug("Event stream is disconnected.")
self._event_stream_connected = False
async def _handle_change(self, change):
"""Process a change notification."""
guid = change.get("guid", None)
if guid is None:
_LOGGER.error("Cannot process change - no GUID in {}".format(change))
return
affected_entity = self._all_entities.get(guid, None)
if affected_entity is None:
_LOGGER.debug("UNEXPECTED: Got change notification for {}, but not an expected entity! ({}".format(guid, change))
return
_LOGGER.debug("Got change notification for '{}' - {}".format(affected_entity.name, change))
changed_fields = change.get("fields", [])
return await affected_entity.handle_update(changed_fields)
def get_entity(self, entity_id: Guid) -> NoonEntity:
return self._all_entities.get(entity_id, None)
async def _refreshEndpoints(self):
"""Update the noon endpoints for this account"""
if len(self._endpoints) > 0:
return
await self.authenticate()
async with self.session.get(DEX_URL, headers={
"Authorization": "Token {}".format(self._token)
}) as login_response:
parsed_response = await login_response.json()
# Must be a dictionary
if not isinstance(parsed_response, dict):
_LOGGER.error("Response from get endpoints was not a dictionary - {}".format(parsed_response))
raise NoonProtocolError
# Store
try:
self._endpoints = parsed_response["endpoints"]
except KeyError:
_LOGGER.error("Unexpected endpoints response {}".format(parsed_response))
raise NoonUnknownError
def _registerEntity(self, entity: NoonEntity):
""" EVERYTHING """
self._all_entities[entity.guid] = entity
""" SPACE """
if isinstance(entity, NoonSpace):
existingEntity = self._spaces.get(entity.guid, None)
if existingEntity is not None:
if entity.name != existingEntity.name and False:
_LOGGER.error("New space '{}' has same ID as existing space '{}'".format(entity.name, existingEntity.name))
raise NoonDuplicateIdError
else:
return
else:
self._spaces[entity.guid] = entity
""" LINE """
if isinstance(entity, NoonLine):
existingEntity = self._lines.get(entity.guid, None)
if existingEntity is not None:
if entity.name != existingEntity.name and False:
_LOGGER.error("New line '{}' has same ID as existing line '{}'".format(entity.name, existingEntity.name))
raise NoonDuplicateIdError
else:
return
else:
self._lines[entity.guid] = entity
""" SCENE """
if isinstance(entity, NoonScene):
existingEntity = self._scenes.get(entity.guid, None)
if existingEntity is not None:
if entity.name != existingEntity.name and False:
_LOGGER.error("New scene '{}' has same ID as existing scene '{}'".format(entity.name, existingEntity.name))
raise NoonDuplicateIdError
else:
return
else:
self._scenes[entity.guid] = entity
async def _refreshDevices(self):
"""Load the devices (spaces/lines) on this account."""
# Reset cache
self._spaces = {}
self._scenes = {}
self._lines = {}
# Authenticate if needed
await self.authenticate()
# Load the device details
url = "{}/api/query".format(self._endpoints["query"])
headers = {
"Authorization": "Token {}".format(self._token),
"Content-Type": "application/graphql"
}
data = "{spaces {guid name lightsOn activeScene{guid name} lines{guid lineState displayName dimmingLevel multiwayMaster { guid }} scenes{name guid}}}"
async with self.session.post(url, headers=headers, data=data) as discovery_response:
parsed_response = await discovery_response.json()
# Must be a dictionary
if not isinstance(parsed_response, dict):
_LOGGER.error("Response from discovery was not a dictionary - {}".format(parsed_response))
raise NoonProtocolError
# Parse spaces
for space in parsed_response["spaces"]:
this_space = await NoonSpace.from_json(self, space)
_LOGGER.debug("Discovered space {}".format(this_space.name)) | [
[
[
7,
14
],
[
522,
529
]
],
[
[
22,
29
],
[
4227,
4234
]
],
[
[
50,
64
],
[
6451,
6465
]
],
[
[
85,
98
],
[
1001,
1014
]
],
[
[
100,
109
],
[
5647,
5656
],
[
6049,
6058
],
[
6262,
6271
]
],
[
[
111,
124
],
[
4987,
5000
]
],
[
[
126,
148
],
[
6625,
6647
]
],
[
[
156,
160
],
[
5793,
5797
]
],
[
[
168,
176
],
[
2199,
2207
],
[
3514,
3522
],
[
3540,
3548
]
],
[
[
184,
193
],
[
6998,
7007
]
],
[
[
201,
207
],
[
655,
661
],
[
831,
837
]
],
[
[
233,
242
],
[
2595,
2604
]
],
[
[
248,
255
],
[
8260,
8267
]
],
[
[
261,
265
],
[
667,
671
],
[
843,
847
],
[
7951,
7955
]
],
[
[
287,
296
],
[
673,
682
],
[
9109,
9118
],
[
11819,
11828
]
],
[
[
315,
323
],
[
849,
857
],
[
9640,
9648
]
],
[
[
344,
354
],
[
7960,
7970
],
[
8966,
8976
]
],
[
[
374,
383
],
[
10167,
10176
]
],
[
[
414,
437
],
[
3092,
3115
]
],
[
[
443,
459
],
[
3341,
3357
],
[
3847,
3863
],
[
8910,
8926
]
],
[
[
465,
482
],
[
2964,
2981
],
[
6160,
6177
],
[
6374,
6391
],
[
8650,
8667
],
[
11686,
11703
]
],
[
[
488,
508
],
[
9448,
9468
],
[
9975,
9995
],
[
10506,
10526
]
],
[
[
512,
519
],
[
2236,
2243
],
[
2384,
2391
],
[
2709,
2716
],
[
2875,
2882
],
[
3222,
3229
],
[
3619,
3626
],
[
3744,
3751
],
[
4155,
4162
],
[
4651,
4658
],
[
5169,
5176
],
[
5409,
5416
],
[
5691,
5698
],
[
6095,
6102
],
[
6307,
6314
],
[
6483,
6490
],
[
6665,
6672
],
[
6854,
6861
],
[
7096,
7103
],
[
7364,
7371
],
[
7563,
7570
],
[
7705,
7712
],
[
8533,
8540
],
[
8814,
8821
],
[
9314,
9321
],
[
9843,
9850
],
[
10372,
10379
],
[
11573,
11580
],
[
11868,
11875
]
],
[
[
559,
563
]
]
] |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# Modified by BaseDetection, Inc. and its affiliates. All Rights Reserved
"""
Detection Training Script.
This scripts reads a given config file and runs the training or evaluation.
It is an entry point that is made to train standard models in cvpods.
In order to let one script support training of many models,
this script contains logic that are specific to these built-in models and therefore
may not be suitable for your own project.
For example, your research project perhaps only needs a single "evaluator".
Therefore, we recommend you to use cvpods as an library and take
this file as an example of how to use the library.
You may want to write your own script with your datasets and other customizations.
"""
import logging
import os
import pickle as pkl
import sys
from collections import OrderedDict
from colorama import Fore, Style
import torch
from cvpods.checkpoint import DetectionCheckpointer
from cvpods.engine import DefaultTrainer, default_argument_parser, default_setup, hooks, launch
from cvpods.evaluation import build_evaluator, verify_results
from cvpods.modeling import GeneralizedRCNNWithTTA
from cvpods.utils import comm
sys.path.insert(0, '.')
from config import config # noqa: E402
from net import build_model # noqa: E402
class Trainer(DefaultTrainer):
"""
We use the "DefaultTrainer" which contains pre-defined default logic for
standard training workflow. They may not work for you, especially if you
are working on a new research project. In that case you can use the cleaner
"SimpleTrainer", or write your own training loop. You can use
"tools/plain_train_net.py" as an example.
"""
@classmethod
def build_evaluator(cls, cfg, dataset_name, dataset, output_folder=None):
"""
Create evaluator(s) for a given dataset.
This uses the special metadata "evaluator_type" associated with each builtin dataset.
For your own dataset, you can simply create an evaluator manually in your
script and do not have to worry about the hacky if-else logic here.
"""
dump_train = config.GLOBAL.DUMP_TRAIN
return build_evaluator(cfg, dataset_name, dataset, output_folder, dump=dump_train)
@classmethod
def test_with_TTA(cls, cfg, model):
logger = logging.getLogger("cvpods.trainer")
# In the end of training, run an evaluation with TTA
# Only support some R-CNN models.
logger.info("Running inference with test-time augmentation ...")
model = GeneralizedRCNNWithTTA(cfg, model)
res = cls.test(cfg, model, output_folder=os.path.join(cfg.OUTPUT_DIR, "inference_TTA"))
res = OrderedDict({k + "_TTA": v for k, v in res.items()})
return res
def stage_main(args, cfg, build):
cfg.merge_from_list(args.opts)
cfg, logger = default_setup(cfg, args)
model_build_func = build
"""
If you'd like to do anything fancier than the standard training logic,
consider writing your own training loop or subclassing the trainer.
"""
trainer = Trainer(cfg, model_build_func)
trainer.resume_or_load(resume=args.resume)
if args.eval_only:
DetectionCheckpointer(
trainer.model, save_dir=cfg.OUTPUT_DIR, resume=args.resume).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume)
res = Trainer.test(cfg, trainer.model)
if comm.is_main_process():
verify_results(cfg, res)
if cfg.TEST.AUG.ENABLED:
res.update(Trainer.test_with_TTA(cfg, trainer.model))
return res
# check wheather worksapce has enough storeage space
# assume that a single dumped model is 700Mb
file_sys = os.statvfs(cfg.OUTPUT_DIR)
free_space_Gb = (file_sys.f_bfree * file_sys.f_frsize) / 2**30
eval_space_Gb = (cfg.SOLVER.LR_SCHEDULER.MAX_ITER // cfg.SOLVER.CHECKPOINT_PERIOD) * 700 / 2**10
if eval_space_Gb > free_space_Gb:
logger.warning(f"{Fore.RED}Remaining space({free_space_Gb}GB) "
f"is less than ({eval_space_Gb}GB){Style.RESET_ALL}")
if cfg.TEST.AUG.ENABLED:
trainer.register_hooks(
[hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))]
)
trainer.train()
if comm.is_main_process() and cfg.MODEL.AS_PRETRAIN:
# convert last ckpt to pretrain format
convert_to_pretrained_model(
input=os.path.join(cfg.OUTPUT_DIR, "model_final.pth"),
save_path=os.path.join(cfg.OUTPUT_DIR, "model_final_pretrain_weight.pkl")
)
def convert_to_pretrained_model(input, save_path):
obj = torch.load(input, map_location="cpu")
obj = obj["model"]
newmodel = {}
for k, v in obj.items():
if not k.startswith("encoder_q.") and not k.startswith("network"):
continue
old_k = k
if k.startswith("encoder_q."):
k = k.replace("encoder_q.", "")
elif k.startswith("network"):
k = k.replace("network.", "")
print(old_k, "->", k)
newmodel[k] = v.numpy()
res = {
"model": newmodel,
"__author__": "MOCO" if k.startswith("encoder_q.") else "CLS",
"matching_heuristics": True
}
with open(save_path, "wb") as f:
pkl.dump(res, f)
def main(args):
if isinstance(config, list):
assert isinstance(build_model, list) and len(config) == len(build_model)
for cfg, build in zip(config, build_model):
stage_main(args, cfg, build)
else:
stage_main(args, config, build_model)
if __name__ == "__main__":
args = default_argument_parser().parse_args()
if isinstance(config, list):
assert len(config) > 0
print("soft link first config in list to {}".format(config[0].OUTPUT_DIR))
config[0].link_log()
else:
print("soft link to {}".format(config.OUTPUT_DIR))
config.link_log()
print("Command Line Args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)
| [
[
[
797,
804
],
[
2357,
2364
]
],
[
[
812,
814
],
[
2669,
2671
],
[
3758,
3760
],
[
4476,
4478
],
[
4547,
4549
]
],
[
[
822,
835
],
[
5331,
5334
]
],
[
[
843,
846
],
[
1223,
1226
]
],
[
[
871,
882
],
[
2730,
2741
]
],
[
[
904,
908
],
[
4017,
4021
]
],
[
[
910,
915
],
[
4121,
4126
]
],
[
[
924,
929
],
[
4684,
4689
]
],
[
[
961,
982
],
[
3233,
3254
]
],
[
[
1009,
1023
],
[
1345,
1359
]
],
[
[
1025,
1048
],
[
5669,
5692
]
],
[
[
1050,
1063
],
[
2891,
2904
]
],
[
[
1065,
1070
],
[
4215,
4220
]
],
[
[
1072,
1078
],
[
6021,
6027
]
],
[
[
1109,
1124
],
[
2206,
2221
]
],
[
[
1126,
1140
],
[
3493,
3507
]
],
[
[
1169,
1191
],
[
2585,
2607
]
],
[
[
1217,
1221
],
[
3457,
3461
],
[
4324,
4328
]
],
[
[
1266,
1272
],
[
5726,
5732
],
[
5760,
5766
],
[
5832,
5838
],
[
5863,
5869
],
[
5933,
5939
],
[
5961,
5967
],
[
2166,
2172
],
[
5384,
5390
],
[
5452,
5458
],
[
5510,
5516
],
[
5608,
5614
]
],
[
[
1303,
1314
],
[
5425,
5436
],
[
5467,
5478
],
[
5518,
5529
],
[
5616,
5627
]
],
[
[
1337,
1344
],
[
3123,
3130
],
[
3413,
3420
],
[
3574,
3581
]
],
[
[
2808,
2818
],
[
5544,
5554
],
[
5591,
5601
]
],
[
[
4627,
4654
],
[
4429,
4456
]
],
[
[
5354,
5358
],
[
6037,
6041
]
],
[
[
5662,
5666
],
[
6011,
6015
],
[
6051,
6055
],
[
6087,
6091
],
[
6127,
6131
],
[
6163,
6167
],
[
6192,
6196
]
]
] |
# Generated by Django 3.2.3 on 2021-05-17 16:22
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0072_alter_product_region'),
]
operations = [
migrations.RenameField(
model_name='product',
old_name='name_lt',
new_name='name',
),
]
| [
[
[
71,
81
],
[
100,
110
],
[
226,
236
]
],
[
[
90,
99
]
]
] |
__author__ = 'Alexandre Calil Martins Fonseca, github: xandao6'
# region TUTORIAL
'''
Go to region 'FOR SCRIPTING' and use the methods in your script!
EXAMPLE OF USAGE:
from wplay.pyppeteerUtils import pyppeteerConfig as pypConfig
from wplay.pyppeteerUtils import pyppeteerSearch as pypSearch
async def my_script(target):
pages, browser = wait pyp.configure_browser_and_load_whatsapp(pypConfig.websites['whatsapp'])
await pypSearch.search_for_target_and_get_ready_for_conversation(pages[0], target)
message = pypSearch.ask_user_for_message_breakline_mode()
await pypSearch.send_message(pages[0], message)
message2 = pypSearch.ask_user_for_message()
await pypSearch.send_message(pages[0], message2)
'''
# endregion
# region IMPORTS
from wplay.utils.helpers import whatsapp_selectors_dict
from wplay.utils import Logger
from wplay.utils.helpers import logs_path
from pyppeteer.errors import ElementHandleError
# endregion
# region FOR SCRIPTING
async def search_and_select_target(page, target, hide_groups=False):
await __open_new_chat(page)
await __type_in_new_chat_search_bar(page, target)
contact_list_elements_unchecked = await __get_contacts_elements_filtered(page, target)
group_list_elements_unchecked = await __get_groups_elements_filtered(page, target, hide_groups)
contact_titles_unchecked = await __get_contacts_titles_from_elements_unchecked(page, contact_list_elements_unchecked)
group_titles_unchecked = await __get_groups_titles_from_elements_unchecked(page, group_list_elements_unchecked)
contact_list_unchecked = __zip_contact_titles_and_elements_unchecked(
contact_titles_unchecked, contact_list_elements_unchecked)
group_list_unchecked = __zip_group_titles_and_elements_unchecked(
group_titles_unchecked, group_list_elements_unchecked)
contact_tuple = __check_contact_list(target, contact_list_unchecked)
group_tuple = __check_group_list(target, group_list_unchecked)
target_tuple = __get_target_tuple(contact_tuple, group_tuple)
__print_target_tuple(target_tuple)
target_index_choosed = __ask_user_to_choose_the_filtered_target(target_tuple)
choosed_target = __get_choosed_target(target_tuple, target_index_choosed)
await __navigate_to_target(page, choosed_target)
target_focused_title = await __get_focused_target_title(page, target)
if any(choosed_target[0] in i for i in contact_tuple):
complete_target_info = await get_complete_info_on_target(page)
print_complete_target_info(complete_target_info)
await close_contact_info_page(page)
else:
__print_selected_target_title(target_focused_title)
__check_target_focused_title(page, target, target_focused_title)
await __wait_for_message_area(page)
return target_focused_title
async def search_and_select_target_without_new_chat_button(page,target, hide_groups=False):
await __type_in_chat_or_message_search(page,target)
chats_messages_groups_elements_list = await __get_chats_messages_groups_elements(page)
contact_name_index_tuple_list = await __get_contacts_matched_with_query(chats_messages_groups_elements_list)
group_name_index_tuple_list = await __get_groups_matched_with_query(chats_messages_groups_elements_list,hide_groups)
target_tuple = (contact_name_index_tuple_list,group_name_index_tuple_list)
__print_target_tuple(target_tuple)
target_index_chosen = __ask_user_to_choose_the_filtered_target(target_tuple)
#chosen_target will be a tuple (a,b) such that a is the name of the target and b is the
#index of that element in chats_messages_groups_elements_list
chosen_target = __get_choosed_target(target_tuple, target_index_chosen)
await __open_selected_chat(chosen_target[1],chats_messages_groups_elements_list)
target_name = chosen_target[0]
if any(chosen_target[0] in i for i in contact_name_index_tuple_list):
complete_target_info = await get_complete_info_on_target(page)
print_complete_target_info(complete_target_info)
await close_contact_info_page(page)
else:
__print_selected_target_title(target_name)
await __wait_for_message_area(page)
return target_name
# endregion
#region LOGGER create
logger : Logger = Logger.setup_logger('logs',logs_path/'logs.log')
#endregion
# region SEARCH AND SELECT TARGET
async def __type_in_chat_or_message_search(page,target):
try:
print(f'Looking for: {target}')
await page.waitForSelector(
whatsapp_selectors_dict['chat_or_message_search'],
visible=True,
timeout=0
)
await page.waitFor(500)
await page.type(whatsapp_selectors_dict['chat_or_message_search'], target)
await page.waitFor(3000)
except Exception as e:
print(e)
async def __get_chats_messages_groups_elements(page):
chats_messages_groups_elements_list = [] # type : list[int]
try:
chats_messages_groups_elements_list = await page.querySelectorAll\
(whatsapp_selectors_dict['chats_groups_messages_elements'])
return chats_messages_groups_elements_list
except Exception as e:
print(e)
exit()
async def __get_contacts_matched_with_query(chats_groups_messages_elements_list):
contacts_to_choose_from = [] # type : list[str , int]
get_contact_node_title_function = 'node => node.parentNode.getAttribute("title")'
for idx, element in enumerate(chats_groups_messages_elements_list):
try:
contact_name = await element.querySelectorEval(whatsapp_selectors_dict['contact_element'],get_contact_node_title_function)
contacts_to_choose_from.append((contact_name,idx))
except ElementHandleError:
# if it is not a contact element, move to the next one
continue
except Exception as e:
print(e)
return contacts_to_choose_from
async def __get_groups_matched_with_query(chats_groups_messages_elements_list,hide_groups):
groups_to_choose_from = []
if hide_groups:
return groups_to_choose_from
get_group_node_title_function = 'node => node.parentNode.getAttribute("title")'
for idx, element in enumerate(chats_groups_messages_elements_list):
try:
group_name = await element.querySelectorEval(whatsapp_selectors_dict['group_element'],
get_group_node_title_function)
groups_to_choose_from.append((group_name,idx))
except ElementHandleError:
# if it is not a contact element, move to the next one
continue
except Exception as e:
print(e)
return groups_to_choose_from
async def __open_selected_chat(target_index,chats_messages_groups_elements_list):
try:
await chats_messages_groups_elements_list[target_index].click()
except Exception as e:
print(f"This target doesn't exist! Error: {str(e)}")
exit()
async def get_complete_info_on_target(page):
contact_page_elements = []
try:
await page.waitForSelector(
whatsapp_selectors_dict['target_chat_header'],
visible=True,
timeout=3000
)
await page.click(whatsapp_selectors_dict['target_chat_header'])
contact_page_elements = await get_contact_page_elements(page)
complete_target_info = {}
await get_contact_name_info(contact_page_elements[0], complete_target_info)
await get_contact_about_and_phone(contact_page_elements[3], complete_target_info)
await get_contact_groups_common_with_target(complete_target_info, page)
except Exception as e:
print(e)
return complete_target_info
async def get_contact_page_elements(page):
contact_page_elements = []
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_info_page_elements'],
visible=True,
timeout=8000
)
contact_page_elements = await page.querySelectorAll(whatsapp_selectors_dict['contact_info_page_elements'])
except Exception as e:
print(e)
return contact_page_elements
async def get_contact_name_info(contact_name_element,complete_target_info):
try:
complete_target_info['Name'] = await contact_name_element.querySelectorEval('span > span', 'element => element.innerText')
complete_target_info['Last_seen'] = await contact_name_element.querySelectorEval('div > span:last-of-type > div > span', 'element => element.getAttribute("title")')
except:
print(f'last seen not available')
async def get_contact_about_and_phone(contact_name_element, complete_target_info):
try:
complete_target_info['About'] = await contact_name_element.querySelectorEval('div:nth-child(2) > div > div > span > span', 'element => element.getAttribute("title")')
complete_target_info['Mobile'] = await contact_name_element.querySelectorEval('div:last-of-type > div > div > span > span', 'element => element.innerText')
except Exception as e:
print(e)
async def get_contact_groups_common_with_target(complete_target_info,page):
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_info_page_group_element_heading'],
visible= True,
timeout=3000
)
if (await page.evaluate(f'document.querySelector("{whatsapp_selectors_dict["contact_info_page_group_element_heading"]}").innerText'))\
== "Groups in common":
group_elements = await page.querySelectorAll(whatsapp_selectors_dict['contact_info_page_group_elements'])
complete_target_info['Groups'] = [await ele.querySelectorEval('div>div>div:nth-child(2)>div:first-child>div>div>span', 'e => e.getAttribute("title")') for ele in group_elements]
else:
complete_target_info['Groups'] = []
except:
complete_target_info['Groups'] = []
print(f'No groups in common')
async def close_contact_info_page(page):
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_info_page_close_button'],
visible = True,
timeout = 5000
)
await page.click(whatsapp_selectors_dict['contact_info_page_close_button'])
except Exception as e:
print(e)
def print_complete_target_info(complete_target_info):
for key in complete_target_info.keys():
if key == "Groups":
print("Groups:")
print(*complete_target_info[key], sep=",")
else:
print(f'{key}: {complete_target_info[key]} ')
async def __open_new_chat(page):
await page.waitForSelector(
whatsapp_selectors_dict['new_chat_button'],
visible=True,
timeout=0
)
await page.waitFor(500)
await page.click(whatsapp_selectors_dict['new_chat_button'])
async def __type_in_new_chat_search_bar(page, target):
print(f'Looking for: {target}')
logger.info('Searching Target')
await page.waitForSelector(
whatsapp_selectors_dict['search_contact_input_new_chat'],
visible=True
)
await page.type(whatsapp_selectors_dict['search_contact_input_new_chat'], target)
await page.waitFor(3000)
async def __get_contacts_elements_filtered(page, target):
contact_list_elements_unchecked = list()
try:
await page.waitForSelector(
whatsapp_selectors_dict['contact_list_elements_filtered_new_chat'],
visible=True,
timeout=3000
)
contact_list_elements_unchecked = await page.querySelectorAll(
whatsapp_selectors_dict['contact_list_elements_filtered_new_chat']
)
except:
print(f'No contact named by "{target}"!')
logger.info('Target not found')
return contact_list_elements_unchecked
async def __get_groups_elements_filtered(page, target, hide_groups=False):
group_list_elements_unchecked = list()
if hide_groups:
return group_list_elements_unchecked
try:
await page.waitForSelector(
whatsapp_selectors_dict['group_list_elements_filtered_new_chat'],
visible=True,
timeout=3000
)
group_list_elements_unchecked = await page.querySelectorAll(
whatsapp_selectors_dict['group_list_elements_filtered_new_chat']
)
except:
print(f'No group named by "{target}"!')
logger.info('Target not found in groups')
return group_list_elements_unchecked
async def __get_contacts_titles_from_elements_unchecked(page, contact_list_elements_unchecked):
contact_titles_unchecked = []
for i in range(len(contact_list_elements_unchecked)):
contact_titles_unchecked\
.append(await page.evaluate(f'document.querySelectorAll("{whatsapp_selectors_dict["contact_list_elements_filtered_new_chat"]}")[{i}].getAttribute("title")'))
return contact_titles_unchecked
async def __get_groups_titles_from_elements_unchecked(page, group_list_elements_unchecked):
group_titles_unchecked = []
for i in range(len(group_list_elements_unchecked)):
group_titles_unchecked.append(await page.evaluate(f'document.querySelectorAll("{whatsapp_selectors_dict["group_list_elements_filtered_new_chat"]}")[{i}].getAttribute("title")'))
return group_titles_unchecked
# contact_list_unchecked is a zip (list of tuples) of contact_titles and
# contact elements, unchecked.
def __zip_contact_titles_and_elements_unchecked(contact_titles_unchecked, contact_list_elements_unchecked):
contact_list_unchecked = list(zip(contact_titles_unchecked, contact_list_elements_unchecked))
return contact_list_unchecked
def __zip_group_titles_and_elements_unchecked(group_titles_unchecked, group_list_elements_unchecked):
group_list_unchecked = list(zip(group_titles_unchecked, group_list_elements_unchecked))
return group_list_unchecked
# __checking_contact_list verify if target is in title, if not we pop from list
def __check_contact_list(target, contact_list_unchecked):
i = 0
while i < len(contact_list_unchecked):
if len(contact_list_unchecked) <= 0:
break
# we can add more verifications if we are getting false-positive contacts
if contact_list_unchecked[i][0].lower().find(target.lower()) == -1:
try:
contact_list_unchecked.pop(i)
except Exception as e:
print(f'Error: {str(e)}')
i -= 1
i += 1
contact_tuple = tuple(contact_list_unchecked)
return contact_tuple
def __check_group_list(target, group_list_unchecked):
i = 0
while i < len(group_list_unchecked):
if len(group_list_unchecked) <= 0:
break
# we can add more verifications if we are getting false-positive groups
if group_list_unchecked[i][0].lower().find(target.lower()) == -1:
try:
group_list_unchecked.pop(i)
except Exception as e:
print(f'Error: {str(e)}')
i -= 1
i += 1
group_tuple = tuple(group_list_unchecked)
return group_tuple
# target_list is like that: (((0, 'a'), (1, 'b')), ((3, 'c'), (4, 'd'))),
# but instead numbers and letters we have titles and elements
# the first index is the contacts and the second is the groups
def __get_target_tuple(contact_tuple, group_tuple):
target_tuple = (contact_tuple, group_tuple)
return target_tuple
def __print_target_tuple(target_tuple):
lenght_of_contacts_tuple = len(target_tuple[0])
lenght_of_groups_tuple = len(target_tuple[1])
for i in range(lenght_of_contacts_tuple):
if lenght_of_contacts_tuple <= 0:
break
if i == 0:
print("Contacts found:")
logger.info('List of Targets')
print(f'{i}: {target_tuple[0][i][0]}')
for i in range(lenght_of_contacts_tuple, lenght_of_groups_tuple + lenght_of_contacts_tuple):
if lenght_of_groups_tuple <= 0:
break
if i == lenght_of_contacts_tuple:
print("Groups found:")
logger.info('List of Target in groups')
print(f'{i}: {target_tuple[1][i-lenght_of_contacts_tuple][0]}')
def __ask_user_to_choose_the_filtered_target(target_tuple):
if len(target_tuple[0] + target_tuple[1]) > 0:
logger.info('Input Target Number')
target_index_choosed = int(
input('Enter the number of the target you wish to choose: '))
return target_index_choosed
def __get_choosed_target(target_tuple, target_index_choosed):
lenght_of_contacts_tuple = len(target_tuple[0])
if target_index_choosed is None:
exit()
try:
if target_index_choosed < lenght_of_contacts_tuple:
choosed_target = target_tuple[0][target_index_choosed]
elif target_index_choosed >= lenght_of_contacts_tuple:
choosed_target = target_tuple[1][target_index_choosed - lenght_of_contacts_tuple]
else:
print("This target doesn't exist!")
logger.error('Invalid Target')
exit()
except Exception as e:
print(f"This target doesn't exist! Error: {str(e)}")
logger.error('Invalid Target')
exit()
return choosed_target
async def __navigate_to_target(page, choosed_target):
try:
await choosed_target[1].click()
except Exception as e:
print(f"This target doesn't exist! Error: {str(e)}")
logger.error('Invalid Target')
exit()
async def __get_focused_target_title(page, target):
try:
await page.waitForSelector(whatsapp_selectors_dict['target_focused_title'])
target_focused_title = await page.evaluate(f'document.querySelector("{whatsapp_selectors_dict["target_focused_title"]}").getAttribute("title")')
except Exception as e:
print(f'No target selected! Error: {str(e)}')
logger.error('Target not selected from list')
exit()
return target_focused_title
def __print_selected_target_title(target_focused_title):
print(f"You've selected the target named by: {target_focused_title}")
logger.info('Selected Target')
def __check_target_focused_title(page, target, target_focused_title):
if target_focused_title.lower().find(target.lower()) == -1:
print(f"You're focused in the wrong target, {target_focused_title}")
must_continue = str(input("Do you want to continue (yes/no)? "))
accepted_yes = {'yes', 'y'}
if must_continue.lower() in accepted_yes:
pass
else:
exit()
async def __wait_for_message_area(page):
try:
await page.waitForSelector(whatsapp_selectors_dict['message_area'])
except Exception as e:
print(f"You don't belong this group anymore! Error: {str(e)}")
# endregion
| [
[
[
0,
10
]
],
[
[
795,
818
],
[
4515,
4538
],
[
4680,
4703
],
[
5034,
5057
],
[
5576,
5599
],
[
6336,
6359
],
[
7137,
7160
],
[
7270,
7293
],
[
7884,
7907
],
[
8060,
8083
],
[
9249,
9272
],
[
9439,
9462
],
[
9618,
9641
],
[
10125,
10148
],
[
10274,
10297
],
[
10736,
10759
],
[
10875,
10898
],
[
11088,
11111
],
[
11193,
11216
],
[
11450,
11473
],
[
11663,
11686
],
[
12129,
12152
],
[
12338,
12361
],
[
12858,
12881
],
[
13264,
13287
],
[
17676,
17699
],
[
17803,
17826
],
[
18737,
18760
]
],
[
[
843,
849
],
[
4265,
4271
],
[
4256,
4262
]
],
[
[
882,
891
],
[
4292,
4301
]
],
[
[
921,
939
],
[
5730,
5748
],
[
6540,
6558
]
],
[
[
977,
2808
]
],
[
[
2811,
4208
]
],
[
[
4247,
4253
],
[
4256,
4262
],
[
11016,
11022
],
[
11810,
11816
],
[
12481,
12487
],
[
15844,
15850
],
[
16167,
16173
],
[
16400,
16406
],
[
17113,
17119
],
[
17259,
17265
],
[
17532,
17538
],
[
17967,
17973
],
[
18197,
18203
]
],
[
[
4361,
4815
],
[
2913,
2945
]
],
[
[
4818,
5202
],
[
3007,
3043
]
],
[
[
5205,
5925
],
[
3092,
3125
]
],
[
[
5928,
6733
],
[
3203,
3234
]
],
[
[
6736,
7001
],
[
3729,
3749
]
],
[
[
7004,
7750
],
[
2463,
2490
],
[
3950,
3977
]
],
[
[
7753,
8191
],
[
7355,
7380
]
],
[
[
8194,
8636
],
[
7435,
7456
]
],
[
[
8639,
9113
],
[
7519,
7546
]
],
[
[
9116,
10024
],
[
7609,
7646
]
],
[
[
10027,
10376
],
[
2568,
2591
],
[
4055,
4078
]
],
[
[
10383,
10409
],
[
2505,
2531
],
[
3992,
4018
]
],
[
[
10663,
10918
],
[
1056,
1071
]
],
[
[
10921,
11287
],
[
1088,
1117
]
],
[
[
11290,
11884
],
[
1176,
1208
]
],
[
[
11887,
12563
],
[
1265,
1295
]
],
[
[
12566,
12993
],
[
1360,
1405
]
],
[
[
12996,
13395
],
[
1480,
1523
]
],
[
[
13506,
13549
],
[
1590,
1633
]
],
[
[
13748,
13789
],
[
1729,
1770
]
],
[
[
14056,
14076
],
[
1855,
1875
]
],
[
[
14641,
14659
],
[
1926,
1944
]
],
[
[
15405,
15423
],
[
1994,
2012
]
],
[
[
15531,
15551
],
[
2045,
2065
],
[
3367,
3387
]
],
[
[
16285,
16325
],
[
2107,
2147
],
[
3428,
3468
]
],
[
[
16583,
16603
],
[
2183,
2203
],
[
3663,
3683
]
],
[
[
17333,
17577
],
[
2250,
2270
]
],
[
[
17580,
18059
],
[
2326,
2352
]
],
[
[
18066,
18095
],
[
2616,
2645
],
[
4103,
4132
]
],
[
[
18234,
18262
],
[
2672,
2700
]
],
[
[
18652,
18875
],
[
2747,
2770
],
[
4156,
4179
]
]
] |
def beg(arr):
a = []
b = []
c = []
for i in arr:
if i == 0:
a.append(i)
if i == 1:
b.append(i)
if i == 2:
c.append(i)
return a+b+c
a = []
b = [0,0,0]
c = [1,2,1,1,2,1,2]
d = [0,2,1,0,1,0,2,2,2,1,0,2,1,0,1,2,0]
print(beg(a))
print(beg(b))
print(beg(c))
print(beg(d)) | [
[
[
4,
7
],
[
300,
303
],
[
314,
317
],
[
328,
331
],
[
342,
345
]
],
[
[
214,
215
],
[
304,
305
]
],
[
[
221,
222
],
[
318,
319
]
],
[
[
233,
234
],
[
332,
333
]
],
[
[
253,
254
],
[
346,
347
]
]
] |
from django.apps import AppConfig
class MasterAppConfig(AppConfig):
name = 'msa.contrib.master'
verbose_name = 'Master Service'
| [
[
[
24,
33
],
[
58,
67
]
],
[
[
42,
57
]
]
] |
import importlib
import json
import os
import shutil
import subprocess
from pathlib import Path
from shutil import which
from typing import List, Optional, Tuple
from setuptools import find_packages
from typer import Argument, Option, Typer
from .paths import (
GLOBAL_APP_DIR,
GLOBAL_EXTENSIONS_DIR,
GLOBAL_FRONTEND_DIR,
GLOBAL_QUETZ_DIR,
LOCAL_APP_DIR,
)
from .utils import clean_dir, get_extensions_dir, get_federated_extensions
app = Typer()
@app.command()
def link_frontend(
dev_mode: bool = Option(
False, "--development", help="Whether to install it in dev mode or not"
)
) -> None:
"""Intall the Quetz-Frontend"""
assert LOCAL_APP_DIR.exists()
if not GLOBAL_FRONTEND_DIR.exists():
GLOBAL_FRONTEND_DIR.mkdir(parents=True, exist_ok=True)
if GLOBAL_APP_DIR.exists():
if GLOBAL_APP_DIR.is_symlink():
GLOBAL_APP_DIR.unlink()
else:
shutil.rmtree(GLOBAL_APP_DIR)
if dev_mode:
GLOBAL_APP_DIR.symlink_to(LOCAL_APP_DIR)
print(
f"""Symlink created:
Ori: {LOCAL_APP_DIR}
Dest: {GLOBAL_APP_DIR}
"""
)
else:
shutil.copytree(LOCAL_APP_DIR, GLOBAL_APP_DIR, symlinks=True)
print(
f"""App directory copied:
Ori: {LOCAL_APP_DIR}
Dest: {GLOBAL_APP_DIR}
"""
)
@app.command()
def clean_frontend() -> None:
"""Clean the Quetz-Frontend"""
if GLOBAL_APP_DIR.is_file() or GLOBAL_APP_DIR.is_symlink():
GLOBAL_APP_DIR.unlink()
elif GLOBAL_APP_DIR.is_dir():
shutil.rmtree(GLOBAL_APP_DIR)
@app.command()
def install(ext_path: str = Argument(Path(), help="The path of the extension")) -> None:
"""Build and install an extension"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_build_extension(ext_path, True, False)
module, metadata = _get_extensions_metadata(extension_path)
src = Path(extension_path).joinpath(module.__name__, metadata[0]["src"])
dest = GLOBAL_EXTENSIONS_DIR.joinpath(metadata[0]["dest"])
clean_dir(dest)
shutil.copytree(src, dest, symlinks=True)
print(
f"""
Extension installed:
Path: {dest}
"""
)
@app.command()
def develop(ext_path: str = Argument(Path(), help="The path of the extension")) -> None:
"""Build and install an extension in dev mode"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_build_extension(extension_path, True, False)
_develop_extension(extension_path)
@app.command()
def build(
ext_path: str = Argument(Path(), help="The path of the extension"),
dev_mode: bool = Option(False, "--development", help="Build in development"),
) -> None:
"""Build an extension"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_build_extension(extension_path, dev_mode, False)
@app.command()
def watch(ext_path: str = Argument(Path(), help="The path of the extension")) -> None:
"""Watch an extension"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(ext_path).resolve()
assert extension_path.joinpath("package.json").exists()
_develop_extension(extension_path)
_build_extension(extension_path, True, True)
@app.command()
def uninstall(ext_name: str = Argument("", help="The name of the extension")) -> None:
"""Uninstall an extension"""
if not GLOBAL_EXTENSIONS_DIR.exists():
os.mkdir(GLOBAL_EXTENSIONS_DIR)
extension_path = Path(GLOBAL_EXTENSIONS_DIR, ext_name)
clean_dir(extension_path)
@app.command()
def list() -> None:
"""List of extensions"""
print(f"Installed extensions:")
print(f"---------------------")
print(f" Installation path: '{GLOBAL_EXTENSIONS_DIR}'\n")
extensions = get_federated_extensions([get_extensions_dir()])
if not extensions:
print("No installed extensions yet")
for ext in extensions.values():
print(f'\t- {Path(ext["ext_path"]).relative_to(GLOBAL_EXTENSIONS_DIR)}')
print()
@app.command()
def clean() -> None:
"""Clean the extensions directory"""
if GLOBAL_EXTENSIONS_DIR.exists():
shutil.rmtree(GLOBAL_EXTENSIONS_DIR)
@app.command()
def paths() -> None:
"""Quetz installation paths"""
print(
f"""
System cofigured paths:
Quetz: {GLOBAL_QUETZ_DIR}
Frontend: {GLOBAL_FRONTEND_DIR}
App: {GLOBAL_APP_DIR}
Extensions: {GLOBAL_EXTENSIONS_DIR}
"""
)
def _develop_extension(ext_path: Path):
with (ext_path / "package.json").open(encoding="utf-8") as fid:
ext_data = json.load(fid)
_, metadata = _get_extensions_metadata(ext_path)
src = ext_path / ext_data["quetz"].get("outputDir", metadata[0]["src"])
dest = GLOBAL_EXTENSIONS_DIR.joinpath(ext_data["name"])
clean_dir(dest)
# Create parent directory if extension name is scoped
dest.parent.mkdir(parents=True, exist_ok=True)
dest.symlink_to(src)
print(
f"""
Symlink created:
Ori: {src!s}
Dest: {dest!s}
"""
)
def _build_extension(ext_path: Path, dev_mode: bool = False, watch: bool = False):
if not GLOBAL_APP_DIR.joinpath("package.json").exists():
print(f"Quetz frontend not fount at '{GLOBAL_APP_DIR!s}'")
builder_path = _find_builder(ext_path)
if builder_path is None:
print(f"Could not find @quetz-frontend/builder at {ext_path!s}")
print(f"Extensions require a devDependency '@quetz-frontend/builder'")
return
exe = "node"
exe_path = which(exe)
if not exe_path:
print(f"Could not find {exe}. Install NodeJS.")
exit(1)
command = [exe, str(builder_path), "--core-path", str(GLOBAL_APP_DIR.resolve())]
if dev_mode:
command.append("--development")
command.append("--source-map")
if watch:
command.append("--watch")
command.append(str(ext_path))
print("Building extension")
subprocess.check_call(command)
def _find_builder(ext_path: Path) -> Optional[Path]:
"""Find the package '@quetz-frontend/builder' in the extension dependencies"""
with (ext_path / "package.json").open(encoding="utf-8") as fid:
ext_data = json.load(fid)
depVersion2 = ext_data.get("devDependencies", dict()).get("@quetz-frontend/builder")
depVersion2 = depVersion2 or ext_data.get("dependencies", dict()).get(
"@quetz-frontend/builder"
)
if depVersion2 is None:
return None
# Find @quetz-frontend/builder in the node_modules directory
target = ext_path
while not (target / "node_modules" / "@quetz-frontend" / "builder").exists():
if target.parent == target:
return None
target = target.parent
return (
target
/ "node_modules"
/ "@quetz-frontend"
/ "builder"
/ "lib"
/ "build-quetzextension.js"
)
def _get_extensions_metadata(
module_path: Path,
) -> Tuple["importlib.ModuleType", List[str]]:
mod_path = module_path.resolve()
if not mod_path.exists():
raise FileNotFoundError(f"The path `{mod_path!s}` does not exist.")
# TODO: Change function name to match lab
try:
module = importlib.import_module(str(module_path))
if hasattr(module, "js_plugin_paths"):
return module, module.js_plugin_paths()
else:
module = None
except Exception:
module = None
# Looking for modules in the package
packages = find_packages(str(mod_path))
for package in packages:
try:
module = importlib.import_module(package)
if hasattr(module, "js_plugin_paths"):
return module, module.js_plugin_paths()
except Exception:
module = None
raise ModuleNotFoundError(f"There is not a extension at {module_path}")
if __name__ == "__main__":
app()
| [
[
[
7,
16
],
[
7699,
7708
],
[
8073,
8082
]
],
[
[
24,
28
],
[
5074,
5078
],
[
6692,
6696
]
],
[
[
36,
38
],
[
1835,
1837
],
[
2588,
2590
],
[
3092,
3094
],
[
3471,
3473
],
[
3888,
3890
]
],
[
[
46,
52
],
[
941,
947
],
[
1187,
1193
],
[
1606,
1612
],
[
2250,
2256
],
[
4606,
4612
]
],
[
[
60,
70
],
[
6435,
6445
]
],
[
[
91,
95
],
[
1690,
1694
],
[
2431,
2435
],
[
2875,
2879
],
[
3338,
3342
],
[
1889,
1893
],
[
2094,
2098
],
[
2642,
2646
],
[
3146,
3150
],
[
3525,
3529
],
[
3942,
3946
],
[
4407,
4411
],
[
4980,
4984
],
[
5573,
5577
],
[
6514,
6518
],
[
6496,
6500
],
[
7430,
7434
]
],
[
[
115,
120
],
[
6026,
6031
]
],
[
[
140,
144
],
[
7471,
7475
]
],
[
[
146,
154
],
[
6505,
6513
]
],
[
[
156,
161
],
[
7441,
7446
]
],
[
[
186,
199
],
[
7981,
7994
]
],
[
[
218,
226
],
[
1681,
1689
],
[
2422,
2430
],
[
2866,
2874
],
[
3329,
3337
],
[
3746,
3754
]
],
[
[
228,
234
],
[
526,
532
],
[
2939,
2945
]
],
[
[
236,
241
],
[
461,
466
]
],
[
[
268,
282
],
[
814,
828
],
[
850,
864
],
[
891,
905
],
[
955,
969
],
[
997,
1011
],
[
1131,
1145
],
[
1218,
1232
],
[
1347,
1361
],
[
1475,
1489
],
[
1503,
1517
],
[
1540,
1554
],
[
1573,
1587
],
[
1620,
1634
],
[
4871,
4885
],
[
5636,
5650
],
[
5732,
5746
],
[
6190,
6204
]
],
[
[
288,
309
],
[
1795,
1816
],
[
1844,
1865
],
[
2172,
2193
],
[
2548,
2569
],
[
2597,
2618
],
[
3052,
3073
],
[
3101,
3122
],
[
3431,
3452
],
[
3480,
3501
],
[
3848,
3869
],
[
3897,
3918
],
[
3947,
3968
],
[
4184,
4205
],
[
4441,
4462
],
[
4566,
4587
],
[
4620,
4641
],
[
4908,
4929
],
[
5230,
5251
]
],
[
[
315,
334
],
[
713,
732
],
[
751,
770
],
[
4829,
4848
]
],
[
[
340,
356
],
[
4790,
4806
]
],
[
[
362,
375
],
[
678,
691
],
[
1023,
1036
],
[
1101,
1114
],
[
1203,
1216
],
[
1317,
1330
]
],
[
[
398,
407
],
[
2229,
2238
],
[
3984,
3993
],
[
5284,
5293
]
],
[
[
409,
427
],
[
4256,
4274
]
],
[
[
429,
453
],
[
4230,
4254
]
],
[
[
455,
458
],
[
472,
475
],
[
1388,
1391
],
[
1639,
1642
],
[
2380,
2383
],
[
2821,
2824
],
[
3289,
3292
],
[
3702,
3705
],
[
4013,
4016
],
[
4483,
4486
],
[
4646,
4649
],
[
8375,
8378
]
],
[
[
490,
503
]
],
[
[
1406,
1420
]
],
[
[
1657,
1664
]
],
[
[
2398,
2405
]
],
[
[
2839,
2844
]
],
[
[
3307,
3312
]
],
[
[
3720,
3729
]
],
[
[
4031,
4035
]
],
[
[
4501,
4506
]
],
[
[
4664,
4669
]
],
[
[
4951,
4969
],
[
2783,
2801
],
[
3615,
3633
]
],
[
[
5546,
5562
],
[
1979,
1995
],
[
2732,
2748
],
[
3236,
3252
],
[
3654,
3670
]
],
[
[
6472,
6485
],
[
5773,
5786
]
],
[
[
7387,
7411
],
[
2043,
2067
],
[
5108,
5132
]
]
] |
from socorepo.locators import github_tags, nexus3, pypi
LOCATOR_PARSERS = {
"github_tags": github_tags.parse_locator,
"nexus3": nexus3.parse_locator,
"pypi": pypi.parse_locator
}
| [
[
[
30,
41
],
[
96,
107
]
],
[
[
43,
49
],
[
137,
143
]
],
[
[
51,
55
],
[
171,
175
]
],
[
[
57,
72
]
]
] |
from flask import Flask, render_template, request
import json
import requests
app = Flask(__name__)
@app.route('/')
def stop_words():
URL_prefix = 'https://api.github.com/search/code?q='
URL_suffix = '+repo:spotify/mkdocs-monorepo-plugin/docs'
reportfile = open('./templates/stopWordsSearch.html', 'w')
reportfile.write('<!DOCTYPE html><html lang="en"><head><meta charset="UTF-8"><meta name="viewport" content="width=device-width, initial-scale=1">')
reportfile.write('<link rel="stylesheet" type="text/css" href="../static/bootstrap.min.css">')
reportfile.write('<link rel="stylesheet" type="text/css" href="../static/common.css">')
reportfile.write('<script src="../static/jquery.min.js"></script>')
reportfile.write('<script src="../static/popper.min.js"></script>')
reportfile.write('<script src="../static/bootstrap.min.js"></script>')
reportfile.write('<title>Stop-words Search</title></head>')
reportfile.write('<body><div class="container"><h1>Stop-words Search</h1>')
fname = './static/wordList.txt'
wordlist = []
explainlist = []
print("\n")
print('Reading the word list ...\n')
fwordlist = open(fname, 'r')
for line in fwordlist:
colon = line.find(':')
word = line[0:(colon)]
explain = line[(colon + 1):]
explain = explain.rstrip()
print(word)
print(explain)
wordlist.append(word)
explainlist.append(explain)
fwordlist.close()
print(wordlist)
print(explainlist)
x = len(wordlist)
print('\nNo. of words and phrases to search for: ', x)
try:
reportfile.write('<p class="lead">Consider reviewing the occurrences of the following words in the documentation.</p><hr/>')
wordpos = 0
for word in wordlist:
url_string = URL_prefix + word + URL_suffix
r = requests.get(url_string)
json_data = json.loads(json.dumps(r.json()))
print(json_data)
if len(json_data['items']) != 0:
print(word)
reportfile.write('<div class="container">')
reportfile.write('<h2>' + word + '</h2>')
print(explainlist[wordpos])
reportfile.write('<p>' + explainlist[wordpos] + '</p>')
print(json_data['total_count'], 'instances of', word)
reportfile.write('<p>' + str(json_data['total_count']) + ' instances of <mark>' + word + '</mark> found in the following files:</p>')
reportfile.write('<ul>')
for line in json_data['items']:
for k, v in line.items():
if k == 'path':
print(v)
reportfile.write('<li>' + v + '</li>')
print('--------\n')
reportfile.write('</ul>')
reportfile.write('</div>')
reportfile.write('<hr/>')
wordpos = wordpos + 1
except:
reportfile.write("<p class='text-danger'>>>>>> If you're seeing these lines, it means you've hit the API rate limits for GitHub search and the Stopwords search was abandoned.</p>")
#reportfile.write("<p class='text-danger'>Had the search been completed, you would've got an output shown in the following image.</p>")
#reportfile.write('<img src="../static/stopWords.png"/>')
reportfile.write("<p class='text-danger'>Maybe choose a smaller documentation repository for your search?</p>")
reportfile.write("<p class='text-danger'>But then, this is just a demo and you get the general idea, I hope? <<<<<")
reportfile.write("</div></body>")
reportfile.write("</html>")
reportfile.close()
return render_template('stopWordsSearch.html')
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
#app.run()
| [
[
[
18,
23
],
[
85,
90
]
],
[
[
25,
40
],
[
3774,
3789
]
],
[
[
42,
49
]
],
[
[
57,
61
],
[
1919,
1923
],
[
1930,
1934
]
],
[
[
69,
77
],
[
1870,
1878
]
],
[
[
79,
82
],
[
103,
106
],
[
3899,
3902
]
],
[
[
122,
132
]
],
[
[
3850,
3854
],
[
3928,
3932
]
]
] |
from django.apps import apps
from django.db import DEFAULT_DB_ALIAS, router
from django.db.models import signals
from django.utils.encoding import smart_text
from django.utils import six
from django.utils.six.moves import input
def update_contenttypes(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, **kwargs):
"""
Creates content types for models in the given app, removing any model
entries that no longer have a matching model class.
"""
if not app_config.models_module:
return
try:
ContentType = apps.get_model('contenttypes', 'ContentType')
except LookupError:
return
if not router.allow_migrate(using, ContentType):
return
ContentType.objects.clear_cache()
app_label = app_config.label
app_models = dict(
(model._meta.model_name, model)
for model in app_config.get_models())
if not app_models:
return
# Get all the content types
content_types = dict(
(ct.model, ct)
for ct in ContentType.objects.using(using).filter(app_label=app_label)
)
to_remove = [
ct
for (model_name, ct) in six.iteritems(content_types)
if model_name not in app_models
]
cts = [
ContentType(
name=smart_text(model._meta.verbose_name_raw),
app_label=app_label,
model=model_name,
)
for (model_name, model) in six.iteritems(app_models)
if model_name not in content_types
]
ContentType.objects.using(using).bulk_create(cts)
if verbosity >= 2:
for ct in cts:
print("Adding content type '%s | %s'" % (ct.app_label, ct.model))
# Confirm that the content type is stale before deletion.
if to_remove:
if interactive:
content_type_display = '\n'.join(
' %s | %s' % (ct.app_label, ct.model)
for ct in to_remove
)
ok_to_delete = input("""The following content types are stale and need to be deleted:
%s
Any objects related to these content types by a foreign key will also
be deleted. Are you sure you want to delete these content types?
If you're unsure, answer 'no'.
Type 'yes' to continue, or 'no' to cancel: """ % content_type_display)
else:
ok_to_delete = False
if ok_to_delete == 'yes':
for ct in to_remove:
if verbosity >= 2:
print("Deleting stale content type '%s | %s'" % (ct.app_label, ct.model))
ct.delete()
else:
if verbosity >= 2:
print("Stale content types remain.")
def update_all_contenttypes(**kwargs):
for app_config in apps.get_app_configs():
update_contenttypes(app_config, **kwargs)
signals.post_migrate.connect(update_contenttypes)
if __name__ == "__main__":
update_all_contenttypes()
| [
[
[
24,
28
],
[
562,
566
],
[
2731,
2735
]
],
[
[
51,
67
],
[
303,
319
]
],
[
[
69,
75
],
[
659,
665
]
],
[
[
105,
112
],
[
2807,
2814
]
],
[
[
147,
157
],
[
1292,
1302
]
],
[
[
183,
186
],
[
1166,
1169
],
[
1442,
1445
]
],
[
[
222,
227
],
[
1980,
1985
]
],
[
[
234,
253
],
[
2836,
2855
],
[
2763,
2782
]
],
[
[
2674,
2697
],
[
2890,
2913
]
]
] |
#!/usr/bin/env python3
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from parlai.core.params import ParlaiParser
from parlai.mturk.core.mturk_manager import MTurkManager
from worlds import \
PersonaChatWorld, PersonaProfileWorld, PersonasGenerator
from task_config import task_config
import os
def main():
"""This task consists of one agent, model or MTurk worker, talking to an
MTurk worker to negotiate a deal.
"""
argparser = ParlaiParser(False, False)
argparser.add_parlai_data_path()
argparser.add_mturk_args()
argparser.add_argument('-min_t', '--min_turns', default=5, type=int,
help='minimum number of turns')
argparser.add_argument('-mt', '--max_turns', default=10, type=int,
help='maximal number of chat turns')
argparser.add_argument('-mx_rsp_time', '--max_resp_time', default=150,
type=int,
help='time limit for entering a dialog message')
argparser.add_argument('-mx_psn_time', '--max_persona_time', type=int,
default=300, help='time limit for turker'
'entering the persona')
argparser.add_argument('--ag_shutdown_time', default=120,
type=int,
help='time limit for entering a dialog message')
argparser.add_argument('--persona-type', default='both', type=str,
choices=['both', 'self', 'other'],
help='Which personas to load from personachat')
argparser.add_argument('--revised', default=False, type='bool',
help='Whether to use revised personas')
argparser.add_argument('-rt', '--range_turn', default='5,7',
help='sample range of number of turns')
argparser.add_argument('--personas-path', default=None,
help='specify path for personas data')
opt = argparser.parse_args()
directory_path = os.path.dirname(os.path.abspath(__file__))
opt['task'] = os.path.basename(directory_path)
if not opt.get('personas_path'):
opt['personas_path'] = argparser.parlai_home + '/parlai/mturk/personachat_chat/data'
opt.update(task_config)
opt['extract_personas_path'] = os.path.join(opt['datapath'], 'personachat_chat')
mturk_agent_ids = ['PERSON_1', 'PERSON_2']
mturk_manager = MTurkManager(
opt=opt,
mturk_agent_ids=mturk_agent_ids
)
persona_generator = PersonasGenerator(opt)
mturk_manager.setup_server(task_directory_path=directory_path)
try:
mturk_manager.start_new_run()
mturk_manager.create_hits()
if not opt['is_sandbox']:
blocked_worker_list = []
for w in blocked_worker_list:
mturk_manager.block_worker(w, 'We found that you have unexpected behaviors in our previous HITs. For more questions please email us.')
def run_onboard(worker):
worker.persona_generator = persona_generator
world = PersonaProfileWorld(opt, worker)
world.parley()
world.shutdown()
mturk_manager.set_onboard_function(onboard_function=run_onboard)
mturk_manager.ready_to_accept_workers()
def check_worker_eligibility(worker):
return True
def assign_worker_roles(workers):
for index, worker in enumerate(workers):
worker.id = mturk_agent_ids[index % len(mturk_agent_ids)]
def run_conversation(mturk_manager, opt, workers):
agents = [workers[0], workers[1]]
conv_idx = mturk_manager.conversation_index
world = PersonaChatWorld(
opt=opt,
agents=agents,
range_turn=[int(s) for s in opt['range_turn'].split(',')],
max_turn=opt['max_turns'],
max_resp_time=opt['max_resp_time'],
world_tag='conversation t_{}'.format(conv_idx)
)
world.reset_random()
while not world.episode_done():
world.parley()
world.save_data()
world.shutdown()
world.review_work()
mturk_manager.start_task(
eligibility_function=check_worker_eligibility,
assign_role_function=assign_worker_roles,
task_function=run_conversation
)
except BaseException:
raise
finally:
mturk_manager.expire_all_unassigned_hits()
mturk_manager.shutdown()
if __name__ == '__main__':
main()
| [
[
[
348,
360
],
[
700,
712
]
],
[
[
405,
417
],
[
2685,
2697
]
],
[
[
443,
459
],
[
3970,
3986
]
],
[
[
461,
480
],
[
3337,
3356
]
],
[
[
482,
499
],
[
2787,
2804
]
],
[
[
524,
535
],
[
2517,
2528
]
],
[
[
544,
546
],
[
2276,
2278
],
[
2292,
2294
],
[
2337,
2339
],
[
2566,
2568
]
],
[
[
553,
557
],
[
4863,
4867
]
]
] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Logistic Regression Gradient Descent
"""
import numpy as np
class LogisticRegressionGD(object):
"""Logistic Regression Classifier using gradient descent.
Parameters
------------
eta : float
Learning rate (between 0.0 and 1.0)
n_iter : int
Passes over the training dataset.
random_state : int
Random number generator seed for random weight
initialization.
Attributes
-----------
w_ : 1d-array
Weights after fitting.
cost_ : list
Logistic cost function value in each epoch.
"""
def __init__(self, eta=0.05, n_iter=100, random_state=1):
self.eta = eta
self.n_iter = n_iter
self.random_state = random_state
def fit(self, X, y):
""" Fit training data.
Parameters
----------
X : {array-like}, shape = [n_examples, n_features]
Training vectors, where n_examples is the number of
examples and n_features is the number of features.
y : array-like, shape = [n_examples]
Target values.
Returns
-------
self : object
"""
rgen = np.random.RandomState(self.random_state)
self.w_ = rgen.normal(loc=0.0, scale=0.01,
size=1 + X.shape[1])
self.cost_ = []
for i in range(self.n_iter):
net_input = self.net_input(X)
output = self.activation(net_input)
errors = (y - output)
self.w_[1:] += self.eta * X.T.dot(errors)
self.w_[0] += self.eta * errors.sum()
# note that we compute the logistic `cost` now
# instead of the sum of squared errors cost
cost = (-y.dot(np.log(output)) -
((1 - y).dot(np.log(1 - output))))
self.cost_.append(cost)
return self
def net_input(self, X):
"""Calculate net input"""
return np.dot(X, self.w_[1:]) + self.w_[0]
def activation(self, z):
"""Compute logistic sigmoid activation"""
return 1. / (1. + np.exp(-np.clip(z, -250, 250)))
def predict(self, X):
"""Return class label after unit step"""
return np.where(self.net_input(X) >= 0.0, 1, 0)
# equivalent to:
# return np.where(self.activation(self.net_input(X))
# >= 0.5, 1, 0) | [
[
[
100,
111
],
[
1261,
1263
],
[
1857,
1859
],
[
1912,
1914
],
[
2076,
2078
],
[
2226,
2228
],
[
2234,
2236
],
[
2357,
2359
]
],
[
[
119,
139
]
]
] |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.mysql.hooks.mysql`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.mysql.hooks.mysql import MySqlHook # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.mysql.hooks.mysql`.",
DeprecationWarning, stacklevel=2
)
| [
[
[
902,
910
],
[
1011,
1019
]
],
[
[
992,
1001
]
]
] |
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Tiling(_BaseTraceHierarchyType):
# flip
# ----
@property
def flip(self):
"""
Determines if the positions obtained from solver are flipped on
each axis.
The 'flip' property is a flaglist and may be specified
as a string containing:
- Any combination of ['x', 'y'] joined with '+' characters
(e.g. 'x+y')
Returns
-------
Any
"""
return self["flip"]
@flip.setter
def flip(self, val):
self["flip"] = val
# packing
# -------
@property
def packing(self):
"""
Determines d3 treemap solver. For more info please refer to
https://github.com/d3/d3-hierarchy#treemap-tiling
The 'packing' property is an enumeration that may be specified as:
- One of the following enumeration values:
['squarify', 'binary', 'dice', 'slice', 'slice-dice',
'dice-slice']
Returns
-------
Any
"""
return self["packing"]
@packing.setter
def packing(self, val):
self["packing"] = val
# pad
# ---
@property
def pad(self):
"""
Sets the inner padding (in px).
The 'pad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["pad"]
@pad.setter
def pad(self, val):
self["pad"] = val
# squarifyratio
# -------------
@property
def squarifyratio(self):
"""
When using "squarify" `packing` algorithm, according to https:/
/github.com/d3/d3-hierarchy/blob/master/README.md#squarify_rati
o this option specifies the desired aspect ratio of the
generated rectangles. The ratio must be specified as a number
greater than or equal to one. Note that the orientation of the
generated rectangles (tall or wide) is not implied by the
ratio; for example, a ratio of two will attempt to produce a
mixture of rectangles whose width:height ratio is either 2:1 or
1:2. When using "squarify", unlike d3 which uses the Golden
Ratio i.e. 1.618034, Plotly applies 1 to increase squares in
treemap layouts.
The 'squarifyratio' property is a number and may be specified as:
- An int or float in the interval [1, inf]
Returns
-------
int|float
"""
return self["squarifyratio"]
@squarifyratio.setter
def squarifyratio(self, val):
self["squarifyratio"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
flip
Determines if the positions obtained from solver are
flipped on each axis.
packing
Determines d3 treemap solver. For more info please
refer to https://github.com/d3/d3-hierarchy#treemap-
tiling
pad
Sets the inner padding (in px).
squarifyratio
When using "squarify" `packing` algorithm, according to
https://github.com/d3/d3-hierarchy/blob/master/README.m
d#squarify_ratio this option specifies the desired
aspect ratio of the generated rectangles. The ratio
must be specified as a number greater than or equal to
one. Note that the orientation of the generated
rectangles (tall or wide) is not implied by the ratio;
for example, a ratio of two will attempt to produce a
mixture of rectangles whose width:height ratio is
either 2:1 or 1:2. When using "squarify", unlike d3
which uses the Golden Ratio i.e. 1.618034, Plotly
applies 1 to increase squares in treemap layouts.
"""
def __init__(
self, arg=None, flip=None, packing=None, pad=None, squarifyratio=None, **kwargs
):
"""
Construct a new Tiling object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Tiling
flip
Determines if the positions obtained from solver are
flipped on each axis.
packing
Determines d3 treemap solver. For more info please
refer to https://github.com/d3/d3-hierarchy#treemap-
tiling
pad
Sets the inner padding (in px).
squarifyratio
When using "squarify" `packing` algorithm, according to
https://github.com/d3/d3-hierarchy/blob/master/README.m
d#squarify_ratio this option specifies the desired
aspect ratio of the generated rectangles. The ratio
must be specified as a number greater than or equal to
one. Note that the orientation of the generated
rectangles (tall or wide) is not implied by the ratio;
for example, a ratio of two will attempt to produce a
mixture of rectangles whose width:height ratio is
either 2:1 or 1:2. When using "squarify", unlike d3
which uses the Golden Ratio i.e. 1.618034, Plotly
applies 1 to increase squares in treemap layouts.
Returns
-------
Tiling
"""
super(Tiling, self).__init__("tiling")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Tiling
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Tiling"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import tiling as v_tiling
# Initialize validators
# ---------------------
self._validators["flip"] = v_tiling.FlipValidator()
self._validators["packing"] = v_tiling.PackingValidator()
self._validators["pad"] = v_tiling.PadValidator()
self._validators["squarifyratio"] = v_tiling.SquarifyratioValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("flip", None)
self["flip"] = flip if flip is not None else _v
_v = arg.pop("packing", None)
self["packing"] = packing if packing is not None else _v
_v = arg.pop("pad", None)
self["pad"] = pad if pad is not None else _v
_v = arg.pop("squarifyratio", None)
self["squarifyratio"] = squarifyratio if squarifyratio is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Textfont(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on plot.ly for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The plotly service (at https://plot.ly or on-
premise) generates images on a server, where only a select
number of fonts are installed and supported. These include
"Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open
Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New
Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on plot.ly for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on plot.ly for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Textfont object
Sets the font used for `textinfo`.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Textfont
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
Returns
-------
Textfont
"""
super(Textfont, self).__init__("textfont")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Textfont
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Textfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import textfont as v_textfont
# Initialize validators
# ---------------------
self._validators["color"] = v_textfont.ColorValidator()
self._validators["colorsrc"] = v_textfont.ColorsrcValidator()
self._validators["family"] = v_textfont.FamilyValidator()
self._validators["familysrc"] = v_textfont.FamilysrcValidator()
self._validators["size"] = v_textfont.SizeValidator()
self._validators["sizesrc"] = v_textfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
self["color"] = color if color is not None else _v
_v = arg.pop("colorsrc", None)
self["colorsrc"] = colorsrc if colorsrc is not None else _v
_v = arg.pop("family", None)
self["family"] = family if family is not None else _v
_v = arg.pop("familysrc", None)
self["familysrc"] = familysrc if familysrc is not None else _v
_v = arg.pop("size", None)
self["size"] = size if size is not None else _v
_v = arg.pop("sizesrc", None)
self["sizesrc"] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Stream(_BaseTraceHierarchyType):
# maxpoints
# ---------
@property
def maxpoints(self):
"""
Sets the maximum number of points to keep on the plots from an
incoming stream. If `maxpoints` is set to 50, only the newest
50 points will be displayed on the plot.
The 'maxpoints' property is a number and may be specified as:
- An int or float in the interval [0, 10000]
Returns
-------
int|float
"""
return self["maxpoints"]
@maxpoints.setter
def maxpoints(self, val):
self["maxpoints"] = val
# token
# -----
@property
def token(self):
"""
The stream id number links a data trace on a plot with a
stream. See https://plot.ly/settings for more details.
The 'token' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["token"]
@token.setter
def token(self, val):
self["token"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
"""
def __init__(self, arg=None, maxpoints=None, token=None, **kwargs):
"""
Construct a new Stream object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Stream
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
Returns
-------
Stream
"""
super(Stream, self).__init__("stream")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Stream
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Stream"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import stream as v_stream
# Initialize validators
# ---------------------
self._validators["maxpoints"] = v_stream.MaxpointsValidator()
self._validators["token"] = v_stream.TokenValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("maxpoints", None)
self["maxpoints"] = maxpoints if maxpoints is not None else _v
_v = arg.pop("token", None)
self["token"] = token if token is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Pathbar(_BaseTraceHierarchyType):
# edgeshape
# ---------
@property
def edgeshape(self):
"""
Determines which shape is used for edges between `barpath`
labels.
The 'edgeshape' property is an enumeration that may be specified as:
- One of the following enumeration values:
['>', '<', '|', '\\']
- A string that matches one of the following regular expressions:
['']
Returns
-------
Any
"""
return self["edgeshape"]
@edgeshape.setter
def edgeshape(self, val):
self["edgeshape"] = val
# side
# ----
@property
def side(self):
"""
Determines on which side of the the treemap the `pathbar`
should be presented.
The 'side' property is an enumeration that may be specified as:
- One of the following enumeration values:
['top', 'bottom']
Returns
-------
Any
"""
return self["side"]
@side.setter
def side(self, val):
self["side"] = val
# textfont
# --------
@property
def textfont(self):
"""
Sets the font used inside `pathbar`.
The 'textfont' property is an instance of Textfont
that may be specified as:
- An instance of plotly.graph_objs.treemap.pathbar.Textfont
- A dict of string/value properties that will be passed
to the Textfont constructor
Supported dict properties:
color
colorsrc
Sets the source reference on plot.ly for color
.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for
family .
size
sizesrc
Sets the source reference on plot.ly for size
.
Returns
-------
plotly.graph_objs.treemap.pathbar.Textfont
"""
return self["textfont"]
@textfont.setter
def textfont(self, val):
self["textfont"] = val
# thickness
# ---------
@property
def thickness(self):
"""
Sets the thickness of `pathbar` (in px). If not specified the
`pathbar.textfont.size` is used with 3 pixles extra padding on
each side.
The 'thickness' property is a number and may be specified as:
- An int or float in the interval [12, inf]
Returns
-------
int|float
"""
return self["thickness"]
@thickness.setter
def thickness(self, val):
self["thickness"] = val
# visible
# -------
@property
def visible(self):
"""
Determines if the path bar is drawn i.e. outside the trace
`domain` and with one pixel gap.
The 'visible' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["visible"]
@visible.setter
def visible(self, val):
self["visible"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
edgeshape
Determines which shape is used for edges between
`barpath` labels.
side
Determines on which side of the the treemap the
`pathbar` should be presented.
textfont
Sets the font used inside `pathbar`.
thickness
Sets the thickness of `pathbar` (in px). If not
specified the `pathbar.textfont.size` is used with 3
pixles extra padding on each side.
visible
Determines if the path bar is drawn i.e. outside the
trace `domain` and with one pixel gap.
"""
def __init__(
self,
arg=None,
edgeshape=None,
side=None,
textfont=None,
thickness=None,
visible=None,
**kwargs
):
"""
Construct a new Pathbar object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Pathbar
edgeshape
Determines which shape is used for edges between
`barpath` labels.
side
Determines on which side of the the treemap the
`pathbar` should be presented.
textfont
Sets the font used inside `pathbar`.
thickness
Sets the thickness of `pathbar` (in px). If not
specified the `pathbar.textfont.size` is used with 3
pixles extra padding on each side.
visible
Determines if the path bar is drawn i.e. outside the
trace `domain` and with one pixel gap.
Returns
-------
Pathbar
"""
super(Pathbar, self).__init__("pathbar")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Pathbar
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Pathbar"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import pathbar as v_pathbar
# Initialize validators
# ---------------------
self._validators["edgeshape"] = v_pathbar.EdgeshapeValidator()
self._validators["side"] = v_pathbar.SideValidator()
self._validators["textfont"] = v_pathbar.TextfontValidator()
self._validators["thickness"] = v_pathbar.ThicknessValidator()
self._validators["visible"] = v_pathbar.VisibleValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("edgeshape", None)
self["edgeshape"] = edgeshape if edgeshape is not None else _v
_v = arg.pop("side", None)
self["side"] = side if side is not None else _v
_v = arg.pop("textfont", None)
self["textfont"] = textfont if textfont is not None else _v
_v = arg.pop("thickness", None)
self["thickness"] = thickness if thickness is not None else _v
_v = arg.pop("visible", None)
self["visible"] = visible if visible is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Outsidetextfont(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on plot.ly for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The plotly service (at https://plot.ly or on-
premise) generates images on a server, where only a select
number of fonts are installed and supported. These include
"Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open
Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New
Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on plot.ly for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on plot.ly for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Outsidetextfont object
Sets the font used for `textinfo` lying outside the sector.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
plotly.graph_objs.treemap.Outsidetextfont
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
Returns
-------
Outsidetextfont
"""
super(Outsidetextfont, self).__init__("outsidetextfont")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Outsidetextfont
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Outsidetextfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import outsidetextfont as v_outsidetextfont
# Initialize validators
# ---------------------
self._validators["color"] = v_outsidetextfont.ColorValidator()
self._validators["colorsrc"] = v_outsidetextfont.ColorsrcValidator()
self._validators["family"] = v_outsidetextfont.FamilyValidator()
self._validators["familysrc"] = v_outsidetextfont.FamilysrcValidator()
self._validators["size"] = v_outsidetextfont.SizeValidator()
self._validators["sizesrc"] = v_outsidetextfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
self["color"] = color if color is not None else _v
_v = arg.pop("colorsrc", None)
self["colorsrc"] = colorsrc if colorsrc is not None else _v
_v = arg.pop("family", None)
self["family"] = family if family is not None else _v
_v = arg.pop("familysrc", None)
self["familysrc"] = familysrc if familysrc is not None else _v
_v = arg.pop("size", None)
self["size"] = size if size is not None else _v
_v = arg.pop("sizesrc", None)
self["sizesrc"] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Marker(_BaseTraceHierarchyType):
# autocolorscale
# --------------
@property
def autocolorscale(self):
"""
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`marker.colorscale`. Has an effect only if colorsis set to a
numerical array. In case `colorscale` is unspecified or
`autocolorscale` is true, the default palette will be chosen
according to whether numbers in the `color` array are all
positive, all negative or mixed.
The 'autocolorscale' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["autocolorscale"]
@autocolorscale.setter
def autocolorscale(self, val):
self["autocolorscale"] = val
# cauto
# -----
@property
def cauto(self):
"""
Determines whether or not the color domain is computed with
respect to the input data (here colors) or the bounds set in
`marker.cmin` and `marker.cmax` Has an effect only if colorsis
set to a numerical array. Defaults to `false` when
`marker.cmin` and `marker.cmax` are set by the user.
The 'cauto' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["cauto"]
@cauto.setter
def cauto(self, val):
self["cauto"] = val
# cmax
# ----
@property
def cmax(self):
"""
Sets the upper bound of the color domain. Has an effect only if
colorsis set to a numerical array. Value should have the same
units as colors and if set, `marker.cmin` must be set as well.
The 'cmax' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["cmax"]
@cmax.setter
def cmax(self, val):
self["cmax"] = val
# cmid
# ----
@property
def cmid(self):
"""
Sets the mid-point of the color domain by scaling `marker.cmin`
and/or `marker.cmax` to be equidistant to this point. Has an
effect only if colorsis set to a numerical array. Value should
have the same units as colors. Has no effect when
`marker.cauto` is `false`.
The 'cmid' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["cmid"]
@cmid.setter
def cmid(self, val):
self["cmid"] = val
# cmin
# ----
@property
def cmin(self):
"""
Sets the lower bound of the color domain. Has an effect only if
colorsis set to a numerical array. Value should have the same
units as colors and if set, `marker.cmax` must be set as well.
The 'cmin' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["cmin"]
@cmin.setter
def cmin(self, val):
self["cmin"] = val
# coloraxis
# ---------
@property
def coloraxis(self):
"""
Sets a reference to a shared color axis. References to these
shared color axes are "coloraxis", "coloraxis2", "coloraxis3",
etc. Settings for these shared color axes are set in the
layout, under `layout.coloraxis`, `layout.coloraxis2`, etc.
Note that multiple color scales can be linked to the same color
axis.
The 'coloraxis' property is an identifier of a particular
subplot, of type 'coloraxis', that may be specified as the string 'coloraxis'
optionally followed by an integer >= 1
(e.g. 'coloraxis', 'coloraxis1', 'coloraxis2', 'coloraxis3', etc.)
Returns
-------
str
"""
return self["coloraxis"]
@coloraxis.setter
def coloraxis(self, val):
self["coloraxis"] = val
# colorbar
# --------
@property
def colorbar(self):
"""
The 'colorbar' property is an instance of ColorBar
that may be specified as:
- An instance of plotly.graph_objs.treemap.marker.ColorBar
- A dict of string/value properties that will be passed
to the ColorBar constructor
Supported dict properties:
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format
We add one item to d3's date formatter: "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of plotly.graph_objects.treemap.marker.
colorbar.Tickformatstop instances or dicts with
compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.treemap.marker.colorbar.tickformatstopdefault
s), sets the default property values to use for
elements of
treemap.marker.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on plot.ly for
ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on plot.ly for
tickvals .
tickwidth
Sets the tick width (in px).
title
plotly.graph_objects.treemap.marker.colorbar.Ti
tle instance or dict with compatible properties
titlefont
Deprecated: Please use
treemap.marker.colorbar.title.font instead.
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
treemap.marker.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
Returns
-------
plotly.graph_objs.treemap.marker.ColorBar
"""
return self["colorbar"]
@colorbar.setter
def colorbar(self, val):
self["colorbar"] = val
# colors
# ------
@property
def colors(self):
"""
Sets the color of each sector of this trace. If not specified,
the default trace color set is used to pick the sector colors.
The 'colors' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["colors"]
@colors.setter
def colors(self, val):
self["colors"] = val
# colorscale
# ----------
@property
def colorscale(self):
"""
Sets the colorscale. Has an effect only if colorsis set to a
numerical array. The colorscale must be an array containing
arrays mapping a normalized value to an rgb, rgba, hex, hsl,
hsv, or named color string. At minimum, a mapping for the
lowest (0) and highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`. To control the
bounds of the colorscale in color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may be a palette
name string of the following list: Greys,YlGnBu,Greens,YlOrRd,B
luered,RdBu,Reds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Blackbod
y,Earth,Electric,Viridis,Cividis.
The 'colorscale' property is a colorscale and may be
specified as:
- A list of colors that will be spaced evenly to create the colorscale.
Many predefined colorscale lists are included in the sequential, diverging,
and cyclical modules in the plotly.colors package.
- A list of 2-element lists where the first element is the
normalized color level value (starting at 0 and ending at 1),
and the second item is a valid color string.
(e.g. [[0, 'green'], [0.5, 'red'], [1.0, 'rgb(0, 0, 255)']])
- One of the following named colorscales:
['aggrnyl', 'agsunset', 'algae', 'amp', 'armyrose', 'balance',
'blackbody', 'bluered', 'blues', 'blugrn', 'bluyl', 'brbg',
'brwnyl', 'bugn', 'bupu', 'burg', 'burgyl', 'cividis', 'curl',
'darkmint', 'deep', 'delta', 'dense', 'earth', 'edge', 'electric',
'emrld', 'fall', 'geyser', 'gnbu', 'gray', 'greens', 'greys',
'haline', 'hot', 'hsv', 'ice', 'icefire', 'inferno', 'jet',
'magenta', 'magma', 'matter', 'mint', 'mrybm', 'mygbm', 'oranges',
'orrd', 'oryel', 'peach', 'phase', 'picnic', 'pinkyl', 'piyg',
'plasma', 'plotly3', 'portland', 'prgn', 'pubu', 'pubugn', 'puor',
'purd', 'purp', 'purples', 'purpor', 'rainbow', 'rdbu', 'rdgy',
'rdpu', 'rdylbu', 'rdylgn', 'redor', 'reds', 'solar', 'spectral',
'speed', 'sunset', 'sunsetdark', 'teal', 'tealgrn', 'tealrose',
'tempo', 'temps', 'thermal', 'tropic', 'turbid', 'twilight',
'viridis', 'ylgn', 'ylgnbu', 'ylorbr', 'ylorrd']
Returns
-------
str
"""
return self["colorscale"]
@colorscale.setter
def colorscale(self, val):
self["colorscale"] = val
# colorssrc
# ---------
@property
def colorssrc(self):
"""
Sets the source reference on plot.ly for colors .
The 'colorssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorssrc"]
@colorssrc.setter
def colorssrc(self, val):
self["colorssrc"] = val
# depthfade
# ---------
@property
def depthfade(self):
"""
Determines if the sector colors are faded towards the
background from the leaves up to the headers. This option is
unavailable when a `colorscale` is present, defaults to false
when `marker.colors` is set, but otherwise defaults to true.
When set to "reversed", the fading direction is inverted, that
is the top elements within hierarchy are drawn with fully
saturated colors while the leaves are faded towards the
background color.
The 'depthfade' property is an enumeration that may be specified as:
- One of the following enumeration values:
[True, False, 'reversed']
Returns
-------
Any
"""
return self["depthfade"]
@depthfade.setter
def depthfade(self, val):
self["depthfade"] = val
# line
# ----
@property
def line(self):
"""
The 'line' property is an instance of Line
that may be specified as:
- An instance of plotly.graph_objs.treemap.marker.Line
- A dict of string/value properties that will be passed
to the Line constructor
Supported dict properties:
color
Sets the color of the line enclosing each
sector. Defaults to the `paper_bgcolor` value.
colorsrc
Sets the source reference on plot.ly for color
.
width
Sets the width (in px) of the line enclosing
each sector.
widthsrc
Sets the source reference on plot.ly for width
.
Returns
-------
plotly.graph_objs.treemap.marker.Line
"""
return self["line"]
@line.setter
def line(self, val):
self["line"] = val
# pad
# ---
@property
def pad(self):
"""
The 'pad' property is an instance of Pad
that may be specified as:
- An instance of plotly.graph_objs.treemap.marker.Pad
- A dict of string/value properties that will be passed
to the Pad constructor
Supported dict properties:
b
Sets the padding form the bottom (in px).
l
Sets the padding form the left (in px).
r
Sets the padding form the right (in px).
t
Sets the padding form the top (in px).
Returns
-------
plotly.graph_objs.treemap.marker.Pad
"""
return self["pad"]
@pad.setter
def pad(self, val):
self["pad"] = val
# reversescale
# ------------
@property
def reversescale(self):
"""
Reverses the color mapping if true. Has an effect only if
colorsis set to a numerical array. If true, `marker.cmin` will
correspond to the last color in the array and `marker.cmax`
will correspond to the first color.
The 'reversescale' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["reversescale"]
@reversescale.setter
def reversescale(self, val):
self["reversescale"] = val
# showscale
# ---------
@property
def showscale(self):
"""
Determines whether or not a colorbar is displayed for this
trace. Has an effect only if colorsis set to a numerical array.
The 'showscale' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showscale"]
@showscale.setter
def showscale(self, val):
self["showscale"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
autocolorscale
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`marker.colorscale`. Has an effect only if colorsis set
to a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the default
palette will be chosen according to whether numbers in
the `color` array are all positive, all negative or
mixed.
cauto
Determines whether or not the color domain is computed
with respect to the input data (here colors) or the
bounds set in `marker.cmin` and `marker.cmax` Has an
effect only if colorsis set to a numerical array.
Defaults to `false` when `marker.cmin` and
`marker.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmin`
must be set as well.
cmid
Sets the mid-point of the color domain by scaling
`marker.cmin` and/or `marker.cmax` to be equidistant to
this point. Has an effect only if colorsis set to a
numerical array. Value should have the same units as
colors. Has no effect when `marker.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmax`
must be set as well.
coloraxis
Sets a reference to a shared color axis. References to
these shared color axes are "coloraxis", "coloraxis2",
"coloraxis3", etc. Settings for these shared color axes
are set in the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple color
scales can be linked to the same color axis.
colorbar
plotly.graph_objects.treemap.marker.ColorBar instance
or dict with compatible properties
colors
Sets the color of each sector of this trace. If not
specified, the default trace color set is used to pick
the sector colors.
colorscale
Sets the colorscale. Has an effect only if colorsis set
to a numerical array. The colorscale must be an array
containing arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At minimum,
a mapping for the lowest (0) and highest (1) values are
required. For example, `[[0, 'rgb(0,0,255)'], [1,
'rgb(255,0,0)']]`. To control the bounds of the
colorscale in color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may be a
palette name string of the following list: Greys,YlGnBu
,Greens,YlOrRd,Bluered,RdBu,Reds,Blues,Picnic,Rainbow,P
ortland,Jet,Hot,Blackbody,Earth,Electric,Viridis,Cividi
s.
colorssrc
Sets the source reference on plot.ly for colors .
depthfade
Determines if the sector colors are faded towards the
background from the leaves up to the headers. This
option is unavailable when a `colorscale` is present,
defaults to false when `marker.colors` is set, but
otherwise defaults to true. When set to "reversed", the
fading direction is inverted, that is the top elements
within hierarchy are drawn with fully saturated colors
while the leaves are faded towards the background
color.
line
plotly.graph_objects.treemap.marker.Line instance or
dict with compatible properties
pad
plotly.graph_objects.treemap.marker.Pad instance or
dict with compatible properties
reversescale
Reverses the color mapping if true. Has an effect only
if colorsis set to a numerical array. If true,
`marker.cmin` will correspond to the last color in the
array and `marker.cmax` will correspond to the first
color.
showscale
Determines whether or not a colorbar is displayed for
this trace. Has an effect only if colorsis set to a
numerical array.
"""
def __init__(
self,
arg=None,
autocolorscale=None,
cauto=None,
cmax=None,
cmid=None,
cmin=None,
coloraxis=None,
colorbar=None,
colors=None,
colorscale=None,
colorssrc=None,
depthfade=None,
line=None,
pad=None,
reversescale=None,
showscale=None,
**kwargs
):
"""
Construct a new Marker object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Marker
autocolorscale
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`marker.colorscale`. Has an effect only if colorsis set
to a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the default
palette will be chosen according to whether numbers in
the `color` array are all positive, all negative or
mixed.
cauto
Determines whether or not the color domain is computed
with respect to the input data (here colors) or the
bounds set in `marker.cmin` and `marker.cmax` Has an
effect only if colorsis set to a numerical array.
Defaults to `false` when `marker.cmin` and
`marker.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmin`
must be set as well.
cmid
Sets the mid-point of the color domain by scaling
`marker.cmin` and/or `marker.cmax` to be equidistant to
this point. Has an effect only if colorsis set to a
numerical array. Value should have the same units as
colors. Has no effect when `marker.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has an effect
only if colorsis set to a numerical array. Value should
have the same units as colors and if set, `marker.cmax`
must be set as well.
coloraxis
Sets a reference to a shared color axis. References to
these shared color axes are "coloraxis", "coloraxis2",
"coloraxis3", etc. Settings for these shared color axes
are set in the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple color
scales can be linked to the same color axis.
colorbar
plotly.graph_objects.treemap.marker.ColorBar instance
or dict with compatible properties
colors
Sets the color of each sector of this trace. If not
specified, the default trace color set is used to pick
the sector colors.
colorscale
Sets the colorscale. Has an effect only if colorsis set
to a numerical array. The colorscale must be an array
containing arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At minimum,
a mapping for the lowest (0) and highest (1) values are
required. For example, `[[0, 'rgb(0,0,255)'], [1,
'rgb(255,0,0)']]`. To control the bounds of the
colorscale in color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may be a
palette name string of the following list: Greys,YlGnBu
,Greens,YlOrRd,Bluered,RdBu,Reds,Blues,Picnic,Rainbow,P
ortland,Jet,Hot,Blackbody,Earth,Electric,Viridis,Cividi
s.
colorssrc
Sets the source reference on plot.ly for colors .
depthfade
Determines if the sector colors are faded towards the
background from the leaves up to the headers. This
option is unavailable when a `colorscale` is present,
defaults to false when `marker.colors` is set, but
otherwise defaults to true. When set to "reversed", the
fading direction is inverted, that is the top elements
within hierarchy are drawn with fully saturated colors
while the leaves are faded towards the background
color.
line
plotly.graph_objects.treemap.marker.Line instance or
dict with compatible properties
pad
plotly.graph_objects.treemap.marker.Pad instance or
dict with compatible properties
reversescale
Reverses the color mapping if true. Has an effect only
if colorsis set to a numerical array. If true,
`marker.cmin` will correspond to the last color in the
array and `marker.cmax` will correspond to the first
color.
showscale
Determines whether or not a colorbar is displayed for
this trace. Has an effect only if colorsis set to a
numerical array.
Returns
-------
Marker
"""
super(Marker, self).__init__("marker")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Marker
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Marker"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import marker as v_marker
# Initialize validators
# ---------------------
self._validators["autocolorscale"] = v_marker.AutocolorscaleValidator()
self._validators["cauto"] = v_marker.CautoValidator()
self._validators["cmax"] = v_marker.CmaxValidator()
self._validators["cmid"] = v_marker.CmidValidator()
self._validators["cmin"] = v_marker.CminValidator()
self._validators["coloraxis"] = v_marker.ColoraxisValidator()
self._validators["colorbar"] = v_marker.ColorBarValidator()
self._validators["colors"] = v_marker.ColorsValidator()
self._validators["colorscale"] = v_marker.ColorscaleValidator()
self._validators["colorssrc"] = v_marker.ColorssrcValidator()
self._validators["depthfade"] = v_marker.DepthfadeValidator()
self._validators["line"] = v_marker.LineValidator()
self._validators["pad"] = v_marker.PadValidator()
self._validators["reversescale"] = v_marker.ReversescaleValidator()
self._validators["showscale"] = v_marker.ShowscaleValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("autocolorscale", None)
self["autocolorscale"] = autocolorscale if autocolorscale is not None else _v
_v = arg.pop("cauto", None)
self["cauto"] = cauto if cauto is not None else _v
_v = arg.pop("cmax", None)
self["cmax"] = cmax if cmax is not None else _v
_v = arg.pop("cmid", None)
self["cmid"] = cmid if cmid is not None else _v
_v = arg.pop("cmin", None)
self["cmin"] = cmin if cmin is not None else _v
_v = arg.pop("coloraxis", None)
self["coloraxis"] = coloraxis if coloraxis is not None else _v
_v = arg.pop("colorbar", None)
self["colorbar"] = colorbar if colorbar is not None else _v
_v = arg.pop("colors", None)
self["colors"] = colors if colors is not None else _v
_v = arg.pop("colorscale", None)
self["colorscale"] = colorscale if colorscale is not None else _v
_v = arg.pop("colorssrc", None)
self["colorssrc"] = colorssrc if colorssrc is not None else _v
_v = arg.pop("depthfade", None)
self["depthfade"] = depthfade if depthfade is not None else _v
_v = arg.pop("line", None)
self["line"] = line if line is not None else _v
_v = arg.pop("pad", None)
self["pad"] = pad if pad is not None else _v
_v = arg.pop("reversescale", None)
self["reversescale"] = reversescale if reversescale is not None else _v
_v = arg.pop("showscale", None)
self["showscale"] = showscale if showscale is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Insidetextfont(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on plot.ly for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The plotly service (at https://plot.ly or on-
premise) generates images on a server, where only a select
number of fonts are installed and supported. These include
"Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open
Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New
Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on plot.ly for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on plot.ly for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Insidetextfont object
Sets the font used for `textinfo` lying inside the sector.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Insidetextfont
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
Returns
-------
Insidetextfont
"""
super(Insidetextfont, self).__init__("insidetextfont")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Insidetextfont
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Insidetextfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import insidetextfont as v_insidetextfont
# Initialize validators
# ---------------------
self._validators["color"] = v_insidetextfont.ColorValidator()
self._validators["colorsrc"] = v_insidetextfont.ColorsrcValidator()
self._validators["family"] = v_insidetextfont.FamilyValidator()
self._validators["familysrc"] = v_insidetextfont.FamilysrcValidator()
self._validators["size"] = v_insidetextfont.SizeValidator()
self._validators["sizesrc"] = v_insidetextfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
self["color"] = color if color is not None else _v
_v = arg.pop("colorsrc", None)
self["colorsrc"] = colorsrc if colorsrc is not None else _v
_v = arg.pop("family", None)
self["family"] = family if family is not None else _v
_v = arg.pop("familysrc", None)
self["familysrc"] = familysrc if familysrc is not None else _v
_v = arg.pop("size", None)
self["size"] = size if size is not None else _v
_v = arg.pop("sizesrc", None)
self["sizesrc"] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Hoverlabel(_BaseTraceHierarchyType):
# align
# -----
@property
def align(self):
"""
Sets the horizontal alignment of the text content within hover
label box. Has an effect only if the hover label text spans
more two or more lines
The 'align' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'right', 'auto']
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["align"]
@align.setter
def align(self, val):
self["align"] = val
# alignsrc
# --------
@property
def alignsrc(self):
"""
Sets the source reference on plot.ly for align .
The 'alignsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["alignsrc"]
@alignsrc.setter
def alignsrc(self, val):
self["alignsrc"] = val
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the background color of the hover labels for this trace
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["bgcolor"]
@bgcolor.setter
def bgcolor(self, val):
self["bgcolor"] = val
# bgcolorsrc
# ----------
@property
def bgcolorsrc(self):
"""
Sets the source reference on plot.ly for bgcolor .
The 'bgcolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["bgcolorsrc"]
@bgcolorsrc.setter
def bgcolorsrc(self, val):
self["bgcolorsrc"] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the border color of the hover labels for this trace.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["bordercolor"]
@bordercolor.setter
def bordercolor(self, val):
self["bordercolor"] = val
# bordercolorsrc
# --------------
@property
def bordercolorsrc(self):
"""
Sets the source reference on plot.ly for bordercolor .
The 'bordercolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["bordercolorsrc"]
@bordercolorsrc.setter
def bordercolorsrc(self, val):
self["bordercolorsrc"] = val
# font
# ----
@property
def font(self):
"""
Sets the font used in hover labels.
The 'font' property is an instance of Font
that may be specified as:
- An instance of plotly.graph_objs.treemap.hoverlabel.Font
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
colorsrc
Sets the source reference on plot.ly for color
.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for
family .
size
sizesrc
Sets the source reference on plot.ly for size
.
Returns
-------
plotly.graph_objs.treemap.hoverlabel.Font
"""
return self["font"]
@font.setter
def font(self, val):
self["font"] = val
# namelength
# ----------
@property
def namelength(self):
"""
Sets the default length (in number of characters) of the trace
name in the hover labels for all traces. -1 shows the whole
name regardless of length. 0-3 shows the first 0-3 characters,
and an integer >3 will show the whole name if it is less than
that many characters, but if it is longer, will truncate to
`namelength - 3` characters and add an ellipsis.
The 'namelength' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [-1, 9223372036854775807]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|numpy.ndarray
"""
return self["namelength"]
@namelength.setter
def namelength(self, val):
self["namelength"] = val
# namelengthsrc
# -------------
@property
def namelengthsrc(self):
"""
Sets the source reference on plot.ly for namelength .
The 'namelengthsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["namelengthsrc"]
@namelengthsrc.setter
def namelengthsrc(self, val):
self["namelengthsrc"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
"""
def __init__(
self,
arg=None,
align=None,
alignsrc=None,
bgcolor=None,
bgcolorsrc=None,
bordercolor=None,
bordercolorsrc=None,
font=None,
namelength=None,
namelengthsrc=None,
**kwargs
):
"""
Construct a new Hoverlabel object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Hoverlabel
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
Returns
-------
Hoverlabel
"""
super(Hoverlabel, self).__init__("hoverlabel")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Hoverlabel
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Hoverlabel"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import hoverlabel as v_hoverlabel
# Initialize validators
# ---------------------
self._validators["align"] = v_hoverlabel.AlignValidator()
self._validators["alignsrc"] = v_hoverlabel.AlignsrcValidator()
self._validators["bgcolor"] = v_hoverlabel.BgcolorValidator()
self._validators["bgcolorsrc"] = v_hoverlabel.BgcolorsrcValidator()
self._validators["bordercolor"] = v_hoverlabel.BordercolorValidator()
self._validators["bordercolorsrc"] = v_hoverlabel.BordercolorsrcValidator()
self._validators["font"] = v_hoverlabel.FontValidator()
self._validators["namelength"] = v_hoverlabel.NamelengthValidator()
self._validators["namelengthsrc"] = v_hoverlabel.NamelengthsrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("align", None)
self["align"] = align if align is not None else _v
_v = arg.pop("alignsrc", None)
self["alignsrc"] = alignsrc if alignsrc is not None else _v
_v = arg.pop("bgcolor", None)
self["bgcolor"] = bgcolor if bgcolor is not None else _v
_v = arg.pop("bgcolorsrc", None)
self["bgcolorsrc"] = bgcolorsrc if bgcolorsrc is not None else _v
_v = arg.pop("bordercolor", None)
self["bordercolor"] = bordercolor if bordercolor is not None else _v
_v = arg.pop("bordercolorsrc", None)
self["bordercolorsrc"] = bordercolorsrc if bordercolorsrc is not None else _v
_v = arg.pop("font", None)
self["font"] = font if font is not None else _v
_v = arg.pop("namelength", None)
self["namelength"] = namelength if namelength is not None else _v
_v = arg.pop("namelengthsrc", None)
self["namelengthsrc"] = namelengthsrc if namelengthsrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Domain(_BaseTraceHierarchyType):
# column
# ------
@property
def column(self):
"""
If there is a layout grid, use the domain for this column in
the grid for this treemap trace .
The 'column' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self["column"]
@column.setter
def column(self, val):
self["column"] = val
# row
# ---
@property
def row(self):
"""
If there is a layout grid, use the domain for this row in the
grid for this treemap trace .
The 'row' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self["row"]
@row.setter
def row(self, val):
self["row"] = val
# x
# -
@property
def x(self):
"""
Sets the horizontal domain of this treemap trace (in plot
fraction).
The 'x' property is an info array that may be specified as:
* a list or tuple of 2 elements where:
(0) The 'x[0]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
(1) The 'x[1]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
list
"""
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
# y
# -
@property
def y(self):
"""
Sets the vertical domain of this treemap trace (in plot
fraction).
The 'y' property is an info array that may be specified as:
* a list or tuple of 2 elements where:
(0) The 'y[0]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
(1) The 'y[1]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
list
"""
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return "treemap"
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
column
If there is a layout grid, use the domain for this
column in the grid for this treemap trace .
row
If there is a layout grid, use the domain for this row
in the grid for this treemap trace .
x
Sets the horizontal domain of this treemap trace (in
plot fraction).
y
Sets the vertical domain of this treemap trace (in plot
fraction).
"""
def __init__(self, arg=None, column=None, row=None, x=None, y=None, **kwargs):
"""
Construct a new Domain object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.treemap.Domain
column
If there is a layout grid, use the domain for this
column in the grid for this treemap trace .
row
If there is a layout grid, use the domain for this row
in the grid for this treemap trace .
x
Sets the horizontal domain of this treemap trace (in
plot fraction).
y
Sets the vertical domain of this treemap trace (in plot
fraction).
Returns
-------
Domain
"""
super(Domain, self).__init__("domain")
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.treemap.Domain
constructor must be a dict or
an instance of plotly.graph_objs.treemap.Domain"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
# Import validators
# -----------------
from plotly.validators.treemap import domain as v_domain
# Initialize validators
# ---------------------
self._validators["column"] = v_domain.ColumnValidator()
self._validators["row"] = v_domain.RowValidator()
self._validators["x"] = v_domain.XValidator()
self._validators["y"] = v_domain.YValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("column", None)
self["column"] = column if column is not None else _v
_v = arg.pop("row", None)
self["row"] = row if row is not None else _v
_v = arg.pop("x", None)
self["x"] = x if x is not None else _v
_v = arg.pop("y", None)
self["y"] = y if y is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
__all__ = [
"Domain",
"Hoverlabel",
"Insidetextfont",
"Marker",
"Outsidetextfont",
"Pathbar",
"Stream",
"Textfont",
"Tiling",
"hoverlabel",
"marker",
"pathbar",
]
from plotly.graph_objs.treemap import pathbar
from plotly.graph_objs.treemap import marker
from plotly.graph_objs.treemap import hoverlabel
| [
[
[
33,
82
],
[
119,
142
]
],
[
[
90,
103
]
],
[
[
112,
118
],
[
5737,
5743
]
],
[
[
7558,
7607
],
[
7646,
7669
]
],
[
[
7615,
7628
]
],
[
[
7637,
7645
],
[
16404,
16412
]
],
[
[
18575,
18624
],
[
18661,
18684
]
],
[
[
18632,
18645
]
],
[
[
18654,
18660
],
[
21180,
21186
]
],
[
[
22669,
22718
],
[
22756,
22779
]
],
[
[
22726,
22739
]
],
[
[
22748,
22755
],
[
28945,
28952
]
],
[
[
30964,
31013
],
[
31059,
31082
]
],
[
[
31021,
31034
]
],
[
[
31043,
31058
],
[
39875,
39890
]
],
[
[
42130,
42179
],
[
42216,
42239
]
],
[
[
42187,
42200
]
],
[
[
42209,
42215
],
[
75968,
75974
]
],
[
[
79684,
79733
],
[
79778,
79801
]
],
[
[
79741,
79754
]
],
[
[
79763,
79777
],
[
88578,
88592
]
],
[
[
90821,
90870
],
[
90911,
90934
]
],
[
[
90878,
90891
]
],
[
[
90900,
90910
],
[
105544,
105554
]
],
[
[
108390,
108439
],
[
108476,
108499
]
],
[
[
108447,
108460
],
[
6001,
6006
],
[
16672,
16677
],
[
21444,
21449
],
[
29211,
29216
],
[
40157,
40162
],
[
76232,
76237
],
[
88858,
88863
],
[
105816,
105821
],
[
112696,
112701
]
],
[
[
108469,
108475
],
[
112432,
112438
]
],
[
[
114124,
114131
]
],
[
[
114374,
114381
]
],
[
[
114420,
114426
]
],
[
[
114465,
114475
]
]
] |
from django.conf.urls import url,include
from django.contrib import admin
from django.contrib.auth import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'',include('instagram.urls')),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^logout/$', views.logout, {"next_page": '/'}),
] | [
[
[
29,
32
],
[
134,
137
],
[
172,
175
],
[
212,
215
],
[
282,
285
]
],
[
[
33,
40
],
[
180,
187
],
[
231,
238
]
],
[
[
68,
73
],
[
150,
155
]
],
[
[
106,
111
],
[
300,
305
]
],
[
[
114,
125
]
]
] |
import chainer
from chainer import training
from chainer.training import extensions
from chainer.datasets import TupleDataset
from chainer import Chain
from chainer import links as L
from chainer import functions as F
from chainer import reporter
from chainer import cuda
import numpy as np
def dot(a, b):
""" Simple dot product"""
return F.sum(a * b, axis=-1)
def batch_interactions(x):
xp = cuda.get_array_module(x.data)
batchsize = x.shape[0]
shape = (batchsize, x.shape[1] ** 2)
left = xp.tile(x.data, (1, x.shape[1]))
right = xp.repeat(x.data, x.shape[1]).reshape(shape)
return left, right
class VFM(Chain):
lv_floor = -100.0
def __init__(self, n_features=None, n_dim=1 , lossfun=F.mean_squared_error,
lambda0=1, lambda1=1, lambda2=1, init_bias_mu=0.0,
init_bias_lv=0.0, intx_term=True, total_nobs=1):
self.n_dim = n_dim
self.n_features = n_features
self.lossfun = lossfun
self.lambda0 = lambda0
self.lambda1 = lambda1
self.lambda2 = lambda2
self.intx_term = intx_term
self.total_nobs = total_nobs
# In contrast to the FM model, the slopes and latent vectors
# will have means (mu) and log variances (lv) for each component.
super(VFM, self).__init__(bias_mu=L.Bias(shape=(1,)),
bias_lv=L.Bias(shape=(1,)),
slop_mu=L.Bias(shape=(1, 1)),
slop_lv=L.Bias(shape=(1, 1)),
slop_delta_mu=L.EmbedID(n_features, 1,
ignore_label=-1),
slop_delta_lv=L.EmbedID(n_features, 1,
ignore_label=-1),
feat_mu_vec=L.Bias(shape=(1, 1, n_dim)),
feat_lv_vec=L.Bias(shape=(1, 1, n_dim)),
feat_delta_mu=L.EmbedID(n_features, n_dim,
ignore_label=-1),
feat_delta_lv=L.EmbedID(n_features, n_dim,
ignore_label=-1))
# Xavier initialize weights
c = np.sqrt(n_features * n_dim) * 1e3
d = np.sqrt(n_features) * 1e3
self.feat_delta_mu.W.data[...] = np.random.randn(n_features, n_dim) / c
self.feat_delta_lv.W.data[...] = np.random.randn(n_features, n_dim) / c
self.slop_delta_mu.W.data[...] = np.random.randn(n_features, 1) / d
self.slop_delta_lv.W.data[...] = np.random.randn(n_features, 1) / d
self.bias_mu.b.data[...] *= 0.0
self.bias_mu.b.data[...] += init_bias_mu
self.bias_lv.b.data[...] *= 0.0
self.bias_lv.b.data[...] += init_bias_lv
def term_bias(self, bs, train=True):
""" Compute overall bias and broadcast to shape of batchsize
"""
shape = (bs, 1,)
# Bias is drawn from a Gaussian with given mu and log variance
bs_mu = F.broadcast_to(self.bias_mu.b, shape)
bs_lv = F.broadcast_to(self.bias_lv.b, shape)
bias = F.flatten(F.gaussian(bs_mu, bs_lv))
# Add a very negative log variance so we're sampling
# from a very narrow distribution about the mean.
# Useful for validation dataset when we want to only guess
# the mean.
if not train:
bs_lv += self.lv_floor
# Compute prior on the bias, so compute the KL div
# from the KL(N(mu_bias, var_bias) | N(0, 1))
kld = F.gaussian_kl_divergence(self.bias_mu.b, self.bias_lv.b)
return bias, kld
def term_slop(self, loc, val, bs, nf, train=True):
""" Compute the slope for each active feature.
"""
shape = (bs, nf)
# Reshape all of our constants
pr_mu = F.broadcast_to(self.slop_mu.b, shape)
pr_lv = F.broadcast_to(self.slop_lv.b, shape)
# This is either zero or a very negative number
# indicating to sample N(mean, logvar) or just draw
# the mean preicsely
if not train:
pr_lv += self.lv_floor
# The feature slopes are grouped together so that they
# all share a common mean. Then individual features slop_delta_lv
# are shrunk towards zero, which effectively sets features to fall
# back on the group mean.
sl_mu = F.reshape(self.slop_delta_mu(loc), shape) + pr_mu
sl_lv = F.reshape(self.slop_delta_lv(loc), shape) + pr_lv
coef = F.gaussian(sl_mu, sl_lv)
slop = F.sum(coef * val, axis=1)
# Calculate divergence between group mean and N(0, 1)
kld1 = F.gaussian_kl_divergence(self.slop_mu.b, self.slop_lv.b)
# Calculate divergence of individual delta means and delta vars
args = (self.slop_delta_mu.W, self.slop_delta_lv.W)
kld2 = F.gaussian_kl_divergence(*args)
return slop, kld1 + kld2
def term_feat(self, iloc, jloc, ival, jval, bs, nf, train=True):
# Change all of the shapes to form interaction vectors
shape = (bs, nf * 2, self.n_dim)
feat_mu_vec = F.broadcast_to(self.feat_mu_vec.b, shape)
feat_lv_vec = F.broadcast_to(self.feat_lv_vec.b, shape)
if not train:
feat_lv_vec += self.lv_floor
# Construct the interaction mean and variance
# iloc is (bs, nf), feat(iloc) is (bs, nf, ndim) and
# dot(feat, feat) is (bs, nf)
ivec = F.gaussian(feat_mu_vec + self.feat_delta_mu(iloc),
feat_lv_vec + self.feat_delta_lv(iloc))
jvec = F.gaussian(feat_mu_vec + self.feat_delta_mu(jloc),
feat_lv_vec + self.feat_delta_lv(jloc))
# feat is (bs, )
feat = dot(F.sum(ivec * jvec, axis=2), ival * jval)
# Compute the KLD for the group mean vector and variance vector
kld1 = F.gaussian_kl_divergence(self.feat_mu_vec.b, self.feat_lv_vec.b)
# Compute the KLD for vector deviations from the group mean and var
kld2 = F.gaussian_kl_divergence(self.feat_delta_mu.W,
self.feat_delta_lv.W)
return feat, kld1 + kld2
def forward(self, loc, val, y, train=True):
""" Given the sparse feature vector defined by location
integers for the column index and the value at that index.
y ~ c + sum(w_i x_i) + sum_ij( <v_i, v_j> * x_i * x_j)
Parameters
----------
val : array of float
Values in the feature array. Should of shape (batchsize, n_feat_max)
loc : array of int
Location of the non-zero columns in the sparse vector. Should be of
shape (batchsize, n_feat_max)
y : array of float
Array of expected outcome.
train: bool
If True uses the reparameterization trick to estimate variables.
If False, this sets the variance to nearly zero such that
parameters are always set to the mean with no noise, which is useful
at test time.
"""
bs = val.data.shape[0]
nf = val.data.shape[1]
iloc, jloc = batch_interactions(loc)
ival, jval = batch_interactions(val)
# Compute scalar bias term
bias, kld0 = self.term_bias(bs, train=train)
# Compute the feature weights
slop, kld1 = self.term_slop(loc, val, bs, nf, train=train)
# Compute factorized weights on interaction features
feat, kld2 = self.term_feat(iloc, jloc, ival, jval,
bs, nf, train=train)
# Optionally choose to include the interaction term
# without this is linear regression
pred = bias + slop
if self.intx_term:
pred += feat
return pred, kld0, kld1, kld2
def __call__(self, loc, val, y, train=True):
bs = val.data.shape[0]
pred, kld0, kld1, kld2 = self.forward(loc, val, y, train=train)
# Compute MSE loss
mse = F.mean_squared_error(pred, y)
rmse = F.sqrt(mse) # Only used for reporting
# Now compute the total KLD loss
kldt = kld0 * self.lambda0 + kld1 * self.lambda1 + kld2 * self.lambda2
# Total loss is MSE plus regularization losses
loss = mse + kldt * (1.0 / self.total_nobs)
# Log the errors
logs = {'loss': loss, 'rmse': rmse, 'kld0': kld0, 'kld1': kld1,
'kld2': kld2, 'kldt': kldt, 'bias': F.sum(self.bias_mu.b)}
reporter.report(logs, self)
return loss
class TestModeEvaluator(extensions.Evaluator):
def evaluate(self):
model = self.get_target('main')
model.train = False
ret = super(TestModeEvaluator, self).evaluate()
model.train = True
return ret
def fit(model, train, valid, device=-1, batchsize=4096, n_epoch=500,
resume=None, alpha=1e-3):
if device >= 0:
chainer.cuda.get_device(device).use()
model.to_gpu(device)
optimizer = chainer.optimizers.Adam(alpha)
optimizer.setup(model)
# Setup iterators
train_iter = chainer.iterators.SerialIterator(train, batchsize)
valid_iter = chainer.iterators.SerialIterator(valid, batchsize,
repeat=False, shuffle=False)
updater = training.StandardUpdater(train_iter, optimizer, device=device)
trainer = training.Trainer(updater, (n_epoch, 'epoch'),
out='out_' + str(device))
# Setup logging, printing & saving
keys = ['loss', 'rmse', 'bias', 'kld0', 'kld1']
keys += ['kldg', 'kldi', 'hypg', 'hypi']
keys += ['hypglv', 'hypilv']
reports = ['epoch']
reports += ['main/' + key for key in keys]
reports += ['validation/main/rmse']
trainer.extend(TestModeEvaluator(valid_iter, model, device=device))
trainer.extend(extensions.Evaluator(valid_iter, model, device=device))
trainer.extend(extensions.dump_graph('main/loss'))
trainer.extend(extensions.snapshot(), trigger=(10, 'epoch'))
trainer.extend(extensions.LogReport(trigger=(1, 'epoch')))
trainer.extend(extensions.PrintReport(reports))
trainer.extend(extensions.ProgressBar(update_interval=10))
# If previous model detected, resume
if resume:
print("Loading from {}".format(resume))
chainer.serializers.load_npz(resume, trainer)
# Run the model
trainer.run()
| [
[
[
7,
14
],
[
9049,
9056
],
[
9132,
9139
],
[
9230,
9237
],
[
9298,
9305
],
[
10461,
10468
]
],
[
[
35,
43
],
[
9442,
9450
],
[
9519,
9527
]
],
[
[
73,
83
],
[
8699,
8709
],
[
9994,
10004
],
[
10069,
10079
],
[
10124,
10134
],
[
10189,
10199
],
[
10252,
10262
],
[
10304,
10314
]
],
[
[
113,
125
]
],
[
[
147,
152
],
[
644,
649
]
],
[
[
173,
183
],
[
1335,
1336
],
[
1397,
1398
],
[
1459,
1460
],
[
1523,
1524
],
[
1593,
1594
],
[
1742,
1743
],
[
1889,
1890
],
[
1964,
1965
],
[
2041,
2042
],
[
2194,
2195
]
],
[
[
204,
218
],
[
733,
734
],
[
350,
351
],
[
3146,
3147
],
[
3200,
3201
],
[
3253,
3254
],
[
3263,
3264
],
[
3681,
3682
],
[
3967,
3968
],
[
4021,
4022
],
[
4524,
4525
],
[
4590,
4591
],
[
4655,
4656
],
[
4695,
4696
],
[
4799,
4800
],
[
5003,
5004
],
[
5265,
5266
],
[
5329,
5330
],
[
5603,
5604
],
[
5735,
5736
],
[
5896,
5897
],
[
6025,
6026
],
[
6181,
6182
],
[
8131,
8132
],
[
8176,
8177
],
[
8594,
8595
]
],
[
[
239,
247
],
[
8625,
8633
]
],
[
[
268,
272
],
[
410,
414
]
],
[
[
280,
291
],
[
2348,
2350
],
[
2394,
2396
],
[
2461,
2463
],
[
2541,
2543
],
[
2621,
2623
],
[
2697,
2699
]
],
[
[
298,
301
],
[
5892,
5895
]
],
[
[
378,
396
],
[
7272,
7290
],
[
7317,
7335
]
],
[
[
640,
643
],
[
1307,
1310
]
],
[
[
8681,
8698
],
[
8834,
8851
],
[
9922,
9939
]
],
[
[
8922,
8925
]
]
] |
import json
from pathlib import Path
from typing import Optional
import typer
from . import utils
from .utils import example
from .utils.iohelper import AltTemporaryDirectory
@example()
def check(
project_dir: Path = Path("."), checkout: Optional[str] = None, strict: bool = True
) -> bool:
"""Checks to see if there have been any updates to the Cookiecutter template
used to generate this project."""
cruft_file = utils.cruft.get_cruft_file(project_dir)
cruft_state = json.loads(cruft_file.read_text())
with AltTemporaryDirectory() as cookiecutter_template_dir:
with utils.cookiecutter.get_cookiecutter_repo(
cruft_state["template"],
Path(cookiecutter_template_dir),
checkout,
filter="blob:none",
no_checkout=True,
) as repo:
last_commit = repo.head.object.hexsha
if utils.cruft.is_project_updated(repo, cruft_state["commit"], last_commit, strict):
typer.secho(
"SUCCESS: Good work! Project's cruft is up to date "
"and as clean as possible :).",
fg=typer.colors.GREEN,
)
return True
typer.secho(
"FAILURE: Project's cruft is out of date! Run `cruft update` to clean this mess up.",
fg=typer.colors.RED,
)
return False
| [
[
[
7,
11
],
[
493,
497
]
],
[
[
32,
36
],
[
225,
229
],
[
218,
222
],
[
695,
699
]
],
[
[
56,
64
],
[
246,
254
]
],
[
[
73,
78
],
[
995,
1000
],
[
1156,
1161
],
[
1231,
1236
],
[
1357,
1362
]
],
[
[
94,
99
],
[
435,
440
],
[
604,
609
],
[
897,
902
]
],
[
[
119,
126
],
[
180,
187
]
],
[
[
155,
176
],
[
537,
558
]
],
[
[
194,
199
]
]
] |
"""Test zha switch."""
from unittest.mock import call, patch
import pytest
import zigpy.zcl.clusters.general as general
import zigpy.zcl.foundation as zcl_f
from homeassistant.components.switch import DOMAIN
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from .common import (
async_enable_traffic,
find_entity_id,
make_attribute,
make_zcl_header,
)
from tests.common import mock_coro
ON = 1
OFF = 0
@pytest.fixture
def zigpy_device(zigpy_device_mock):
"""Device tracker zigpy device."""
endpoints = {
1: {
"in_clusters": [general.Basic.cluster_id, general.OnOff.cluster_id],
"out_clusters": [],
"device_type": 0,
}
}
return zigpy_device_mock(endpoints)
async def test_switch(hass, zha_gateway, zha_device_joined_restored, zigpy_device):
"""Test zha switch platform."""
zha_device = await zha_device_joined_restored(zigpy_device)
cluster = zigpy_device.endpoints.get(1).on_off
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
assert entity_id is not None
# test that the switch was created and that its state is unavailable
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, zha_gateway, [zha_device])
# test that the state has changed from unavailable to off
assert hass.states.get(entity_id).state == STATE_OFF
# turn on at switch
attr = make_attribute(0, 1)
hdr = make_zcl_header(zcl_f.Command.Report_Attributes)
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
# turn off at switch
attr.value.value = 0
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
# turn on from HA
with patch(
"zigpy.zcl.Cluster.request",
return_value=mock_coro([0x00, zcl_f.Status.SUCCESS]),
):
# turn on via UI
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(cluster.request.mock_calls) == 1
assert cluster.request.call_args == call(
False, ON, (), expect_reply=True, manufacturer=None
)
# turn off from HA
with patch(
"zigpy.zcl.Cluster.request",
return_value=mock_coro([0x01, zcl_f.Status.SUCCESS]),
):
# turn off via UI
await hass.services.async_call(
DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(cluster.request.mock_calls) == 1
assert cluster.request.call_args == call(
False, OFF, (), expect_reply=True, manufacturer=None
)
# test joining a new switch to the network and HA
cluster.bind.reset_mock()
cluster.configure_reporting.reset_mock()
await zha_gateway.async_device_initialized(zigpy_device)
await hass.async_block_till_done()
assert cluster.bind.call_count == 1
assert cluster.bind.await_count == 1
assert cluster.configure_reporting.call_count == 1
assert cluster.configure_reporting.await_count == 1
| [
[
[
49,
53
],
[
2315,
2319
],
[
2785,
2789
]
],
[
[
55,
60
],
[
1960,
1965
],
[
2428,
2433
]
],
[
[
69,
75
],
[
448,
454
]
],
[
[
83,
120
],
[
598,
605
],
[
624,
631
]
],
[
[
128,
157
],
[
1569,
1574
],
[
2042,
2047
],
[
2510,
2515
]
],
[
[
203,
209
],
[
1044,
1050
],
[
2150,
2156
],
[
2619,
2625
]
],
[
[
242,
251
],
[
1476,
1485
],
[
1918,
1927
]
],
[
[
253,
261
],
[
1730,
1738
]
],
[
[
263,
280
],
[
1224,
1241
]
],
[
[
308,
328
],
[
1312,
1332
]
],
[
[
334,
348
],
[
1029,
1043
]
],
[
[
354,
368
],
[
1522,
1536
]
],
[
[
374,
389
],
[
1553,
1568
]
],
[
[
419,
428
],
[
2025,
2034
],
[
2493,
2502
]
],
[
[
430,
432
],
[
2340,
2342
]
],
[
[
437,
440
],
[
2810,
2813
]
],
[
[
467,
479
]
],
[
[
771,
3287
]
]
] |
"""
This file contains all routes for the /search API
"""
from sanic import Blueprint
from sanic.response import HTTPResponse
from dp4py_sanic.api.response.json_response import json
from dp_conceptual_search.config import CONFIG
from dp_conceptual_search.api.request import ONSRequest
from dp_conceptual_search.ons.search.index import Index
from dp_conceptual_search.ons.search.client.search_engine import SearchEngine
from dp_conceptual_search.ons.search.response.search_result import SearchResult
from dp_conceptual_search.api.search.sanic_search_engine import SanicSearchEngine
from dp_conceptual_search.api.search.conceptual import routes as conceptual_routes
search_blueprint = Blueprint('search', url_prefix='/search')
@search_blueprint.route('/departments', methods=['GET'], strict_slashes=True)
async def ons_departments_query(request: ONSRequest) -> HTTPResponse:
"""
Handles departments queries to the departments index
:param request:
:return:
"""
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.DEPARTMENTS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.departments_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/', methods=['GET', 'POST'], strict_slashes=False)
async def search(request: ONSRequest) -> HTTPResponse:
"""
API which combines the content, counts and featured result queries into one
:param request:
:return:
"""
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.search(request)
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
result = await sanic_search_engine.search(request)
return json(request, result, 200)
@search_blueprint.route('/content', methods=['GET', 'POST'], strict_slashes=True)
async def ons_content_query(request: ONSRequest) -> HTTPResponse:
"""
Handles content queries to the API.
:param request:
:return:
"""
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.conceptual_content_query(request)
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.content_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/counts', methods=['GET', 'POST'], strict_slashes=True)
async def ons_counts_query(request: ONSRequest) -> HTTPResponse:
"""
Handles type counts queries to the API.
:param request:
:return:
"""
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.conceptual_counts_query(request)
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.type_counts_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/featured', methods=['GET'], strict_slashes=True)
async def ons_featured_result_query(request: ONSRequest) -> HTTPResponse:
"""
Handles featured result queries (i.e product and home page census pages)
:param request:
:return:
"""
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.featured_result_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/uri/', methods=['GET', 'POST'])
@search_blueprint.route('/uri/<path:path>', methods=['GET', 'POST'])
async def search_by_uri(request: ONSRequest, path: str):
"""
Search for a page by it's uri
:param request:
:param path:
:return:
"""
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.search_by_uri(request, path)
return json(request, search_result.to_dict(), 200)
| [
[
[
76,
85
],
[
686,
695
]
],
[
[
113,
125
],
[
864,
876
],
[
1398,
1410
],
[
1991,
2003
],
[
2631,
2643
],
[
3281,
3293
]
],
[
[
178,
182
],
[
1235,
1239
],
[
1828,
1832
],
[
2453,
2457
],
[
3100,
3104
],
[
3668,
3672
],
[
4243,
4247
]
],
[
[
224,
230
],
[
1548,
1554
],
[
2101,
2107
],
[
2745,
2751
]
],
[
[
276,
286
],
[
849,
859
],
[
1383,
1393
],
[
1976,
1986
],
[
2616,
2626
],
[
3266,
3276
],
[
3874,
3884
]
],
[
[
337,
342
],
[
1090,
1095
],
[
1749,
1754
],
[
2320,
2325
],
[
2963,
2968
],
[
3527,
3532
],
[
4104,
4109
]
],
[
[
408,
420
],
[
1076,
1088
],
[
1735,
1747
],
[
2306,
2318
],
[
2949,
2961
],
[
3513,
3525
],
[
4090,
4102
]
],
[
[
488,
500
],
[
1155,
1167
],
[
2377,
2389
],
[
3020,
3032
],
[
3584,
3596
],
[
4161,
4173
]
],
[
[
565,
582
],
[
1045,
1062
],
[
1704,
1721
],
[
2275,
2292
],
[
2918,
2935
],
[
3482,
3499
],
[
4059,
4076
]
],
[
[
638,
665
],
[
1608,
1625
],
[
2161,
2178
],
[
2805,
2822
]
],
[
[
667,
683
],
[
731,
747
],
[
1282,
1298
],
[
1858,
1874
],
[
2500,
2516
],
[
3147,
3163
],
[
3715,
3731
],
[
3773,
3789
]
],
[
[
808,
1278
]
],
[
[
1357,
1854
]
],
[
[
1939,
2496
]
],
[
[
2580,
3143
]
],
[
[
3221,
3711
]
],
[
[
3841,
4286
]
]
] |
# -*- coding: UTF-8 -*-
from common_utils.new_log import NewLog
class LogDecorator:
log = NewLog(__name__)
logger = log.get_log()
def __call__(self, func):
def wrapper(*args, **kw):
self.logger.debug("call method %s ===============" % func.__name__)
self.logger.debug("method [%s] input args: [%s], kw: [%s]" % (func.__name__, args, kw))
result = func(*args, **kw)
self.logger.debug("method [%s] response: [%s]" % (func.__name__, result))
return result
return wrapper
| [
[
[
58,
64
],
[
98,
104
]
],
[
[
73,
85
]
]
] |
# -*- coding: utf-8 -*-
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
from resources.lib.modules import source_utils
from resources.lib.modules import dom_parser2
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['ultrahdindir.com']
self.base_link = 'http://ultrahdindir.com'
self.post_link = '/index.php?do=search'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() is False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['title'].replace(':','').lower()
year = data['year']
query = '%s %s' % (data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = urlparse.urljoin(self.base_link, self.post_link)
post = 'do=search&subaction=search&search_start=0&full_search=0&result_from=1&story=%s' % urllib.quote_plus(query)
r = client.request(url, post=post)
r = client.parseDOM(r, 'div', attrs={'class': 'box-out margin'})
r = [(dom_parser2.parse_dom(i, 'div', attrs={'class':'news-title'})) for i in r if data['imdb'] in i]
r = [(dom_parser2.parse_dom(i[0], 'a', req='href')) for i in r if i]
r = [(i[0].attrs['href'], i[0].content) for i in r if i]
hostDict = hostprDict + hostDict
for item in r:
try:
name = item[1]
y = re.findall('\((\d{4})\)', name)[0]
if not y == year: raise Exception()
s = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))', name)
s = s[0] if s else '0'
data = client.request(item[0])
data = dom_parser2.parse_dom(data, 'div', attrs={'id': 'r-content'})
data = re.findall('\s*<b><a href=.+?>(.+?)</b>.+?<u><b><a href="(.+?)".+?</a></b></u>',
data[0].content, re.DOTALL)
u = [(i[0], i[1], s) for i in data if i]
for name, url, size in u:
try:
if '4K' in name:
quality = '4K'
elif '1080p' in name:
quality = '1080p'
elif '720p' in name:
quality = '720p'
elif any(i in ['dvdscr', 'r5', 'r6'] for i in name):
quality = 'SCR'
elif any(i in ['camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts']
for i in name):
quality = 'CAM'
else: quality = '720p'
info = []
if '3D' in name or '.3D.' in url: info.append('3D'); quality = '1080p'
if any(i in ['hevc', 'h265', 'x265'] for i in name): info.append('HEVC')
try:
size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))', size)[-1]
div = 1 if size.endswith(('Gb', 'GiB', 'GB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size)) / div
size = '%.2f GB' % size
info.append(size)
except:
pass
info = ' | '.join(info)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
if any(x in url for x in ['.rar', '.zip', '.iso', 'turk']):continue
if 'ftp' in url: host = 'COV'; direct = True;
else: direct = False; host= 'turbobit.net'
#if not host in hostDict: continue
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en',
'url': url, 'info': info, 'direct': direct, 'debridonly': True})
except:
pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
| [
[
[
733,
735
],
[
1976,
1978
],
[
2787,
2789
],
[
2906,
2908
],
[
3194,
3196
],
[
3331,
3333
],
[
4506,
4508
],
[
4719,
4721
]
],
[
[
736,
742
],
[
1387,
1393
],
[
2206,
2212
]
],
[
[
743,
751
],
[
1693,
1701
],
[
2052,
2060
]
],
[
[
789,
799
]
],
[
[
835,
841
],
[
2250,
2256
],
[
2298,
2304
],
[
3052,
3058
],
[
5034,
5040
],
[
5466,
5472
]
],
[
[
877,
883
],
[
1627,
1633
]
],
[
[
919,
931
]
],
[
[
967,
978
],
[
2378,
2389
],
[
2493,
2504
],
[
3104,
3115
]
],
[
[
988,
994
]
]
] |
"""Helper functions for the distribution."""
import importlib
import json
import pathlib
import subprocess
import sys
import types
import os
from typing import Optional, List
import requests
import repobee_plug as plug
import _repobee.ext
from _repobee import distinfo
from _repobee import plugin
class DependencyResolutionError(plug.PlugError):
"""Raise when dependency resolution fails during an install."""
def get_installed_plugins_path() -> pathlib.Path:
"""Return the path to the installed_plugins.json file."""
assert distinfo.INSTALL_DIR
return distinfo.INSTALL_DIR / "installed_plugins.json"
def get_installed_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
) -> dict:
"""Return the public content of the installed_plugins.json file."""
installed_plugins = _get_installed_plugins(installed_plugins_path)
if "_metainfo" in installed_plugins:
del installed_plugins["_metainfo"]
return installed_plugins
def _get_installed_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
):
"""Return the content of the installed_plugins.json file, with metainfo."""
return json.loads(
(installed_plugins_path or get_installed_plugins_path()).read_text(
"utf8"
)
)
def write_installed_plugins(
installed_plugins: dict,
installed_plugins_path: Optional[pathlib.Path] = None,
) -> None:
"""Write the installed_plugins.json file."""
path = installed_plugins_path or get_installed_plugins_path()
metainfo = _get_installed_plugins(path).get("_metainfo") or {}
metainfo.update(installed_plugins.get("_metainfo") or {})
installed_plugins_write = dict(installed_plugins)
installed_plugins_write["_metainfo"] = metainfo
path.write_text(
json.dumps(installed_plugins_write, indent=4), encoding="utf8"
)
def get_active_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
) -> List[str]:
"""Read active plugins from the installed_plugins.json file."""
installed_plugins = _get_installed_plugins(installed_plugins_path)
return (installed_plugins.get("_metainfo") or {}).get(
"active_plugins"
) or []
def write_active_plugins(
active_plugins: List[str],
installed_plugins_path: Optional[pathlib.Path] = None,
) -> None:
"""Write the active plugins."""
installed_plugins = _get_installed_plugins(installed_plugins_path)
installed_plugins.setdefault("_metainfo", {})[
"active_plugins"
] = active_plugins
write_installed_plugins(installed_plugins, installed_plugins_path)
def get_pip_path() -> pathlib.Path:
"""Return the path to the installed pip binary."""
assert distinfo.INSTALL_DIR
return distinfo.INSTALL_DIR / "env" / "bin" / "pip"
def get_plugins_json(url: str = "https://repobee.org/plugins.json") -> dict:
"""Fetch and parse the plugins.json file.
Args:
url: URL to the plugins.json file.
Returns:
A dictionary with the contents of the plugins.json file.
"""
resp = requests.get(url)
if resp.status_code != 200:
plug.log.error(resp.content.decode("utf8"))
raise plug.PlugError(f"could not fetch plugins.json from '{url}'")
return resp.json()
def get_builtin_plugins(ext_pkg: types.ModuleType = _repobee.ext) -> dict:
"""Returns a dictionary of builting plugins on the same form as the
plugins.json dict.
"""
def _get_plugin_description(name):
return (
importlib.import_module(f"{ext_pkg.__name__}.{name}").__dict__.get(
"PLUGIN_DESCRIPTION"
)
or "-"
)
return {
name: dict(
description=_get_plugin_description(name),
url=f"https://repobee.readthedocs.io/"
f"en/stable/builtins.html#{name}",
versions={"N/A": {}},
builtin=True,
)
for name in plugin.get_module_names(ext_pkg)
}
def pip(command: str, *args, **kwargs) -> subprocess.CompletedProcess:
"""Thin wrapper around the ``pip`` executable in the distribution's virtual
environment.
Args:
command: The command to execute (e.g. "install" or "list").
args: Positional arguments to ``pip``, passed in order. Flags should
also be passed here (e.g. `--pre`)
kwargs: Keyword arguments to ``pip``, passed as ``--key value`` to the
CLI. If the value is ``True``, the argument is passed as a flag,
i.e. as ``--key``.
Returns:
True iff the command exited with a zero exit status.
Raises:
DependencyResolutionError: If the 2020-resolver encounters fails to
resolve dependencies.
"""
cli_kwargs = [
f"--{key.replace('_', '-')}"
# True is interpreted as a flag
+ (f"={val}" if val is not True else "")
for key, val in kwargs.items()
]
env = dict(os.environ)
if command == "install":
# the resolver allows us to avoid installing plugins that are
# incompatible with the current version of RepoBee
cli_kwargs.append("--use-feature=2020-resolver")
# REPOBEE_INSTALL_DIR must be available when upgrading RepoBee,
# or the dist plugins aren't activated
env["REPOBEE_INSTALL_DIR"] = str(distinfo.INSTALL_DIR)
# due to the hack in setup.py to edit the distinfo, we must build
# RepoBee from source
cli_kwargs.append("--no-binary=repobee")
cmd = [str(get_pip_path()), command, *args, *cli_kwargs]
proc = subprocess.run(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
)
if proc.returncode != 0:
stderr = proc.stderr.decode(sys.getdefaultencoding())
plug.log.error(stderr)
if "ResolutionImpossible" in stderr:
raise DependencyResolutionError()
return proc
| [
[
[
52,
61
],
[
3512,
3521
]
],
[
[
69,
73
],
[
1162,
1166
],
[
1795,
1799
]
],
[
[
81,
88
],
[
457,
464
],
[
690,
697
],
[
1046,
1053
],
[
1382,
1389
],
[
1927,
1934
],
[
2296,
2303
],
[
2630,
2637
]
],
[
[
96,
106
],
[
4020,
4030
],
[
5582,
5592
],
[
5618,
5628
],
[
5642,
5652
]
],
[
[
114,
117
],
[
5738,
5741
]
],
[
[
125,
130
],
[
3298,
3303
]
],
[
[
138,
140
],
[
4945,
4947
]
],
[
[
161,
169
],
[
681,
689
],
[
1037,
1045
],
[
1373,
1381
],
[
1918,
1926
],
[
2287,
2295
]
],
[
[
171,
175
],
[
1954,
1958
],
[
2248,
2252
]
],
[
[
184,
192
],
[
3063,
3071
]
],
[
[
200,
220
],
[
334,
338
],
[
3121,
3125
],
[
3179,
3183
],
[
5772,
5776
]
],
[
[
229,
241
],
[
3317,
3325
]
],
[
[
263,
271
],
[
544,
552
],
[
576,
584
],
[
2710,
2718
],
[
2742,
2750
],
[
5333,
5341
]
],
[
[
293,
299
],
[
3937,
3943
]
],
[
[
308,
333
],
[
5859,
5884
]
],
[
[
425,
451
],
[
1209,
1235
],
[
1501,
1527
]
],
[
[
630,
651
]
],
[
[
985,
1007
],
[
819,
841
],
[
1545,
1567
],
[
2057,
2079
],
[
2389,
2411
]
],
[
[
1291,
1314
],
[
2539,
2562
]
],
[
[
1870,
1888
]
],
[
[
2206,
2226
]
],
[
[
2612,
2624
],
[
5525,
5537
]
],
[
[
2793,
2809
]
],
[
[
3269,
3288
]
],
[
[
3982,
3985
]
]
] |
from pyspark import SparkConf, SparkContext
import collections
conf = SparkConf().setMaster("local").setAppName("RatingsHistogram")
sc = SparkContext(conf = conf)
lines = sc.textFile("D:/celebal/resources/ml-100k/u.data")
ratings = lines.map(lambda x: x.split()[2])
result = ratings.countByValue()
sortedResults = collections.OrderedDict(sorted(result.items()))
for key, value in sortedResults.items():
print("%s %i" % (key, value))
| [
[
[
20,
29
],
[
71,
80
]
],
[
[
31,
43
],
[
138,
150
]
],
[
[
51,
62
],
[
317,
328
]
],
[
[
64,
68
],
[
158,
162
]
],
[
[
133,
135
],
[
173,
175
]
],
[
[
165,
170
],
[
234,
239
]
],
[
[
224,
231
],
[
277,
284
]
],
[
[
268,
274
],
[
348,
354
]
],
[
[
301,
314
],
[
383,
396
]
],
[
[
369,
372
],
[
427,
430
]
],
[
[
374,
379
],
[
432,
437
]
]
] |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-02-08 22:37:13
import os
import time
import shutil
import unittest2 as unittest
import logging
import logging.config
logging.config.fileConfig("pyspider/logging.conf")
from pyspider.scheduler.task_queue import TaskQueue
class TestTaskQueue(unittest.TestCase):
@classmethod
def setUpClass(self):
self.task_queue = TaskQueue()
self.task_queue.rate = 100000
self.task_queue.burst = 100000
self.task_queue.processing_timeout = 0.5
def test_10_put(self):
self.task_queue.put('a3', 0, time.time() + 0.5)
self.task_queue.put('a4', 3, time.time() + 0.2)
self.task_queue.put('a2', 0)
self.task_queue.put('a1', 1)
self.assertEqual(self.task_queue.size(), 4)
def test_20_update(self):
self.task_queue.put('a2', 4)
self.assertEqual(self.task_queue.size(), 4)
self.task_queue.put('a3', 2, 0)
self.assertEqual(self.task_queue.size(), 4)
def test_30_get_from_priority_queue(self):
self.assertEqual(self.task_queue.get(), 'a2')
self.assertEqual(self.task_queue.size(), 4)
def test_40_time_queue_1(self):
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a3')
self.assertEqual(self.task_queue.size(), 4)
def test_50_time_queue_2(self):
time.sleep(0.3)
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a4')
self.assertEqual(self.task_queue.get(), 'a1')
self.assertEqual(self.task_queue.size(), 4)
def test_60_processing_queue(self):
time.sleep(0.5)
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a2')
self.assertEqual(len(self.task_queue), 4)
self.assertEqual(self.task_queue.get(), 'a4')
self.assertEqual(self.task_queue.get(), 'a3')
self.assertEqual(self.task_queue.get(), 'a1')
self.assertEqual(len(self.task_queue), 4)
def test_70_done(self):
self.assertTrue(self.task_queue.done('a2'))
self.assertTrue(self.task_queue.done('a1'))
self.assertEqual(len(self.task_queue), 2)
self.assertTrue(self.task_queue.done('a4'))
self.assertTrue(self.task_queue.done('a3'))
self.assertEqual(len(self.task_queue), 0)
from pyspider.scheduler.token_bucket import Bucket
class TestBucket(unittest.TestCase):
def test_bucket(self):
bucket = Bucket(100, 1000)
self.assertEqual(bucket.get(), 1000)
time.sleep(0.1)
self.assertEqual(bucket.get(), 1000)
bucket.desc(100)
self.assertEqual(bucket.get(), 900)
time.sleep(0.1)
self.assertAlmostEqual(bucket.get(), 910, delta=2)
time.sleep(0.1)
self.assertAlmostEqual(bucket.get(), 920, delta=2)
try:
from six.moves import xmlrpc_client
except ImportError:
import xmlrpclib as xmlrpc_client
from pyspider.scheduler.scheduler import Scheduler
from pyspider.database.sqlite import taskdb, projectdb, resultdb
from pyspider.libs.multiprocessing_queue import Queue
from pyspider.libs.utils import run_in_thread
class TestScheduler(unittest.TestCase):
taskdb_path = './data/tests/task.db'
projectdb_path = './data/tests/project.db'
resultdb_path = './data/tests/result.db'
check_project_time = 1
scheduler_xmlrpc_port = 23333
@classmethod
def setUpClass(self):
shutil.rmtree('./data/tests', ignore_errors=True)
os.makedirs('./data/tests')
def get_taskdb():
return taskdb.TaskDB(self.taskdb_path)
self.taskdb = get_taskdb()
def get_projectdb():
return projectdb.ProjectDB(self.projectdb_path)
self.projectdb = get_projectdb()
def get_resultdb():
return resultdb.ResultDB(self.resultdb_path)
self.resultdb = get_resultdb()
self.newtask_queue = Queue(10)
self.status_queue = Queue(10)
self.scheduler2fetcher = Queue(10)
self.rpc = xmlrpc_client.ServerProxy('http://localhost:%d' % self.scheduler_xmlrpc_port)
def run_scheduler():
scheduler = Scheduler(taskdb=get_taskdb(), projectdb=get_projectdb(),
newtask_queue=self.newtask_queue, status_queue=self.status_queue,
out_queue=self.scheduler2fetcher, data_path="./data/tests/",
resultdb=get_resultdb())
scheduler.UPDATE_PROJECT_INTERVAL = 0.1
scheduler.LOOP_INTERVAL = 0.1
scheduler.INQUEUE_LIMIT = 10
scheduler.DELETE_TIME = 0
scheduler.DEFAULT_RETRY_DELAY = {'': 5}
scheduler._last_tick = int(time.time()) # not dispatch cronjob
run_in_thread(scheduler.xmlrpc_run, port=self.scheduler_xmlrpc_port)
scheduler.run()
self.process = run_in_thread(run_scheduler)
time.sleep(1)
@classmethod
def tearDownClass(self):
if self.process.is_alive():
self.rpc._quit()
self.process.join(5)
assert not self.process.is_alive()
shutil.rmtree('./data/tests', ignore_errors=True)
time.sleep(1)
def test_10_new_task_ignore(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url'
})
self.assertEqual(self.rpc.size(), 0)
self.assertEqual(len(self.rpc.get_active_tasks()), 0)
def test_20_new_project(self):
self.projectdb.insert('test_project', {
'name': 'test_project',
'group': 'group',
'status': 'TODO',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 1.0,
'burst': 10,
})
def test_30_update_project(self):
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
task = self.scheduler2fetcher.get(timeout=1)
self.projectdb.update('test_project', status="DEBUG")
time.sleep(0.1)
self.rpc.update_project()
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.assertEqual(task['url'], 'data:,_on_get_info')
def test_34_new_not_used_project(self):
self.projectdb.insert('test_project_not_started', {
'name': 'test_project_not_started',
'group': 'group',
'status': 'RUNNING',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 1.0,
'burst': 10,
})
task = self.scheduler2fetcher.get(timeout=1)
self.assertEqual(task['taskid'], '_on_get_info')
def test_35_new_task(self):
time.sleep(0.2)
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
},
})
time.sleep(0.5)
task = self.scheduler2fetcher.get(timeout=10)
self.assertGreater(len(self.rpc.get_active_tasks()), 0)
self.assertIsNotNone(task)
self.assertEqual(task['project'], 'test_project')
self.assertIn('schedule', task)
self.assertIn('fetch', task)
self.assertIn('process', task)
self.assertIn('track', task)
self.assertEqual(task['fetch']['data'], 'abc')
def test_37_force_update_processing_task(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url_force_update',
'schedule': {
'age': 10,
'force_update': True,
},
})
time.sleep(0.2)
# it should not block next
def test_40_taskdone_error_no_project(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'no_project',
'url': 'url'
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
def test_50_taskdone_error_no_track(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url'
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {}
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
def test_60_taskdone_failed_retry(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
task = self.scheduler2fetcher.get(timeout=4)
task = self.scheduler2fetcher.get(timeout=5)
self.assertIsNotNone(task)
def test_70_taskdone_ok(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
time.sleep(0.2)
self.assertEqual(self.rpc.size(), 0)
def test_80_newtask_age_ignore(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 30,
},
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 0)
def test_82_newtask_via_rpc(self):
self.rpc.newtask({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 30,
},
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 0)
def test_90_newtask_with_itag(self):
time.sleep(0.1)
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'itag': "abc",
'retries': 1
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.test_70_taskdone_ok()
def test_a10_newtask_restart_by_age(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
'retries': 1
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a20_failed_retry(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
task = self.scheduler2fetcher.get(timeout=5)
self.assertIsNotNone(task)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': False
},
'process': {
'ok': False
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
self.scheduler2fetcher.get(timeout=5)
def test_a30_task_verify(self):
self.assertFalse(self.rpc.newtask({
#'taskid': 'taskid#',
'project': 'test_project',
'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
#'project': 'test_project',
'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'test_project',
#'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'not_exist_project',
'url': 'url',
}))
self.assertTrue(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'test_project',
'url': 'url',
}))
def test_a40_success_recrawl(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a50_failed_recrawl(self):
for i in range(3):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a60_disable_recrawl(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
self.scheduler2fetcher.get(timeout=5)
def test_x10_inqueue_limit(self):
self.projectdb.insert('test_inqueue_project', {
'name': 'test_inqueue_project',
'group': 'group',
'status': 'DEBUG',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 0,
'burst': 0,
})
time.sleep(0.1)
pre_size = self.rpc.size()
for i in range(20):
self.newtask_queue.put({
'taskid': 'taskid%d' % i,
'project': 'test_inqueue_project',
'url': 'url',
'schedule': {
'age': 3000,
'force_update': True,
},
})
time.sleep(1)
self.assertEqual(self.rpc.size() - pre_size, 10)
def test_x20_delete_project(self):
self.assertIsNotNone(self.projectdb.get('test_inqueue_project'))
#self.assertIsNotNone(self.taskdb.get_task('test_inqueue_project', 'taskid1'))
self.projectdb.update('test_inqueue_project', status="STOP", group="lock,delete")
time.sleep(1)
self.assertIsNone(self.projectdb.get('test_inqueue_project'))
self.taskdb._list_project()
self.assertIsNone(self.taskdb.get_task('test_inqueue_project', 'taskid1'))
def test_z10_startup(self):
self.assertTrue(self.process.is_alive())
def test_z20_quit(self):
self.rpc._quit()
time.sleep(0.2)
self.assertFalse(self.process.is_alive())
self.assertEqual(
self.taskdb.get_task('test_project', 'taskid')['status'],
self.taskdb.SUCCESS
)
if __name__ == '__main__':
unittest.main()
| [
[
[
192,
194
],
[
3643,
3645
]
],
[
[
202,
206
],
[
706,
710
],
[
762,
766
],
[
1500,
1504
],
[
1764,
1768
],
[
2680,
2684
],
[
2818,
2822
],
[
2901,
2905
],
[
5095,
5099
],
[
5363,
5367
],
[
6258,
6262
],
[
6983,
6987
],
[
7356,
7360
],
[
8112,
8116
],
[
8358,
8362
],
[
8614,
8618
],
[
8847,
8851
],
[
9900,
9904
],
[
10361,
10365
],
[
10813,
10817
],
[
10924,
10928
],
[
16496,
16500
],
[
16882,
16886
],
[
17251,
17255
],
[
17599,
17603
],
[
4888,
4892
]
],
[
[
214,
220
],
[
3585,
3591
],
[
5305,
5311
]
],
[
[
228,
249
],
[
413,
421
],
[
2544,
2552
],
[
3319,
3327
],
[
17835,
17843
]
],
[
[
257,
264
]
],
[
[
272,
286
],
[
287,
294
]
],
[
[
381,
390
],
[
503,
512
]
],
[
[
399,
412
]
],
[
[
2518,
2524
],
[
2609,
2615
]
],
[
[
2533,
2543
]
],
[
[
3009,
3022
],
[
4180,
4193
]
],
[
[
3054,
3080
],
[
4180,
4193
]
],
[
[
3122,
3131
],
[
4312,
4321
]
],
[
[
3169,
3175
],
[
3717,
3723
]
],
[
[
3177,
3186
],
[
3833,
3842
]
],
[
[
3188,
3196
],
[
3963,
3971
]
],
[
[
3245,
3250
],
[
4070,
4075
],
[
4108,
4113
],
[
4151,
4156
]
],
[
[
3283,
3296
],
[
5058,
5071
],
[
4937,
4950
]
],
[
[
3305,
3318
]
]
] |
from django.db import models
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
from django.contrib.auth.models import BaseUserManager
from django.conf import settings
class UserProfileManager(BaseUserManager):
"""Manager for user profiles"""
def create_user(self, email, name, password=None):
"""create a new user profile"""
if not email:
raise ValueError("User must have and email address")
email = self.normalize_email(email)
user = self.model(email=email,name=name)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, name, password):
"""create and save new superuser with given details"""
user = self.create_user(email, name, password)
user.is_superuser = True
user.is_staff = True
user.save(using=self._db)
return user
class UserProfile(AbstractBaseUser,PermissionsMixin):
"""Database model for users in the system """
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserProfileManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['name']
def get_full_name(self):
"""Retrieve full name of user"""
return self.name
def get_short_name(self):
"""Retrieve short name of user"""
return self.name
def __str__(self):
"""Return string representation of our user"""
return self.email
class ProfileFeedItem(models.Model):
"""Profile status update"""
user_profile = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE
)
status_text = models.CharField(max_length=255)
created_on = models.DateTimeField(auto_now_add=True)
def __str__(self):
"""Retusnt the model as a string"""
return self.status_text
| [
[
[
22,
28
],
[
1089,
1095
],
[
1147,
1153
],
[
1196,
1202
],
[
1245,
1251
],
[
1699,
1705
],
[
1765,
1771
],
[
1836,
1842
],
[
1875,
1881
],
[
1925,
1931
]
],
[
[
68,
84
],
[
991,
1007
]
],
[
[
124,
140
],
[
1008,
1024
]
],
[
[
180,
195
],
[
256,
271
]
],
[
[
220,
228
],
[
1792,
1800
]
],
[
[
237,
255
],
[
1295,
1313
]
],
[
[
979,
990
]
],
[
[
1683,
1698
]
]
] |
"""
factor.py
"""
from functools import wraps
from operator import attrgetter
from numbers import Number
from numpy import inf, where
from toolz import curry
from zipline.errors import UnknownRankMethod
from zipline.lib.normalize import naive_grouped_rowwise_apply
from zipline.lib.rank import masked_rankdata_2d
from zipline.pipeline.classifiers import Classifier, Everything, Quantiles
from zipline.pipeline.mixins import (
CustomTermMixin,
LatestMixin,
PositiveWindowLengthMixin,
RestrictedDTypeMixin,
SingleInputMixin,
)
from zipline.pipeline.term import (
ComputableTerm,
NotSpecified,
NotSpecifiedType,
Term,
)
from zipline.pipeline.expression import (
BadBinaryOperator,
COMPARISONS,
is_comparison,
MATH_BINOPS,
method_name_for_op,
NumericalExpression,
NUMEXPR_MATH_FUNCS,
UNARY_OPS,
unary_op_name,
)
from zipline.pipeline.filters import (
Filter,
NumExprFilter,
PercentileFilter,
NullFilter,
)
from zipline.utils.input_validation import expect_types
from zipline.utils.math_utils import nanmean, nanstd
from zipline.utils.numpy_utils import (
bool_dtype,
coerce_to_dtype,
datetime64ns_dtype,
float64_dtype,
int64_dtype,
)
from zipline.utils.preprocess import preprocess
_RANK_METHODS = frozenset(['average', 'min', 'max', 'dense', 'ordinal'])
def coerce_numbers_to_my_dtype(f):
"""
A decorator for methods whose signature is f(self, other) that coerces
``other`` to ``self.dtype``.
This is used to make comparison operations between numbers and `Factor`
instances work independently of whether the user supplies a float or
integer literal.
For example, if I write::
my_filter = my_factor > 3
my_factor probably has dtype float64, but 3 is an int, so we want to coerce
to float64 before doing the comparison.
"""
@wraps(f)
def method(self, other):
if isinstance(other, Number):
other = coerce_to_dtype(self.dtype, other)
return f(self, other)
return method
@curry
def set_attribute(name, value):
"""
Decorator factory for setting attributes on a function.
Doesn't change the behavior of the wrapped function.
Usage
-----
>>> @set_attribute('__name__', 'foo')
... def bar():
... return 3
...
>>> bar()
3
>>> bar.__name__
'foo'
"""
def decorator(f):
setattr(f, name, value)
return f
return decorator
# Decorators for setting the __name__ and __doc__ properties of a decorated
# function.
# Example:
with_name = set_attribute('__name__')
with_doc = set_attribute('__doc__')
def binop_return_type(op):
if is_comparison(op):
return NumExprFilter
else:
return NumExprFactor
def binop_return_dtype(op, left, right):
"""
Compute the expected return dtype for the given binary operator.
Parameters
----------
op : str
Operator symbol, (e.g. '+', '-', ...).
left : numpy.dtype
Dtype of left hand side.
right : numpy.dtype
Dtype of right hand side.
Returns
-------
outdtype : numpy.dtype
The dtype of the result of `left <op> right`.
"""
if is_comparison(op):
if left != right:
raise TypeError(
"Don't know how to compute {left} {op} {right}.\n"
"Comparisons are only supported between Factors of equal "
"dtypes.".format(left=left, op=op, right=right)
)
return bool_dtype
elif left != float64_dtype or right != float64_dtype:
raise TypeError(
"Don't know how to compute {left} {op} {right}.\n"
"Arithmetic operators are only supported between Factors of "
"dtype 'float64'.".format(
left=left.name,
op=op,
right=right.name,
)
)
return float64_dtype
def binary_operator(op):
"""
Factory function for making binary operator methods on a Factor subclass.
Returns a function, "binary_operator" suitable for implementing functions
like __add__.
"""
# When combining a Factor with a NumericalExpression, we use this
# attrgetter instance to defer to the commuted implementation of the
# NumericalExpression operator.
commuted_method_getter = attrgetter(method_name_for_op(op, commute=True))
@with_doc("Binary Operator: '%s'" % op)
@with_name(method_name_for_op(op))
@coerce_numbers_to_my_dtype
def binary_operator(self, other):
# This can't be hoisted up a scope because the types returned by
# binop_return_type aren't defined when the top-level function is
# invoked in the class body of Factor.
return_type = binop_return_type(op)
if isinstance(self, NumExprFactor):
self_expr, other_expr, new_inputs = self.build_binary_op(
op, other,
)
return return_type(
"({left}) {op} ({right})".format(
left=self_expr,
op=op,
right=other_expr,
),
new_inputs,
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
elif isinstance(other, NumExprFactor):
# NumericalExpression overrides ops to correctly handle merging of
# inputs. Look up and call the appropriate reflected operator with
# ourself as the input.
return commuted_method_getter(other)(self)
elif isinstance(other, Term):
if self is other:
return return_type(
"x_0 {op} x_0".format(op=op),
(self,),
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
return return_type(
"x_0 {op} x_1".format(op=op),
(self, other),
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
elif isinstance(other, Number):
return return_type(
"x_0 {op} ({constant})".format(op=op, constant=other),
binds=(self,),
# .dtype access is safe here because coerce_numbers_to_my_dtype
# will convert any input numbers to numpy equivalents.
dtype=binop_return_dtype(op, self.dtype, other.dtype)
)
raise BadBinaryOperator(op, self, other)
return binary_operator
def reflected_binary_operator(op):
"""
Factory function for making binary operator methods on a Factor.
Returns a function, "reflected_binary_operator" suitable for implementing
functions like __radd__.
"""
assert not is_comparison(op)
@with_name(method_name_for_op(op, commute=True))
@coerce_numbers_to_my_dtype
def reflected_binary_operator(self, other):
if isinstance(self, NumericalExpression):
self_expr, other_expr, new_inputs = self.build_binary_op(
op, other
)
return NumExprFactor(
"({left}) {op} ({right})".format(
left=other_expr,
right=self_expr,
op=op,
),
new_inputs,
dtype=binop_return_dtype(op, other.dtype, self.dtype)
)
# Only have to handle the numeric case because in all other valid cases
# the corresponding left-binding method will be called.
elif isinstance(other, Number):
return NumExprFactor(
"{constant} {op} x_0".format(op=op, constant=other),
binds=(self,),
dtype=binop_return_dtype(op, other.dtype, self.dtype),
)
raise BadBinaryOperator(op, other, self)
return reflected_binary_operator
def unary_operator(op):
"""
Factory function for making unary operator methods for Factors.
"""
# Only negate is currently supported.
valid_ops = {'-'}
if op not in valid_ops:
raise ValueError("Invalid unary operator %s." % op)
@with_doc("Unary Operator: '%s'" % op)
@with_name(unary_op_name(op))
def unary_operator(self):
if self.dtype != float64_dtype:
raise TypeError(
"Can't apply unary operator {op!r} to instance of "
"{typename!r} with dtype {dtypename!r}.\n"
"{op!r} is only supported for Factors of dtype "
"'float64'.".format(
op=op,
typename=type(self).__name__,
dtypename=self.dtype.name,
)
)
# This can't be hoisted up a scope because the types returned by
# unary_op_return_type aren't defined when the top-level function is
# invoked.
if isinstance(self, NumericalExpression):
return NumExprFactor(
"{op}({expr})".format(op=op, expr=self._expr),
self.inputs,
dtype=float64_dtype,
)
else:
return NumExprFactor(
"{op}x_0".format(op=op),
(self,),
dtype=float64_dtype,
)
return unary_operator
def function_application(func):
"""
Factory function for producing function application methods for Factor
subclasses.
"""
if func not in NUMEXPR_MATH_FUNCS:
raise ValueError("Unsupported mathematical function '%s'" % func)
@with_name(func)
def mathfunc(self):
if isinstance(self, NumericalExpression):
return NumExprFactor(
"{func}({expr})".format(func=func, expr=self._expr),
self.inputs,
dtype=float64_dtype,
)
else:
return NumExprFactor(
"{func}(x_0)".format(func=func),
(self,),
dtype=float64_dtype,
)
return mathfunc
def restrict_to_dtype(dtype, message_template):
"""
A factory for decorators that restricting Factor methods to only be
callable on Factors with a specific dtype.
This is conceptually similar to
zipline.utils.input_validation.expect_dtypes, but provides more flexibility
for providing error messages that are specifically targeting Factor
methods.
Parameters
----------
dtype : numpy.dtype
The dtype on which the decorated method may be called.
message_template : str
A template for the error message to be raised.
`message_template.format` will be called with keyword arguments
`method_name`, `expected_dtype`, and `received_dtype`.
Usage
-----
@restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() was called on a factor of dtype {received_dtype}."
"{method_name}() requires factors of dtype{expected_dtype}."
),
)
def some_factor_method(self, ...):
self.stuff_that_requires_being_float64(...)
"""
def processor(factor_method, _, factor_instance):
factor_dtype = factor_instance.dtype
if factor_dtype != dtype:
raise TypeError(
message_template.format(
method_name=factor_method.__name__,
expected_dtype=dtype.name,
received_dtype=factor_dtype,
)
)
return factor_instance
return preprocess(self=processor)
# Decorators for Factor methods.
if_not_float64_tell_caller_to_use_isnull = restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() was called on a factor of dtype {received_dtype}.\n"
"{method_name}() is only defined for dtype {expected_dtype}."
"To filter missing data, use isnull() or notnull()."
)
)
float64_only = restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() is only defined on Factors of dtype {expected_dtype},"
" but it was called on a Factor of dtype {received_dtype}."
)
)
FACTOR_DTYPES = frozenset([datetime64ns_dtype, float64_dtype, int64_dtype])
class Factor(RestrictedDTypeMixin, ComputableTerm):
"""
Pipeline API expression producing a numerical or date-valued output.
Factors are the most commonly-used Pipeline term, representing the result
of any computation producing a numerical result.
Factors can be combined, both with other Factors and with scalar values,
via any of the builtin mathematical operators (``+``, ``-``, ``*``, etc).
This makes it easy to write complex expressions that combine multiple
Factors. For example, constructing a Factor that computes the average of
two other Factors is simply::
>>> f1 = SomeFactor(...)
>>> f2 = SomeOtherFactor(...)
>>> average = (f1 + f2) / 2.0
Factors can also be converted into :class:`zipline.pipeline.Filter` objects
via comparison operators: (``<``, ``<=``, ``!=``, ``eq``, ``>``, ``>=``).
There are many natural operators defined on Factors besides the basic
numerical operators. These include methods identifying missing or
extreme-valued outputs (isnull, notnull, isnan, notnan), methods for
normalizing outputs (rank, demean, zscore), and methods for constructing
Filters based on rank-order properties of results (top, bottom,
percentile_between).
"""
ALLOWED_DTYPES = FACTOR_DTYPES # Used by RestrictedDTypeMixin
# Dynamically add functions for creating NumExprFactor/NumExprFilter
# instances.
clsdict = locals()
clsdict.update(
{
method_name_for_op(op): binary_operator(op)
# Don't override __eq__ because it breaks comparisons on tuples of
# Factors.
for op in MATH_BINOPS.union(COMPARISONS - {'=='})
}
)
clsdict.update(
{
method_name_for_op(op, commute=True): reflected_binary_operator(op)
for op in MATH_BINOPS
}
)
clsdict.update(
{
unary_op_name(op): unary_operator(op)
for op in UNARY_OPS
}
)
clsdict.update(
{
funcname: function_application(funcname)
for funcname in NUMEXPR_MATH_FUNCS
}
)
__truediv__ = clsdict['__div__']
__rtruediv__ = clsdict['__rdiv__']
eq = binary_operator('==')
@expect_types(
mask=(Filter, NotSpecifiedType),
groupby=(Classifier, NotSpecifiedType),
)
@float64_only
def demean(self, mask=NotSpecified, groupby=NotSpecified):
"""
Construct a Factor that computes ``self`` and subtracts the mean from
row of the result.
If ``mask`` is supplied, ignore values where ``mask`` returns False
when computing row means, and output NaN anywhere the mask is False.
If ``groupby`` is supplied, compute by partitioning each row based on
the values produced by ``groupby``, de-meaning the partitioned arrays,
and stitching the sub-results back together.
Parameters
----------
mask : zipline.pipeline.Filter, optional
A Filter defining values to ignore when computing means.
groupby : zipline.pipeline.Classifier, optional
A classifier defining partitions over which to compute means.
Example
-------
Let ``f`` be a Factor which would produce the following output::
AAPL MSFT MCD BK
2017-03-13 1.0 2.0 3.0 4.0
2017-03-14 1.5 2.5 3.5 1.0
2017-03-15 2.0 3.0 4.0 1.5
2017-03-16 2.5 3.5 1.0 2.0
Let ``c`` be a Classifier producing the following output::
AAPL MSFT MCD BK
2017-03-13 1 1 2 2
2017-03-14 1 1 2 2
2017-03-15 1 1 2 2
2017-03-16 1 1 2 2
Let ``m`` be a Filter producing the following output::
AAPL MSFT MCD BK
2017-03-13 False True True True
2017-03-14 True False True True
2017-03-15 True True False True
2017-03-16 True True True False
Then ``f.demean()`` will subtract the mean from each row produced by
``f``.
::
AAPL MSFT MCD BK
2017-03-13 -1.500 -0.500 0.500 1.500
2017-03-14 -0.625 0.375 1.375 -1.125
2017-03-15 -0.625 0.375 1.375 -1.125
2017-03-16 0.250 1.250 -1.250 -0.250
``f.demean(mask=m)`` will subtract the mean from each row, but means
will be calculated ignoring values on the diagonal, and NaNs will
written to the diagonal in the output. Diagonal values are ignored
because they are the locations where the mask ``m`` produced False.
::
AAPL MSFT MCD BK
2017-03-13 NaN -1.000 0.000 1.000
2017-03-14 -0.500 NaN 1.500 -1.000
2017-03-15 -0.166 0.833 NaN -0.666
2017-03-16 0.166 1.166 -1.333 NaN
``f.demean(groupby=c)`` will subtract the group-mean of AAPL/MSFT and
MCD/BK from their respective entries. The AAPL/MSFT are grouped
together because both assets always produce 1 in the output of the
classifier ``c``. Similarly, MCD/BK are grouped together because they
always produce 2.
::
AAPL MSFT MCD BK
2017-03-13 -0.500 0.500 -0.500 0.500
2017-03-14 -0.500 0.500 1.250 -1.250
2017-03-15 -0.500 0.500 1.250 -1.250
2017-03-16 -0.500 0.500 -0.500 0.500
``f.demean(mask=m, groupby=c)`` will also subtract the group-mean of
AAPL/MSFT and MCD/BK, but means will be calculated ignoring values on
the diagonal , and NaNs will be written to the diagonal in the output.
::
AAPL MSFT MCD BK
2017-03-13 NaN 0.000 -0.500 0.500
2017-03-14 0.000 NaN 1.250 -1.250
2017-03-15 -0.500 0.500 NaN 0.000
2017-03-16 -0.500 0.500 0.000 NaN
Notes
-----
Mean is sensitive to the magnitudes of outliers. When working with
factor that can potentially produce large outliers, it is often useful
to use the ``mask`` parameter to discard values at the extremes of the
distribution::
>>> base = MyFactor(...)
>>> normalized = base.demean(mask=base.percentile_between(1, 99))
``demean()`` is only supported on Factors of dtype float64.
See Also
--------
:meth:`pandas.DataFrame.groupby`
"""
# This is a named function so that it has a __name__ for use in the
# graph repr of GroupedRowTransform.
def demean(row):
return row - nanmean(row)
return GroupedRowTransform(
transform=demean,
factor=self,
mask=mask,
groupby=groupby,
)
@expect_types(
mask=(Filter, NotSpecifiedType),
groupby=(Classifier, NotSpecifiedType),
)
@float64_only
def zscore(self, mask=NotSpecified, groupby=NotSpecified):
"""
Construct a Factor that Z-Scores each day's results.
The Z-Score of a row is defined as::
(row - row.mean()) / row.stddev()
If ``mask`` is supplied, ignore values where ``mask`` returns False
when computing row means and standard deviations, and output NaN
anywhere the mask is False.
If ``groupby`` is supplied, compute by partitioning each row based on
the values produced by ``groupby``, z-scoring the partitioned arrays,
and stitching the sub-results back together.
Parameters
----------
mask : zipline.pipeline.Filter, optional
A Filter defining values to ignore when Z-Scoring.
groupby : zipline.pipeline.Classifier, optional
A classifier defining partitions over which to compute Z-Scores.
Returns
-------
zscored : zipline.pipeline.Factor
A Factor producing that z-scores the output of self.
Notes
-----
Mean and standard deviation are sensitive to the magnitudes of
outliers. When working with factor that can potentially produce large
outliers, it is often useful to use the ``mask`` parameter to discard
values at the extremes of the distribution::
>>> base = MyFactor(...)
>>> normalized = base.zscore(mask=base.percentile_between(1, 99))
``zscore()`` is only supported on Factors of dtype float64.
Example
-------
See :meth:`~zipline.pipeline.factors.Factor.demean` for an in-depth
example of the semantics for ``mask`` and ``groupby``.
See Also
--------
:meth:`pandas.DataFrame.groupby`
"""
# This is a named function so that it has a __name__ for use in the
# graph repr of GroupedRowTransform.
def zscore(row):
return (row - nanmean(row)) / nanstd(row)
return GroupedRowTransform(
transform=zscore,
factor=self,
mask=mask,
groupby=groupby,
)
def rank(self, method='ordinal', ascending=True, mask=NotSpecified):
"""
Construct a new Factor representing the sorted rank of each column
within each row.
Parameters
----------
method : str, {'ordinal', 'min', 'max', 'dense', 'average'}
The method used to assign ranks to tied elements. See
`scipy.stats.rankdata` for a full description of the semantics for
each ranking method. Default is 'ordinal'.
ascending : bool, optional
Whether to return sorted rank in ascending or descending order.
Default is True.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when computing ranks.
If mask is supplied, ranks are computed ignoring any asset/date
pairs for which `mask` produces a value of False.
Returns
-------
ranks : zipline.pipeline.factors.Rank
A new factor that will compute the ranking of the data produced by
`self`.
Notes
-----
The default value for `method` is different from the default for
`scipy.stats.rankdata`. See that function's documentation for a full
description of the valid inputs to `method`.
Missing or non-existent data on a given day will cause an asset to be
given a rank of NaN for that day.
See Also
--------
:func:`scipy.stats.rankdata`
:class:`zipline.pipeline.factors.factor.Rank`
"""
return Rank(self, method=method, ascending=ascending, mask=mask)
@expect_types(bins=int, mask=(Filter, NotSpecifiedType))
def quantiles(self, bins, mask=NotSpecified):
"""
Construct a Classifier computing quantiles of the output of ``self``.
Every non-NaN data point the output is labelled with an integer value
from 0 to (bins - 1). NaNs are labelled with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
bins : int
Number of bins labels to compute.
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing quantiles.
Returns
-------
quantiles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to (bins - 1).
"""
if mask is NotSpecified:
mask = self.mask
return Quantiles(inputs=(self,), bins=bins, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def quartiles(self, mask=NotSpecified):
"""
Construct a Classifier computing quartiles over the output of ``self``.
Every non-NaN data point the output is labelled with a value of either
0, 1, 2, or 3, corresponding to the first, second, third, or fourth
quartile over each row. NaN data points are labelled with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing quartiles.
Returns
-------
quartiles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to 3.
"""
return self.quantiles(bins=4, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def quintiles(self, mask=NotSpecified):
"""
Construct a Classifier computing quintile labels on ``self``.
Every non-NaN data point the output is labelled with a value of either
0, 1, 2, or 3, 4, corresonding to quintiles over each row. NaN data
points are labelled with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing quintiles.
Returns
-------
quintiles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to 4.
"""
return self.quantiles(bins=5, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def deciles(self, mask=NotSpecified):
"""
Construct a Classifier computing decile labels on ``self``.
Every non-NaN data point the output is labelled with a value from 0 to
9 corresonding to deciles over each row. NaN data points are labelled
with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing deciles.
Returns
-------
deciles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to 9.
"""
return self.quantiles(bins=10, mask=mask)
def top(self, N, mask=NotSpecified):
"""
Construct a Filter matching the top N asset values of self each day.
Parameters
----------
N : int
Number of assets passing the returned filter each day.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when computing ranks.
If mask is supplied, top values are computed ignoring any
asset/date pairs for which `mask` produces a value of False.
Returns
-------
filter : zipline.pipeline.filters.Filter
"""
return self.rank(ascending=False, mask=mask) <= N
def bottom(self, N, mask=NotSpecified):
"""
Construct a Filter matching the bottom N asset values of self each day.
Parameters
----------
N : int
Number of assets passing the returned filter each day.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when computing ranks.
If mask is supplied, bottom values are computed ignoring any
asset/date pairs for which `mask` produces a value of False.
Returns
-------
filter : zipline.pipeline.Filter
"""
return self.rank(ascending=True, mask=mask) <= N
def percentile_between(self,
min_percentile,
max_percentile,
mask=NotSpecified):
"""
Construct a new Filter representing entries from the output of this
Factor that fall within the percentile range defined by min_percentile
and max_percentile.
Parameters
----------
min_percentile : float [0.0, 100.0]
Return True for assets falling above this percentile in the data.
max_percentile : float [0.0, 100.0]
Return True for assets falling below this percentile in the data.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when percentile
calculating thresholds. If mask is supplied, percentile cutoffs
are computed each day using only assets for which ``mask`` returns
True. Assets for which ``mask`` produces False will produce False
in the output of this Factor as well.
Returns
-------
out : zipline.pipeline.filters.PercentileFilter
A new filter that will compute the specified percentile-range mask.
See Also
--------
zipline.pipeline.filters.filter.PercentileFilter
"""
return PercentileFilter(
self,
min_percentile=min_percentile,
max_percentile=max_percentile,
mask=mask,
)
def isnull(self):
"""
A Filter producing True for values where this Factor has missing data.
Equivalent to self.isnan() when ``self.dtype`` is float64.
Otherwise equivalent to ``self.eq(self.missing_value)``.
Returns
-------
filter : zipline.pipeline.filters.Filter
"""
if self.dtype == float64_dtype:
# Using isnan is more efficient when possible because we can fold
# the isnan computation with other NumExpr expressions.
return self.isnan()
else:
return NullFilter(self)
def notnull(self):
"""
A Filter producing True for values where this Factor has complete data.
Equivalent to ``~self.isnan()` when ``self.dtype`` is float64.
Otherwise equivalent to ``(self != self.missing_value)``.
"""
return ~self.isnull()
@if_not_float64_tell_caller_to_use_isnull
def isnan(self):
"""
A Filter producing True for all values where this Factor is NaN.
Returns
-------
nanfilter : zipline.pipeline.filters.Filter
"""
return self != self
@if_not_float64_tell_caller_to_use_isnull
def notnan(self):
"""
A Filter producing True for values where this Factor is not NaN.
Returns
-------
nanfilter : zipline.pipeline.filters.Filter
"""
return ~self.isnan()
@if_not_float64_tell_caller_to_use_isnull
def isfinite(self):
"""
A Filter producing True for values where this Factor is anything but
NaN, inf, or -inf.
"""
return (-inf < self) & (self < inf)
class NumExprFactor(NumericalExpression, Factor):
"""
Factor computed from a numexpr expression.
Parameters
----------
expr : string
A string suitable for passing to numexpr. All variables in 'expr'
should be of the form "x_i", where i is the index of the corresponding
factor input in 'binds'.
binds : tuple
A tuple of factors to use as inputs.
Notes
-----
NumExprFactors are constructed by numerical operators like `+` and `-`.
Users should rarely need to construct a NumExprFactor directly.
"""
pass
class GroupedRowTransform(Factor):
"""
A Factor that transforms an input factor by applying a row-wise
shape-preserving transformation on classifier-defined groups of that
Factor.
This is most often useful for normalization operators like ``zscore`` or
``demean``.
Parameters
----------
transform : function[ndarray[ndim=1] -> ndarray[ndim=1]]
Function to apply over each row group.
factor : zipline.pipeline.Factor
The factor providing baseline data to transform.
mask : zipline.pipeline.Filter
Mask of entries to ignore when calculating transforms.
groupby : zipline.pipeline.Classifier
Classifier partitioning ``factor`` into groups to use when calculating
means.
Notes
-----
Users should rarely construct instances of this factor directly. Instead,
they should construct instances via factor normalization methods like
``zscore`` and ``demean``.
See Also
--------
zipline.pipeline.factors.Factor.zscore
zipline.pipeline.factors.Factor.demean
"""
window_length = 0
def __new__(cls, transform, factor, mask, groupby):
if mask is NotSpecified:
mask = factor.mask
else:
mask = mask & factor.mask
if groupby is NotSpecified:
groupby = Everything(mask=mask)
return super(GroupedRowTransform, cls).__new__(
GroupedRowTransform,
transform=transform,
inputs=(factor, groupby),
missing_value=factor.missing_value,
mask=mask,
dtype=factor.dtype,
)
def _init(self, transform, *args, **kwargs):
self._transform = transform
return super(GroupedRowTransform, self)._init(*args, **kwargs)
@classmethod
def static_identity(cls, transform, *args, **kwargs):
return (
super(GroupedRowTransform, cls).static_identity(*args, **kwargs),
transform,
)
def _compute(self, arrays, dates, assets, mask):
data = arrays[0]
null_group_value = self.inputs[1].missing_value
group_labels = where(
mask,
arrays[1],
null_group_value,
)
return where(
group_labels != null_group_value,
naive_grouped_rowwise_apply(
data=data,
group_labels=group_labels,
func=self._transform,
),
self.missing_value,
)
@property
def transform_name(self):
return self._transform.__name__
def short_repr(self):
return type(self).__name__ + '(%r)' % self.transform_name
class Rank(SingleInputMixin, Factor):
"""
A Factor representing the row-wise rank data of another Factor.
Parameters
----------
factor : zipline.pipeline.factors.Factor
The factor on which to compute ranks.
method : str, {'average', 'min', 'max', 'dense', 'ordinal'}
The method used to assign ranks to tied elements. See
`scipy.stats.rankdata` for a full description of the semantics for each
ranking method.
See Also
--------
:func:`scipy.stats.rankdata`
:class:`Factor.rank`
Notes
-----
Most users should call Factor.rank rather than directly construct an
instance of this class.
"""
window_length = 0
dtype = float64_dtype
def __new__(cls, factor, method, ascending, mask):
return super(Rank, cls).__new__(
cls,
inputs=(factor,),
method=method,
ascending=ascending,
mask=mask,
)
def _init(self, method, ascending, *args, **kwargs):
self._method = method
self._ascending = ascending
return super(Rank, self)._init(*args, **kwargs)
@classmethod
def static_identity(cls, method, ascending, *args, **kwargs):
return (
super(Rank, cls).static_identity(*args, **kwargs),
method,
ascending,
)
def _validate(self):
"""
Verify that the stored rank method is valid.
"""
if self._method not in _RANK_METHODS:
raise UnknownRankMethod(
method=self._method,
choices=set(_RANK_METHODS),
)
return super(Rank, self)._validate()
def _compute(self, arrays, dates, assets, mask):
"""
For each row in the input, compute a like-shaped array of per-row
ranks.
"""
return masked_rankdata_2d(
arrays[0],
mask,
self.inputs[0].missing_value,
self._method,
self._ascending,
)
def __repr__(self):
return "{type}({input_}, method='{method}', mask={mask})".format(
type=type(self).__name__,
input_=self.inputs[0],
method=self._method,
mask=self.mask,
)
class CustomFactor(PositiveWindowLengthMixin, CustomTermMixin, Factor):
'''
Base class for user-defined Factors.
Parameters
----------
inputs : iterable, optional
An iterable of `BoundColumn` instances (e.g. USEquityPricing.close),
describing the data to load and pass to `self.compute`. If this
argument is passed to the CustomFactor constructor, we look for a
class-level attribute named `inputs`.
window_length : int, optional
Number of rows to pass for each input. If this argument is not passed
to the CustomFactor constructor, we look for a class-level attribute
named `window_length`.
mask : zipline.pipeline.Filter, optional
A Filter describing the assets on which we should compute each day.
Each call to ``CustomFactor.compute`` will only receive assets for
which ``mask`` produced True on the day for which compute is being
called.
Notes
-----
Users implementing their own Factors should subclass CustomFactor and
implement a method named `compute` with the following signature:
.. code-block:: python
def compute(self, today, assets, out, *inputs):
...
On each simulation date, ``compute`` will be called with the current date,
an array of sids, an output array, and an input array for each expression
passed as inputs to the CustomFactor constructor.
The specific types of the values passed to `compute` are as follows::
today : np.datetime64[ns]
Row label for the last row of all arrays passed as `inputs`.
assets : np.array[int64, ndim=1]
Column labels for `out` and`inputs`.
out : np.array[self.dtype, ndim=1]
Output array of the same shape as `assets`. `compute` should write
its desired return values into `out`.
*inputs : tuple of np.array
Raw data arrays corresponding to the values of `self.inputs`.
``compute`` functions should expect to be passed NaN values for dates on
which no data was available for an asset. This may include dates on which
an asset did not yet exist.
For example, if a CustomFactor requires 10 rows of close price data, and
asset A started trading on Monday June 2nd, 2014, then on Tuesday, June
3rd, 2014, the column of input data for asset A will have 9 leading NaNs
for the preceding days on which data was not yet available.
Examples
--------
A CustomFactor with pre-declared defaults:
.. code-block:: python
class TenDayRange(CustomFactor):
"""
Computes the difference between the highest high in the last 10
days and the lowest low.
Pre-declares high and low as default inputs and `window_length` as
10.
"""
inputs = [USEquityPricing.high, USEquityPricing.low]
window_length = 10
def compute(self, today, assets, out, highs, lows):
from numpy import nanmin, nanmax
highest_highs = nanmax(highs, axis=0)
lowest_lows = nanmin(lows, axis=0)
out[:] = highest_highs - lowest_lows
# Doesn't require passing inputs or window_length because they're
# pre-declared as defaults for the TenDayRange class.
ten_day_range = TenDayRange()
A CustomFactor without defaults:
.. code-block:: python
class MedianValue(CustomFactor):
"""
Computes the median value of an arbitrary single input over an
arbitrary window..
Does not declare any defaults, so values for `window_length` and
`inputs` must be passed explicitly on every construction.
"""
def compute(self, today, assets, out, data):
from numpy import nanmedian
out[:] = data.nanmedian(data, axis=0)
# Values for `inputs` and `window_length` must be passed explicitly to
# MedianValue.
median_close10 = MedianValue([USEquityPricing.close], window_length=10)
median_low15 = MedianValue([USEquityPricing.low], window_length=15)
'''
dtype = float64_dtype
class Latest(LatestMixin, CustomFactor):
"""
Factor producing the most recently-known value of `inputs[0]` on each day.
The `.latest` attribute of DataSet columns returns an instance of this
Factor.
"""
window_length = 1
def compute(self, today, assets, out, data):
out[:] = data[-1]
| [
[
[
40,
45
],
[
1891,
1896
]
],
[
[
67,
77
],
[
4387,
4397
]
],
[
[
98,
104
],
[
1958,
1964
],
[
6096,
6102
],
[
7601,
7607
]
],
[
[
124,
127
],
[
31612,
31615
],
[
31634,
31637
]
],
[
[
129,
134
],
[
34378,
34383
],
[
34482,
34487
]
],
[
[
153,
158
],
[
2073,
2078
]
],
[
[
187,
204
],
[
36452,
36469
]
],
[
[
239,
266
],
[
34547,
34574
]
],
[
[
296,
314
],
[
36793,
36811
]
],
[
[
356,
366
],
[
14615,
14625
],
[
19517,
19527
]
],
[
[
368,
378
],
[
33563,
33573
]
],
[
[
380,
389
],
[
24335,
24344
]
],
[
[
432,
447
],
[
37252,
37267
]
],
[
[
453,
464
],
[
41474,
41485
]
],
[
[
470,
495
],
[
37225,
37250
]
],
[
[
501,
521
],
[
12287,
12307
]
],
[
[
527,
543
],
[
34932,
34948
]
],
[
[
587,
601
],
[
12309,
12323
]
],
[
[
607,
619
],
[
14696,
14708
],
[
14718,
14730
],
[
19598,
19610
],
[
19620,
19632
],
[
21788,
21800
],
[
23464,
23476
],
[
24464,
24476
],
[
25406,
25418
],
[
26303,
26315
],
[
27125,
27137
],
[
27800,
27812
],
[
28594,
28606
],
[
24277,
24289
],
[
33407,
33419
],
[
33527,
33539
]
],
[
[
625,
641
],
[
14579,
14595
],
[
14627,
14643
],
[
19481,
19497
],
[
19529,
19545
],
[
23410,
23426
],
[
24416,
24432
],
[
25358,
25374
],
[
26257,
26273
]
],
[
[
647,
651
],
[
5626,
5630
]
],
[
[
701,
718
],
[
6488,
6505
],
[
7843,
7860
]
],
[
[
724,
735
],
[
13955,
13966
]
],
[
[
741,
754
],
[
2710,
2723
],
[
3243,
3256
],
[
6796,
6809
]
],
[
[
760,
771
],
[
13937,
13948
],
[
14125,
14136
]
],
[
[
777,
795
],
[
13769,
13787
],
[
14035,
14053
],
[
4398,
4416
],
[
4496,
4514
],
[
6830,
6848
]
],
[
[
801,
820
],
[
31661,
31680
],
[
6977,
6996
],
[
8937,
8956
],
[
9655,
9674
]
],
[
[
826,
844
],
[
14393,
14411
],
[
9487,
9505
]
],
[
[
850,
859
],
[
14255,
14264
]
],
[
[
865,
878
],
[
14195,
14208
],
[
8236,
8249
]
],
[
[
925,
931
],
[
14571,
14577
],
[
19473,
19479
],
[
23402,
23408
],
[
24408,
24414
],
[
25350,
25356
],
[
26249,
26255
]
],
[
[
937,
950
],
[
2744,
2757
]
],
[
[
956,
972
],
[
29778,
29794
]
],
[
[
978,
988
],
[
30525,
30535
]
],
[
[
1035,
1047
],
[
14543,
14555
],
[
19445,
19457
],
[
23373,
23385
],
[
24389,
24401
],
[
25331,
25343
],
[
26230,
26242
]
],
[
[
1085,
1092
],
[
19272,
19279
],
[
21547,
21554
]
],
[
[
1094,
1100
],
[
21563,
21569
]
],
[
[
1145,
1155
],
[
3552,
3562
]
],
[
[
1161,
1176
],
[
1987,
2002
]
],
[
[
1182,
1200
],
[
12223,
12241
]
],
[
[
1206,
1219
],
[
11700,
11713
],
[
12000,
12013
],
[
12243,
12256
],
[
35637,
35650
],
[
41445,
41458
],
[
3581,
3594
],
[
3607,
3620
],
[
3947,
3960
],
[
30299,
30312
],
[
8310,
8323
],
[
9107,
9120
],
[
9272,
9285
],
[
9831,
9844
],
[
10004,
10017
]
],
[
[
1225,
1236
],
[
12258,
12269
]
],
[
[
1277,
1287
],
[
11567,
11577
]
],
[
[
1290,
1303
],
[
36419,
36432
],
[
36536,
36549
]
],
[
[
1369,
1395
],
[
4525,
4551
],
[
6873,
6899
]
],
[
[
2083,
2096
],
[
2612,
2625
],
[
2649,
2662
]
],
[
[
2600,
2609
],
[
4486,
4495
],
[
6820,
6829
],
[
8226,
8235
],
[
9587,
9596
]
],
[
[
2638,
2646
],
[
4442,
4450
],
[
8183,
8191
]
],
[
[
2680,
2697
],
[
4806,
4823
]
],
[
[
2803,
2821
],
[
5235,
5253
],
[
5804,
5822
],
[
6002,
6020
],
[
6412,
6430
],
[
7363,
7381
],
[
7766,
7784
]
],
[
[
3967,
3982
],
[
13793,
13808
],
[
14515,
14530
]
],
[
[
6557,
6582
],
[
14073,
14098
]
],
[
[
7921,
7935
],
[
14214,
14228
]
],
[
[
9333,
9353
],
[
14334,
14354
]
],
[
[
10059,
10076
],
[
11671,
11688
],
[
11971,
11988
]
],
[
[
11628,
11668
],
[
30844,
30884
],
[
31122,
31162
],
[
31402,
31442
]
],
[
[
11956,
11968
],
[
14657,
14669
],
[
19559,
19571
]
],
[
[
12196,
12209
],
[
13567,
13580
]
],
[
[
12280,
12286
],
[
31682,
31688
],
[
32251,
32257
],
[
34950,
34956
],
[
37269,
37275
]
],
[
[
31647,
31660
],
[
2783,
2796
],
[
4856,
4869
],
[
5329,
5342
],
[
7128,
7141
],
[
7629,
7642
],
[
8978,
8991
],
[
9169,
9182
],
[
9696,
9709
],
[
9893,
9906
]
],
[
[
32231,
32250
],
[
19301,
19320
],
[
21591,
21610
],
[
33607,
33626
],
[
33654,
33673
],
[
33966,
33985
],
[
34127,
34146
]
],
[
[
34927,
34931
],
[
23309,
23313
],
[
35728,
35732
],
[
36033,
36037
],
[
36187,
36191
],
[
36587,
36591
]
],
[
[
37212,
37224
],
[
41487,
41499
]
],
[
[
41467,
41473
]
]
] |
#!/usr/bin/env python
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
"""
command line application and sample code for destroying a secret verison.
"""
import argparse
# [START secretmanager_destroy_secret_version]
def destroy_secret_version(project_id, secret_id, version_id):
"""
Destroy the given secret version, making the payload irrecoverable. Other
secrets versions are unaffected.
"""
# Import the Secret Manager client library.
from google.cloud import secretmanager_v1beta1 as secretmanager
# Create the Secret Manager client.
client = secretmanager.SecretManagerServiceClient()
# Build the resource name of the secret version
name = client.secret_version_path(project_id, secret_id, version_id)
# Destroy the secret version.
response = client.destroy_secret_version(name)
print('Destroyed secret version: {}'.format(response.name))
# [END secretmanager_destroy_secret_version]
return response
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('project_id', help='id of the GCP project')
parser.add_argument('secret_id', help='id of the secret from which to act')
parser.add_argument('version_id', help='id of the version to destroy')
args = parser.parse_args()
destroy_secret_version(args.project_id, args.secret_id, args.version_id)
| [
[
[
654,
662
],
[
1505,
1513
],
[
1583,
1591
]
],
[
[
716,
738
],
[
1880,
1902
]
],
[
[
1496,
1502
],
[
1625,
1631
],
[
1693,
1699
],
[
1773,
1779
],
[
1855,
1861
]
],
[
[
1848,
1852
],
[
1903,
1907
],
[
1920,
1924
],
[
1936,
1940
]
]
] |
"""Polygons and their linear ring components
"""
from ctypes import c_double, c_void_p, cast, POINTER
from ctypes import ArgumentError
import weakref
from shapely.algorithms.cga import signed_area
from shapely.coords import required
from shapely.geos import lgeos
from shapely.geometry.base import BaseGeometry
from shapely.geometry.linestring import LineString, LineStringAdapter
from shapely.geometry.proxy import PolygonProxy
__all__ = ['Polygon', 'asPolygon', 'LinearRing', 'asLinearRing']
class LinearRing(LineString):
"""
A closed one-dimensional feature comprising one or more line segments
A LinearRing that crosses itself or touches itself at a single point is
invalid and operations on it may fail.
"""
def __init__(self, coordinates=None):
"""
Parameters
----------
coordinates : sequence
A sequence of (x, y [,z]) numeric coordinate pairs or triples
Rings are implicitly closed. There is no need to specific a final
coordinate pair identical to the first.
Example
-------
Construct a square ring.
>>> ring = LinearRing( ((0, 0), (0, 1), (1 ,1 ), (1 , 0)) )
>>> ring.is_closed
True
>>> ring.length
4.0
"""
BaseGeometry.__init__(self)
if coordinates is not None:
self._set_coords(coordinates)
@property
def __geo_interface__(self):
return {
'type': 'LinearRing',
'coordinates': tuple(self.coords)
}
# Coordinate access
_get_coords = BaseGeometry._get_coords
def _set_coords(self, coordinates):
self.empty()
self._geom, self._ndim = geos_linearring_from_py(coordinates)
coords = property(_get_coords, _set_coords)
@property
def is_ccw(self):
"""True is the ring is oriented counter clock-wise"""
return bool(self.impl['is_ccw'](self))
@property
def is_simple(self):
"""True if the geometry is simple, meaning that any self-intersections
are only at boundary points, else False"""
return LineString(self).is_simple
class LinearRingAdapter(LineStringAdapter):
__p__ = None
def __init__(self, context):
self.context = context
self.factory = geos_linearring_from_py
@property
def __geo_interface__(self):
return {
'type': 'LinearRing',
'coordinates': tuple(self.coords)
}
coords = property(BaseGeometry._get_coords)
def asLinearRing(context):
"""Adapt an object to the LinearRing interface"""
return LinearRingAdapter(context)
class InteriorRingSequence(object):
_factory = None
_geom = None
__p__ = None
_ndim = None
_index = 0
_length = 0
__rings__ = None
_gtag = None
def __init__(self, parent):
self.__p__ = parent
self._geom = parent._geom
self._ndim = parent._ndim
def __iter__(self):
self._index = 0
self._length = self.__len__()
return self
def next(self):
if self._index < self._length:
ring = self._get_ring(self._index)
self._index += 1
return ring
else:
raise StopIteration
def __len__(self):
return lgeos.GEOSGetNumInteriorRings(self._geom)
def __getitem__(self, key):
m = self.__len__()
if isinstance(key, int):
if key + m < 0 or key >= m:
raise IndexError("index out of range")
if key < 0:
i = m + key
else:
i = key
return self._get_ring(i)
elif isinstance(key, slice):
res = []
start, stop, stride = key.indices(m)
for i in xrange(start, stop, stride):
res.append(self._get_ring(i))
return res
else:
raise TypeError("key must be an index or slice")
@property
def _longest(self):
max = 0
for g in iter(self):
l = len(g.coords)
if l > max:
max = l
def gtag(self):
return hash(repr(self.__p__))
def _get_ring(self, i):
gtag = self.gtag()
if gtag != self._gtag:
self.__rings__ = {}
if i not in self.__rings__:
g = lgeos.GEOSGetInteriorRingN(self._geom, i)
ring = LinearRing()
ring.__geom__ = g
ring.__p__ = self
ring._owned = True
ring._ndim = self._ndim
self.__rings__[i] = weakref.ref(ring)
return self.__rings__[i]()
class Polygon(BaseGeometry):
"""
A two-dimensional figure bounded by a linear ring
A polygon has a non-zero area. It may have one or more negative-space
"holes" which are also bounded by linear rings. If any rings cross each
other, the feature is invalid and operations on it may fail.
Attributes
----------
exterior : LinearRing
The ring which bounds the positive space of the polygon.
interiors : sequence
A sequence of rings which bound all existing holes.
"""
_exterior = None
_interiors = []
_ndim = 2
def __init__(self, shell=None, holes=None):
"""
Parameters
----------
shell : sequence
A sequence of (x, y [,z]) numeric coordinate pairs or triples
holes : sequence
A sequence of objects which satisfy the same requirements as the
shell parameters above
Example
-------
Create a square polygon with no holes
>>> coords = ((0., 0.), (0., 1.), (1., 1.), (1., 0.), (0., 0.))
>>> polygon = Polygon(coords)
>>> polygon.area
1.0
"""
BaseGeometry.__init__(self)
if shell is not None:
self._geom, self._ndim = geos_polygon_from_py(shell, holes)
@property
def exterior(self):
if self.is_empty:
return None
elif self._exterior is None or self._exterior() is None:
g = lgeos.GEOSGetExteriorRing(self._geom)
ring = LinearRing()
ring.__geom__ = g
ring.__p__ = self
ring._owned = True
ring._ndim = self._ndim
self._exterior = weakref.ref(ring)
return self._exterior()
@property
def interiors(self):
if self.is_empty:
return []
return InteriorRingSequence(self)
@property
def ctypes(self):
if not self._ctypes_data:
self._ctypes_data = self.exterior.ctypes
return self._ctypes_data
@property
def __array_interface__(self):
raise NotImplementedError(
"A polygon does not itself provide the array interface. Its rings do.")
def _get_coords(self):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
def _set_coords(self, ob):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
@property
def coords(self):
raise NotImplementedError(
"Component rings have coordinate sequences, but the polygon does not")
@property
def __geo_interface__(self):
coords = [tuple(self.exterior.coords)]
for hole in self.interiors:
coords.append(tuple(hole.coords))
return {
'type': 'Polygon',
'coordinates': tuple(coords)
}
class PolygonAdapter(PolygonProxy, Polygon):
def __init__(self, shell, holes=None):
self.shell = shell
self.holes = holes
self.context = (shell, holes)
self.factory = geos_polygon_from_py
@property
def _ndim(self):
try:
# From array protocol
array = self.shell.__array_interface__
n = array['shape'][1]
assert n == 2 or n == 3
return n
except AttributeError:
# Fall back on list
return len(self.shell[0])
def asPolygon(shell, holes=None):
"""Adapt objects to the Polygon interface"""
return PolygonAdapter(shell, holes)
def orient(polygon, sign=1.0):
s = float(sign)
rings = []
ring = polygon.exterior
if signed_area(ring)/s >= 0.0:
rings.append(ring)
else:
rings.append(list(ring.coords)[::-1])
for ring in polygon.interiors:
if signed_area(ring)/s <= 0.0:
rings.append(ring)
else:
rings.append(list(ring.coords)[::-1])
return Polygon(rings[0], rings[1:])
def geos_linearring_from_py(ob, update_geom=None, update_ndim=0):
# If numpy is present, we use numpy.require to ensure that we have a
# C-continguous array that owns its data. View data will be copied.
ob = required(ob)
try:
# From array protocol
array = ob.__array_interface__
assert len(array['shape']) == 2
m = array['shape'][0]
n = array['shape'][1]
if m < 3:
raise ValueError(
"A LinearRing must have at least 3 coordinate tuples")
assert n == 2 or n == 3
# Make pointer to the coordinate array
if isinstance(array['data'], tuple):
# numpy tuple (addr, read-only)
cp = cast(array['data'][0], POINTER(c_double))
else:
cp = array['data']
# Add closing coordinates to sequence?
if cp[0] != cp[m*n-n] or cp[1] != cp[m*n-n+1]:
M = m + 1
else:
M = m
# Create a coordinate sequence
if update_geom is not None:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(M, n)
# add to coordinate sequence
for i in xrange(m):
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, i, cp[n*i])
lgeos.GEOSCoordSeq_setY(cs, i, cp[n*i+1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, i, cp[n*i+2])
# Add closing coordinates to sequence?
if M > m:
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, M-1, cp[0])
lgeos.GEOSCoordSeq_setY(cs, M-1, cp[1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, M-1, cp[2])
except AttributeError:
# Fall back on list
m = len(ob)
n = len(ob[0])
if m < 3:
raise ValueError(
"A LinearRing must have at least 3 coordinate tuples")
assert (n == 2 or n == 3)
# Add closing coordinates if not provided
if m == 3 or ob[0][0] != ob[-1][0] or ob[0][1] != ob[-1][1]:
M = m + 1
else:
M = m
# Create a coordinate sequence
if update_geom is not None:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(M, n)
# add to coordinate sequence
for i in xrange(m):
coords = ob[i]
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, i, coords[0])
lgeos.GEOSCoordSeq_setY(cs, i, coords[1])
if n == 3:
try:
lgeos.GEOSCoordSeq_setZ(cs, i, coords[2])
except IndexError:
raise ValueError("Inconsistent coordinate dimensionality")
# Add closing coordinates to sequence?
if M > m:
coords = ob[0]
# Because of a bug in the GEOS C API,
# always set X before Y
lgeos.GEOSCoordSeq_setX(cs, M-1, coords[0])
lgeos.GEOSCoordSeq_setY(cs, M-1, coords[1])
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, M-1, coords[2])
if update_geom is not None:
return None
else:
return lgeos.GEOSGeom_createLinearRing(cs), n
def update_linearring_from_py(geom, ob):
geos_linearring_from_py(ob, geom._geom, geom._ndim)
def geos_polygon_from_py(shell, holes=None):
if shell is not None:
geos_shell, ndim = geos_linearring_from_py(shell)
if holes:
ob = holes
L = len(ob)
exemplar = ob[0]
try:
N = len(exemplar[0])
except TypeError:
N = exemplar._ndim
assert L >= 1
assert N == 2 or N == 3
# Array of pointers to ring geometries
geos_holes = (c_void_p * L)()
# add to coordinate sequence
for l in xrange(L):
geom, ndim = geos_linearring_from_py(ob[l])
geos_holes[l] = cast(geom, c_void_p)
else:
geos_holes = POINTER(c_void_p)()
L = 0
return (
lgeos.GEOSGeom_createPolygon(
c_void_p(geos_shell),
geos_holes,
L
),
ndim
)
# Test runner
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
| [
[
[
69,
77
],
[
9462,
9470
]
],
[
[
79,
87
],
[
13138,
13146
],
[
13335,
13343
],
[
13392,
13400
],
[
13505,
13513
]
],
[
[
89,
93
],
[
9431,
9435
],
[
13324,
13328
]
],
[
[
95,
102
],
[
9454,
9461
],
[
13384,
13391
]
],
[
[
122,
135
]
],
[
[
143,
150
],
[
4631,
4638
],
[
6389,
6396
]
],
[
[
187,
198
],
[
8394,
8405
],
[
8551,
8562
]
],
[
[
226,
234
],
[
8935,
8943
]
],
[
[
260,
265
],
[
3344,
3349
],
[
4398,
4403
],
[
6163,
6168
],
[
9768,
9773
],
[
10019,
10024
],
[
10216,
10221
],
[
10268,
10273
],
[
10349,
10354
],
[
10564,
10569
],
[
10616,
10621
],
[
10695,
10700
],
[
11266,
11271
],
[
11517,
11522
],
[
11749,
11754
],
[
11803,
11808
],
[
11909,
11914
],
[
12257,
12262
],
[
12313,
12318
],
[
12396,
12401
],
[
12518,
12523
],
[
13451,
13456
]
],
[
[
300,
312
],
[
1614,
1626
],
[
2536,
2548
],
[
4708,
4720
],
[
1305,
1317
],
[
5862,
5874
]
],
[
[
353,
363
],
[
516,
526
],
[
2151,
2161
]
],
[
[
365,
382
],
[
2204,
2221
]
],
[
[
418,
430
],
[
7633,
7645
]
],
[
[
432,
439
]
],
[
[
505,
515
],
[
4459,
4469
],
[
6220,
6230
]
],
[
[
2186,
2203
],
[
2656,
2673
]
],
[
[
2568,
2580
]
],
[
[
2691,
2711
],
[
6542,
6562
]
],
[
[
4700,
4707
],
[
7647,
7654
],
[
8685,
8692
]
],
[
[
7618,
7632
],
[
8263,
8277
]
],
[
[
8173,
8182
]
],
[
[
8297,
8303
]
],
[
[
8719,
8742
],
[
1734,
1757
],
[
2330,
2353
],
[
12603,
12626
],
[
12754,
12777
],
[
13261,
13284
]
],
[
[
12562,
12587
]
],
[
[
12660,
12680
],
[
5958,
5978
],
[
7820,
7840
]
],
[
[
13666,
13671
],
[
13748,
13753
]
]
] |
x=2
print(x == 2)
print(x == 3)
print(x<3)
#Boolean operators
name = "John"
age = 23
if name == "John" and age == 23:
print("Your name is John, and you are also 23 years old.")
if name == "John" or name == "Rick":
print("Your name is either John or Rick.")
# in operator
#The "in" operator could be used to check if a specified object exists within an iterable object container, such as a list:
mylist=["John","Rick"]
if name in mylist:
print("You are here with us")
# if else statement block in python
x=3
if(x==2):
print("x is 2")
elif(x==3):
print("x is 3")
else:
print("value doesnot match")
| [
[
[
0,
1
],
[
11,
12
],
[
26,
27
],
[
41,
42
]
],
[
[
71,
75
],
[
99,
103
],
[
199,
203
],
[
217,
221
],
[
453,
457
]
],
[
[
86,
89
],
[
118,
121
]
],
[
[
424,
430
],
[
461,
467
]
],
[
[
544,
545
],
[
552,
553
],
[
586,
587
]
]
] |
from . import Base
from sqlalchemy import Column, Integer, Text, DateTime, ForeignKey
from datetime import datetime
class Chapter(Base):
__tablename__ = "chapters"
id = Column(Integer, primary_key=True, autoincrement=True)
manga_id = Column(Integer, ForeignKey("manga.id"))
chapter_no = Column(Integer)
chapter_postfix = Column(Text)
ordinal = Column(Integer)
page_count = Column(Integer)
title = Column(Text)
version = Column(Integer)
language_id = Column(Text)
group_id = Column(Integer)
date_added = Column(DateTime)
ipfs_link = Column(Text)
def to_dict(self):
return {
"id" : self.id,
"manga_id" : self.manga_id,
"chapter_no" : self.chapter_no,
"chapter_postfix" : self.chapter_postfix,
"ordinal" : self.ordinal,
"title" : self.title,
"page_count" : self.page_count,
"version" : self.version,
"language_id" : self.language_id,
"group_id" : self.group_id,
"date_added" : int(self.date_added.timestamp()),
"ipfs_link" : self.ipfs_link
} | [
[
[
14,
18
],
[
131,
135
]
],
[
[
42,
48
],
[
179,
185
],
[
248,
254
],
[
305,
311
],
[
343,
349
],
[
370,
376
],
[
403,
409
],
[
431,
437
],
[
458,
464
],
[
492,
498
],
[
520,
526
],
[
553,
559
],
[
586,
592
]
],
[
[
50,
57
],
[
186,
193
],
[
255,
262
],
[
312,
319
],
[
377,
384
],
[
410,
417
],
[
465,
472
],
[
527,
534
]
],
[
[
59,
63
],
[
350,
354
],
[
438,
442
],
[
499,
503
],
[
593,
597
]
],
[
[
65,
73
],
[
560,
568
]
],
[
[
75,
85
],
[
264,
274
]
],
[
[
107,
115
]
],
[
[
123,
130
]
]
] |
from systems.plugins.index import BaseProvider
import re
import shlex
class Provider(BaseProvider('task', 'command')):
def execute(self, results, params):
env = self._env_vars(params)
stdin = params.pop('input', self.field_input)
cwd = params.pop('cwd', self.field_cwd)
display = params.pop('display', self.field_display)
options = self._merge_options(self.field_options, params, self.field_lock)
command = self._interpolate(self.field_command, options)
if self.field_sudo:
command = 'sudo ' + command[0]
else:
command = command[0]
self.command.sh(shlex.split(command),
input = stdin,
display = display,
env = env,
cwd = cwd
)
| [
[
[
34,
46
],
[
88,
100
]
],
[
[
55,
57
]
],
[
[
65,
70
],
[
654,
659
]
],
[
[
79,
87
]
]
] |
"""
Tests for `kolibri.utils.cli` module.
"""
from __future__ import absolute_import
from __future__ import print_function
import logging
import os
import tempfile
import pytest
from django.db.utils import OperationalError
from mock import patch
import kolibri
from kolibri.plugins.utils import autoremove_unavailable_plugins
from kolibri.utils import cli
from kolibri.utils import options
logger = logging.getLogger(__name__)
LOG_LOGGER = []
def log_logger(logger_instance, LEVEL, msg, args, **kwargs):
"""
Monkeypatching for logging.Logger._log to scoop up log messages if we wanna
test something specific was logged.
"""
LOG_LOGGER.append((LEVEL, msg))
# Call the original function
logger_instance.__log(LEVEL, msg, args, **kwargs)
def activate_log_logger(monkeypatch):
"""
Activates logging everything to ``LOG_LOGGER`` with the monkeypatch pattern
of py.test (test accepts a ``monkeypatch`` argument)
"""
monkeypatch.setattr(logging.Logger, "__log", logging.Logger._log, raising=False)
monkeypatch.setattr(logging.Logger, "_log", log_logger)
@pytest.fixture
def plugins():
from kolibri import plugins
_, config_file = tempfile.mkstemp(suffix="json")
old_config_file = plugins.conf_file
plugins.conf_file = config_file
plugins.config.set_defaults()
yield plugins
plugins.conf_file = old_config_file
def test_bogus_plugin_autoremove(plugins):
"""
Checks that a plugin is auto-removed when it cannot be imported
"""
plugin_name = "giraffe.horse"
plugins.config["INSTALLED_PLUGINS"].add(plugin_name)
plugins.config.save()
autoremove_unavailable_plugins()
assert plugin_name not in plugins.config["INSTALLED_PLUGINS"]
def test_bogus_plugin_autoremove_no_path(plugins):
"""
Checks that a plugin without a dotted path is also auto-removed
"""
plugin_name = "giraffehorse"
plugins.config["INSTALLED_PLUGINS"].add(plugin_name)
plugins.config.save()
autoremove_unavailable_plugins()
assert plugin_name not in plugins.config["INSTALLED_PLUGINS"]
def test_bogus_plugin_disable(plugins):
installed_apps_before = plugins.config["INSTALLED_PLUGINS"].copy()
disabled_apps_before = plugins.config["DISABLED_PLUGINS"].copy()
try:
cli.disable.callback(("i_do_not_exist",), False)
except Exception:
pass
assert installed_apps_before == plugins.config["INSTALLED_PLUGINS"]
assert disabled_apps_before == plugins.config["DISABLED_PLUGINS"]
def test_plugin_cannot_be_imported_disable(plugins):
"""
A plugin may be in plugins.config['INSTALLED_PLUGINS'] but broken or uninstalled
"""
plugin_name = "giraffe.horse"
plugins.config["INSTALLED_PLUGINS"].add(plugin_name)
plugins.config.save()
try:
cli.disable.callback((plugin_name,), False)
except Exception:
pass
assert plugin_name not in plugins.config["INSTALLED_PLUGINS"]
# We also don't want to endlessly add cruft to the disabled apps
assert plugin_name not in plugins.config["DISABLED_PLUGINS"]
def test_real_plugin_disable(plugins):
installed_apps_before = plugins.config["INSTALLED_PLUGINS"].copy()
test_plugin = "kolibri.plugins.media_player"
assert test_plugin in installed_apps_before
# Because RIP example plugin
cli.disable.callback((test_plugin,), False)
assert test_plugin not in plugins.config["INSTALLED_PLUGINS"]
assert test_plugin in plugins.config["DISABLED_PLUGINS"]
def test_real_plugin_disable_twice(plugins):
installed_apps_before = plugins.config["INSTALLED_PLUGINS"].copy()
test_plugin = "kolibri.plugins.media_player"
assert test_plugin in installed_apps_before
cli.disable.callback((test_plugin,), False)
assert test_plugin not in plugins.config.ACTIVE_PLUGINS
assert test_plugin not in plugins.config["INSTALLED_PLUGINS"]
assert test_plugin in plugins.config["DISABLED_PLUGINS"]
installed_apps_before = plugins.config["INSTALLED_PLUGINS"].copy()
cli.disable.callback((test_plugin,), False)
assert test_plugin not in plugins.config.ACTIVE_PLUGINS
assert test_plugin not in plugins.config["INSTALLED_PLUGINS"]
assert test_plugin in plugins.config["DISABLED_PLUGINS"]
def test_plugin_with_no_plugin_class(plugins):
"""
Expected behavior is that nothing blows up with exceptions, user just gets
a warning and nothing is enabled or changed in the configuration.
"""
# For fun, we pass in a system library
installed_apps_before = plugins.config["INSTALLED_PLUGINS"].copy()
try:
cli.enable.callback(("os.path",), False)
except Exception:
pass
assert installed_apps_before == plugins.config["INSTALLED_PLUGINS"]
@pytest.mark.django_db
def test_kolibri_listen_port_env(monkeypatch):
"""
Starts and stops the server, mocking the actual server.start()
Checks that the correct fallback port is used from the environment.
"""
with patch("django.core.management.call_command"), patch(
"kolibri.utils.server.start"
) as start:
from kolibri.utils import server
def start_mock(port, *args, **kwargs):
assert port == test_port
try:
os.remove(server.STARTUP_LOCK)
except OSError:
pass
activate_log_logger(monkeypatch)
start.side_effect = start_mock
test_port = 1234
os.environ["KOLIBRI_HTTP_PORT"] = str(test_port)
# force a reload of plugins.OPTIONS so the environment variable will be read in
from kolibri.utils import conf
conf.OPTIONS.update(options.read_options_file(conf.KOLIBRI_HOME))
cli.start.callback(test_port, False)
with pytest.raises(SystemExit) as excinfo:
cli.stop.callback()
assert excinfo.code == 0
# Stop the server AGAIN, asserting that we can call the stop command
# on an already stopped server and will be gracefully informed about
# it.
with pytest.raises(SystemExit) as excinfo:
cli.stop.callback()
assert excinfo.code == 0
assert "Already stopped" in LOG_LOGGER[-1][1]
def status_starting_up():
raise server.NotRunning(server.STATUS_STARTING_UP)
# Ensure that if a server is reported to be 'starting up', it doesn't
# get killed while doing that.
monkeypatch.setattr(server, "get_status", status_starting_up)
with pytest.raises(SystemExit) as excinfo:
cli.stop.callback()
assert excinfo.code == server.STATUS_STARTING_UP
assert "Not stopped" in LOG_LOGGER[-1][1]
@pytest.mark.django_db
@patch("kolibri.utils.cli.get_version", return_value="")
@patch("kolibri.utils.cli.update")
@patch("kolibri.utils.cli.plugin.callback")
@patch("kolibri.core.deviceadmin.utils.dbbackup")
def test_first_run(dbbackup, plugin, update, get_version):
"""
Tests that the first_run() function performs as expected
"""
cli.initialize()
update.assert_called_once()
dbbackup.assert_not_called()
# Check that it got called for each default plugin
from kolibri import plugins
assert set(plugins.config["INSTALLED_PLUGINS"]) == set(plugins.DEFAULT_PLUGINS)
@pytest.mark.django_db
@patch("kolibri.utils.cli.get_version", return_value="0.0.1")
@patch("kolibri.utils.cli.update")
def test_update(update, get_version):
"""
Tests that update() function performs as expected
"""
cli.initialize()
update.assert_called_once()
@pytest.mark.django_db
@patch("kolibri.utils.cli.get_version", return_value="0.0.1")
def test_update_exits_if_running(get_version):
"""
Tests that update() function performs as expected
"""
with patch("kolibri.utils.cli.server.get_status"):
try:
cli.initialize()
pytest.fail("Update did not exit when Kolibri was already running")
except SystemExit:
pass
@pytest.mark.django_db
def test_version_updated():
"""
Tests our db backup logic: version_updated gets any change, backup gets only non-dev changes
"""
assert cli.version_updated("0.10.0", "0.10.1")
assert not cli.version_updated("0.10.0", "0.10.0")
assert not cli.should_back_up("0.10.0-dev0", "")
assert not cli.should_back_up("0.10.0-dev0", "0.10.0")
assert not cli.should_back_up("0.10.0", "0.10.0-dev0")
assert not cli.should_back_up("0.10.0-dev0", "0.10.0-dev0")
@pytest.mark.django_db
@patch("kolibri.utils.cli.get_version", return_value=kolibri.__version__)
@patch("kolibri.utils.cli.update")
@patch("kolibri.core.deviceadmin.utils.dbbackup")
def test_update_no_version_change(dbbackup, update, get_version):
"""
Tests that when the version doesn't change, we are not doing things we
shouldn't
"""
cli.initialize()
update.assert_not_called()
dbbackup.assert_not_called()
def test_cli_usage():
# Test the -h
with pytest.raises(SystemExit) as excinfo:
cli.main("-h")
assert excinfo.code == 0
with pytest.raises(SystemExit) as excinfo:
cli.main("--version")
assert excinfo.code == 0
@patch("kolibri.utils.cli.click.echo")
def test_list_plugins(echo_mock, plugins):
cli.list.callback()
test_plugin = "kolibri.plugins.media_player"
any(
map(
lambda x: test_plugin in x[0] and "ENABLED" in x[0],
echo_mock.call_args_list,
)
)
@patch("kolibri.utils.cli.click.echo")
def test_list_plugins_disabled(echo_mock, plugins):
cli.list.callback()
test_plugin = "kolibri.plugins.media_player"
cli.disable.callback((test_plugin,), False)
any(
map(
lambda x: test_plugin in x[0] and "DISABLED" in x[0],
echo_mock.call_args_list,
)
)
@patch("kolibri.utils.cli._migrate_databases")
@patch("kolibri.utils.cli.version_updated")
def test_migrate_if_unmigrated(version_updated, _migrate_databases):
# No matter what, ensure that version_updated returns False
version_updated.return_value = False
from morango.models import InstanceIDModel
with patch.object(
InstanceIDModel, "get_or_create_current_instance"
) as get_or_create_current_instance:
get_or_create_current_instance.side_effect = OperationalError("Test")
cli.initialize()
_migrate_databases.assert_called_once()
| [
[
[
69,
84
]
],
[
[
108,
122
]
],
[
[
131,
138
],
[
403,
410
],
[
988,
995
],
[
1013,
1020
],
[
1073,
1080
]
],
[
[
146,
148
],
[
5459,
5461
],
[
5263,
5265
]
],
[
[
156,
164
],
[
1196,
1204
]
],
[
[
173,
179
],
[
1112,
1118
],
[
4764,
4770
],
[
6703,
6709
],
[
7310,
7316
],
[
7593,
7599
],
[
8018,
8024
],
[
8525,
8531
],
[
5770,
5776
],
[
6059,
6065
],
[
6519,
6525
],
[
7903,
7909
],
[
9013,
9019
],
[
9116,
9122
]
],
[
[
208,
224
],
[
10361,
10377
]
],
[
[
242,
247
],
[
6726,
6731
],
[
6783,
6788
],
[
6818,
6823
],
[
6862,
6867
],
[
7333,
7338
],
[
7395,
7400
],
[
7616,
7621
],
[
8548,
8553
],
[
8622,
8627
],
[
8657,
8662
],
[
9220,
9225
],
[
9518,
9523
],
[
9874,
9879
],
[
9921,
9926
],
[
4998,
5003
],
[
5044,
5049
],
[
7803,
7808
],
[
10195,
10200
]
],
[
[
256,
263
],
[
8600,
8607
]
],
[
[
298,
328
],
[
1646,
1676
],
[
2002,
2032
]
],
[
[
355,
358
],
[
2300,
2303
],
[
2816,
2819
],
[
3341,
3344
],
[
3731,
3734
],
[
4037,
4040
],
[
4613,
4616
],
[
5720,
5723
],
[
5820,
5823
],
[
6109,
6112
],
[
6569,
6572
],
[
7052,
7055
],
[
7541,
7544
],
[
7874,
7877
],
[
8192,
8195
],
[
8247,
8250
],
[
8302,
8305
],
[
8355,
8358
],
[
8414,
8417
],
[
8473,
8476
],
[
8881,
8884
],
[
9059,
9062
],
[
9162,
9165
],
[
9305,
9308
],
[
9612,
9615
],
[
9685,
9688
],
[
10394,
10397
]
],
[
[
385,
392
],
[
5665,
5672
]
],
[
[
394,
400
]
],
[
[
433,
443
],
[
652,
662
],
[
6202,
6212
],
[
6682,
6692
]
],
[
[
455,
465
],
[
1097,
1107
]
],
[
[
777,
796
],
[
5352,
5371
]
],
[
[
1131,
1138
]
],
[
[
1402,
1430
]
],
[
[
1751,
1787
]
],
[
[
2107,
2132
]
],
[
[
2532,
2570
]
],
[
[
3101,
3125
]
],
[
[
3518,
3548
]
],
[
[
4274,
4306
]
],
[
[
4790,
4818
]
],
[
[
6915,
6929
]
],
[
[
7433,
7444
]
],
[
[
7681,
7709
]
],
[
[
8044,
8064
]
],
[
[
8710,
8739
]
],
[
[
8968,
8982
]
],
[
[
9262,
9279
]
],
[
[
9560,
9586
]
],
[
[
9968,
9994
]
]
] |
class Person:
name='zhangsan'
age=20
p = Person()
print(p) # <__main__.Person object at 0x10073e668>
print('⭐️ ' * 20)
class Stu:
name='zhangsan'
age=20
def __str__(self):
return "name: %s; age: %d"%(self.name, self.age)
s = Stu()
print(s) # name: zhangsan; age: 20 | [
[
[
6,
12
],
[
46,
52
]
],
[
[
42,
43
],
[
61,
62
]
],
[
[
132,
135
],
[
244,
247
]
],
[
[
240,
241
],
[
256,
257
]
]
] |
""" Define the sublayers in encoder/decoder layer """
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class ScaledDotProductAttention(nn.Module):
""" Scaled Dot-Product Attention """
def __init__(self, temperature, attn_dropout=0.1):
super().__init__()
self.temperature = temperature
def forward(self, q, k, v, mask=None):
# Scale based on the current shape
attn = torch.matmul(q / (q.shape[-1] ** 0.5), k.transpose(2, 3))
if mask is not None:
attn = attn.masked_fill(mask == 0, -1e9)
attn = F.softmax(attn, dim=-1)
output = torch.matmul(attn, v)
return output, attn
class MultiHeadAttention(nn.Module):
""" Multi-Head Attention module """
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super().__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.w_qs = nn.Linear(d_model, n_head * d_k, bias=False)
self.w_ks = nn.Linear(d_model, n_head * d_k, bias=False)
self.w_vs = nn.Linear(d_model, n_head * d_v, bias=False)
self.fc = nn.Linear(n_head * d_v, d_model, bias=False)
self.attention = ScaledDotProductAttention(temperature=d_k ** 0.5)
self.dropout = nn.Dropout(dropout)
self.layer_norm = nn.LayerNorm(d_model, eps=1e-6)
def forward(self, q, k, v, mask=None):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, len_k, len_v = q.size(0), q.size(1), k.size(1), v.size(1)
residual = q
# Pass through the pre-attention projection: b x lq x (n*dv)
# Separate different heads: b x lq x n x dv
q = self.w_qs(q).view(sz_b, len_q, n_head, d_k)
k = self.w_ks(k).view(sz_b, len_k, n_head, d_k)
v = self.w_vs(v).view(sz_b, len_v, n_head, d_v)
# Transpose for attention dot product: b x n x lq x dv
q, k, v = q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2)
if mask is not None:
mask = mask.unsqueeze(1) # For head axis broadcasting.
q, attn = self.attention(q, k, v, mask=mask)
# Transpose to move the head dimension back: b x lq x n x dv
# Combine the last two dimensions to concatenate all the heads together: b x lq x (n*dv)
q = q.transpose(1, 2).contiguous().view(sz_b, len_q, -1)
q = self.dropout(self.fc(q))
q += residual
q = self.layer_norm(q)
return q, attn
class PositionwiseFeedForward(nn.Module):
""" A two-feed-forward-layer module """
def __init__(self, d_in, d_hid, dropout=0.1):
super().__init__()
self.w_1 = nn.Linear(d_in, d_hid) # position-wise
self.w_2 = nn.Linear(d_hid, d_in) # position-wise
self.layer_norm = nn.LayerNorm(d_in, eps=1e-6)
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
x = self.w_2(F.relu(self.w_1(x)))
x = self.dropout(x)
x += residual
x = self.layer_norm(x)
return x
| [
[
[
61,
72
]
],
[
[
80,
85
],
[
451,
456
],
[
647,
652
]
],
[
[
93,
107
],
[
174,
176
],
[
725,
727
],
[
2546,
2548
],
[
966,
968
],
[
1031,
1033
],
[
1096,
1098
],
[
1159,
1161
],
[
1304,
1306
],
[
1350,
1352
],
[
2699,
2701
],
[
2758,
2760
],
[
2824,
2826
],
[
2876,
2878
]
],
[
[
115,
139
],
[
606,
607
],
[
2967,
2968
]
],
[
[
148,
173
],
[
1230,
1255
]
],
[
[
706,
724
]
],
[
[
2522,
2545
]
]
] |
'''Standard challenge module.'''
import os
import shutil
import fcntl
from cffi import FFI
from tornado import gen, concurrent, process
from tornado.stack_context import StackContext
from tornado.ioloop import IOLoop
import PyExt
import Privilege
import Config
from Utils import FileUtils
STATUS_NONE = 0
STATUS_AC = 1
STATUS_WA = 2
STATUS_RE = 3
STATUS_TLE = 4
STATUS_MLE = 5
STATUS_CE = 6
STATUS_ERR = 7
MS_BIND = 4096
class StdChal:
'''Standard challenge.
Static attributes:
last_uniqid (int): Last ID.
last_standard_uid (int): Last UID for standard tasks.
last_restrict_uid (int): Last UID for restricted tasks.
null_fd (int): File descriptor of /dev/null.
build_cache (dict): Cache information of builds.
build_cache_refcount (dict): Refcount of build caches.
Attributes:
uniqid (int): Unique ID.
code_path (string): Code path.
res_path (string): Resource path.
comp_typ (string): Type of compile.
judge_typ (string): Type of judge.
test_list ([dict]): Test parameter lists.
metadata (dict): Metadata for judge.
chal_id (int): Challenge ID.
chal_path (string): Challenge path.
'''
last_uniqid = 0
last_standard_uid = Config.CONTAINER_STANDARD_UID_BASE
last_restrict_uid = Config.CONTAINER_RESTRICT_UID_BASE
null_fd = None
@staticmethod
def init():
'''Initialize the module.'''
with StackContext(Privilege.fileaccess):
try:
shutil.rmtree('container/standard/home')
except FileNotFoundError:
pass
os.mkdir('container/standard/home', mode=0o771)
try:
shutil.rmtree('container/standard/cache')
except FileNotFoundError:
pass
os.mkdir('container/standard/cache', mode=0o771)
ffi = FFI()
ffi.cdef('''int mount(const char source[], const char target[],
const char filesystemtype[], unsigned long mountflags,
const void *data);''')
ffi.cdef('''int umount(const char *target);''')
libc = ffi.dlopen('libc.so.6')
with StackContext(Privilege.fullaccess):
libc.umount(b'container/standard/dev')
libc.mount(b'/dev', b'container/standard/dev', b'', MS_BIND, \
ffi.NULL)
StdChal.null_fd = os.open('/dev/null', os.O_RDWR | os.O_CLOEXEC)
StdChal.build_cache = {}
StdChal.build_cache_refcount = {}
@staticmethod
def get_standard_ugid():
'''Generate standard UID/GID.
Returns:
(int, int): Standard UID/GID
'''
StdChal.last_standard_uid += 1
return (StdChal.last_standard_uid, StdChal.last_standard_uid)
@staticmethod
def get_restrict_ugid():
'''Generate restrict UID/GID.
Returns:
(int, int): Restrict UID/GID
'''
StdChal.last_restrict_uid += 1
return (StdChal.last_restrict_uid, StdChal.last_restrict_uid)
@staticmethod
def build_cache_find(res_path):
'''Get build cache.
Args:
res_path (string): Resource path.
Returns:
(string, int): (cache hash, GID) or None if not found.
'''
try:
return StdChal.build_cache[res_path]
except KeyError:
return None
@staticmethod
def build_cache_update(res_path, cache_hash, gid):
'''Update build cache.
Args:
res_path (string): Resource path.
cache_hash (int): Cache hash.
gid (int): GID.
Returns:
None
'''
ret = StdChal.build_cache_find(res_path)
if ret is not None:
StdChal.build_cache_decref(ret[0])
del StdChal.build_cache[res_path]
StdChal.build_cache[res_path] = (cache_hash, gid)
StdChal.build_cache_refcount[cache_hash] = 1
@staticmethod
def build_cache_incref(cache_hash):
'''Increment the refcount of the build cache.
Args:
cache_hash (int): Cache hash.
Returns:
None
'''
StdChal.build_cache_refcount[cache_hash] += 1
@staticmethod
def build_cache_decref(cache_hash):
'''Decrement the refcount of the build cache.
Delete the build cache if the refcount = 0.
Args:
cache_hash (int): Cache hash.
Returns:
None
'''
StdChal.build_cache_refcount[cache_hash] -= 1
if StdChal.build_cache_refcount[cache_hash] == 0:
with StackContext(Privilege.fileaccess):
shutil.rmtree('container/standard/cache/%x'%cache_hash)
def __init__(self, chal_id, code_path, comp_typ, judge_typ, res_path, \
test_list, metadata):
'''Initialize.
Args:
chal_id (int): Challenge ID.
code_path (string): Code path.
comp_typ (string): Type of compile.
judge_typ (string): Type of judge.
res_path (string): Resource path.
test_list ([dict]): Test parameter lists.
metadata (dict): Metadata for judge.
'''
StdChal.last_uniqid += 1
self.uniqid = StdChal.last_uniqid
self.code_path = code_path
self.res_path = res_path
self.comp_typ = comp_typ
self.judge_typ = judge_typ
self.test_list = test_list
self.metadata = metadata
self.chal_id = chal_id
self.chal_path = None
StdChal.last_standard_uid += 1
self.compile_uid, self.compile_gid = StdChal.get_standard_ugid()
@gen.coroutine
def prefetch(self):
'''Prefetch files.'''
path_set = set([self.code_path])
for root, _, files in os.walk(self.res_path):
for filename in files:
path_set.add(os.path.abspath(os.path.join(root, filename)))
path_list = list(path_set)
proc_list = []
with StackContext(Privilege.fileaccess):
for idx in range(0, len(path_list), 16):
proc_list.append(process.Subprocess(
['./Prefetch.py'] + path_list[idx:idx + 16],
stdout=process.Subprocess.STREAM))
for proc in proc_list:
yield proc.stdout.read_bytes(2)
@gen.coroutine
def start(self):
'''Start the challenge.
Returns:
dict: Challenge result.
'''
cache_hash = None
cache_gid = None
# Check if special judge needs to rebuild.
if self.judge_typ in ['ioredir']:
hashproc = process.Subprocess( \
['./HashDir.py', self.res_path + '/check'], \
stdout=process.Subprocess.STREAM)
dirhash = yield hashproc.stdout.read_until(b'\n')
dirhash = int(dirhash.decode('utf-8').rstrip('\n'), 16)
ret = StdChal.build_cache_find(self.res_path)
if ret is not None and ret[0] == dirhash:
cache_hash, cache_gid = ret
judge_ioredir = IORedirJudge('container/standard', \
'/cache/%x'%cache_hash)
else:
cache_hash = dirhash
_, cache_gid = StdChal.get_standard_ugid()
build_ugid = StdChal.get_standard_ugid()
build_relpath = '/cache/%x'%cache_hash
build_path = 'container/standard' + build_relpath
judge_ioredir = IORedirJudge('container/standard', \
build_relpath)
if not (yield judge_ioredir.build(build_ugid, self.res_path)):
return [(0, 0, STATUS_ERR)] * len(self.test_list), ''
FileUtils.setperm(build_path, \
Privilege.JUDGE_UID, cache_gid, umask=0o750)
with StackContext(Privilege.fullaccess):
os.chmod(build_path, 0o750)
StdChal.build_cache_update(self.res_path, cache_hash, cache_gid)
print('StdChal %d built checker %x'%(self.chal_id, cache_hash))
StdChal.build_cache_incref(cache_hash)
print('StdChal %d started'%self.chal_id)
# Create challenge environment.
self.chal_path = 'container/standard/home/%d'%self.uniqid
with StackContext(Privilege.fileaccess):
os.mkdir(self.chal_path, mode=0o771)
try:
yield self.prefetch()
print('StdChal %d prefetched'%self.chal_id)
if self.comp_typ in ['g++', 'clang++']:
ret, verdict = yield self.comp_cxx()
elif self.comp_typ == 'makefile':
ret, verdict = yield self.comp_make()
elif self.comp_typ == 'python3':
ret, verdict = yield self.comp_python()
if ret != PyExt.DETECT_NONE:
return [(0, 0, STATUS_CE, verdict)] * len(self.test_list)
print('StdChal %d compiled'%self.chal_id)
# Prepare test arguments
if self.comp_typ == 'python3':
exefile_path = self.chal_path \
+ '/compile/__pycache__/test.cpython-34.pyc'
exe_path = '/usr/bin/python3.5'
argv = ['./a.out']
envp = ['HOME=/', 'LANG=en_US.UTF-8']
else:
exefile_path = self.chal_path + '/compile/a.out'
exe_path = './a.out'
argv = []
envp = []
# Prepare judge
test_future = []
if self.judge_typ == 'diff':
for test in self.test_list:
test_future.append(self.judge_diff(
exefile_path,
exe_path, argv, envp,
test['in'], test['ans'],
test['timelimit'], test['memlimit']))
elif self.judge_typ == 'ioredir':
for test in self.test_list:
check_uid, _ = StdChal.get_standard_ugid()
test_uid, test_gid = StdChal.get_restrict_ugid()
test_future.append(judge_ioredir.judge( \
exefile_path, exe_path, argv, envp, \
(check_uid, cache_gid), \
(test_uid, test_gid), \
'/home/%d/run_%d'%(self.uniqid, test_uid), \
test, self.metadata))
# Emit tests
test_result = yield gen.multi(test_future)
ret_result = list()
for result in test_result:
test_pass, data, verdict = result
runtime, peakmem, error = data
status = STATUS_ERR
if error == PyExt.DETECT_NONE:
if test_pass is True:
status = STATUS_AC
else:
status = STATUS_WA
elif error == PyExt.DETECT_OOM:
status = STATUS_MLE
elif error == PyExt.DETECT_TIMEOUT \
or error == PyExt.DETECT_FORCETIMEOUT:
status = STATUS_TLE
elif error == PyExt.DETECT_EXITERR:
status = STATUS_RE
else:
status = STATUS_ERR
ret_result.append((runtime, peakmem, status, verdict))
return ret_result
finally:
if cache_hash is not None:
StdChal.build_cache_decref(cache_hash)
with StackContext(Privilege.fileaccess):
shutil.rmtree(self.chal_path)
print('StdChal %d done'%self.chal_id)
@concurrent.return_future
def comp_cxx(self, callback=None):
'''GCC, Clang compile.
Args:
callback (function): Callback of return_future.
Returns:
None
'''
def _started_cb(task_id):
'''Started callback.
Close unused file descriptors after the task is started.
Args:
task_id (int): Task ID.
Returns:
None
'''
nonlocal errpipe_fd
os.close(errpipe_fd)
def _done_cb(task_id, stat):
'''Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
'''
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open(compile_path + '/verdict.txt', 'rb')
# To fix decoding error.
# Force convert the binary string to string temporarily.
verdict = ''.join(chr(c) for c in verfile.read(140))
verfile.close()
callback((stat['detect_error'], verdict))
compile_path = self.chal_path + '/compile'
with StackContext(Privilege.fileaccess):
os.mkdir(compile_path, mode=0o770)
shutil.copyfile(self.code_path, compile_path + '/test.cpp', \
follow_symlinks=False)
FileUtils.setperm(compile_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fileaccess):
errpipe_fd = os.open(compile_path + '/verdict.txt', \
os.O_WRONLY | os.O_CREAT | os.O_CLOEXEC, mode=0o440)
if self.comp_typ == 'g++':
compiler = '/usr/bin/g++'
elif self.comp_typ == 'clang++':
compiler = '/usr/bin/clang++'
task_id = PyExt.create_task(compiler, \
[
'-O2',
'-std=c++14',
'-o', './a.out',
'./test.cpp',
], \
[
'PATH=/usr/bin:/bin',
'TMPDIR=/home/%d/compile'%self.uniqid,
], \
{
0: StdChal.null_fd,
1: StdChal.null_fd,
2: errpipe_fd,
}, \
'/home/%d/compile'%self.uniqid, 'container/standard', \
self.compile_uid, self.compile_gid, 60000, 1024 * 1024 * 1024, \
PyExt.RESTRICT_LEVEL_LOW)
if task_id is None:
os.close(errpipe_fd)
callback((PyExt.DETECT_INTERNALERR, ''))
return
PyExt.start_task(task_id, _done_cb, _started_cb)
@concurrent.return_future
def comp_make(self, callback=None):
'''Makefile compile.
Args:
callback (function): Callback of return_future.
Returns:
None
'''
def _done_cb(task_id, stat):
'''Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
'''
callback((stat['detect_error'], ''))
make_path = self.chal_path + '/compile'
FileUtils.copydir(self.res_path + '/make', make_path)
with StackContext(Privilege.fileaccess):
shutil.copyfile(self.code_path, make_path + '/main.cpp', \
follow_symlinks=False)
FileUtils.setperm(make_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fullaccess):
os.chmod(make_path, mode=0o770)
task_id = PyExt.create_task('/usr/bin/make', \
[], \
[
'PATH=/usr/bin:/bin',
'TMPDIR=/home/%d/compile'%self.uniqid,
'OUT=./a.out',
], \
{
0: StdChal.null_fd,
1: StdChal.null_fd,
2: StdChal.null_fd,
}, \
'/home/%d/compile'%self.uniqid, 'container/standard', \
self.compile_uid, self.compile_gid, 60000, 1024 * 1024 * 1024, \
PyExt.RESTRICT_LEVEL_LOW)
if task_id is None:
callback((PyExt.DETECT_INTERNALERR, ''))
else:
PyExt.start_task(task_id, _done_cb)
@concurrent.return_future
def comp_python(self, callback=None):
'''Python3.4 compile.
Args:
callback (function): Callback of return_future.
Returns:
None
'''
def _started_cb(task_id):
'''Started callback.
Close unused file descriptors after the task is started.
Args:
task_id (int): Task ID.
Returns:
None
'''
nonlocal errpipe_fd
os.close(errpipe_fd)
def _done_cb(task_id, stat):
'''Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
'''
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open(compile_path + '/verdict.txt', 'rb')
# To fix decoding error.
# Force convert the binary string to string temporarily.
verdict = ''.join(chr(c) for c in verfile.read(140))
verfile.close()
callback((stat['detect_error'], verdict))
compile_path = self.chal_path + '/compile'
with StackContext(Privilege.fileaccess):
os.mkdir(compile_path, mode=0o770)
shutil.copyfile(self.code_path, compile_path + '/test.py', \
follow_symlinks=False)
FileUtils.setperm(compile_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fileaccess):
errpipe_fd = os.open(compile_path + '/verdict.txt', \
os.O_WRONLY | os.O_CREAT | os.O_CLOEXEC, mode=0o440)
task_id = PyExt.create_task('/usr/bin/python3.5', \
[
'-m',
'py_compile',
'./test.py'
], \
[
'HOME=/home/%d/compile'%self.uniqid,
'LANG=en_US.UTF-8'
], \
{
0: StdChal.null_fd,
1: StdChal.null_fd,
2: errpipe_fd,
}, \
'/home/%d/compile'%self.uniqid, 'container/standard', \
self.compile_uid, self.compile_gid, 60000, 1024 * 1024 * 1024, \
PyExt.RESTRICT_LEVEL_LOW)
if task_id is None:
os.close(errpipe_fd)
callback((PyExt.DETECT_INTERNALERR, ''))
return
PyExt.start_task(task_id, _done_cb, _started_cb)
@concurrent.return_future
def judge_diff(self, src_path, exe_path, argv, envp, in_path, ans_path, \
timelimit, memlimit, callback=None):
'''Diff judge.
Args:
src_path (string): Executable source path.
exe_path (string): Executable or interpreter path in the sandbox.
argv ([string]): List of arguments.
envp ([string]): List of environment variables.
in_path (string): Input file path.
ans_path (string): Answer file path.
timelimit (int): Timelimit.
memlimit (int): Memlimit.
callback (function): Callback of return_future.
Returns:
None
'''
def _started_cb(task_id):
'''Started callback.
Close unused file descriptors after the task is started.
Args:
task_id (int): Task ID.
Returns:
None
'''
nonlocal infile_fd
nonlocal outpipe_fd
os.close(infile_fd)
os.close(outpipe_fd[1])
IOLoop.instance().add_handler(outpipe_fd[0], _diff_out, \
IOLoop.READ | IOLoop.ERROR)
def _done_cb(task_id, stat):
'''Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
'''
nonlocal result_stat
nonlocal result_pass
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
if result_pass is not None:
callback((result_pass, result_stat, ''))
def _diff_out(evfd, events):
'''Diff the output of the task.
Args:
evfd (int): Event file descriptor.
events (int): Event flags.
Returns:
None
'''
nonlocal outpipe_fd
nonlocal ansfile
nonlocal result_stat
nonlocal result_pass
end_flag = False
if events & IOLoop.READ:
while True:
try:
data = os.read(outpipe_fd[0], 65536)
except BlockingIOError:
break
ansdata = ansfile.read(len(data))
if data != ansdata:
result_pass = False
end_flag = True
break
if len(ansdata) == 0:
if len(ansfile.read(1)) == 0:
result_pass = True
else:
result_pass = False
end_flag = True
break
if (events & IOLoop.ERROR) or end_flag:
if result_pass is None:
if len(ansfile.read(1)) == 0:
result_pass = True
else:
result_pass = False
IOLoop.instance().remove_handler(evfd)
os.close(outpipe_fd[0])
ansfile.close()
if result_stat is not None:
callback((result_pass, result_stat, ''))
judge_uid, judge_gid = StdChal.get_restrict_ugid()
# Prepare I/O and stat.
with StackContext(Privilege.fileaccess):
infile_fd = os.open(in_path, os.O_RDONLY | os.O_CLOEXEC)
ansfile = open(ans_path, 'rb')
outpipe_fd = os.pipe2(os.O_CLOEXEC)
fcntl.fcntl(outpipe_fd[0], fcntl.F_SETFL, os.O_NONBLOCK)
result_stat = None
result_pass = None
# Prepare judge environment.
with StackContext(Privilege.fileaccess):
judge_path = self.chal_path + '/run_%d'%judge_uid
os.mkdir(judge_path, mode=0o771)
shutil.copyfile(src_path, judge_path + '/a.out', \
follow_symlinks=False)
with StackContext(Privilege.fullaccess):
os.chown(judge_path + '/a.out', judge_uid, judge_gid)
os.chmod(judge_path + '/a.out', 0o500)
task_id = PyExt.create_task(exe_path, argv, envp, \
{
0: infile_fd,
1: outpipe_fd[1],
2: outpipe_fd[1],
}, \
'/home/%d/run_%d'%(self.uniqid, judge_uid), 'container/standard', \
judge_uid, judge_gid, timelimit, memlimit, \
PyExt.RESTRICT_LEVEL_HIGH)
if task_id is None:
os.close(infile_fd)
os.close(outpipe_fd[0])
os.close(outpipe_fd[1])
ansfile.close()
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ''))
else:
PyExt.start_task(task_id, _done_cb, _started_cb)
class IORedirJudge:
'''I/O redirect spcial judge.
Attributes:
container_path (string): Container path.
build_relpath (string): Relative build path.
build_path (string): Build path.
'''
def __init__(self, container_path, build_relpath):
'''Initialize.
Args:
container_path (string): Container path.
build_relpath (string): Relative build path.
'''
self.container_path = container_path
self.build_relpath = build_relpath
self.build_path = container_path + build_relpath
@concurrent.return_future
def build(self, build_ugid, res_path, callback=None):
'''Build environment.
Args:
build_ugid ((int, int)): Build UID/GID.
res_path (string): Resource path.
callback (function): Callback of return_future.
Returns:
None
'''
def _done_cb(task_id, stat):
'''Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
'''
if stat['detect_error'] == PyExt.DETECT_NONE:
callback(True)
else:
callback(False)
build_uid, build_gid = build_ugid
# Prepare build environment.
FileUtils.copydir(res_path + '/check', self.build_path)
FileUtils.setperm(self.build_path, build_uid, build_gid)
with StackContext(Privilege.fullaccess):
os.chmod(self.build_path, mode=0o770)
with StackContext(Privilege.fileaccess):
if not os.path.isfile(self.build_path + '/build'):
callback(True)
return
# Make the build file executable.
with StackContext(Privilege.fullaccess):
os.chmod(self.build_path + '/build', mode=0o770)
# Build.
task_id = PyExt.create_task(self.build_relpath + '/build', \
[], \
[
'PATH=/usr/bin:/bin',
'TMPDIR=%s'%self.build_relpath,
'HOME=%s'%self.build_relpath,
'LANG=en_US.UTF-8'
], \
{
0: StdChal.null_fd,
1: StdChal.null_fd,
2: StdChal.null_fd,
}, \
self.build_relpath, 'container/standard', \
build_uid, build_gid, 60000, 1024 * 1024 * 1024, \
PyExt.RESTRICT_LEVEL_LOW)
if task_id is None:
callback(False)
else:
PyExt.start_task(task_id, _done_cb)
@concurrent.return_future
def judge(self, src_path, exe_relpath, argv, envp, check_ugid, test_ugid, \
test_relpath, test_param, metadata, callback=None):
'''I/O redirect special judge.
Args:
src_path (string): Executable source path.
exe_relpath (string): Executable or interpreter path in the sandbox.
argv ([string]): List of arguments.
envp ([string]): List of environment variables.
check_ugid (int, int): Check UID/GID.
test_ugid (int, int): Test UID/GID.
test_relpath (string): Test relative path.
test_param (dict): Test parameters.
metadata (dict): Metadata.
callback (function): Callback of return_future.
Returns:
None
'''
def _check_started_cb(task_id):
'''Check started callback.
Close unused file descriptors after the check is started.
Args:
task_id (int): Task ID.
Returns:
None
'''
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal ansfile_fd
nonlocal check_infile_fd
os.close(inpipe_fd[1])
os.close(outpipe_fd[0])
if ansfile_fd is not None:
os.close(ansfile_fd)
if check_infile_fd is not None:
os.close(check_infile_fd)
def _test_started_cb(task_id):
'''Test started callback.
Close unused file descriptors after the test is started.
Args:
task_id (int): Task ID.
Returns:
None
'''
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal outfile_fd
nonlocal test_infile_fd
os.close(inpipe_fd[0])
os.close(outpipe_fd[1])
os.close(outfile_fd)
if test_infile_fd is not None:
os.close(test_infile_fd)
def _done_cb():
'''Done callback.'''
nonlocal result_stat
nonlocal result_pass
nonlocal verdict_path
if result_pass is not None and result_stat is not None:
with StackContext(Privilege.fileaccess):
verfile = open(verdict_path, 'r')
verdict = verfile.read(140)
verfile.close()
callback((result_pass, result_stat, verdict))
return
def _check_done_cb(task_id, stat):
'''Check done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
'''
nonlocal result_pass
if stat['detect_error'] == PyExt.DETECT_NONE:
result_pass = True
else:
result_pass = False
_done_cb()
def _test_done_cb(task_id, stat):
'''Test done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
'''
nonlocal result_stat
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
_done_cb()
result_stat = None
result_pass = None
in_path = test_param['in']
ans_path = test_param['ans']
timelimit = test_param['timelimit']
memlimit = test_param['memlimit']
check_uid, check_gid = check_ugid
test_uid, test_gid = test_ugid
test_path = self.container_path + test_relpath
output_relpath = test_relpath + '/output.txt'
output_path = self.container_path + output_relpath
verdict_relpath = test_relpath + '/verdict.txt'
verdict_path = self.container_path + verdict_relpath
# Prepare test environment.
with StackContext(Privilege.fileaccess):
os.mkdir(test_path, mode=0o771)
shutil.copyfile(src_path, test_path + '/a.out', \
follow_symlinks=False)
with StackContext(Privilege.fullaccess):
os.chown(test_path + '/a.out', test_uid, test_gid)
os.chmod(test_path + '/a.out', 0o500)
# Prepare I/O.
with StackContext(Privilege.fileaccess):
try:
check_infile_fd = os.open(in_path, os.O_RDONLY | os.O_CLOEXEC)
test_infile_fd = os.open(in_path, os.O_RDONLY | os.O_CLOEXEC)
except (FileNotFoundError, TypeError):
check_infile_fd = None
test_infile_fd = None
try:
ansfile_fd = os.open(ans_path, os.O_RDONLY | os.O_CLOEXEC)
except (FileNotFoundError, TypeError):
ansfile_fd = None
outfile_fd = os.open(output_path, \
os.O_WRONLY | os.O_CREAT | os.O_CLOEXEC, mode=0o400)
os.close(os.open(verdict_path,
os.O_CREAT | os.O_CLOEXEC, mode=0o640))
with StackContext(Privilege.fullaccess):
os.chown(output_path, check_uid, check_gid)
os.chown(verdict_path, check_uid, check_gid)
inpipe_fd = os.pipe2(os.O_CLOEXEC)
outpipe_fd = os.pipe2(os.O_CLOEXEC)
# Set file descriptor mapping.
check_fdmap = {
0: StdChal.null_fd,
1: StdChal.null_fd,
2: StdChal.null_fd,
}
test_fdmap = {
0: StdChal.null_fd,
1: StdChal.null_fd,
2: StdChal.null_fd,
}
if check_infile_fd is not None:
check_fdmap[metadata['redir_check']['testin']] = check_infile_fd
if ansfile_fd is not None:
check_fdmap[metadata['redir_check']['ansin']] = ansfile_fd
check_fdmap[metadata['redir_check']['pipein']] = inpipe_fd[1]
check_fdmap[metadata['redir_check']['pipeout']] = outpipe_fd[0]
try:
del check_fdmap[-1]
except KeyError:
pass
if test_infile_fd is not None:
test_fdmap[metadata['redir_test']['testin']] = test_infile_fd
test_fdmap[metadata['redir_test']['testout']] = outfile_fd
test_fdmap[metadata['redir_test']['pipein']] = inpipe_fd[0]
test_fdmap[metadata['redir_test']['pipeout']] = outpipe_fd[1]
try:
del test_fdmap[-1]
except KeyError:
pass
check_task_id = PyExt.create_task(self.build_relpath + '/check', \
[], \
[
'PATH=/usr/bin:/bin',
'HOME=%s'%self.build_relpath,
'LANG=en_US.UTF-8',
'OUTPUT=%s'%output_relpath,
'VERDICT=%s'%verdict_relpath,
], \
check_fdmap, \
self.build_relpath, self.container_path, \
check_uid, check_gid, 60000, 1024 * 1024 * 1024, \
PyExt.RESTRICT_LEVEL_LOW)
if check_task_id is None:
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ''))
return
PyExt.start_task(check_task_id, _check_done_cb, _check_started_cb)
test_task_id = PyExt.create_task(exe_relpath, argv, envp, \
test_fdmap, \
test_relpath, self.container_path, \
test_uid, test_gid, timelimit, memlimit, \
PyExt.RESTRICT_LEVEL_HIGH)
if test_task_id is None:
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ''))
return
PyExt.start_task(test_task_id, _test_done_cb, _test_started_cb)
| [
[
[
41,
43
],
[
1656,
1658
],
[
1850,
1852
],
[
2417,
2419
],
[
2438,
2440
],
[
2450,
2452
],
[
5864,
5866
],
[
5952,
5954
],
[
5968,
5970
],
[
7993,
7995
],
[
8453,
8455
],
[
13088,
13090
],
[
13387,
13389
],
[
13444,
13446
],
[
13458,
13460
],
[
13471,
13473
],
[
14332,
14334
],
[
15389,
15391
],
[
17434,
17436
],
[
17732,
17734
],
[
17789,
17791
],
[
17803,
17805
],
[
17816,
17818
],
[
18491,
18493
],
[
22129,
22131
],
[
22146,
22148
],
[
22160,
22162
],
[
22238,
22240
],
[
22247,
22249
],
[
22311,
22313
],
[
22541,
22543
],
[
22737,
22739
],
[
22803,
22805
],
[
23249,
23251
],
[
23281,
23283
],
[
23317,
23319
],
[
25080,
25082
],
[
25187,
25189
],
[
25397,
25399
],
[
30216,
30218
],
[
30410,
30412
],
[
30473,
30475
],
[
30635,
30637
],
[
30652,
30654
],
[
30666,
30668
],
[
30713,
30715
],
[
30730,
30732
],
[
30744,
30746
],
[
30932,
30934
],
[
30950,
30952
],
[
30964,
30966
],
[
31088,
31090
],
[
31127,
31129
],
[
31141,
31143
],
[
31154,
31156
],
[
31192,
31194
],
[
31201,
31203
],
[
31239,
31241
],
[
31252,
31254
],
[
31340,
31342
],
[
31396,
31398
],
[
31462,
31464
],
[
31471,
31473
],
[
31506,
31508
],
[
31515,
31517
],
[
12299,
12301
],
[
16645,
16647
],
[
19691,
19693
],
[
19723,
19725
],
[
20870,
20872
],
[
21801,
21803
],
[
27397,
27399
],
[
27432,
27434
],
[
27511,
27513
],
[
27592,
27594
],
[
28030,
28032
],
[
28065,
28067
],
[
28101,
28103
],
[
28181,
28183
]
],
[
[
51,
57
],
[
1544,
1550
],
[
1737,
1743
],
[
4722,
4728
],
[
11692,
11698
],
[
13135,
13141
],
[
15157,
15163
],
[
17481,
17487
],
[
22586,
22592
],
[
30260,
30266
]
],
[
[
65,
70
],
[
22269,
22274
],
[
22296,
22301
]
],
[
[
88,
91
],
[
1914,
1917
]
],
[
[
112,
115
],
[
5724,
5727
],
[
6416,
6419
],
[
10588,
10591
]
],
[
[
117,
127
],
[
11778,
11788
],
[
14489,
14499
],
[
16122,
16132
],
[
18648,
18658
],
[
24106,
24116
],
[
26170,
26180
]
],
[
[
129,
136
],
[
6194,
6201
],
[
6306,
6313
],
[
6718,
6725
],
[
6825,
6832
]
],
[
[
171,
183
],
[
1475,
1487
],
[
2202,
2214
],
[
4670,
4682
],
[
6072,
6084
],
[
7937,
7949
],
[
8405,
8417
],
[
11640,
11652
],
[
13040,
13052
],
[
13326,
13338
],
[
15109,
15121
],
[
15341,
15353
],
[
17386,
17398
],
[
17671,
17683
],
[
22069,
22081
],
[
22431,
22443
],
[
22689,
22701
],
[
25032,
25044
],
[
25132,
25144
],
[
25349,
25361
],
[
30168,
30180
],
[
30362,
30374
],
[
30548,
30560
],
[
31292,
31304
],
[
12602,
12614
],
[
16948,
16960
],
[
28455,
28467
]
],
[
[
211,
217
],
[
19759,
19765
],
[
19833,
19839
],
[
19847,
19853
],
[
20773,
20779
],
[
21499,
21505
],
[
21746,
21752
]
],
[
[
225,
230
],
[
8926,
8931
],
[
10843,
10848
],
[
11046,
11051
],
[
11134,
11139
],
[
11189,
11194
],
[
11286,
11291
],
[
13673,
13678
],
[
14265,
14270
],
[
14375,
14380
],
[
14434,
14439
],
[
15440,
15445
],
[
15946,
15951
],
[
16023,
16028
],
[
16080,
16085
],
[
17861,
17866
],
[
18424,
18429
],
[
18534,
18539
],
[
18593,
18598
],
[
22861,
22866
],
[
23181,
23186
],
[
23405,
23410
],
[
23463,
23468
],
[
25482,
25487
],
[
26019,
26024
],
[
26128,
26133
],
[
32709,
32714
],
[
33176,
33181
],
[
33273,
33278
],
[
33332,
33337
],
[
33423,
33428
],
[
33610,
33615
],
[
33707,
33712
],
[
33766,
33771
],
[
24709,
24714
],
[
29029,
29034
]
],
[
[
238,
247
],
[
1488,
1497
],
[
2215,
2224
],
[
4683,
4692
],
[
6085,
6094
],
[
7871,
7880
],
[
7950,
7959
],
[
8418,
8427
],
[
11653,
11662
],
[
13053,
13062
],
[
13339,
13348
],
[
15122,
15131
],
[
15354,
15363
],
[
17399,
17408
],
[
17684,
17693
],
[
22082,
22091
],
[
22444,
22453
],
[
22702,
22711
],
[
25045,
25054
],
[
25145,
25154
],
[
25362,
25371
],
[
30181,
30190
],
[
30375,
30384
],
[
30561,
30570
],
[
31305,
31314
],
[
12615,
12624
],
[
16961,
16970
],
[
28468,
28477
]
],
[
[
255,
261
],
[
1276,
1282
],
[
1335,
1341
]
],
[
[
280,
289
],
[
7819,
7828
],
[
13244,
13253
],
[
15042,
15051
],
[
15263,
15272
],
[
17589,
17598
],
[
24898,
24907
],
[
24962,
24971
]
],
[
[
292,
303
]
],
[
[
308,
317
],
[
10937,
10946
]
],
[
[
322,
331
],
[
11006,
11015
]
],
[
[
336,
345
],
[
11337,
11346
]
],
[
[
350,
360
],
[
11245,
11255
]
],
[
[
365,
375
],
[
11093,
11103
]
],
[
[
380,
389
],
[
8976,
8985
]
],
[
[
394,
404
],
[
7764,
7774
],
[
10804,
10814
],
[
11398,
11408
]
],
[
[
410,
417
],
[
2353,
2360
]
],
[
[
433,
440
],
[
2399,
2406
],
[
2472,
2479
],
[
2505,
2512
],
[
2706,
2713
],
[
2753,
2760
],
[
2780,
2787
],
[
2974,
2981
],
[
3021,
3028
],
[
3048,
3055
],
[
3350,
3357
],
[
3728,
3735
],
[
3803,
3810
],
[
3854,
3861
],
[
3893,
3900
],
[
3951,
3958
],
[
4223,
4230
],
[
4549,
4556
],
[
4606,
4613
],
[
5273,
5280
],
[
5320,
5327
],
[
5614,
5621
],
[
5690,
5697
],
[
7001,
7008
],
[
7339,
7346
],
[
7396,
7403
],
[
8038,
8045
],
[
8196,
8203
],
[
10096,
10103
],
[
10165,
10172
],
[
11584,
11591
],
[
14007,
14014
],
[
14043,
14050
],
[
15683,
15690
],
[
15719,
15726
],
[
15755,
15762
],
[
18166,
18173
],
[
18202,
18209
],
[
21995,
22002
],
[
25782,
25789
],
[
25818,
25825
],
[
25854,
25861
],
[
31608,
31615
],
[
31640,
31647
],
[
31672,
31679
],
[
31737,
31744
],
[
31769,
31776
],
[
31801,
31808
]
],
[
[
23520,
23532
],
[
7171,
7183
],
[
7578,
7590
]
]
] |
n, m = map(int, input().split())
l = list(map(int, input().split()))
l.sort()
mini = l[m-1] - l[0]
for i in range(m-n+1):
mini = min(mini, l[i+n-1]-l[i])
print(mini)
| [
[
[
0,
1
],
[
116,
117
],
[
147,
148
]
],
[
[
3,
4
],
[
87,
88
],
[
114,
115
]
],
[
[
33,
34
],
[
69,
70
],
[
85,
86
],
[
94,
95
],
[
143,
144
],
[
152,
153
]
],
[
[
78,
82
],
[
137,
141
],
[
164,
168
]
],
[
[
103,
104
],
[
145,
146
],
[
154,
155
]
],
[
[
126,
130
],
[
137,
141
],
[
164,
168
]
]
] |
# global
from typing import Union, Optional, Tuple, Literal
from collections import namedtuple
# local
import ivy
from ivy.framework_handler import current_framework as _cur_framework
inf = float('inf')
# Array API Standard #
# -------------------#
def matrix_transpose(x: Union[ivy.Array, ivy.NativeArray])\
-> ivy.Array:
"""
Transposes a matrix (or a stack of matrices) ``x``.
Parameters
----------
x: array
input array having shape ``(..., M, N)`` and whose innermost two dimensions form ``MxN`` matrices.
Returns
-------
out: array
an array containing the transpose for each matrix and having shape ``(..., N, M)``. The returned array must have the same data type as ``x``.
"""
return _cur_framework(x).matrix_transpose(x)
# noinspection PyShadowingBuiltins
def vector_norm(x: Union[ivy.Array, ivy.NativeArray],
axis: Optional[Union[int, Tuple[int]]] = None,
keepdims: bool = False,
ord: Union[int, float, Literal[inf, -inf]] = 2)\
-> ivy.Array:
"""
Computes the vector norm of a vector (or batch of vectors) ``x``.
Parameters
----------
x:
input array. Should have a floating-point data type.
axis:
If an integer, ``axis`` specifies the axis (dimension) along which to compute vector norms. If an n-tuple, ``axis`` specifies the axes (dimensions) along which to compute batched vector norms. If ``None``, the vector norm must be computed over all array values (i.e., equivalent to computing the vector norm of a flattened array). Negative indices must be supported. Default: ``None``.
keepdims:
If ``True``, the axes (dimensions) specified by ``axis`` must be included in the result as singleton dimensions, and, accordingly, the result must be compatible with the input array (see :ref:`broadcasting`). Otherwise, if ``False``, the axes (dimensions) specified by ``axis`` must not be included in the result. Default: ``False``.
ord:
order of the norm. The following mathematical norms must be supported:
+------------------+----------------------------+
| ord | description |
+==================+============================+
| 1 | L1-norm (Manhattan) |
+------------------+----------------------------+
| 2 | L2-norm (Euclidean) |
+------------------+----------------------------+
| inf | infinity norm |
+------------------+----------------------------+
| (int,float >= 1) | p-norm |
+------------------+----------------------------+
The following non-mathematical "norms" must be supported:
+------------------+--------------------------------+
| ord | description |
+==================+================================+
| 0 | sum(a != 0) |
+------------------+--------------------------------+
| -1 | 1./sum(1./abs(a)) |
+------------------+--------------------------------+
| -2 | 1./sqrt(sum(1./abs(a)\*\*2)) |
+------------------+--------------------------------+
| -inf | min(abs(a)) |
+------------------+--------------------------------+
| (int,float < 1) | sum(abs(a)\*\*ord)\*\*(1./ord) |
+------------------+--------------------------------+
Default: ``2``.
Returns
-------
out:
an array containing the vector norms. If ``axis`` is ``None``, the returned array must be a zero-dimensional array containing a vector norm. If ``axis`` is a scalar value (``int`` or ``float``), the returned array must have a rank which is one less than the rank of ``x``. If ``axis`` is a ``n``-tuple, the returned array must have a rank which is ``n`` less than the rank of ``x``. The returned array must have a floating-point data type determined by :ref:`type-promotion`.
"""
if ord == -float('inf'):
return ivy.reduce_min(ivy.abs(x), axis, keepdims)
elif ord == float('inf'):
return ivy.reduce_max(ivy.abs(x), axis, keepdims)
elif ord == 0:
return ivy.reduce_sum(ivy.cast(x != 0, 'float32'), axis, keepdims)
x_raised = x ** ord
return ivy.reduce_sum(x_raised, axis, keepdims) ** (1/ord)
def svd(x:Union[ivy.Array,ivy.NativeArray],full_matrices: bool = True)->Union[ivy.Array, Tuple[ivy.Array,...]]:
"""
Singular Value Decomposition.
When x is a 2D array, it is factorized as u @ numpy.diag(s) @ vh = (u * s) @ vh, where u and vh are 2D unitary
arrays and s is a 1D array of a’s singular values. When x is higher-dimensional, SVD is applied in batched mode.
:param x: Input array with number of dimensions >= 2.
:type x: array
:return:
u -> { (…, M, M), (…, M, K) } array \n
Unitary array(s). The first (number of dims - 2) dimensions have the same size as those of the input a.
The size of the last two dimensions depends on the value of full_matrices.
s -> (…, K) array \n
Vector(s) with the singular values, within each vector sorted in descending ord.
The first (number of dims - 2) dimensions have the same size as those of the input a.
vh -> { (…, N, N), (…, K, N) } array \n
Unitary array(s). The first (number of dims - 2) dimensions have the same size as those of the input a.
The size of the last two dimensions depends on the value of full_matrices.
"""
return _cur_framework(x).svd(x,full_matrices)
def diagonal(x: ivy.Array,
offset: int = 0,
axis1: int = -2,
axis2: int = -1) -> ivy.Array:
"""
Returns the specified diagonals of a matrix (or a stack of matrices) ``x``.
Parameters
----------
x:
input array having shape ``(..., M, N)`` and whose innermost two dimensions form ``MxN`` matrices.
offset:
offset specifying the off-diagonal relative to the main diagonal.
- ``offset = 0``: the main diagonal.
- ``offset > 0``: off-diagonal above the main diagonal.
- ``offset < 0``: off-diagonal below the main diagonal.
Default: `0`.
axis1:
axis to be used as the first axis of the 2-D sub-arrays from which the diagonals should be taken.
Defaults to first axis (0).
axis2:
axis to be used as the second axis of the 2-D sub-arrays from which the diagonals should be taken.
Defaults to second axis (1).
Returns
-------
out:
an array containing the diagonals and whose shape is determined by removing the last two dimensions and appending a dimension equal to the size of the resulting diagonals. The returned array must have the same data type as ``x``.
"""
return _cur_framework(x).diagonal(x, offset, axis1=axis1, axis2=axis2)
def inv(x):
"""
Computes the (multiplicative) inverse of x matrix.
Given a square matrix x, returns the matrix x_inv satisfying dot(x, x_inv) = dot(x_inv, x) = eye(x.shape[0]).
:param x: Matrix to be inverted.
:type x: array
:return: (Multiplicative) inverse of the matrix x.
"""
return _cur_framework(x).inv(x)
def pinv(x):
"""
Computes the pseudo inverse of x matrix.
:param x: Matrix to be pseudo inverted.
:type x: array
:return: pseudo inverse of the matrix x.
"""
return _cur_framework(x).pinv(x)
def qr(x: ivy.Array,
mode: str = 'reduced') -> namedtuple('qr', ['Q', 'R']):
"""
Returns the qr decomposition x = QR of a full column rank matrix (or a stack of matrices), where Q is an orthonormal matrix (or a stack of matrices) and R is an upper-triangular matrix (or a stack of matrices).
Parameters
----------
x:
input array having shape (..., M, N) and whose innermost two dimensions form MxN matrices of rank N. Should have a floating-point data type.
mode:
decomposition mode. Should be one of the following modes:
- 'reduced': compute only the leading K columns of q, such that q and r have dimensions (..., M, K) and (..., K, N), respectively, and where K = min(M, N).
- 'complete': compute q and r with dimensions (..., M, M) and (..., M, N), respectively.
Default: 'reduced'.
Returns
-------
out:
a namedtuple (Q, R) whose
- first element must have the field name Q and must be an array whose shape depends on the value of mode and contain matrices with orthonormal columns. If mode is 'complete', the array must have shape (..., M, M). If mode is 'reduced', the array must have shape (..., M, K), where K = min(M, N). The first x.ndim-2 dimensions must have the same size as those of the input array x.
- second element must have the field name R and must be an array whose shape depends on the value of mode and contain upper-triangular matrices. If mode is 'complete', the array must have shape (..., M, N). If mode is 'reduced', the array must have shape (..., K, N), where K = min(M, N). The first x.ndim-2 dimensions must have the same size as those of the input x.
"""
return _cur_framework(x).qr(x, mode)
def matmul(x1: Union[ivy.Array, ivy.NativeArray],
x2: Union[ivy.Array, ivy.NativeArray]) -> ivy.Array:
"""
Computes the matrix product.
Parameters
----------
x1:
x1 (array) – first input array. Should have a numeric data type. Must have at least one dimension.
x2:
x2 (array) – second input array. Should have a numeric data type. Must have at least one dimension.
Returns
-------
out(array):
if both x1 and x2 are one-dimensional arrays having shape (N,), a zero-dimensional array containing the inner product as its only element.
if x1 is a two-dimensional array having shape (M, K) and x2 is a two-dimensional array having shape (K, N), a two-dimensional array containing the conventional matrix product and having shape (M, N).
if x1 is a one-dimensional array having shape (K,) and x2 is an array having shape (..., K, N), an array having shape (..., N) (i.e., prepended dimensions during vector-to-matrix promotion must be removed) and containing the conventional matrix product.
if x1 is an array having shape (..., M, K) and x2 is a one-dimensional array having shape (K,), an array having shape (..., M) (i.e., appended dimensions during vector-to-matrix promotion must be removed) and containing the conventional matrix product.
if x1 is a two-dimensional array having shape (M, K) and x2 is an array having shape (..., K, N), an array having shape (..., M, N) and containing the conventional matrix product for each stacked matrix.
if x1 is an array having shape (..., M, K) and x2 is a two-dimensional array having shape (K, N), an array having shape (..., M, N) and containing the conventional matrix product for each stacked matrix.
if either x1 or x2 has more than two dimensions, an array having a shape determined by Broadcasting shape(x1)[:-2] against shape(x2)[:-2] and containing the conventional matrix product for each stacked matrix.
Raises
------
if either x1 or x2 is a zero-dimensional array.
if x1 is a one-dimensional array having shape (K,), x2 is a one-dimensional array having shape (L,), and K != L.
if x1 is a one-dimensional array having shape (K,), x2 is an array having shape (..., L, N), and K != L.
if x1 is an array having shape (..., M, K), x2 is a one-dimensional array having shape (L,), and K != L.
if x1 is an array having shape (..., M, K), x2 is an array having shape (..., L, N), and K != L.
"""
return _cur_framework(x1).matmul(x1, x2)
def slodget(x: Union[ivy.Array, ivy.NativeArray],) \
-> ivy.Array:
"""
Computes the sign and natural logarithm of the determinant of an array.
Parameters
----------
x:
This is a 2D array, and it has to be square
Return
----------
Out:
This function returns two values -
sign:
A number representing the sign of the determinant.
logdet:
The natural log of the absolute value of the determinant.
"""
return _cur_framework(x).slodget(x)
def svdvals(x: Union[ivy.Array, ivy.NativeArray],) \
-> ivy.Array:
"""
Returns the singular values of a matrix (or a stack of matrices) ``x``.
Parameters
----------
x:
input array having shape ``(..., M, N)`` and whose innermost two dimensions form ``MxN`` matrices.
Return
----------
Out:
array with shape ``(..., K)`` that contains the vector(s) of singular values of length ``K``, where K = min(M, N).
The values are sorted in descending order by magnitude.
"""
return _cur_framework(x).svdvals(x)
def trace(x: ivy.Array,
offset: int = 0)\
-> ivy.Array:
"""
Computes the sum of the diagonal of an array.
Parameters
----------
x:
This is an array.
Return
----------
Out:
This function returns two values -
sum:
The sum of the diagonals along an axis.
"""
return _cur_framework(x).trace(x, offset)
# Extra #
# ------#
| [
[
[
28,
33
],
[
277,
282
],
[
851,
856
],
[
917,
922
],
[
1010,
1015
],
[
4593,
4598
],
[
4531,
4536
],
[
9399,
9404
],
[
9450,
9455
],
[
11971,
11976
],
[
12527,
12532
]
],
[
[
35,
43
],
[
908,
916
]
],
[
[
45,
50
],
[
928,
933
],
[
4610,
4615
]
],
[
[
52,
59
],
[
1028,
1035
]
],
[
[
84,
94
],
[
7690,
7700
]
],
[
[
111,
114
],
[
324,
327
],
[
283,
286
],
[
294,
297
],
[
1065,
1068
],
[
857,
860
],
[
868,
871
],
[
4207,
4210
],
[
4222,
4225
],
[
4295,
4298
],
[
4310,
4313
],
[
4372,
4375
],
[
4387,
4390
],
[
4467,
4470
],
[
4599,
4602
],
[
4616,
4619
],
[
4537,
4540
],
[
4547,
4550
],
[
5878,
5881
],
[
5774,
5777
],
[
7646,
7649
],
[
9488,
9491
],
[
9405,
9408
],
[
9416,
9419
],
[
9456,
9459
],
[
9467,
9470
],
[
12024,
12027
],
[
11977,
11980
],
[
11988,
11991
],
[
12580,
12583
],
[
12533,
12536
],
[
12544,
12547
],
[
13167,
13170
],
[
13110,
13113
]
],
[
[
149,
184
],
[
757,
771
],
[
5717,
5731
],
[
7001,
7015
],
[
7387,
7401
],
[
7608,
7622
],
[
9350,
9364
],
[
11918,
11932
],
[
12481,
12495
],
[
13066,
13080
],
[
13474,
13488
]
],
[
[
185,
188
],
[
1036,
1039
],
[
1042,
1045
]
],
[
[
257,
273
]
],
[
[
836,
847
]
],
[
[
4525,
4528
]
],
[
[
5762,
5770
]
],
[
[
7071,
7074
]
],
[
[
7418,
7422
]
],
[
[
7640,
7642
]
],
[
[
9388,
9394
]
],
[
[
11960,
11967
]
],
[
[
12516,
12523
]
],
[
[
13101,
13106
]
]
] |
from django.contrib.postgres.fields import JSONField
from django.db import models
from surfsara.models.permission import Permission
class Task(models.Model):
RUNNING = "running"
SUCCESS = "success"
ERROR = "error"
OUTPUT_RELEASED = "output_released"
RELEASE_REJECTED = "release_rejected"
TASK_STATES = (
(RUNNING, "Running"),
(SUCCESS, "Success"),
(ERROR, "Error"),
(OUTPUT_RELEASED, "Output Released"),
(RELEASE_REJECTED, "Release Rejected"),
)
id = models.AutoField(primary_key=True)
state = models.CharField(max_length=255, choices=TASK_STATES)
progress_state = JSONField(null=True)
author_email = models.EmailField()
approver_email = models.EmailField()
algorithm = models.TextField()
algorithm_storage = models.TextField()
dataset = models.TextField()
dataset_storage = models.TextField()
output = models.TextField(null=True)
review_output = models.BooleanField(default=True)
permission = models.ForeignKey(Permission, null=True, on_delete=models.SET_NULL)
registered_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
| [
[
[
43,
52
],
[
649,
658
]
],
[
[
75,
81
],
[
145,
151
],
[
527,
533
],
[
574,
580
],
[
689,
695
],
[
730,
736
],
[
766,
772
],
[
809,
815
],
[
842,
848
],
[
883,
889
],
[
915,
921
],
[
963,
969
],
[
1014,
1020
],
[
1065,
1071
],
[
1102,
1108
],
[
1159,
1165
]
],
[
[
121,
131
],
[
1032,
1042
]
],
[
[
140,
144
]
]
] |
# Author: Tom Dupre la Tour
# Joan Massich <mailsik@gmail.com>
#
# License: BSD 3 clause
import numpy as np
import pytest
import scipy.sparse as sp
from numpy.testing import assert_array_equal
from sklearn.utils._seq_dataset import (
ArrayDataset32, ArrayDataset64, CSRDataset32, CSRDataset64)
from sklearn.datasets import load_iris
from sklearn.utils._testing import assert_allclose
iris = load_iris()
X64 = iris.data.astype(np.float64)
y64 = iris.target.astype(np.float64)
X_csr64 = sp.csr_matrix(X64)
sample_weight64 = np.arange(y64.size, dtype=np.float64)
X32 = iris.data.astype(np.float32)
y32 = iris.target.astype(np.float32)
X_csr32 = sp.csr_matrix(X32)
sample_weight32 = np.arange(y32.size, dtype=np.float32)
def assert_csr_equal_values(current, expected):
current.eliminate_zeros()
expected.eliminate_zeros()
expected = expected.astype(current.dtype)
assert current.shape[0] == expected.shape[0]
assert current.shape[1] == expected.shape[1]
assert_array_equal(current.data, expected.data)
assert_array_equal(current.indices, expected.indices)
assert_array_equal(current.indptr, expected.indptr)
def make_dense_dataset_32():
return ArrayDataset32(X32, y32, sample_weight32, seed=42)
def make_dense_dataset_64():
return ArrayDataset64(X64, y64, sample_weight64, seed=42)
def make_sparse_dataset_32():
return CSRDataset32(X_csr32.data, X_csr32.indptr, X_csr32.indices, y32,
sample_weight32, seed=42)
def make_sparse_dataset_64():
return CSRDataset64(X_csr64.data, X_csr64.indptr, X_csr64.indices, y64,
sample_weight64, seed=42)
@pytest.mark.parametrize('dataset_constructor', [
make_dense_dataset_32,
make_dense_dataset_64,
make_sparse_dataset_32,
make_sparse_dataset_64,
])
def test_seq_dataset_basic_iteration(dataset_constructor):
NUMBER_OF_RUNS = 5
dataset = dataset_constructor()
for _ in range(NUMBER_OF_RUNS):
# next sample
xi_, yi, swi, idx = dataset._next_py()
xi = sp.csr_matrix((xi_), shape=(1, X64.shape[1]))
assert_csr_equal_values(xi, X_csr64[idx])
assert yi == y64[idx]
assert swi == sample_weight64[idx]
# random sample
xi_, yi, swi, idx = dataset._random_py()
xi = sp.csr_matrix((xi_), shape=(1, X64.shape[1]))
assert_csr_equal_values(xi, X_csr64[idx])
assert yi == y64[idx]
assert swi == sample_weight64[idx]
@pytest.mark.parametrize('make_dense_dataset,make_sparse_dataset', [
(make_dense_dataset_32, make_sparse_dataset_32),
(make_dense_dataset_64, make_sparse_dataset_64),
])
def test_seq_dataset_shuffle(make_dense_dataset, make_sparse_dataset):
dense_dataset, sparse_dataset = make_dense_dataset(), make_sparse_dataset()
# not shuffled
for i in range(5):
_, _, _, idx1 = dense_dataset._next_py()
_, _, _, idx2 = sparse_dataset._next_py()
assert idx1 == i
assert idx2 == i
for i in [132, 50, 9, 18, 58]:
_, _, _, idx1 = dense_dataset._random_py()
_, _, _, idx2 = sparse_dataset._random_py()
assert idx1 == i
assert idx2 == i
seed = 77
dense_dataset._shuffle_py(seed)
sparse_dataset._shuffle_py(seed)
idx_next = [63, 91, 148, 87, 29]
idx_shuffle = [137, 125, 56, 121, 127]
for i, j in zip(idx_next, idx_shuffle):
_, _, _, idx1 = dense_dataset._next_py()
_, _, _, idx2 = sparse_dataset._next_py()
assert idx1 == i
assert idx2 == i
_, _, _, idx1 = dense_dataset._random_py()
_, _, _, idx2 = sparse_dataset._random_py()
assert idx1 == j
assert idx2 == j
@pytest.mark.parametrize('make_dataset_32,make_dataset_64', [
(make_dense_dataset_32, make_dense_dataset_64),
(make_sparse_dataset_32, make_sparse_dataset_64),
])
def test_fused_types_consistency(make_dataset_32, make_dataset_64):
dataset_32, dataset_64 = make_dataset_32(), make_dataset_64()
NUMBER_OF_RUNS = 5
for _ in range(NUMBER_OF_RUNS):
# next sample
(xi_data32, _, _), yi32, _, _ = dataset_32._next_py()
(xi_data64, _, _), yi64, _, _ = dataset_64._next_py()
assert xi_data32.dtype == np.float32
assert xi_data64.dtype == np.float64
assert_allclose(xi_data64, xi_data32, rtol=1e-5)
assert_allclose(yi64, yi32, rtol=1e-5)
| [
[
[
105,
116
],
[
441,
443
],
[
478,
480
],
[
537,
539
],
[
563,
565
],
[
599,
601
],
[
636,
638
],
[
695,
697
],
[
721,
723
],
[
4258,
4260
],
[
4303,
4305
]
],
[
[
124,
130
],
[
1659,
1665
],
[
2487,
2493
],
[
3714,
3720
]
],
[
[
138,
156
],
[
500,
502
],
[
658,
660
],
[
2057,
2059
],
[
2314,
2316
]
],
[
[
183,
201
],
[
992,
1010
],
[
1044,
1062
],
[
1102,
1120
]
],
[
[
247,
261
],
[
1196,
1210
]
],
[
[
263,
277
],
[
1289,
1303
]
],
[
[
279,
291
],
[
1383,
1395
]
],
[
[
293,
305
],
[
1541,
1553
]
],
[
[
337,
346
],
[
406,
415
]
],
[
[
382,
397
],
[
4323,
4338
],
[
4380,
4395
]
],
[
[
399,
403
],
[
424,
428
],
[
459,
463
],
[
582,
586
],
[
617,
621
]
],
[
[
418,
421
],
[
514,
517
],
[
1304,
1307
],
[
2088,
2091
],
[
2345,
2348
]
],
[
[
453,
456
],
[
547,
550
],
[
1309,
1312
],
[
1601,
1604
],
[
2175,
2178
],
[
2432,
2435
]
],
[
[
490,
497
],
[
1554,
1561
],
[
1568,
1575
],
[
1584,
1591
],
[
2140,
2147
],
[
2397,
2404
]
],
[
[
519,
534
],
[
1314,
1329
],
[
1630,
1645
],
[
2206,
2221
],
[
2463,
2478
]
],
[
[
576,
579
],
[
672,
675
],
[
1211,
1214
]
],
[
[
611,
614
],
[
705,
708
],
[
1216,
1219
],
[
1443,
1446
]
],
[
[
648,
655
],
[
1396,
1403
],
[
1410,
1417
],
[
1426,
1433
]
],
[
[
677,
692
],
[
1221,
1236
],
[
1472,
1487
]
],
[
[
739,
762
],
[
2112,
2135
],
[
2369,
2392
]
],
[
[
1160,
1181
],
[
1712,
1733
],
[
2560,
2581
],
[
3780,
3801
]
],
[
[
1253,
1274
],
[
1739,
1760
],
[
2613,
2634
],
[
3803,
3824
]
],
[
[
1346,
1368
],
[
1766,
1788
],
[
2583,
2605
],
[
3832,
3854
]
],
[
[
1504,
1526
],
[
1794,
1816
],
[
2636,
2658
],
[
3856,
3878
]
],
[
[
1825,
1857
]
],
[
[
2668,
2692
]
],
[
[
3888,
3916
]
]
] |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# bug-report: feilengcui008@gmail.com
""" api for docker registry """
import urllib2
import urllib
import json
import base64
class RegistryException(Exception):
""" registry api related exception """
pass
class RegistryApi(object):
""" interact with docker registry and harbor """
def __init__(self, username, password, registry_endpoint):
self.username = username
self.password = password
self.basic_token = base64.encodestring("%s:%s" % (str(username), str(password)))[0:-1]
self.registry_endpoint = registry_endpoint.rstrip('/')
#print("%s/v2/_catalog" % (self.registry_endpoint,))
auth = self.pingRegistry("%s/v2/_catalog" % (self.registry_endpoint,))
if auth is None:
raise RegistryException("get token realm and service failed")
self.token_endpoint = auth[0]
self.service = auth[1]
def pingRegistry(self, registry_endpoint):
""" ping v2 registry and get realm and service """
headers = dict()
try:
res = urllib2.urlopen(registry_endpoint)
except urllib2.HTTPError as e:
headers = e.hdrs.dict
try:
(realm, service, _) = headers['www-authenticate'].split(',')
return (realm[14:-1:], service[9:-1])
except Exception as e:
return None
def getBearerTokenForScope(self, scope):
""" get bearer token from harbor """
payload = urllib.urlencode({'service': self.service, 'scope': scope})
url = "%s?%s" % (self.token_endpoint, payload)
req = urllib2.Request(url)
req.add_header('Authorization', 'Basic %s' % (self.basic_token,))
try:
response = urllib2.urlopen(req)
return json.loads(response.read())["token"]
except Exception as e:
return None
def getRepositoryList(self, n=None):
""" get repository list """
scope = "registry:catalog:*"
bear_token = self.getBearerTokenForScope(scope)
if bear_token is None:
return None
url = "%s/v2/_catalog" % (self.registry_endpoint,)
if n is not None:
url = "%s?n=%s" % (url, str(n))
req = urllib2.Request(url)
req.add_header('Authorization', r'Bearer %s' % (bear_token,))
try:
response = urllib2.urlopen(req)
return json.loads(response.read())
except Exception as e:
return None
def getTagList(self, repository):
""" get tag list for repository """
scope = "repository:%s:pull" % (repository,)
bear_token = self.getBearerTokenForScope(scope)
if bear_token is None:
return None
url = "%s/v2/%s/tags/list" % (self.registry_endpoint, repository)
req = urllib2.Request(url)
req.add_header('Authorization', r'Bearer %s' % (bear_token,))
try:
response = urllib2.urlopen(req)
return json.loads(response.read())
except Exception as e:
return None
def getManifest(self, repository, reference="latest", v1=False):
""" get manifest for tag or digest """
scope = "repository:%s:pull" % (repository,)
bear_token = self.getBearerTokenForScope(scope)
if bear_token is None:
return None
url = "%s/v2/%s/manifests/%s" % (self.registry_endpoint, repository, reference)
req = urllib2.Request(url)
req.get_method = lambda: 'GET'
req.add_header('Authorization', r'Bearer %s' % (bear_token,))
req.add_header('Accept', 'application/vnd.docker.distribution.manifest.v2+json')
if v1:
req.add_header('Accept', 'application/vnd.docker.distribution.manifest.v1+json')
try:
response = urllib2.urlopen(req)
return json.loads(response.read())
except Exception as e:
return None
def existManifest(self, repository, reference, v1=False):
""" check to see it manifest exist """
scope = "repository:%s:pull" % (repository,)
bear_token = self.getBearerTokenForScope(scope)
if bear_token is None:
raise RegistryException("manifestExist failed due to token error")
url = "%s/v2/%s/manifests/%s" % (self.registry_endpoint, repository, reference)
req = urllib2.Request(url)
req.get_method = lambda: 'HEAD'
req.add_header('Authorization', r'Bearer %s' % (bear_token,))
req.add_header('Accept', 'application/vnd.docker.distribution.manifest.v2+json')
if v1:
req.add_header('Accept', 'application/vnd.docker.distribution.manifest.v1+json')
try:
response = urllib2.urlopen(req)
return (True, response.headers.dict["docker-content-digest"])
except Exception as e:
return (False, None)
def deleteManifest(self, repository, reference):
""" delete manifest by tag """
(is_exist, digest) = self.existManifest(repository, reference)
if not is_exist:
raise RegistryException("manifest not exist")
scope = "repository:%s:pull,push" % (repository,)
bear_token = self.getBearerTokenForScope(scope)
if bear_token is None:
raise RegistryException("delete manifest failed due to token error")
url = "%s/v2/%s/manifests/%s" % (self.registry_endpoint, repository, digest)
req = urllib2.Request(url)
req.get_method = lambda: 'DELETE'
req.add_header('Authorization', r'Bearer %s' % (bear_token,))
try:
urllib2.urlopen(req)
except Exception as e:
return False
return True
def getManifestWithConf(self, repository, reference="latest"):
""" get manifest for tag or digest """
manifest = self.getManifest(repository, reference)
if manifest is None:
raise RegistryException("manifest for %s %s not exist" % (repository, reference))
config_digest = manifest["config"]["digest"]
scope = "repository:%s:pull" % (repository,)
bear_token = self.getBearerTokenForScope(scope)
if bear_token is None:
return None
url = "%s/v2/%s/blobs/%s" % (self.registry_endpoint, repository, config_digest)
req = urllib2.Request(url)
req.get_method = lambda: 'GET'
req.add_header('Authorization', r'Bearer %s' % (bear_token,))
req.add_header('Accept', 'application/vnd.docker.distribution.manifest.v2+json')
try:
response = urllib2.urlopen(req)
manifest["configContent"] = json.loads(response.read())
return manifest
except Exception as e:
return None
| [
[
[
124,
131
],
[
1102,
1109
],
[
1152,
1159
],
[
1639,
1646
],
[
1770,
1777
],
[
2271,
2278
],
[
2398,
2405
],
[
2856,
2863
],
[
2983,
2990
],
[
3489,
3496
],
[
3852,
3859
],
[
4406,
4413
],
[
4770,
4777
],
[
5501,
5508
],
[
5659,
5666
],
[
6372,
6379
],
[
6627,
6634
]
],
[
[
139,
145
],
[
1510,
1516
]
],
[
[
153,
157
],
[
1810,
1814
],
[
2438,
2442
],
[
3023,
3027
],
[
3892,
3896
],
[
6688,
6692
]
],
[
[
165,
171
],
[
500,
506
]
],
[
[
180,
197
],
[
814,
831
],
[
4243,
4260
],
[
5136,
5153
],
[
5339,
5356
],
[
5977,
5994
]
],
[
[
270,
281
]
]
] |
#!/usr/bin/env python3
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Camera image classification demo code.
Runs continuous image classification on camera frames and prints detected object
classes.
Example:
image_classification_camera.py --num_frames 10
"""
import argparse
import contextlib
from aiy.vision.inference import CameraInference
from aiy.vision.models import image_classification
from picamera import PiCamera
def classes_info(classes):
return ', '.join('%s (%.2f)' % pair for pair in classes)
@contextlib.contextmanager
def CameraPreview(camera, enabled):
if enabled:
camera.start_preview()
try:
yield
finally:
if enabled:
camera.stop_preview()
def main():
parser = argparse.ArgumentParser('Image classification camera inference example.')
parser.add_argument('--num_frames', '-n', type=int, default=None,
help='Sets the number of frames to run for, otherwise runs forever.')
parser.add_argument('--num_objects', '-c', type=int, default=3,
help='Sets the number of object interences to print.')
parser.add_argument('--nopreview', dest='preview', action='store_false', default=True,
help='Enable camera preview')
args = parser.parse_args()
with PiCamera(sensor_mode=4, framerate=30) as camera, \
CameraPreview(camera, enabled=args.preview), \
CameraInference(image_classification.model()) as inference:
for result in inference.run(args.num_frames):
classes = image_classification.get_classes(result, top_k=args.num_objects)
print(classes_info(classes))
if classes:
camera.annotate_text = '%s (%.2f)' % classes[0]
if __name__ == '__main__':
main()
| [
[
[
801,
809
],
[
1275,
1283
]
],
[
[
817,
827
],
[
1050,
1060
]
],
[
[
862,
877
],
[
1914,
1929
]
],
[
[
908,
928
],
[
1930,
1950
],
[
2050,
2070
]
],
[
[
950,
958
],
[
1798,
1806
]
],
[
[
964,
976
],
[
2133,
2145
]
],
[
[
1080,
1093
],
[
1858,
1871
]
],
[
[
1254,
1258
],
[
2276,
2280
]
]
] |
#
# Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending
#
""" This module provides access to the Deephaven server configuration. """
import jpy
from deephaven import DHError
from deephaven.time import TimeZone
_JDHConfig = jpy.get_type("io.deephaven.configuration.Configuration")
_JDateTimeZone = jpy.get_type("org.joda.time.DateTimeZone")
def get_log_dir() -> str:
""" Returns the server's log directory. """
try:
return _JDHConfig.getInstance().getLogDir()
except Exception as e:
raise DHError(e, "failed to get the server's log directory.") from e
def get_server_timezone() -> TimeZone:
""" Returns the server's time zone. """
try:
j_timezone = _JDateTimeZone.forTimeZone(_JDHConfig.getInstance().getServerTimezone())
for tz in TimeZone:
if j_timezone == tz.value.getTimeZone():
return tz
raise NotImplementedError("can't find the time zone in the TImeZone Enum.")
except Exception as e:
raise DHError(e, message=f"failed to find a recognized time zone") from e
| [
[
[
153,
156
],
[
238,
241
],
[
312,
315
]
],
[
[
180,
187
],
[
533,
540
],
[
1016,
1023
]
],
[
[
215,
223
],
[
627,
635
],
[
802,
810
]
],
[
[
225,
235
],
[
455,
465
],
[
738,
748
]
],
[
[
295,
309
],
[
711,
725
]
],
[
[
361,
372
]
],
[
[
602,
621
]
]
] |
import torch
from torch import nn, optim, multiprocessing
from torch.utils.data import DataLoader
from torch.utils.tensorboard.writer import SummaryWriter
from tqdm import tqdm
from time import time
from collections import defaultdict
from utils.run_utils import get_logger
from utils.train_utils import CheckpointManager, make_k_grid, make_img_grid, make_rss_slice, standardize_image
from data.data_transforms import complex_abs
from metrics.new_1d_ssim import SSIM
from metrics.custom_losses import psnr, nmse
# Send this somewhere else soon...
def get_class_name(obj):
return 'None' if obj is None else str(obj.__class__).split("'")[1]
class ModelTrainerIMG:
"""
Model trainer for real-valued image domain losses.
This model trainer can accept k-space an semi-k-space, regardless of weighting.
Both complex and real-valued image domain losses can be calculated.
"""
def __init__(self, args, model, optimizer, train_loader, val_loader, input_train_transform, input_val_transform,
output_train_transform, output_val_transform, losses, scheduler=None):
# Allow multiple processes to access tensors on GPU. Add checking for multiple continuous runs.
if multiprocessing.get_start_method(allow_none=True) is None:
multiprocessing.set_start_method(method='spawn')
self.logger = get_logger(name=__name__, save_file=args.log_path / args.run_name)
# Checking whether inputs are correct.
assert isinstance(model, nn.Module), '`model` must be a Pytorch Module.'
assert isinstance(optimizer, optim.Optimizer), '`optimizer` must be a Pytorch Optimizer.'
assert isinstance(train_loader, DataLoader) and isinstance(val_loader, DataLoader), \
'`train_loader` and `val_loader` must be Pytorch DataLoader objects.'
assert callable(input_train_transform) and callable(input_val_transform), \
'input_transforms must be callable functions.'
# I think this would be best practice.
assert isinstance(output_train_transform, nn.Module) and isinstance(output_val_transform, nn.Module), \
'`output_train_transform` and `output_val_transform` must be Pytorch Modules.'
# 'losses' is expected to be a dictionary.
# Even composite losses should be a single loss module with a tuple as its output.
losses = nn.ModuleDict(losses)
if scheduler is not None:
if isinstance(scheduler, optim.lr_scheduler.ReduceLROnPlateau):
self.metric_scheduler = True
elif isinstance(scheduler, optim.lr_scheduler._LRScheduler):
self.metric_scheduler = False
else:
raise TypeError('`scheduler` must be a Pytorch Learning Rate Scheduler.')
# Display interval of 0 means no display of validation images on TensorBoard.
if args.max_images <= 0:
self.display_interval = 0
else:
self.display_interval = int(len(val_loader.dataset) // (args.max_images * args.batch_size))
self.manager = CheckpointManager(model, optimizer, mode='min', save_best_only=args.save_best_only,
ckpt_dir=args.ckpt_path, max_to_keep=args.max_to_keep)
# loading from checkpoint if specified.
if vars(args).get('prev_model_ckpt'):
self.manager.load(load_dir=args.prev_model_ckpt, load_optimizer=False)
self.model = model
self.optimizer = optimizer
self.train_loader = train_loader
self.val_loader = val_loader
self.input_train_transform = input_train_transform
self.input_val_transform = input_val_transform
self.output_train_transform = output_train_transform
self.output_val_transform = output_val_transform
self.losses = losses
self.scheduler = scheduler
self.writer = SummaryWriter(str(args.log_path))
self.verbose = args.verbose
self.num_epochs = args.num_epochs
self.smoothing_factor = args.smoothing_factor
self.shrink_scale = args.shrink_scale
self.use_slice_metrics = args.use_slice_metrics
# This part should get SSIM, not 1 - SSIM.
self.ssim = SSIM(filter_size=7).to(device=args.device) # Needed to cache the kernel.
# Logging all components of the Model Trainer.
# Train and Val input and output transforms are assumed to use the same input transform class.
self.logger.info(f'''
Summary of Model Trainer Components:
Model: {get_class_name(model)}.
Optimizer: {get_class_name(optimizer)}.
Input Transforms: {get_class_name(input_val_transform)}.
Output Transform: {get_class_name(output_val_transform)}.
Image Domain Loss: {get_class_name(losses['img_loss'])}.
Learning-Rate Scheduler: {get_class_name(scheduler)}.
''') # This part has parts different for IMG and CMG losses!!
def train_model(self):
tic_tic = time()
self.logger.info('Beginning Training Loop.')
for epoch in range(1, self.num_epochs + 1): # 1 based indexing of epochs.
tic = time() # Training
train_epoch_loss, train_epoch_metrics = self._train_epoch(epoch=epoch)
toc = int(time() - tic)
self._log_epoch_outputs(epoch, train_epoch_loss, train_epoch_metrics, elapsed_secs=toc, training=True)
tic = time() # Validation
val_epoch_loss, val_epoch_metrics = self._val_epoch(epoch=epoch)
toc = int(time() - tic)
self._log_epoch_outputs(epoch, val_epoch_loss, val_epoch_metrics, elapsed_secs=toc, training=False)
self.manager.save(metric=val_epoch_loss, verbose=True)
if self.scheduler is not None:
if self.metric_scheduler: # If the scheduler is a metric based scheduler, include metrics.
self.scheduler.step(metrics=val_epoch_loss)
else:
self.scheduler.step()
self.writer.close() # Flushes remaining data to TensorBoard.
toc_toc = int(time() - tic_tic)
self.logger.info(f'Finishing Training Loop. Total elapsed time: '
f'{toc_toc // 3600} hr {(toc_toc // 60) % 60} min {toc_toc % 60} sec.')
def _train_epoch(self, epoch):
self.model.train()
torch.autograd.set_grad_enabled(True)
epoch_loss = list() # Appending values to list due to numerical underflow and NaN values.
epoch_metrics = defaultdict(list)
data_loader = enumerate(self.train_loader, start=1)
if not self.verbose: # tqdm has to be on the outermost iterator to function properly.
data_loader = tqdm(data_loader, total=len(self.train_loader.dataset)) # Should divide by batch size.
for step, data in data_loader:
# Data pre-processing is expected to have gradient calculations removed inside already.
inputs, targets, extra_params = self.input_train_transform(*data)
# 'recons' is a dictionary containing k-space, complex image, and real image reconstructions.
recons, step_loss, step_metrics = self._train_step(inputs, targets, extra_params)
epoch_loss.append(step_loss.detach()) # Perhaps not elegant, but underflow makes this necessary.
# Gradients are not calculated so as to boost speed and remove weird errors.
with torch.no_grad(): # Update epoch loss and metrics
if self.use_slice_metrics:
slice_metrics = self._get_slice_metrics(recons, targets, extra_params)
step_metrics.update(slice_metrics)
[epoch_metrics[key].append(value.detach()) for key, value in step_metrics.items()]
if self.verbose:
self._log_step_outputs(epoch, step, step_loss, step_metrics, training=True)
# Converted to scalar and dict with scalar values respectively.
return self._get_epoch_outputs(epoch, epoch_loss, epoch_metrics, training=True)
def _train_step(self, inputs, targets, extra_params):
self.optimizer.zero_grad()
outputs = self.model(inputs)
recons = self.output_train_transform(outputs, targets, extra_params)
step_loss, step_metrics = self._step(recons, targets, extra_params)
step_loss.backward()
self.optimizer.step()
return recons, step_loss, step_metrics
def _val_epoch(self, epoch):
self.model.eval()
torch.autograd.set_grad_enabled(False)
epoch_loss = list()
epoch_metrics = defaultdict(list)
# 1 based indexing for steps.
data_loader = enumerate(self.val_loader, start=1)
if not self.verbose:
data_loader = tqdm(data_loader, total=len(self.val_loader.dataset))
for step, data in data_loader:
inputs, targets, extra_params = self.input_val_transform(*data)
recons, step_loss, step_metrics = self._val_step(inputs, targets, extra_params)
epoch_loss.append(step_loss.detach())
if self.use_slice_metrics:
slice_metrics = self._get_slice_metrics(recons, targets, extra_params)
step_metrics.update(slice_metrics)
[epoch_metrics[key].append(value.detach()) for key, value in step_metrics.items()]
if self.verbose:
self._log_step_outputs(epoch, step, step_loss, step_metrics, training=False)
# Visualize images on TensorBoard.
self._visualize_images(recons, targets, extra_params, epoch, step, training=False)
# Converted to scalar and dict with scalar values respectively.
return self._get_epoch_outputs(epoch, epoch_loss, epoch_metrics, training=False)
def _val_step(self, inputs, targets, extra_params):
outputs = self.model(inputs)
recons = self.output_val_transform(outputs, targets, extra_params)
step_loss, step_metrics = self._step(recons, targets, extra_params)
return recons, step_loss, step_metrics
def _step(self, recons, targets, extra_params):
step_loss = self.losses['img_loss'](recons['img_recons'], targets['img_targets'])
# If img_loss is a tuple, it is expected to contain all its component losses as a dict in its second element.
step_metrics = dict()
if isinstance(step_loss, tuple):
step_loss, step_metrics = step_loss
acc = extra_params["acceleration"]
if step_metrics: # This has to be checked before anything is added to step_metrics.
for key, value in step_metrics.items():
step_metrics[f'acc_{acc}_{key}'] = value
step_metrics[f'acc_{acc}_loss'] = step_loss
return step_loss, step_metrics
def _visualize_images(self, recons, targets, extra_params, epoch, step, training=False):
mode = 'Training' if training else 'Validation'
# This numbering scheme seems to have issues for certain numbers.
# Please check cases when there is no remainder.
if self.display_interval and (step % self.display_interval == 0):
img_recon_grid = make_img_grid(recons['img_recons'], self.shrink_scale)
# The delta image is obtained by subtracting at the complex image, not the real valued image.
delta_image = complex_abs(targets['cmg_targets'] - recons['cmg_recons'])
delta_img_grid = make_img_grid(delta_image, self.shrink_scale)
acc = extra_params['acceleration']
kwargs = dict(global_step=epoch, dataformats='HW')
self.writer.add_image(f'{mode} Image Recons/{acc}/{step}', img_recon_grid, **kwargs)
self.writer.add_image(f'{mode} Delta Image/{acc}/{step}', delta_img_grid, **kwargs)
if 'kspace_recons' in recons:
kspace_recon_grid = make_k_grid(recons['kspace_recons'], self.smoothing_factor, self.shrink_scale)
self.writer.add_image(f'{mode} k-space Recons/{acc}/{step}', kspace_recon_grid, **kwargs)
# Adding RSS images of reconstructions and targets.
if 'rss_recons' in recons:
recon_rss = standardize_image(recons['rss_recons'])
delta_rss = standardize_image(make_rss_slice(delta_image))
self.writer.add_image(f'{mode} RSS Recons/{acc}/{step}', recon_rss, **kwargs)
self.writer.add_image(f'{mode} RSS Delta/{acc}/{step}', delta_rss, **kwargs)
if 'semi_kspace_recons' in recons:
semi_kspace_recon_grid = make_k_grid(
recons['semi_kspace_recons'], self.smoothing_factor, self.shrink_scale)
self.writer.add_image(f'{mode} semi-k-space Recons/{acc}/{step}', semi_kspace_recon_grid, **kwargs)
if epoch == 1: # Maybe add input images too later on.
img_target_grid = make_img_grid(targets['img_targets'], self.shrink_scale)
self.writer.add_image(f'{mode} Image Targets/{acc}/{step}', img_target_grid, **kwargs)
if 'kspace_targets' in targets:
kspace_target_grid = \
make_k_grid(targets['kspace_targets'], self.smoothing_factor, self.shrink_scale)
self.writer.add_image(f'{mode} k-space Targets/{acc}/{step}', kspace_target_grid, **kwargs)
if 'img_inputs' in targets:
# Not actually the input but what the input looks like as an image.
img_grid = make_img_grid(targets['img_inputs'], self.shrink_scale)
self.writer.add_image(f'{mode} Inputs as Images/{acc}/{step}', img_grid, **kwargs)
if 'rss_targets' in targets:
target_rss = standardize_image(targets['rss_targets'])
self.writer.add_image(f'{mode} RSS Targets/{acc}/{step}', target_rss, **kwargs)
if 'semi_kspace_targets' in targets:
semi_kspace_target_grid = make_k_grid(
targets['semi_kspace_targets'], self.smoothing_factor, self.shrink_scale)
self.writer.add_image(f'{mode} semi-k-space Targets/{acc}/{step}',
semi_kspace_target_grid, **kwargs)
def _get_slice_metrics(self, recons, targets, extra_params):
img_recons = recons['img_recons'].detach() # Just in case.
img_targets = targets['img_targets'].detach()
max_range = img_targets.max() - img_targets.min()
slice_ssim = self.ssim(img_recons, img_targets)
slice_psnr = psnr(img_recons, img_targets, data_range=max_range)
slice_nmse = nmse(img_recons, img_targets)
slice_metrics = {'slice/ssim': slice_ssim, 'slice/nmse': slice_nmse, 'slice/psnr': slice_psnr}
if 'rss_recons' in recons:
rss_recons = recons['rss_recons'].detach()
rss_targets = targets['rss_targets'].detach()
max_range = rss_targets.max() - rss_targets.min()
rss_ssim = self.ssim(rss_recons, rss_targets)
rss_psnr = psnr(rss_recons, rss_targets, data_range=max_range)
rss_nmse = nmse(rss_recons, rss_targets)
slice_metrics['rss/ssim'] = rss_ssim
slice_metrics['rss/psnr'] = rss_psnr
slice_metrics['rss/nmse'] = rss_nmse
else:
rss_ssim = rss_psnr = rss_nmse = 0
# Additional metrics for separating between acceleration factors.
if 'acceleration' in extra_params:
acc = extra_params["acceleration"]
slice_metrics[f'slice_acc_{acc}/ssim'] = slice_ssim
slice_metrics[f'slice_acc_{acc}/psnr'] = slice_psnr
slice_metrics[f'slice_acc_{acc}/nmse'] = slice_nmse
if 'rss_recons' in recons:
slice_metrics[f'rss_acc_{acc}/ssim'] = rss_ssim
slice_metrics[f'rss_acc_{acc}/psnr'] = rss_psnr
slice_metrics[f'rss_acc_{acc}/nmse'] = rss_nmse
return slice_metrics
def _get_epoch_outputs(self, epoch, epoch_loss, epoch_metrics, training=True):
mode = 'Training' if training else 'Validation'
num_slices = len(self.train_loader.dataset) if training else len(self.val_loader.dataset)
# Checking for nan values.
epoch_loss = torch.stack(epoch_loss)
is_finite = torch.isfinite(epoch_loss)
num_nans = (is_finite.size(0) - is_finite.sum()).item()
if num_nans > 0:
self.logger.warning(f'Epoch {epoch} {mode}: {num_nans} NaN values present in {num_slices} slices.'
f'Turning on anomaly detection.')
# Turn on anomaly detection for finding where the nan values are.
torch.autograd.set_detect_anomaly(True)
epoch_loss = torch.mean(epoch_loss[is_finite]).item()
else:
epoch_loss = torch.mean(epoch_loss).item()
for key, value in epoch_metrics.items():
epoch_metric = torch.stack(value)
is_finite = torch.isfinite(epoch_metric)
num_nans = (is_finite.size(0) - is_finite.sum()).item()
if num_nans > 0:
self.logger.warning(f'Epoch {epoch} {mode} {key}: {num_nans} NaN values present in {num_slices} slices.'
f'Turning on anomaly detection.')
epoch_metrics[key] = torch.mean(epoch_metric[is_finite]).item()
else:
epoch_metrics[key] = torch.mean(epoch_metric).item()
return epoch_loss, epoch_metrics
def _log_step_outputs(self, epoch, step, step_loss, step_metrics, training=True):
mode = 'Training' if training else 'Validation'
self.logger.info(f'Epoch {epoch:03d} Step {step:03d} {mode} loss: {step_loss.item():.4e}')
for key, value in step_metrics.items():
self.logger.info(f'Epoch {epoch:03d} Step {step:03d}: {mode} {key}: {value.item():.4e}')
def _log_epoch_outputs(self, epoch, epoch_loss, epoch_metrics, elapsed_secs, training=True):
mode = 'Training' if training else 'Validation'
self.logger.info(f'Epoch {epoch:03d} {mode}. loss: {epoch_loss:.4e}, '
f'Time: {elapsed_secs // 60} min {elapsed_secs % 60} sec')
self.writer.add_scalar(f'{mode} epoch_loss', scalar_value=epoch_loss, global_step=epoch)
for key, value in epoch_metrics.items():
self.logger.info(f'Epoch {epoch:03d} {mode}. {key}: {value:.4e}')
# Very important whether it is mode_~~ or mode/~~.
if 'loss' in key:
self.writer.add_scalar(f'{mode}/epoch_{key}', scalar_value=value, global_step=epoch)
else:
self.writer.add_scalar(f'{mode}_epoch_{key}', scalar_value=value, global_step=epoch)
if not training: # Record learning rate.
for idx, group in enumerate(self.optimizer.param_groups, start=1):
self.writer.add_scalar(f'learning_rate_{idx}', group['lr'], global_step=epoch)
| [
[
[
7,
12
],
[
6403,
6408
],
[
7489,
7494
],
[
8577,
8582
],
[
16427,
16432
],
[
16471,
16476
],
[
16855,
16860
],
[
16920,
16925
],
[
17000,
17005
],
[
17107,
17112
],
[
17150,
17155
],
[
17505,
17510
],
[
17603,
17608
]
],
[
[
31,
33
],
[
1513,
1515
],
[
2076,
2078
],
[
2124,
2126
],
[
2389,
2391
]
],
[
[
35,
40
],
[
1598,
1603
],
[
2483,
2488
],
[
2606,
2611
]
],
[
[
42,
57
],
[
1222,
1237
],
[
1293,
1308
]
],
[
[
87,
97
],
[
1699,
1709
],
[
1738,
1748
]
],
[
[
141,
154
],
[
3911,
3924
]
],
[
[
173,
177
],
[
6765,
6769
],
[
8839,
8843
]
],
[
[
196,
200
],
[
5023,
5027
],
[
5184,
5188
],
[
5308,
5312
],
[
5456,
5460
],
[
5576,
5580
],
[
6143,
6147
]
],
[
[
225,
236
],
[
6565,
6576
],
[
8669,
8680
]
],
[
[
266,
276
],
[
1365,
1375
]
],
[
[
307,
324
],
[
3094,
3111
]
],
[
[
326,
337
],
[
11954,
11965
],
[
12662,
12673
],
[
13262,
13273
],
[
14099,
14110
]
],
[
[
339,
352
],
[
11248,
11261
],
[
11524,
11537
],
[
12986,
12999
],
[
13619,
13632
]
],
[
[
354,
368
],
[
12357,
12371
]
],
[
[
370,
387
],
[
12271,
12288
],
[
12339,
12356
],
[
13857,
13874
]
],
[
[
421,
432
],
[
11436,
11447
]
],
[
[
465,
469
],
[
4252,
4256
]
],
[
[
504,
508
],
[
14699,
14703
],
[
15199,
15203
]
],
[
[
510,
514
],
[
14772,
14776
],
[
15274,
15278
]
],
[
[
556,
570
],
[
4576,
4590
],
[
4620,
4634
],
[
4675,
4689
],
[
4740,
4754
],
[
4807,
4821
],
[
4878,
4892
]
],
[
[
656,
671
]
]
] |
from nodes import *
from tokens import Token, TokenType
class Interpreter:
def __init__(self, ast):
self.ast = ast
def eval(self):
return self.evalHelper(self.ast)
def evalHelper(self, ast):
if isinstance(ast, NumberNode):
return ast.node
elif isinstance(ast, AddNode):
return self.evalHelper(ast.node_a) + self.evalHelper(ast.node_b)
elif isinstance(ast, SubtractNode):
return self.evalHelper(ast.node_a) - self.evalHelper(ast.node_b)
elif isinstance(ast, MultiplyNode):
return self.evalHelper(ast.node_a) * self.evalHelper(ast.node_b)
elif isinstance(ast, DivideNode):
eval_b = self.evalHelper(ast.node_b)
if eval_b == 0:
raise ZeroDivisionError("Cannot divide by zero")
return self.evalHelper(ast.node_a) / eval_b
elif isinstance(ast, ModuloNode):
eval_b = self.evalHelper(ast.node_b)
if eval_b == 0:
raise ZeroDivisionError("Cannot divide by zero")
return self.evalHelper(ast.node_a) % eval_b
elif isinstance(ast, PowerNode):
return self.evalHelper(ast.node_a) ** self.evalHelper(ast.node_b)
elif isinstance(ast, PositiveNode):
return self.evalHelper(ast.node)
elif isinstance(ast, NegativeNode):
return -self.evalHelper(ast.node)
def postfix_eval(tokens):
stack = []
for t in tokens:
if t.type == TokenType.PLUS:
a = stack.pop().value
b = stack.pop().value
stack.append(Token(TokenType.NUMBER, a + b))
elif t.type == TokenType.MINUS:
a = stack.pop().value
b = stack.pop().value
stack.append(Token(TokenType.NUMBER, b - a))
elif t.type == TokenType.MULTIPLY:
a = stack.pop().value
b = stack.pop().value
stack.append(Token(TokenType.NUMBER, a * b))
elif t.type == TokenType.DIVIDE:
a = stack.pop().value
b = stack.pop().value
stack.append(Token(TokenType.NUMBER, b / a))
elif t.type == TokenType.MODULO:
print(stack)
a = stack.pop().value
b = stack.pop().value
stack.append(Token(TokenType.NUMBER, b % a))
elif t.type == TokenType.POWER:
a = stack.pop().value
b = stack.pop().value
stack.append(Token(TokenType.NUMBER, b ** a))
else:
stack.append(t)
return stack[0].value
| [
[
[
18,
19
],
[
250,
260
],
[
320,
327
],
[
436,
448
],
[
557,
569
],
[
678,
688
],
[
918,
928
],
[
1158,
1167
],
[
1277,
1289
],
[
1366,
1378
]
],
[
[
39,
44
],
[
1620,
1625
],
[
1785,
1790
],
[
1953,
1958
],
[
2119,
2124
],
[
2310,
2315
],
[
2475,
2480
]
],
[
[
46,
55
],
[
1511,
1520
],
[
1626,
1635
],
[
1675,
1684
],
[
1791,
1800
],
[
1840,
1849
],
[
1959,
1968
],
[
2008,
2017
],
[
2125,
2134
],
[
2174,
2183
],
[
2316,
2325
],
[
2365,
2374
],
[
2481,
2490
]
],
[
[
63,
74
]
],
[
[
1432,
1444
]
]
] |
import hashlib
import json
import os
import boto3
from .retry import retry_on_aws_too_many_requests
batch = boto3.client('batch')
class JobDefinition:
@classmethod
def clear_all(cls):
deleted_count = 0
for jobdef in batch.describe_job_definitions(status='ACTIVE')['jobDefinitions']:
cls(metadata=jobdef).delete()
deleted_count += 1
return deleted_count
def __init__(self, docker_image=None, deployment=None, arn=None, metadata=None):
self.deployment = deployment if deployment else os.environ['DEPLOYMENT_STAGE']
if not docker_image and not metadata:
raise RuntimeError("you must provide docker_image or metadata")
self.metadata = metadata
self.docker_image = docker_image if docker_image else metadata['containerProperties']['image']
self.name = self._job_definition_name() if docker_image else metadata['jobDefinitionName']
if not arn:
if metadata:
self.arn = metadata['jobDefinitionArn']
print(f"Job definition {self.name} for {self.docker_image}:")
def find_or_create(self, job_role_arn):
if self.load():
print(f"\tfound {self.arn}")
else:
self.create(job_role_arn)
return self
def load(self):
jobdefs = self._describe_job_definitions(jobDefinitionName=self.name, status='ACTIVE')['jobDefinitions']
if len(jobdefs) > 0:
self.metadata = jobdefs[0]
self.arn = self.metadata['jobDefinitionArn']
return self
else:
return None
@retry_on_aws_too_many_requests
def create(self, job_role_arn):
self.metadata = batch.register_job_definition(
jobDefinitionName=self.name,
type='container',
parameters={},
containerProperties={
'image': self.docker_image,
'vcpus': 4,
'memory': 15000,
'command': [],
'jobRoleArn': job_role_arn,
'volumes': [
{
'host': {'sourcePath': '/data'},
'name': 'data'
},
],
'mountPoints': [
{
'containerPath': '/data',
'readOnly': False,
'sourceVolume': 'data'
},
]
},
retryStrategy={
'attempts': 3
}
)
self.arn = self.metadata['jobDefinitionArn']
print(f"\tcreated {self.arn}")
print(json.dumps(self.metadata, indent=4))
def delete(self):
print(f"Deleting job definition {self.name} ({self.docker_image})")
batch.deregister_job_definition(jobDefinition=self.arn)
@retry_on_aws_too_many_requests
def _describe_job_definitions(self, *args, **kwargs):
return batch.describe_job_definitions(*args, **kwargs)
def _job_definition_name(self):
"""
We create Job Definitions for each unique docker image we are given.
As there is no way to search for job definitions wih a particular Docker image,
we must put the Docker image name in the job definition name (the only thing we can search on).
We hash the image name as it will contain characters that aren't allowed in a job definition name.
"""
hasher = hashlib.sha1()
hasher.update(bytes(self.docker_image, 'utf8'))
return f"upload-{self.deployment}-{hasher.hexdigest()}"
| [
[
[
7,
14
],
[
3499,
3506
]
],
[
[
22,
26
],
[
2687,
2691
]
],
[
[
34,
36
],
[
557,
559
]
],
[
[
45,
50
],
[
111,
116
]
],
[
[
71,
101
],
[
1625,
1655
],
[
2893,
2923
]
],
[
[
103,
108
],
[
246,
251
],
[
1716,
1721
],
[
2831,
2836
],
[
2997,
3002
]
],
[
[
141,
154
]
]
] |
n = int(input())
total_sum = 0
for i in range(1,n+1):
letter = input()
total_sum += ord(letter)
print(f"The sum equals: {total_sum}") | [
[
[
0,
1
],
[
50,
51
]
],
[
[
18,
27
],
[
81,
90
],
[
131,
140
]
],
[
[
37,
38
]
],
[
[
60,
66
],
[
98,
104
]
]
] |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.precise import Precise
class wazirx(Exchange):
def describe(self):
return self.deep_extend(super(wazirx, self).describe(), {
'id': 'wazirx',
'name': 'WazirX',
'countries': ['IN'],
'version': 'v2',
'rateLimit': 100,
'has': {
'cancelAllOrders': True,
'cancelOrder': True,
'CORS': False,
'createOrder': True,
'fetchCurrencies': False,
'fetchBalance': True,
'fetchBidsAsks': False,
'fetchClosedOrders': False,
'fetchDepositAddress': False,
'fetchDeposits': True,
'fetchFundingFees': False,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRates': False,
'fetchMarkets': True,
'fetchMyTrades': False,
'fetchOHLCV': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrders': True,
'fetchOrderBook': True,
'fetchPositions': False,
'fetchStatus': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTradingFee': False,
'fetchTradingFees': False,
'fetchTransactions': False,
'fetchWithdrawals': False,
'setLeverage': False,
'withdraw': False,
'fetchDepositAddressesByNetwork': False,
'transfer': False,
'fetchTransfers': False,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/148647666-c109c20b-f8ac-472f-91c3-5f658cb90f49.jpeg',
'api': 'https://api.wazirx.com/sapi/v1',
'www': 'https://wazirx.com',
'doc': 'https://docs.wazirx.com/#public-rest-api-for-wazirx',
'fees': 'https://wazirx.com/fees',
},
'api': {
'public': {
'get': {
'exchangeInfo': 1,
'depth': 1,
'ping': 1,
'systemStatus': 1,
'tickers/24hr': 1,
'ticker/24hr': 1,
'time': 1,
'trades': 1,
},
},
'private': {
'get': {
'account': 1,
'allOrders': 1,
'funds': 1,
'historicalTrades': 1,
'openOrders': 1,
'order': 1,
},
'post': {
'order': 1,
'order/test': 1,
},
'delete': {
'order': 1,
'openOrders': 1,
},
},
},
'fees': {
'WRX': {'maker': self.parse_number('0.0'), 'taker': self.parse_number('0.0')},
},
'exceptions': {
'exact': {
'-1121': BadSymbol, # {"code": -1121, "message": "Invalid symbol."}
'1999': BadRequest, # {"code":1999,"message":"symbol is missing, symbol does not have a valid value"} message varies depending on the error
'2002': InsufficientFunds, # {"code":2002,"message":"Not enough USDT balance to execute self order"}
'2005': BadRequest, # {"code":2005,"message":"Signature is incorrect."}
'2078': PermissionDenied, # {"code":2078,"message":"Permission denied."}
'2098': BadRequest, # {"code":2098,"message":"Request out of receiving window."}
'2031': InvalidOrder, # {"code":2031,"message":"Minimum buy amount must be worth 2.0 USDT"}
'2113': BadRequest, # {"code":2113,"message":"RecvWindow must be in range 1..60000"}
'2115': BadRequest, # {"code":2115,"message":"Signature not found."}
'2136': RateLimitExceeded, # {"code":2136,"message":"Too many api request"}
'94001': InvalidOrder, # {"code":94001,"message":"Stop price not found."}
},
},
'options': {
# 'fetchTradesMethod': 'privateGetHistoricalTrades',
'recvWindow': 10000,
},
})
def fetch_markets(self, params={}):
response = self.publicGetExchangeInfo(params)
#
# {
# "timezone":"UTC",
# "serverTime":1641336850932,
# "symbols":[
# {
# "symbol":"btcinr",
# "status":"trading",
# "baseAsset":"btc",
# "quoteAsset":"inr",
# "baseAssetPrecision":5,
# "quoteAssetPrecision":0,
# "orderTypes":[
# "limit",
# "stop_limit"
# ],
# "isSpotTradingAllowed":true,
# "filters":[
# {
# "filterType":"PRICE_FILTER",
# "minPrice":"1",
# "tickSize":"1"
# }
# ]
# },
#
markets = self.safe_value(response, 'symbols', [])
result = []
for i in range(0, len(markets)):
entry = markets[i]
id = self.safe_string(entry, 'symbol')
baseId = self.safe_string(entry, 'baseAsset')
quoteId = self.safe_string(entry, 'quoteAsset')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
isSpot = self.safe_value(entry, 'isSpotTradingAllowed')
filters = self.safe_value(entry, 'filters')
minPrice = None
for j in range(0, len(filters)):
filter = filters[j]
filterType = self.safe_string(filter, 'filterType')
if filterType == 'PRICE_FILTER':
minPrice = self.safe_number(filter, 'minPrice')
fee = self.safe_value(self.fees, quote, {})
takerString = self.safe_string(fee, 'taker', '0.2')
takerString = Precise.string_div(takerString, '100')
taker = self.parse_number(takerString)
makerString = self.safe_string(fee, 'maker', '0.2')
makerString = Precise.string_div(makerString, '100')
maker = self.parse_number(makerString)
status = self.safe_string(entry, 'status')
active = status == 'trading'
limits = {
'price': {
'min': minPrice,
'max': None,
},
'amount': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
}
precision = {
'price': self.safe_integer(entry, 'quoteAssetPrecision'),
'amount': self.safe_integer(entry, 'baseAssetPrecision'),
}
result.append({
'info': entry,
'symbol': symbol,
'id': id,
'base': base,
'quote': quote,
'baseId': baseId,
'maker': maker,
'taker': taker,
'quoteId': quoteId,
'limits': limits,
'precision': precision,
'type': 'spot',
'spot': isSpot,
'active': active,
})
return result
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # [1, 5, 10, 20, 50, 100, 500, 1000]
response = self.publicGetDepth(self.extend(request, params))
#
# {
# "timestamp":1559561187,
# "asks":[
# ["8540.0","1.5"],
# ["8541.0","0.0042"]
# ],
# "bids":[
# ["8530.0","0.8814"],
# ["8524.0","1.4"]
# ]
# }
#
timestamp = self.safe_integer(response, 'timestamp')
return self.parse_order_book(response, symbol, timestamp)
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
ticker = self.publicGetTicker24hr(self.extend(request, params))
#
# {
# "symbol":"wrxinr",
# "baseAsset":"wrx",
# "quoteAsset":"inr",
# "openPrice":"94.77",
# "lowPrice":"92.7",
# "highPrice":"95.17",
# "lastPrice":"94.03",
# "volume":"1118700.0",
# "bidPrice":"94.02",
# "askPrice":"94.03",
# "at":1641382455000
# }
#
return self.parse_ticker(ticker, market)
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
tickers = self.publicGetTickers24hr()
#
# [
# {
# "symbol":"btcinr",
# "baseAsset":"btc",
# "quoteAsset":"inr",
# "openPrice":"3698486",
# "lowPrice":"3641155.0",
# "highPrice":"3767999.0",
# "lastPrice":"3713212.0",
# "volume":"254.11582",
# "bidPrice":"3715021.0",
# "askPrice":"3715022.0",
# }
# ...
# ]
#
result = {}
for i in range(0, len(tickers)):
ticker = tickers[i]
parsedTicker = self.parse_ticker(ticker)
symbol = parsedTicker['symbol']
result[symbol] = parsedTicker
return result
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['limit'] = limit # Default 500; max 1000.
method = self.safe_string(self.options, 'fetchTradesMethod', 'publicGetTrades')
response = getattr(self, method)(self.extend(request, params))
# [
# {
# "id":322307791,
# "price":"93.7",
# "qty":"0.7",
# "quoteQty":"65.59",
# "time":1641386701000,
# "isBuyerMaker":false
# },
# ]
return self.parse_trades(response, market, since, limit)
def parse_trade(self, trade, market=None):
#
# {
# "id":322307791,
# "price":"93.7",
# "qty":"0.7",
# "quoteQty":"65.59",
# "time":1641386701000,
# "isBuyerMaker":false
# }
#
id = self.safe_string(trade, 'id')
timestamp = self.parse8601(self.safe_string(trade, 'time'))
datetime = self.iso8601(timestamp)
symbol = None
if market is not None:
symbol = market['symbol']
isBuyerMaker = self.safe_value(trade, 'isBuyerMaker')
side = 'sell' if isBuyerMaker else 'buy'
price = self.safe_number(trade, 'price')
amount = self.safe_number(trade, 'qty')
cost = self.safe_number(trade, 'quoteQty')
return self.safe_trade({
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': datetime,
'symbol': symbol,
'order': id,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
})
def fetch_status(self, params={}):
response = self.publicGetSystemStatus(params)
#
# {"status":"normal","message":"System is running normally."}
#
status = self.safe_string(response, 'status')
status = 'ok' if (status == 'normal') else 'maintenance'
self.status = self.extend(self.status, {
'status': status,
'updated': self.milliseconds(),
})
return self.status
def fetch_time(self, params={}):
response = self.publicGetTime(params)
#
# {
# "serverTime":1635467280514
# }
#
return self.safe_integer(response, 'serverTime')
def parse_ticker(self, ticker, market=None):
#
# {
# "symbol":"btcinr",
# "baseAsset":"btc",
# "quoteAsset":"inr",
# "openPrice":"3698486",
# "lowPrice":"3641155.0",
# "highPrice":"3767999.0",
# "lastPrice":"3713212.0",
# "volume":"254.11582", # base volume
# "bidPrice":"3715021.0",
# "askPrice":"3715022.0",
# "at":1641382455000 # only on fetchTicker
# }
#
marketId = self.safe_string(ticker, 'symbol')
market = self.safe_market(marketId, market)
symbol = market['symbol']
last = self.safe_number(ticker, 'lastPrice')
open = self.safe_number(ticker, 'openPrice')
high = self.safe_number(ticker, 'highPrice')
low = self.safe_number(ticker, 'lowPrice')
baseVolume = self.safe_number(ticker, 'volume')
bid = self.safe_number(ticker, 'bidPrice')
ask = self.safe_number(ticker, 'askPrice')
timestamp = self.safe_string(ticker, 'at')
return self.safe_ticker({
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': high,
'low': low,
'bid': bid,
'bidVolume': None,
'ask': ask,
'askVolume': None,
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': None,
'info': ticker,
}, market)
def parse_balance(self, response):
result = {}
for i in range(0, len(response)):
balance = response[i]
id = self.safe_string(balance, 'asset')
code = self.safe_currency_code(id)
account = self.account()
account['free'] = self.safe_string(balance, 'free')
account['used'] = self.safe_string(balance, 'locked')
result[code] = account
return self.safe_balance(result)
def fetch_balance(self, params={}):
self.load_markets()
response = self.privateGetFunds(params)
#
# [
# {
# "asset":"inr",
# "free":"0.0",
# "locked":"0.0"
# },
# ]
#
return self.parse_balance(response)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders requires a `symbol` argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if since is not None:
request['startTime'] = since
if limit is not None:
request['limit'] = limit
response = self.privateGetAllOrders(self.extend(request, params))
# [
# {
# "id": 28,
# "symbol": "wrxinr",
# "price": "9293.0",
# "origQty": "10.0",
# "executedQty": "8.2",
# "status": "cancel",
# "type": "limit",
# "side": "sell",
# "createdTime": 1499827319559,
# "updatedTime": 1499827319559
# },
# {
# "id": 30,
# "symbol": "wrxinr",
# "price": "9293.0",
# "stopPrice": "9200.0",
# "origQty": "10.0",
# "executedQty": "0.0",
# "status": "cancel",
# "type": "stop_limit",
# "side": "sell",
# "createdTime": 1499827319559,
# "updatedTime": 1507725176595
# }
# ]
orders = self.parse_orders(response, market, since, limit)
orders = self.filter_by(orders, 'symbol', symbol)
return orders
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
response = self.privateGetOpenOrders(self.extend(request, params))
# [
# {
# "id": 28,
# "symbol": "wrxinr",
# "price": "9293.0",
# "origQty": "10.0",
# "executedQty": "8.2",
# "status": "cancel",
# "type": "limit",
# "side": "sell",
# "createdTime": 1499827319559,
# "updatedTime": 1499827319559
# },
# {
# "id": 30,
# "symbol": "wrxinr",
# "price": "9293.0",
# "stopPrice": "9200.0",
# "origQty": "10.0",
# "executedQty": "0.0",
# "status": "cancel",
# "type": "stop_limit",
# "side": "sell",
# "createdTime": 1499827319559,
# "updatedTime": 1507725176595
# }
# ]
orders = self.parse_orders(response, market, since, limit)
return orders
def cancel_all_orders(self, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelAllOrders requires a `symbol` argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
return self.privateDeleteOpenOrders(self.extend(request, params))
def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder requires a `symbol` argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'orderId': id,
}
response = self.privateDeleteOrder(self.extend(request, params))
return self.parse_order(response)
def create_order(self, symbol, type, side, amount, price=None, params={}):
if not (type == 'limit') or (type == 'stop_limit'):
raise ExchangeError(self.id + ' createOrder() supports limit and stop_limit orders only')
if price is None:
raise ExchangeError(self.id + ' createOrder() requires a price argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'side': side,
'quantity': amount,
'type': 'limit',
}
request['price'] = self.price_to_precision(symbol, price)
stopPrice = self.safe_string(params, 'stopPrice')
if stopPrice is not None:
request['type'] = 'stop_limit'
response = self.privatePostOrder(self.extend(request, params))
# {
# "id": 28,
# "symbol": "wrxinr",
# "price": "9293.0",
# "origQty": "10.0",
# "executedQty": "8.2",
# "status": "wait",
# "type": "limit",
# "side": "sell",
# "createdTime": 1499827319559,
# "updatedTime": 1499827319559
# }
return self.parse_order(response, market)
def parse_order(self, order, market=None):
# {
# "id":1949417813,
# "symbol":"ltcusdt",
# "type":"limit",
# "side":"sell",
# "status":"done",
# "price":"146.2",
# "origQty":"0.05",
# "executedQty":"0.05",
# "createdTime":1641252564000,
# "updatedTime":1641252564000
# },
created = self.safe_integer(order, 'createdTime')
updated = self.safe_integer(order, 'updatedTime')
marketId = self.safe_string(order, 'symbol')
symbol = self.safe_symbol(marketId, market)
amount = self.safe_string(order, 'quantity')
filled = self.safe_string(order, 'executedQty')
status = self.parse_order_status(self.safe_string(order, 'status'))
id = self.safe_string(order, 'id')
price = self.safe_string(order, 'price')
type = self.safe_string_lower(order, 'type')
side = self.safe_string_lower(order, 'side')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': None,
'timestamp': created,
'datetime': self.iso8601(created),
'lastTradeTimestamp': updated,
'status': status,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'amount': amount,
'filled': filled,
'remaining': None,
'cost': None,
'fee': None,
'average': None,
'trades': [],
}, market)
def parse_order_status(self, status):
statuses = {
'wait': 'open',
'done': 'closed',
'cancel': 'canceled',
}
return self.safe_string(statuses, status, status)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + path
if api == 'public':
if params:
url += '?' + self.urlencode(params)
if api == 'private':
self.check_required_credentials()
timestamp = self.milliseconds()
data = self.extend({'recvWindow': self.options['recvWindow'], 'timestamp': timestamp}, params)
data = self.keysort(data)
signature = self.hmac(self.encode(self.urlencode(data)), self.encode(self.secret), hashlib.sha256)
url += '?' + self.urlencode(data)
url += '&signature=' + signature
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'X-Api-Key': self.apiKey,
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
#
# {"code":2098,"message":"Request out of receiving window."}
#
if response is None:
return
errorCode = self.safe_string(response, 'code')
if errorCode is not None:
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
raise ExchangeError(feedback)
| [
[
[
212,
220
],
[
640,
648
]
],
[
[
228,
235
],
[
24071,
24078
]
],
[
[
265,
278
],
[
20474,
20487
],
[
20602,
20615
],
[
24902,
24915
]
],
[
[
308,
324
],
[
4497,
4513
]
],
[
[
354,
371
],
[
16688,
16705
],
[
19586,
19603
],
[
19970,
19987
]
],
[
[
401,
411
],
[
4121,
4131
],
[
4404,
4414
],
[
4591,
4601
],
[
4806,
4816
],
[
4912,
4922
]
],
[
[
441,
450
],
[
4033,
4042
]
],
[
[
480,
497
],
[
4282,
4299
]
],
[
[
527,
539
],
[
4693,
4705
],
[
5100,
5112
]
],
[
[
569,
586
],
[
5002,
5019
]
],
[
[
617,
624
],
[
7283,
7290
],
[
7463,
7470
]
],
[
[
633,
639
],
[
714,
720
]
]
] |
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 23 14:16:27 2013
@author: Lucio
Program for assessing the memory footprint of the simulation. Needs the
memory_profiler module (installed on the milano cluster).
"""
import os
import time
import sys
import simpactpurple
from memory_profiler import profile
@profile
def run_single(pop):
s = simpactpurple.Community()
s.INITIAL_POPULATION = pop
#Simulate a run of the simulation
s.start() # initialize data structures
#a few timesteps
s.update_recruiting(s.RECRUIT_INITIAL)
for i in range(s.RECRUIT_WARM_UP):
s.time = i
s.time_operator.step() # 1. Time progresses
s.relationship_operator.step() # 2. Form and dissolve relationships
s.infection_operator.step() # 3. HIV transmission
s.update_recruiting(s.RECRUIT_RATE)
for i in range(s.RECRUIT_WARM_UP, int(s.NUMBER_OF_YEARS*52)):
s.time = i
s.time_operator.step() # 1. Time progresses
s.relationship_operator.step() # 2. Form and dissolve relationships
s.infection_operator.step() # 3. HIV transmission
#post-process / clean-up
for pipe in s.pipes.values():
pipe.send("terminate")
if __name__ == '__main__':
run_single(int(sys.argv[1]))
| [
[
[
225,
227
]
],
[
[
235,
239
]
],
[
[
247,
250
],
[
1281,
1284
]
],
[
[
258,
271
],
[
347,
360
]
],
[
[
300,
307
],
[
310,
317
]
],
[
[
322,
332
],
[
1266,
1276
]
]
] |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
math functions
"""
from __future__ import print_function
import numpy as np
from paddle.common_ops_import import VarDesc
from paddle.common_ops_import import dygraph_only
from paddle.common_ops_import import OpProtoHolder
from paddle.common_ops_import import templatedoc
from paddle.common_ops_import import dygraph_utils
from paddle.tensor import cast
from paddle.tensor.attribute import _complex_to_real_dtype
import paddle
from ..fluid import layers
from ..fluid.framework import core, _varbase_creator, in_dygraph_mode, Variable, convert_np_dtype_to_dtype_
from ..fluid.layer_helper import LayerHelper
from ..fluid.data_feeder import check_variable_and_dtype, check_type, check_dtype, convert_dtype
from ..fluid.layers.layer_function_generator import _generate_doc_string_, generate_activation_fn, generate_layer_fn
from ..fluid.dygraph.inplace_utils import inplace_apis_in_dygraph_only
# TODO: define math functions
# yapf: disable
from ..fluid.layers import abs # noqa: F401
from ..fluid.layers import acos # noqa: F401
from ..fluid.layers import asin # noqa: F401
from ..fluid.layers import ceil # noqa: F401
from ..fluid.layers import ceil_ # noqa: F401
from ..fluid.layers import cos # noqa: F401
from ..fluid.layers import tan # noqa: F401
from ..fluid.layers import sinh # noqa: F401
from ..fluid.layers import cosh # noqa: F401
from ..fluid.layers import exp # noqa: F401
from ..fluid.layers import exp_ # noqa: F401
from ..fluid.layers import expm1 # noqa: F401
from ..fluid.layers import floor # noqa: F401
from ..fluid.layers import floor_ # noqa: F401
from ..fluid.layers import log # noqa: F401
from ..fluid.layers import reciprocal # noqa: F401
from ..fluid.layers import reciprocal_ # noqa: F401
from ..fluid.layers import round # noqa: F401
from ..fluid.layers import round_ # noqa: F401
from ..fluid.layers import rsqrt # noqa: F401
from ..fluid.layers import rsqrt_ # noqa: F401
from ..fluid.layers import scale # noqa: F401
from ..fluid.layers import square # noqa: F401
from ..fluid.layers import stanh # noqa: F401
from ..fluid.layers import atan # noqa: F401
from ..fluid.layers import erf # noqa: F401
from ..fluid.layers import sqrt # noqa: F401
from ..fluid.layers import sqrt_ # noqa: F401
from ..fluid.layers import sin # noqa: F401
from ..fluid.layers import lgamma # noqa: F401
from ..fluid.layers import multiplex # noqa: F401
from ..fluid import layers
from paddle import _C_ops
__all__ = []
_supported_int_dtype_ = [
VarDesc.VarType.UINT8,
VarDesc.VarType.INT8,
VarDesc.VarType.INT16,
VarDesc.VarType.INT32,
VarDesc.VarType.INT64,
]
_supported_float_dtype_ = [
VarDesc.VarType.FP32,
VarDesc.VarType.FP64,
]
@inplace_apis_in_dygraph_only
def scale_(x, scale=1.0, bias=0.0, bias_after_scale=True, act=None, name=None):
"""
Inplace version of ``scale`` API, the output Tensor will be inplaced with input ``x``.
Please refer to :ref:`api_tensor_scale`.
"""
_scale = scale.numpy().item(0) if isinstance(scale, Variable) else scale
return _C_ops.scale_(x, 'scale',
float(_scale), 'bias',
float(bias), 'bias_after_scale', bias_after_scale)
def pow(x, y, name=None):
"""
Compute the power of tensor elements. The equation is:
.. math::
out = x^{y}
**Note**:
``paddle.pow`` supports broadcasting. If you want know more about broadcasting, please refer to :ref:`user_guide_broadcasting` .
Args:
x (Tensor): An N-D Tensor, the data type is float32, float64, int32 or int64.
y (float|int|Tensor): If it is an N-D Tensor, its data type should be the same as `x`.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
N-D Tensor. A location into which the result is stored. Its dimension and data type are the same as `x`.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([1, 2, 3], dtype='float32')
# example 1: y is a float or int
res = paddle.pow(x, 2)
print(res)
# Tensor(shape=[3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [1., 4., 9.])
res = paddle.pow(x, 2.5)
print(res)
# Tensor(shape=[3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [1. , 5.65685415 , 15.58845711])
# example 2: y is a Tensor
y = paddle.to_tensor([2], dtype='float32')
res = paddle.pow(x, y)
print(res)
# Tensor(shape=[3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [1., 4., 9.])
"""
# in dynamic graph mode
if in_dygraph_mode():
if isinstance(y, (int, float)):
return _C_ops.pow(x, 'factor', y)
elif isinstance(y, (paddle.Tensor, Variable)):
return _elementwise_op_in_dygraph(
x, y, axis=-1, act=None, op_name='elementwise_pow')
else:
raise TypeError('y must be scalar or tensor type, but received: %s '% (y.dtype))
# in static graph mode
else:
if isinstance(y, (int, float)):
helper = LayerHelper('pow', **locals())
inputs = {'X': x}
attrs = {'factor': y}
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='pow', inputs=inputs, outputs={'Out': out}, attrs=attrs)
return out
elif isinstance(y, (paddle.Tensor, Variable)):
# TODO A potential speed improvement is supporting different types in C++ and removing the cast ops here
helper = LayerHelper('elementwise_pow', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
return _elementwise_op(LayerHelper('elementwise_pow', **locals()))
else:
raise TypeError('y must be scalar or tensor type, but received: %s '% (type(y)))
@dygraph_only
def _elementwise_op_in_dygraph(x,
y,
axis=-1,
act=None,
use_mkldnn=False,
op_name=None):
op = getattr(_C_ops, op_name)
out = op(x, y, 'axis', axis, 'use_mkldnn', use_mkldnn)
return dygraph_utils._append_activation_in_dygraph(
out, act, use_mkldnn=use_mkldnn)
def _elementwise_op(helper):
op_type = helper.layer_type
original_op_type = helper.kwargs.get('original_op_type', op_type)
x = helper.kwargs.get('x', None)
y = helper.kwargs.get('y', None)
out = helper.kwargs.get('out', None)
assert x is not None, 'x cannot be None in {}'.format(original_op_type)
assert y is not None, 'y cannot be None in {}'.format(original_op_type)
check_variable_and_dtype(
x, 'x', ['float16', 'float32', 'float64', 'int32', 'int64', 'bool'],
original_op_type)
check_variable_and_dtype(
y, 'y', ['float16', 'float32', 'float64', 'int32', 'int64', 'bool'],
original_op_type)
axis = helper.kwargs.get('axis', -1)
use_mkldnn = helper.kwargs.get('use_mkldnn', False)
name = helper.kwargs.get('name', None)
if out is None:
if name is None:
out = helper.create_variable_for_type_inference(dtype=x.dtype)
else:
out = helper.create_variable(name=name, dtype=x.dtype, persistable=False)
helper.append_op(
type=op_type,
inputs={'X': x,
'Y': y},
outputs={'Out': out},
attrs={'axis': axis,
'use_mkldnn': use_mkldnn})
return helper.append_activation(out)
def add(x, y, name=None):
"""
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([2, 3, 4], 'float64')
y = paddle.to_tensor([1, 5, 2], 'float64')
z = paddle.add(x, y)
print(z) # [3., 8., 6. ]
"""
if in_dygraph_mode():
return _C_ops.elementwise_add(x, y)
return _elementwise_op(LayerHelper('elementwise_add', **locals()))
@inplace_apis_in_dygraph_only
def add_(x, y, name=None):
"""
Inplace version of ``add`` API, the output Tensor will be inplaced with input ``x``.
Please refer to :ref:`api_tensor_add`.
"""
op_type = 'elementwise_add_'
axis = -1
out_shape = broadcast_shape(x.shape, y.shape)
if out_shape != x.shape:
raise ValueError("The shape of broadcast output {} is different from that of inplace tensor {} in the Inplace operation.".format(out_shape, x.shape))
out = _elementwise_op_in_dygraph(
x, y, axis=axis, op_name=op_type)
return out
def subtract(x, y, name=None):
"""
Substract two tensors element-wise. The equation is:
.. math::
out = x - y
**Note**:
``paddle.subtract`` supports broadcasting. If you want know more about broadcasting, please refer to :ref:`user_guide_broadcasting` .
Args:
x (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
y (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
N-D Tensor. A location into which the result is stored. If x, y have different shapes and are "broadcastable", the resulting tensor shape is the shape of x and y after broadcasting. If x, y have the same shape, its shape is the same as x and y.
Examples:
.. code-block:: python
import numpy as np
import paddle
x = paddle.to_tensor([[1, 2], [7, 8]])
y = paddle.to_tensor([[5, 6], [3, 4]])
res = paddle.subtract(x, y)
print(res)
# [[-4, -4],
# [4, 4]]
x = paddle.to_tensor([[[1, 2, 3], [1, 2, 3]]])
y = paddle.to_tensor([1, 0, 4])
res = paddle.subtract(x, y)
print(res)
# [[[ 0, 2, -1],
# [ 0, 2, -1]]]
x = paddle.to_tensor([2, np.nan, 5], dtype='float32')
y = paddle.to_tensor([1, 4, np.nan], dtype='float32')
res = paddle.subtract(x, y)
print(res)
# [ 1., nan, nan]
x = paddle.to_tensor([5, np.inf, -np.inf], dtype='float64')
y = paddle.to_tensor([1, 4, 5], dtype='float64')
res = paddle.subtract(x, y)
print(res)
# [ 4., inf., -inf.]
"""
op_type = 'elementwise_sub'
axis = -1
act = None
if in_dygraph_mode():
return _elementwise_op_in_dygraph(
x, y, axis=axis, act=act, op_name=op_type)
return _elementwise_op(LayerHelper(op_type, **locals()))
@inplace_apis_in_dygraph_only
def subtract_(x, y, name=None):
"""
Inplace version of ``subtract`` API, the output Tensor will be inplaced with input ``x``.
Please refer to :ref:`api_tensor_subtract`.
"""
axis = -1
act = None
out_shape = broadcast_shape(x.shape, y.shape)
if out_shape != x.shape:
raise ValueError("The shape of broadcast output {} is different from that of inplace tensor {} in the Inplace operation.".format(out_shape, x.shape))
out = _elementwise_op_in_dygraph(
x, y, axis=axis, act=act, op_name='elementwise_sub_')
return out
def divide(x, y, name=None):
"""
Divide two tensors element-wise. The equation is:
.. math::
out = x / y
**Note**:
``paddle.divide`` supports broadcasting. If you want know more about broadcasting, please refer to :ref:`user_guide_broadcasting` .
Args:
x (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
y (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
N-D Tensor. A location into which the result is stored. If x, y have different shapes and are "broadcastable", the resulting tensor shape is the shape of x and y after broadcasting. If x, y have the same shape, its shape is the same as x and y.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([2, 3, 4], dtype='float64')
y = paddle.to_tensor([1, 5, 2], dtype='float64')
z = paddle.divide(x, y)
print(z) # [2., 0.6, 2.]
"""
op_type = 'elementwise_div'
axis = -1
act = None
if in_dygraph_mode():
return _elementwise_op_in_dygraph(
x, y, axis=axis, act=act, op_name=op_type)
return _elementwise_op(LayerHelper(op_type, **locals()))
def floor_divide(x, y, name=None):
"""
Floor divide two tensors element-wise. The equation is:
.. math::
out = x // y
**Note**:
``paddle.floor_divide`` supports broadcasting. If you want know more about broadcasting, please refer to :ref:`user_guide_broadcasting` .
Args:
x (Tensor): the input tensor, it's data type should be int32, int64.
y (Tensor): the input tensor, it's data type should be int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
N-D Tensor. A location into which the result is stored. It's dimension equals with $x$.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([2, 3, 8, 7])
y = paddle.to_tensor([1, 5, 3, 3])
z = paddle.floor_divide(x, y)
print(z) # [2, 0, 2, 2]
"""
op_type = 'elementwise_floordiv'
axis = -1
if in_dygraph_mode():
return _elementwise_op_in_dygraph(
x, y, axis=axis, op_name=op_type)
return _elementwise_op(LayerHelper(op_type, **locals()))
def remainder(x, y, name=None):
r"""
Mod two tensors element-wise. The equation is:
.. math::
out = x \% y
**Note**:
``paddle.remainder`` supports broadcasting. If you want know more about broadcasting, please refer to :ref:`user_guide_broadcasting` .
Args:
x (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
y (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
N-D Tensor. A location into which the result is stored. If x, y have different shapes and are "broadcastable", the resulting tensor shape is the shape of x and y after broadcasting. If x, y have the same shape, its shape is the same as x and y.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([2, 3, 8, 7])
y = paddle.to_tensor([1, 5, 3, 3])
z = paddle.remainder(x, y)
print(z) # [0, 3, 2, 1]
"""
op_type = 'elementwise_mod'
axis = -1
if in_dygraph_mode():
return _elementwise_op_in_dygraph(
x, y, axis=axis, op_name=op_type)
return _elementwise_op(LayerHelper(op_type, **locals()))
mod = remainder # noqa: F841
floor_mod = remainder # noqa: F841
def multiply(x, y, name=None):
"""
multiply two tensors element-wise. The equation is:
.. math::
out = x * y
**Note**:
``paddle.multiply`` supports broadcasting. If you would like to know more about broadcasting, please refer to :ref:`user_guide_broadcasting` .
Args:
x (Tensor): the input tensor, its data type should be one of float32, float64, int32, int64, bool.
y (Tensor): the input tensor, its data type should be one of float32, float64, int32, int64, bool.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
N-D Tensor. A location into which the result is stored. If x, y have different shapes and are "broadcastable", the resulting tensor shape is the shape of x and y after broadcasting. If x, y have the same shape, its shape is the same as x and y.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[1, 2], [3, 4]])
y = paddle.to_tensor([[5, 6], [7, 8]])
res = paddle.multiply(x, y)
print(res) # [[5, 12], [21, 32]]
x = paddle.to_tensor([[[1, 2, 3], [1, 2, 3]]])
y = paddle.to_tensor([2])
res = paddle.multiply(x, y)
print(res) # [[[2, 4, 6], [2, 4, 6]]]
"""
op_type = 'elementwise_mul'
act = None
axis = -1
if in_dygraph_mode():
return _elementwise_op_in_dygraph(
x, y, axis=axis, act=act, op_name=op_type)
if x.dtype != y.dtype:
raise TypeError(
'Input tensors must be same type, but received type of x: %s, type of y: %s '
% (x.dtype, y.dtype))
return _elementwise_op(LayerHelper(op_type, **locals()))
def maximum(x, y, name=None):
"""
Compare two tensors and returns a new tensor containing the element-wise maxima. The equation is:
.. math::
out = max(x, y)
**Note**:
``paddle.maximum`` supports broadcasting. If you want know more about broadcasting, please refer to :ref:`user_guide_broadcasting` .
Args:
x (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
y (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
N-D Tensor. A location into which the result is stored. If x, y have different shapes and are "broadcastable", the resulting tensor shape is the shape of x and y after broadcasting. If x, y have the same shape, its shape is the same as x and y.
Examples:
.. code-block:: python
import numpy as np
import paddle
x = paddle.to_tensor([[1, 2], [7, 8]])
y = paddle.to_tensor([[3, 4], [5, 6]])
res = paddle.maximum(x, y)
print(res)
# [[3, 4],
# [7, 8]]
x = paddle.to_tensor([[1, 2, 3], [1, 2, 3]])
y = paddle.to_tensor([3, 0, 4])
res = paddle.maximum(x, y)
print(res)
# [[3, 2, 4],
# [3, 2, 4]]
x = paddle.to_tensor([2, 3, 5], dtype='float32')
y = paddle.to_tensor([1, np.nan, np.nan], dtype='float32')
res = paddle.maximum(x, y)
print(res)
# [ 2., nan, nan]
x = paddle.to_tensor([5, 3, np.inf], dtype='float32')
y = paddle.to_tensor([1, -np.inf, 5], dtype='float32')
res = paddle.maximum(x, y)
print(res)
# [ 5., 3., inf.]
"""
op_type = 'elementwise_max'
axis = -1
act = None
if in_dygraph_mode():
return _elementwise_op_in_dygraph(
x, y, axis=axis, act=act, op_name=op_type)
return _elementwise_op(LayerHelper(op_type, **locals()))
def minimum(x, y, name=None):
"""
Compare two tensors and returns a new tensor containing the element-wise minima. The equation is:
.. math::
out = min(x, y)
**Note**:
``paddle.minimum`` supports broadcasting. If you want know more about broadcasting, please refer to :ref:`user_guide_broadcasting` .
Args:
x (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
y (Tensor): the input tensor, it's data type should be float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
N-D Tensor. A location into which the result is stored. If x, y have different shapes and are "broadcastable", the resulting tensor shape is the shape of x and y after broadcasting. If x, y have the same shape, its shape is the same as x and y.
Examples:
.. code-block:: python
import numpy as np
import paddle
x = paddle.to_tensor([[1, 2], [7, 8]])
y = paddle.to_tensor([[3, 4], [5, 6]])
res = paddle.minimum(x, y)
print(res)
# [[1, 2],
# [5, 6]]
x = paddle.to_tensor([[[1, 2, 3], [1, 2, 3]]])
y = paddle.to_tensor([3, 0, 4])
res = paddle.minimum(x, y)
print(res)
# [[[1, 0, 3],
# [1, 0, 3]]]
x = paddle.to_tensor([2, 3, 5], dtype='float32')
y = paddle.to_tensor([1, np.nan, np.nan], dtype='float32')
res = paddle.minimum(x, y)
print(res)
# [ 1., nan, nan]
x = paddle.to_tensor([5, 3, np.inf], dtype='float64')
y = paddle.to_tensor([1, -np.inf, 5], dtype='float64')
res = paddle.minimum(x, y)
print(res)
# [ 1., -inf., 5.]
"""
op_type = 'elementwise_min'
axis = -1
act = None
if in_dygraph_mode():
return _elementwise_op_in_dygraph(
x, y, axis=axis, act=act, op_name=op_type)
return _elementwise_op(LayerHelper(op_type, **locals()))
for func in [
add,
multiply
]:
proto_dict = {'add': 'elementwise_add', 'multiply': 'elementwise_mul'}
op_proto = OpProtoHolder.instance().get_op_proto(proto_dict[func.__name__])
additional_args_lines = [
"name (string, optional): Name of the output. \
Default is None. It's used to print debug info for developers. Details: \
:ref:`api_guide_Name` "
]
func.__doc__ = _generate_doc_string_(
op_proto,
additional_args_lines=additional_args_lines,
skip_attrs_set={"x_data_format", "y_data_format", "axis",
"use_quantizer", "mkldnn_data_type", "Scale_x", "Scale_y", "Scale_out"
}) + """\n""" + str(func.__doc__)
def sum(x, axis=None, dtype=None, keepdim=False, name=None):
"""
Computes the sum of tensor elements over the given dimension.
Args:
x (Tensor): An N-D Tensor, the data type is bool, float16, float32, float64, int32 or int64.
axis (int|list|tuple, optional): The dimensions along which the sum is performed. If
:attr:`None`, sum all elements of :attr:`x` and return a
Tensor with a single element, otherwise must be in the
range :math:`[-rank(x), rank(x))`. If :math:`axis[i] < 0`,
the dimension to reduce is :math:`rank + axis[i]`.
dtype (str, optional): The dtype of output Tensor. The default value is None, the dtype
of output is the same as input Tensor `x`.
keepdim (bool, optional): Whether to reserve the reduced dimension in the
output Tensor. The result Tensor will have one fewer dimension
than the :attr:`x` unless :attr:`keepdim` is true, default
value is False.
name (str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor: Results of summation operation on the specified axis of input Tensor `x`,
if `x.dtype='bool'`, `x.dtype='int32'`, it's data type is `'int64'`,
otherwise it's data type is the same as `x`.
Raises:
TypeError: The type of :attr:`axis` must be int, list or tuple.
Examples:
.. code-block:: python
import paddle
# x is a Tensor with following elements:
# [[0.2, 0.3, 0.5, 0.9]
# [0.1, 0.2, 0.6, 0.7]]
# Each example is followed by the corresponding output tensor.
x = paddle.to_tensor([[0.2, 0.3, 0.5, 0.9],
[0.1, 0.2, 0.6, 0.7]])
out1 = paddle.sum(x) # [3.5]
out2 = paddle.sum(x, axis=0) # [0.3, 0.5, 1.1, 1.6]
out3 = paddle.sum(x, axis=-1) # [1.9, 1.6]
out4 = paddle.sum(x, axis=1, keepdim=True) # [[1.9], [1.6]]
# y is a Tensor with shape [2, 2, 2] and elements as below:
# [[[1, 2], [3, 4]],
# [[5, 6], [7, 8]]]
# Each example is followed by the corresponding output tensor.
y = paddle.to_tensor([[[1, 2], [3, 4]],
[[5, 6], [7, 8]]])
out5 = paddle.sum(y, axis=[1, 2]) # [10, 26]
out6 = paddle.sum(y, axis=[0, 1]) # [16, 20]
# x is a Tensor with following elements:
# [[True, True, True, True]
# [False, False, False, False]]
# Each example is followed by the corresponding output tensor.
x = paddle.to_tensor([[True, True, True, True],
[False, False, False, False]])
out7 = paddle.sum(x) # [4]
out8 = paddle.sum(x, axis=0) # [1, 1, 1, 1]
out9 = paddle.sum(x, axis=1) # [4, 0]
"""
if axis is not None and not isinstance(axis, (list, tuple)):
axis = [axis]
if not axis:
reduce_all_flag = True
else:
if len(axis) == len(x.shape):
reduce_all_flag = True
else:
reduce_all_flag = False
def get_dtype(x, dtype):
if dtype is not None:
return (True, dtype)
src_type = convert_dtype(x.dtype)
if src_type in ['bool','int32', 'int64']:
return (True, 'int64')
return (False, src_type)
dtype_flag, dtype = get_dtype(x, dtype)
if in_dygraph_mode():
axis = axis if axis != None and axis != [] else [0]
if dtype_flag:
return _C_ops.reduce_sum(x, 'dim', axis, 'keep_dim', keepdim,
'reduce_all', reduce_all_flag, 'in_dtype',
x.dtype, 'out_dtype',
convert_np_dtype_to_dtype_(dtype))
else:
return _C_ops.reduce_sum(x, 'dim', axis, 'keep_dim', keepdim,
'reduce_all', reduce_all_flag)
attrs = {
'dim': axis if axis != None and axis != [] and axis != () else [0],
'keep_dim': keepdim,
'reduce_all': reduce_all_flag
}
if dtype_flag:
attrs.update({
'in_dtype': x.dtype,
'out_dtype': convert_np_dtype_to_dtype_(dtype)
})
check_variable_and_dtype(
x, 'x', ['bool', 'float16', 'float32', 'float64',
'int32', 'int64', 'complex64', 'complex128',
u'bool', u'float16', u'float32', u'float64',
u'int32', u'int64', u'complex64', u'complex128'], 'sum')
check_type(axis, 'axis', (int, list, tuple, type(None)), 'sum')
helper = LayerHelper('sum', **locals())
if dtype_flag:
out = helper.create_variable_for_type_inference(
dtype=convert_np_dtype_to_dtype_(dtype))
else:
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='reduce_sum',
inputs={'X': x},
outputs={'Out': out},
attrs=attrs)
return out
@templatedoc(op_type="sum")
def add_n(inputs, name=None):
"""
This OP is used to sum one or more Tensor of the input.
For example:
.. code-block:: text
Case 1:
Input:
input.shape = [2, 3]
input = [[1, 2, 3],
[4, 5, 6]]
Output:
output.shape = [2, 3]
output = [[1, 2, 3],
[4, 5, 6]]
Case 2:
Input:
First input:
input1.shape = [2, 3]
Input1 = [[1, 2, 3],
[4, 5, 6]]
The second input:
input2.shape = [2, 3]
input2 = [[7, 8, 9],
[10, 11, 12]]
Output:
output.shape = [2, 3]
output = [[8, 10, 12],
[14, 16, 18]]
Args:
inputs (Tensor|list[Tensor]|tuple[Tensor]): A Tensor or a list/tuple of Tensors. The shape and data type of the list/tuple elements should be consistent.
Input can be multi-dimensional Tensor, and data types can be: float32, float64, int32, int64.
name(str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor, the sum of input :math:`inputs` , its shape and data types are consistent with :math:`inputs`.
Examples:
.. code-block:: python
import paddle
input0 = paddle.to_tensor([[1, 2, 3], [4, 5, 6]], dtype='float32')
input1 = paddle.to_tensor([[7, 8, 9], [10, 11, 12]], dtype='float32')
output = paddle.add_n([input0, input1])
# [[8., 10., 12.],
# [14., 16., 18.]]
"""
if in_dygraph_mode():
if isinstance(inputs, Variable):
inputs = [inputs]
return _C_ops.sum(inputs, 'use_mkldnn', False)
helper = LayerHelper('add_n', **locals())
check_type(inputs, 'inputs', (Variable, tuple, list), 'add_n')
if isinstance(inputs, list) or isinstance(inputs, tuple):
if len(inputs) > 0:
for input in inputs:
check_variable_and_dtype(input, "inputs", \
['float32', 'float64', 'int32', 'int64'], 'add_n')
else:
check_variable_and_dtype(inputs, "inputs", \
['float32', 'float64', 'int32', 'int64'], 'add_n')
out = helper.create_variable_for_type_inference(
dtype=helper.input_dtype('inputs'))
helper.append_op(
type='sum',
inputs={'X': inputs},
outputs={'Out': out},
attrs={'use_mkldnn': False})
return out
def trunc(input, name=None):
'''
This API is used to returns a new tensor with the truncated integer values of input.
Args:
input (Tensor): The input tensor, it's data type should be int32, int64, float32, float64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
Tensor: The output Tensor of trunc.
Examples:
.. code-block:: python
import paddle
input = paddle.rand([2,2],'float32')
print(input)
# Tensor(shape=[2, 2], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [[0.02331470, 0.42374918],
# [0.79647720, 0.74970269]])
output = paddle.trunc(input)
print(output)
# Tensor(shape=[2, 2], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [[0., 0.],
# [0., 0.]]))
'''
if in_dygraph_mode():
return _C_ops.trunc(input)
else:
inputs = {"X": input}
attrs = {}
helper = LayerHelper("trunc", **locals())
check_variable_and_dtype(input, 'X', ['int32', 'int64', 'float32', 'float64'], 'trunc')
out = helper.create_variable_for_type_inference(dtype=input.dtype)
helper.append_op(
type="trunc", inputs=inputs, attrs=attrs, outputs={"Out": out})
return out
def mm(input, mat2, name=None):
"""
Applies matrix multiplication to two tensors.
Currently, the input tensors' rank can be any, but when the rank of any
inputs is bigger than 3, this two inputs' rank should be equal.
Also note that if the raw tensor :math:`x` or :math:`mat2` is rank-1 and
nontransposed, the prepended or appended dimension :math:`1` will be
removed after matrix multiplication.
Args:
input (Tensor): The input tensor which is a Tensor.
mat2 (Tensor): The input tensor which is a Tensor.
name(str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor: The product Tensor.
Examples:
.. code-block:: python
import paddle
input = paddle.arange(1, 7).reshape((3, 2)).astype('float32')
mat2 = paddle.arange(1, 9).reshape((2, 4)).astype('float32')
out = paddle.mm(input, mat2)
print(out)
# [[11., 14., 17., 20.],
# [23., 30., 37., 44.],
# [35., 46., 57., 68.]])
"""
if in_dygraph_mode():
return _C_ops.matmul_v2(input, mat2)
def __check_input(x, y):
var_names = {'x': x, 'y': y}
for name, val in var_names.items():
check_variable_and_dtype(val, name,
['float16', 'float32', 'float64'], 'mm')
x_shape = list(x.shape)
y_shape = list(y.shape)
if len(x_shape) == 1:
x_shape = [1] + x_shape
if len(y_shape) == 1:
y_shape = y_shape + [1]
# check the inner 2 dimensions
if x_shape[-1] != y_shape[-2]:
if not ((x_shape[-1] == -1) or (y_shape[-2] == -1)):
raise ValueError(
"After performing an optional transpose, Input X's width should be "
"equal to Y's width for multiplication "
"prerequisites. But received X's shape: %s, Y's shape: %s\n"
% (x_shape, y_shape))
if len(y_shape) > 2 and len(x_shape) > 2:
for i, dim_x in enumerate(x_shape[:-2]):
# don't check neg shape
if dim_x < 0 or y_shape[i] < 0:
continue
if dim_x != y_shape[i]:
raise ValueError(
"When the matrix is larger than 2 dimensions, the higher "
"dimensional values of the two matrices need to be equal. "
"But received x_shape[%d] != y_shape[%d]. X's shape: %s, "
"Y's shape: %s.\n" % (i, i, x_shape, y_shape))
__check_input(input, mat2)
helper = LayerHelper('mm', **locals())
out = helper.create_variable_for_type_inference(dtype=input.dtype)
helper.append_op(
type='matmul_v2', inputs={'X': input,
'Y': mat2}, outputs={'Out': out})
return out
def addmm(input, x, y, beta=1.0, alpha=1.0, name=None):
"""
**addmm**
This operator is used to perform matrix multiplication for input $x$ and $y$.
$input$ is added to the final result.
The equation is:
.. math::
Out = alpha * x * y + beta * input
$Input$, $x$ and $y$ can carry the LoD (Level of Details) information, or not. But the output only shares the LoD information with input $input$.
Args:
input (Tensor): The input Tensor to be added to the final result.
x (Tensor): The first input Tensor for matrix multiplication.
y (Tensor): The second input Tensor for matrix multiplication.
beta (float): Coefficient of $input$.
alpha (float): Coefficient of $x*y$.
name (str, optional): Name of the output. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`. Default is None.
Returns:
Tensor: The output Tensor of addmm op.
Examples:
.. code-block:: python
import paddle
x = paddle.ones([2,2])
y = paddle.ones([2,2])
input = paddle.ones([2,2])
out = paddle.addmm( input=input, x=x, y=y, beta=0.5, alpha=5.0 )
print(out)
# [[10.5 10.5]
# [10.5 10.5]]
"""
input_shape = input.shape
x_shape = x.shape
y_shape = y.shape
if not len(input_shape) == len(x_shape) == len(y_shape) == 2:
raise ValueError("The dimention of input, x, y should be 2 but receive input's shape: {}, x's shape: {}, y's shape: {}".format(input_shape, x_shape, y_shape))
if input_shape[0] != x_shape[0]:
if input_shape[0] != 1:
raise ValueError( "When x's dimension[0] is not equal with input's dimension[0], input's dimension[0] must be 1 but got {}".format(input_shape[0]))
if input_shape[1] != y_shape[1] and input_shape[1] != 1:
raise ValueError( "When y's dimension[1] is not equal with input's dimension[1], input's dimension[1] must be 1 but got {}".format(input_shape[1]))
if input_shape[1] != y_shape[1]:
if input_shape[1] != 1:
raise ValueError( "When y's dimension[1] is not equal with input's dimension[1], input's dimension[1] must be 1 but got {}".format(input_shape[1]))
if input_shape[0] != x_shape[0] and input_shape[0] != 1:
raise ValueError( "When x's dimension[0] is not equal with input's dimension[0], input's dimension[0] must be 1 but got {}".format(input_shape[0]))
if x_shape[1] != y_shape[0]:
raise ValueError("The input Variable x's width must be equal with Variable y' height. But received x's shape = {}, y's shape = {}.".format(x_shape, y_shape))
if in_dygraph_mode():
out = _C_ops.addmm(input, x, y, "Alpha", alpha, "Beta", beta)
return out
inputs = {'Input': input, "X": x, "Y": y}
attrs = {'Alpha': alpha, 'Beta': beta}
helper = LayerHelper("addmm", **locals())
check_variable_and_dtype(input, 'Input', ['float32', 'float64'], 'addmm')
check_variable_and_dtype(x, 'X', ['float32', 'float64'], 'addmm')
check_variable_and_dtype(y, 'Y', ['float32', 'float64'], 'addmm')
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type="addmm", inputs=inputs, attrs=attrs, outputs={"Out": out})
return out
def logsumexp(x, axis=None, keepdim=False, name=None):
r"""
This OP calculates the log of the sum of exponentials of ``x`` along ``axis`` .
.. math::
logsumexp(x) = \\log\\sum exp(x)
Args:
x (Tensor): The input Tensor with data type float32 or float64, which
have no more than 4 dimensions.
axis (int|list|tuple, optional): The axis along which to perform
logsumexp calculations. ``axis`` should be int, list(int) or
tuple(int). If ``axis`` is a list/tuple of dimension(s), logsumexp
is calculated along all element(s) of ``axis`` . ``axis`` or
element(s) of ``axis`` should be in range [-D, D), where D is the
dimensions of ``x`` . If ``axis`` or element(s) of ``axis`` is
less than 0, it works the same way as :math:`axis + D` . If
``axis`` is None, logsumexp is calculated along all elements of
``x``. Default is None.
keepdim (bool, optional): Whether to reserve the reduced dimension(s)
in the output Tensor. If ``keep_dim`` is True, the dimensions of
the output Tensor is the same as ``x`` except in the reduced
dimensions(it is of size 1 in this case). Otherwise, the shape of
the output Tensor is squeezed in ``axis`` . Default is False.
name (str, optional): Name for the operation (optional, default is None).
For more information, please refer to :ref:`api_guide_Name`.
Returns:
Tensor, results of logsumexp along ``axis`` of ``x``, with the same data
type as ``x``.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[-1.5, 0., 2.], [3., 1.2, -2.4]])
out1 = paddle.logsumexp(x) # [3.4691226]
out2 = paddle.logsumexp(x, 1) # [2.15317821, 3.15684602]
"""
if isinstance(axis, int):
axis = [axis]
reduce_all = True if axis is None \
or len(axis)==0 \
or len(axis) == len(x.shape) else False
if axis is None or len(axis) == 0:
axis = [0]
if in_dygraph_mode():
return _C_ops.logsumexp(x, 'axis', axis, 'keepdim', keepdim, 'reduce_all', reduce_all)
check_variable_and_dtype(x, 'x',
['float32', 'float64'],
'logsumexp')
helper = LayerHelper('logsumexp', **locals())
attrs = {'axis': axis, 'keepdim': keepdim, 'reduce_all':reduce_all}
out = helper.create_variable_for_type_inference(x.dtype)
helper.append_op(
type='logsumexp', inputs={'X': x}, outputs={'Out': out}, attrs=attrs)
return out
def inverse(x, name=None):
"""
Takes the inverse of the square matrix. A square matrix is a matrix with
the same number of rows and columns. The input can be a square matrix
(2-D Tensor) or batches of square matrices.
Args:
x (Tensor): The input tensor. The last two
dimensions should be equal. When the number of dimensions is
greater than 2, it is treated as batches of square matrix. The data
type can be float32 and float64.
name (str, optional): The default value is None. Normally there is no need for
user to set this property. For more information,
please refer to :ref:`api_guide_Name`
Returns:
Tensor: A Tensor holds the inverse of x. The shape and data type
is the same as x.
Examples:
.. code-block:: python
import paddle
mat = paddle.to_tensor([[2, 0], [0, 2]], dtype='float32')
inv = paddle.inverse(mat)
print(inv) # [[0.5, 0], [0, 0.5]]
"""
if in_dygraph_mode():
return _C_ops.inverse(x)
def _check_input(x):
check_variable_and_dtype(x, 'x',
['float32', 'float64'], 'inverse')
if len(x.shape) < 2:
raise ValueError(
"The input of inverse is expected to be a Tensor whose number "
"of dimensions is no less than 2. But reviced: %d, "
"x's shape: %s." % (len(x.shape), x.shape))
_check_input(x)
helper = LayerHelper('inverse', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='inverse', inputs={'Input': [x] }, outputs={'Output': [out]})
return out
def max(x, axis=None, keepdim=False, name=None):
"""
Computes the maximum of tensor elements over the given axis.
Args:
x(Tensor): A tensor, the data type is float32,
float64, int32, int64.
axis(int|list|tuple, optional): The axis along which the maximum is computed.
If :attr:`None`, compute the maximum over all elements of
`x` and return a Tensor with a single element,
otherwise must be in the range :math:`[-x.ndim(x), x.ndim(x))`.
If :math:`axis[i] < 0`, the axis to reduce is :math:`x.ndim + axis[i]`.
keepdim(bool, optional): Whether to reserve the reduced dimension in the
output Tensor. The result tensor will have one fewer dimension
than the `x` unless :attr:`keepdim` is true, default
value is False.
name(str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor, results of maximum on the specified axis of input tensor,
it's data type is the same as `x`.
Examples:
.. code-block:: python
import paddle
# data_x is a Tensor with shape [2, 4]
# the axis is a int element
x = paddle.to_tensor([[0.2, 0.3, 0.5, 0.9],
[0.1, 0.2, 0.6, 0.7]])
result1 = paddle.max(x)
print(result1)
#[0.9]
result2 = paddle.max(x, axis=0)
print(result2)
#[0.2 0.3 0.6 0.9]
result3 = paddle.max(x, axis=-1)
print(result3)
#[0.9 0.7]
result4 = paddle.max(x, axis=1, keepdim=True)
print(result4)
#[[0.9]
# [0.7]]
# data_y is a Tensor with shape [2, 2, 2]
# the axis is list
y = paddle.to_tensor([[[1.0, 2.0], [3.0, 4.0]],
[[5.0, 6.0], [7.0, 8.0]]])
result5 = paddle.max(y, axis=[1, 2])
print(result5)
#[4. 8.]
result6 = paddle.max(y, axis=[0, 1])
print(result6)
#[7. 8.]
"""
if axis is not None and not isinstance(axis, list):
if isinstance(axis, tuple):
axis = list(axis)
elif isinstance(axis, int):
axis= [axis]
else:
raise TypeError(
"The type of axis must be int, list or tuple, but received {}".format(type(axis)))
reduce_all = True if axis == None or axis == [] else False
axis = axis if axis != None and axis != [] else [0]
if in_dygraph_mode():
return _C_ops.reduce_max(x, 'dim', axis, 'keep_dim', keepdim,
'reduce_all', reduce_all)
helper = LayerHelper('max', **locals())
check_variable_and_dtype(
x, 'x', ['float32', 'float64', 'int32', 'int64'], 'max')
out = helper.create_variable_for_type_inference(
dtype=x.dtype)
helper.append_op(
type='reduce_max',
inputs={'X': x},
outputs={'Out': out},
attrs={
'dim': axis,
'keep_dim': keepdim,
'reduce_all': reduce_all
})
return out
def min(x, axis=None, keepdim=False, name=None):
"""
Computes the minimum of tensor elements over the given axis
Args:
x(Tensor): A tensor, the data type is float32, float64, int32, int64.
axis(int|list|tuple, optional): The axis along which the minimum is computed.
If :attr:`None`, compute the minimum over all elements of
`x` and return a Tensor with a single element,
otherwise must be in the range :math:`[-x.ndim, x.ndim)`.
If :math:`axis[i] < 0`, the axis to reduce is :math:`x.ndim + axis[i]`.
keepdim(bool, optional): Whether to reserve the reduced dimension in the
output Tensor. The result tensor will have one fewer dimension
than the `x` unless :attr:`keepdim` is true, default
value is False.
name(str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor, results of minimum on the specified axis of input tensor,
it's data type is the same as input's Tensor.
Examples:
.. code-block:: python
import paddle
# x is a tensor with shape [2, 4]
# the axis is a int element
x = paddle.to_tensor([[0.2, 0.3, 0.5, 0.9],
[0.1, 0.2, 0.6, 0.7]])
result1 = paddle.min(x)
print(result1)
#[0.1]
result2 = paddle.min(x, axis=0)
print(result2)
#[0.1 0.2 0.5 0.7]
result3 = paddle.min(x, axis=-1)
print(result3)
#[0.2 0.1]
result4 = paddle.min(x, axis=1, keepdim=True)
print(result4)
#[[0.2]
# [0.1]]
# y is a Tensor with shape [2, 2, 2]
# the axis is list
y = paddle.to_tensor([[[1.0, 2.0], [3.0, 4.0]],
[[5.0, 6.0], [7.0, 8.0]]])
result5 = paddle.min(y, axis=[1, 2])
print(result5)
#[1. 5.]
result6 = paddle.min(y, axis=[0, 1])
print(result6)
#[1. 2.]
"""
if axis is not None and not isinstance(axis, list):
if isinstance(axis, tuple):
axis = list(axis)
elif isinstance(axis, int):
axis= [axis]
else:
raise TypeError(
"The type of axis must be int, list or tuple, but received {}".format(type(axis)))
reduce_all = True if axis == None or axis == [] else False
axis = axis if axis != None and axis != [] else [0]
if in_dygraph_mode():
return _C_ops.reduce_min(x, 'dim', axis, 'keep_dim', keepdim,
'reduce_all', reduce_all)
helper = LayerHelper('min', **locals())
check_variable_and_dtype(
x, 'x', ['float32', 'float64', 'int32', 'int64'], 'min')
out = helper.create_variable_for_type_inference(
dtype=x.dtype)
helper.append_op(
type='reduce_min',
inputs={'X': x},
outputs={'Out': out},
attrs={
'dim': axis,
'keep_dim': keepdim,
'reduce_all': reduce_all
})
return out
def log1p(x, name=None):
r"""
Calculates the natural log of the given input tensor, element-wise.
.. math::
Out = \\ln(x+1)
Args:
x (Tensor): Input Tensor. Must be one of the following types: float32, float64.
name(str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor, the natural log of the input Tensor computed element-wise.
Examples:
.. code-block:: python
import paddle
data = paddle.to_tensor([[0], [1]], dtype='float32')
res = paddle.log1p(data)
# [[0.], [0.6931472]]
"""
if in_dygraph_mode():
return _C_ops.log1p(x)
check_variable_and_dtype(x, 'x', ['float32', 'float64'], "log1p")
inputs = {'X': [x]}
helper = LayerHelper('log1p', **locals())
dtype = helper.input_dtype(input_param_name='x')
out = helper.create_variable_for_type_inference(dtype)
helper.append_op(type="log1p", inputs={"X": x}, outputs={"Out": out})
return out
def log2(x, name=None):
r"""
Calculates the log to the base 2 of the given input tensor, element-wise.
.. math::
Out = \\log_2x
Args:
x (Tensor): Input tensor must be one of the following types: float32, float64.
name (str|None): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor: The log to the base 2 of the input Tensor computed element-wise.
Examples:
.. code-block:: python
import paddle
# example 1: x is a float
x_i = paddle.to_tensor([[1.0], [2.0]])
res = paddle.log2(x_i) # [[0.], [1.0]]
# example 2: x is float32
x_i = paddle.full(shape=[1], fill_value=2, dtype='float32')
paddle.to_tensor(x_i)
res = paddle.log2(x_i)
print(res) # [1.0]
# example 3: x is float64
x_i = paddle.full(shape=[1], fill_value=2, dtype='float64')
paddle.to_tensor(x_i)
res = paddle.log2(x_i)
print(res) # [1.0]
"""
if in_dygraph_mode():
return _C_ops.log2(x)
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], "log2")
inputs = {'X': [x]}
helper = LayerHelper('log2', **locals())
dtype = helper.input_dtype(input_param_name='x')
out = helper.create_variable_for_type_inference(dtype)
helper.append_op(type="log2", inputs={"X": x}, outputs={"Out": out})
return out
def log10(x, name=None):
r"""
Calculates the log to the base 10 of the given input tensor, element-wise.
.. math::
Out = \\log_10_x
Args:
x (Tensor): Input tensor must be one of the following types: float32, float64.
name (str|None): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor: The log to the base 10 of the input Tensor computed element-wise.
Examples:
.. code-block:: python
import paddle
# example 1: x is a float
x_i = paddle.to_tensor([[1.0], [10.0]])
res = paddle.log10(x_i) # [[0.], [1.0]]
# example 2: x is float32
x_i = paddle.full(shape=[1], fill_value=10, dtype='float32')
paddle.to_tensor(x_i)
res = paddle.log10(x_i)
print(res) # [1.0]
# example 3: x is float64
x_i = paddle.full(shape=[1], fill_value=10, dtype='float64')
paddle.to_tensor(x_i)
res = paddle.log10(x_i)
print(res) # [1.0]
"""
if in_dygraph_mode():
return _C_ops.log10(x)
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], "log10")
inputs = {'X': [x]}
helper = LayerHelper('log10', **locals())
dtype = helper.input_dtype(input_param_name='x')
out = helper.create_variable_for_type_inference(dtype)
helper.append_op(type="log10", inputs={"X": x}, outputs={"Out": out})
return out
def clip(x, min=None, max=None, name=None):
"""
This operator clip all elements in input into the range [ min, max ] and return
a resulting tensor as the following equation:
.. math::
Out = MIN(MAX(x, min), max)
Args:
x (Tensor): An N-D Tensor with data type float32, float64, int32 or int64.
min (float|int|Tensor): The lower bound with type ``float`` , ``int`` or a ``Tensor``
with shape [1] and type ``int32``, ``float32``, ``float64``.
max (float|int|Tensor): The upper bound with type ``float``, ``int`` or a ``Tensor``
with shape [1] and type ``int32``, ``float32``, ``float64``.
name (str, optional): The default value is None. Normally there is no
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
Returns:
Tensor: A Tensor with the same data type and data shape as input.
Examples:
.. code-block:: python
import paddle
x1 = paddle.to_tensor([[1.2, 3.5], [4.5, 6.4]], 'float32')
out1 = paddle.clip(x1, min=3.5, max=5.0)
out2 = paddle.clip(x1, min=2.5)
print(out1)
# [[3.5, 3.5]
# [4.5, 5.0]]
print(out2)
# [[2.5, 3.5]
# [[4.5, 6.4]
"""
x_dtype = str(x.dtype)
if x_dtype == 'paddle.int32':
min_ = np.iinfo(np.int32).min
max_ = np.iinfo(np.int32).max - 2**7
elif x_dtype == 'paddle.int64':
min_ = np.iinfo(np.int64).min
max_ = np.iinfo(np.int64).max - 2**39
else:
min_ = float(np.finfo(np.float32).min)
max_ = float(np.finfo(np.float32).max)
if in_dygraph_mode():
if isinstance(min, Variable):
min = min.numpy().item(0)
if isinstance(max, Variable):
max = max.numpy().item(0)
min = min_ if min is None else min
max = max_ if max is None else max
return _C_ops.clip(x, "min", min, "max", max)
if min is not None:
check_type(min, 'min', (float, int, Variable), 'clip')
if isinstance(min, Variable):
check_dtype(min.dtype, 'min', ['float32', 'float64', 'int32'],
'clip', '(When the type of min in clip is Variable.)')
if max is not None:
check_type(max, 'max', (float, int, Variable), 'clip')
if isinstance(max, Variable):
check_dtype(max.dtype, 'max', ['float32', 'float64', 'int32'],
'clip', '(When the type of max in clip is Variable.)')
check_variable_and_dtype(x, 'x', ['float32', 'float64', 'int32', 'int64'], 'clip')
inputs = {'X': x}
attrs = {'min': min_, 'max': max_}
if isinstance(min, Variable):
min.stop_gradient = True
inputs['Min'] = min
elif min is not None:
attrs['min'] = min
if isinstance(max, Variable):
max.stop_gradient = True
inputs['Max'] = max
elif max is not None:
attrs['max'] = max
helper = LayerHelper('clip', **locals())
output = helper.create_variable_for_type_inference(
dtype=helper.input_dtype('x'))
helper.append_op(
type='clip', inputs=inputs, outputs={'Out': [output]}, attrs=attrs)
return output
@inplace_apis_in_dygraph_only
def clip_(x, min=None, max=None, name=None):
"""
Inplace version of ``clip`` API, the output Tensor will be inplaced with input ``x``.
Please refer to :ref:`api_tensor_clip`.
"""
fmin = float(np.finfo(np.float32).min)
fmax = float(np.finfo(np.float32).max)
if isinstance(min, Variable):
min = min.numpy().item(0)
if isinstance(max, Variable):
max = max.numpy().item(0)
min = fmin if min is None else min
max = fmax if max is None else max
return _C_ops.clip_(x, "min", min, "max", max)
def trace(x, offset=0, axis1=0, axis2=1, name=None):
"""
**trace**
This OP computes the sum along diagonals of the input tensor x.
If ``x`` is 2D, returns the sum of diagonal.
If ``x`` has larger dimensions, then returns an tensor of diagonals sum, diagonals be taken from
the 2D planes specified by axis1 and axis2. By default, the 2D planes formed by the first and second axes
of the input tensor x.
The argument ``offset`` determines where diagonals are taken from input tensor x:
- If offset = 0, it is the main diagonal.
- If offset > 0, it is above the main diagonal.
- If offset < 0, it is below the main diagonal.
- Note that if offset is out of input's shape indicated by axis1 and axis2, 0 will be returned.
Args:
x(Tensor): The input tensor x. Must be at least 2-dimensional. The input data type should be float32, float64, int32, int64.
offset(int, optional): Which diagonals in input tensor x will be taken. Default: 0 (main diagonals).
axis1(int, optional): The first axis with respect to take diagonal. Default: 0.
axis2(int, optional): The second axis with respect to take diagonal. Default: 1.
name (str, optional): Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`. Default: None.
Returns:
Tensor: the output data type is the same as input data type.
Examples:
.. code-block:: python
import paddle
case1 = paddle.randn([2, 3])
case2 = paddle.randn([3, 10, 10])
case3 = paddle.randn([3, 10, 5, 10])
data1 = paddle.trace(case1) # data1.shape = [1]
data2 = paddle.trace(case2, offset=1, axis1=1, axis2=2) # data2.shape = [3]
data3 = paddle.trace(case3, offset=-3, axis1=1, axis2=-1) # data2.shape = [3, 5]
"""
def __check_input(input, offset, dim1, dim2):
check_dtype(x.dtype, 'Input',
['int32', 'int64', 'float16', 'float32', 'float64'],
'trace')
input_shape = list(x.shape)
assert len(input_shape) >= 2, \
"The x must be at least 2-dimensional, " \
"But received Input x's dimensional: %s.\n" % \
len(input_shape)
axis1_ = axis1 if axis1 >= 0 else len(input_shape) + axis1
axis2_ = axis2 if axis2 >= 0 else len(input_shape) + axis2
assert ((0 <= axis1_) and (axis1_ < len(input_shape))), \
"The argument axis1 is out of range (expected to be in range of [%d, %d], but got %d).\n" \
% (-(len(input_shape)), len(input_shape) - 1, axis1)
assert ((0 <= axis2_) and (axis2_ < len(input_shape))), \
"The argument axis2 is out of range (expected to be in range of [%d, %d], but got %d).\n" \
% (-(len(input_shape)), len(input_shape) - 1, axis2)
assert axis1_ != axis2_, \
"axis1 and axis2 cannot be the same axis." \
"But received axis1 = %d, axis2 = %d\n"%(axis1, axis2)
__check_input(input, offset, axis1, axis2)
if in_dygraph_mode():
return _C_ops.trace(x, 'offset', offset, 'axis1', axis1, 'axis2', axis2)
inputs = {'Input': [x]}
attrs = {'offset': offset, 'axis1': axis1, 'axis2': axis2}
helper = LayerHelper('trace', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='trace',
inputs={'Input': [x]},
attrs={'offset': offset,
'axis1': axis1,
'axis2': axis2},
outputs={'Out': [out]})
return out
def diagonal(x, offset=0, axis1=0, axis2=1, name=None):
"""
This OP computes the diagonals of the input tensor x.
If ``x`` is 2D, returns the diagonal.
If ``x`` has larger dimensions, diagonals be taken from the 2D planes specified by axis1 and axis2.
By default, the 2D planes formed by the first and second axis of the input tensor x.
The argument ``offset`` determines where diagonals are taken from input tensor x:
- If offset = 0, it is the main diagonal.
- If offset > 0, it is above the main diagonal.
- If offset < 0, it is below the main diagonal.
Args:
x(Tensor): The input tensor x. Must be at least 2-dimensional. The input data type should be bool, int32, int64, float16, float32, float64.
offset(int, optional): Which diagonals in input tensor x will be taken. Default: 0 (main diagonals).
axis1(int, optional): The first axis with respect to take diagonal. Default: 0.
axis2(int, optional): The second axis with respect to take diagonal. Default: 1.
name (str, optional): Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`. Default: None.
Returns:
Tensor: a partial view of input tensor in specify two dimensions, the output data type is the same as input data type.
Examples:
.. code-block:: python
import paddle
x = paddle.rand([2,2,3],'float32')
print(x)
# Tensor(shape=[2, 2, 3], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [[[0.45661032, 0.03751532, 0.90191704],
# [0.43760979, 0.86177313, 0.65221709]],
# [[0.17020577, 0.00259554, 0.28954273],
# [0.51795638, 0.27325270, 0.18117726]]])
out1 = paddle.diagonal(x)
print(out1)
#Tensor(shape=[3, 2], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [[0.45661032, 0.51795638],
# [0.03751532, 0.27325270],
# [0.90191704, 0.18117726]])
out2 = paddle.diagonal(x, offset=0, axis1=2, axis2=1)
print(out2)
#Tensor(shape=[2, 2], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [[0.45661032, 0.86177313],
# [0.17020577, 0.27325270]])
out3 = paddle.diagonal(x, offset=1, axis1=0, axis2=1)
print(out3)
#Tensor(shape=[3, 1], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [[0.43760979],
# [0.86177313],
# [0.65221709]])
out4 = paddle.diagonal(x, offset=0, axis1=1, axis2=2)
print(out4)
#Tensor(shape=[2, 2], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [[0.45661032, 0.86177313],
# [0.17020577, 0.27325270]])
"""
if in_dygraph_mode():
return _C_ops.diagonal(x, 'offset', offset, 'axis1', axis1, 'axis2', axis2)
def __check_input(input, offset, dim1, dim2):
check_dtype(x.dtype, 'Input',
['bool', 'int32', 'int64', 'float16', 'float32', 'float64'],
'diagonal')
input_shape = list(x.shape)
assert len(input_shape) >= 2, \
"The x must be at least 2-dimensional, " \
"But received Input x's dimensional: %s.\n" % \
len(input_shape)
axis1_ = axis1 if axis1 >= 0 else len(input_shape) + axis1
axis2_ = axis2 if axis2 >= 0 else len(input_shape) + axis2
assert axis1_ < len(input_shape), \
"The argument axis1 is out of range (expected to be in range of [%d, %d], but got %d).\n" \
% (-(len(input_shape)), len(input_shape) - 1, axis1)
assert axis2_ < len(input_shape), \
"The argument axis2 is out of range (expected to be in range of [%d, %d], but got %d).\n" \
% (-(len(input_shape)), len(input_shape) - 1, axis2)
assert axis1_ != axis2_, \
"axis1 and axis2 cannot be the same axis." \
"But received axis1 = %d, axis2 = %d\n"%(axis1, axis2)
__check_input(input, offset, axis1, axis2)
helper = LayerHelper('diagonal', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='diagonal',
inputs={'Input': [x]},
attrs={'offset': offset,
'axis1': axis1,
'axis2': axis2},
outputs={'Out': [out]})
return out
@templatedoc(op_type="kron")
def kron(x, y, name=None):
"""
${comment}
Args:
x (Tensor): the fist operand of kron op, data type: float16, float32,
float64, int32 or int64.
y (Tensor): the second operand of kron op, data type: float16,
float32, float64, int32 or int64. Its data type should be the same
with x.
name(str, optional): The default value is None. Normally there is no
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
Returns:
Tensor: The output of kron op, data type: float16, float32, float64, int32 or int64. Its data is the same with x.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[1, 2], [3, 4]], dtype='int64')
y = paddle.to_tensor([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype='int64')
out = paddle.kron(x, y)
print(out)
# [[1, 2, 3, 2, 4, 6],
# [ 4, 5, 6, 8, 10, 12],
# [ 7, 8, 9, 14, 16, 18],
# [ 3, 6, 9, 4, 8, 12],
# [12, 15, 18, 16, 20, 24],
# [21, 24, 27, 28, 32, 36]])
"""
if in_dygraph_mode():
return _C_ops.kron(x, y)
helper = LayerHelper('kron', **locals())
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64', 'int32', 'int64'], 'kron')
check_variable_and_dtype(y, 'y', ['float16', 'float32', 'float64', 'int32', 'int64'], 'kron')
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(type="kron", inputs={"X": x, "Y": y}, outputs={"Out": out})
return out
def cumsum(x, axis=None, dtype=None, name=None):
"""
The cumulative sum of the elements along a given axis.
**Note**:
The first element of the result is the same of the first element of the input.
Args:
x (Tensor): The input tensor needed to be cumsumed.
axis (int, optional): The dimension to accumulate along. -1 means the last dimension. The default (None) is to compute the cumsum over the flattened array.
dtype (str, optional): The data type of the output tensor, can be float32, float64, int32, int64. If specified, the input tensor is casted to dtype before the operation is performed. This is useful for preventing data type overflows. The default value is None.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
Tensor, the result of cumsum operator.
Examples:
.. code-block:: python
import paddle
data = paddle.arange(12)
data = paddle.reshape(data, (3, 4))
y = paddle.cumsum(data)
# [ 0 1 3 6 10 15 21 28 36 45 55 66]
y = paddle.cumsum(data, axis=0)
# [[ 0 1 2 3]
# [ 4 6 8 10]
# [12 15 18 21]]
y = paddle.cumsum(data, axis=-1)
# [[ 0 1 3 6]
# [ 4 9 15 22]
# [ 8 17 27 38]]
y = paddle.cumsum(data, dtype='float64')
print(y.dtype)
# VarType.FP64
"""
if axis is None:
flatten = True
else:
flatten = False
if dtype is not None and x.dtype != convert_np_dtype_to_dtype_(dtype):
x = layers.cast(x, dtype)
if in_dygraph_mode():
if axis is None:
return _C_ops.cumsum(x, 'flatten', flatten)
else:
return _C_ops.cumsum(x, 'axis', axis, 'flatten', flatten)
check_type(x, 'x', (Variable), 'cumsum')
locals_var = locals().copy()
kwargs = dict()
for name, val in locals_var.items():
if val is not None:
kwargs[name] = val
_cum_sum_ = generate_layer_fn('cumsum')
return _cum_sum_(**kwargs)
def cumprod(x, dim=None, dtype=None, name=None):
"""
Compute the cumulative product of the input tensor x along a given dimension dim.
**Note**:
The first element of the result is the same as the first element of the input.
Args:
x (Tensor): the input tensor need to be cumproded.
dim (int): the dimension along which the input tensor will be accumulated. It need to be in the range of [-x.rank, x.rank), where x.rank means the dimensions of the input tensor x and -1 means the last dimension.
dtype (str, optional): The data type of the output tensor, can be float32, float64, int32, int64, complex64, complex128. If specified, the input tensor is casted to dtype before the operation is performed. This is useful for preventing data type overflows. The default value is None.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
Tensor, the result of cumprod operator.
Examples:
.. code-block:: python
import paddle
data = paddle.arange(12)
data = paddle.reshape(data, (3, 4))
# [[ 0 1 2 3 ]
# [ 4 5 6 7 ]
# [ 8 9 10 11]]
y = paddle.cumprod(data, dim=0)
# [[ 0 1 2 3]
# [ 0 5 12 21]
# [ 0 45 120 231]]
y = paddle.cumprod(data, dim=-1)
# [[ 0 0 0 0]
# [ 4 20 120 840]
# [ 8 72 720 7920]]
y = paddle.cumprod(data, dim=1, dtype='float64')
# [[ 0. 0. 0. 0.]
# [ 4. 20. 120. 840.]
# [ 8. 72. 720. 7920.]]
print(y.dtype)
# paddle.float64
"""
if dtype is not None and x.dtype != convert_np_dtype_to_dtype_(dtype):
x = layers.cast(x, dtype)
if in_dygraph_mode():
return _C_ops.cumprod(x, 'dim', dim)
check_variable_and_dtype(x, "x", ['complex64', 'complex128', 'float32', 'float64', 'int32', 'int64'], 'cumprod')
check_type(dim, 'dim', int, 'cumprod')
helper = LayerHelper('cumprod', **locals())
out = helper.create_variable_for_type_inference(x.dtype)
helper.append_op(type='cumprod', inputs={'X': x}, outputs={'Out': out}, attrs={'dim': dim})
return out
def isfinite(x, name=None):
"""
Return whether every element of input tensor is finite number or not.
Args:
x (Tensor): The input tensor, it's data type should be float16, float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
`Tensor`, the bool result which shows every element of `x` whether it is finite number or not.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([float('-inf'), -2, 3.6, float('inf'), 0, float('-nan'), float('nan')])
out = paddle.tensor.isfinite(x)
print(out) # [False True True False True False False]
"""
if in_dygraph_mode():
return _C_ops.isfinite_v2(x)
helper = LayerHelper("isfinite_v2", **locals())
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64', 'int32', 'int64'], 'isfinite')
out = helper.create_variable_for_type_inference('bool')
helper.append_op(type="isfinite_v2", inputs={"X": x}, outputs={"Out": out})
return out
def isinf(x, name=None):
"""
Return whether every element of input tensor is `+/-INF` or not.
Args:
x (Tensor): The input tensor, it's data type should be float16, float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
`Tensor`, the bool result which shows every element of `x` whether it is `+/-INF` or not.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([float('-inf'), -2, 3.6, float('inf'), 0, float('-nan'), float('nan')])
out = paddle.tensor.isinf(x)
print(out) # [ True False False True False False False]
"""
if in_dygraph_mode():
return _C_ops.isinf_v2(x)
helper = LayerHelper("isinf_v2", **locals())
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64', 'int32', 'int64'], 'isinf')
out = helper.create_variable_for_type_inference(dtype='bool')
helper.append_op(type="isinf_v2", inputs={"X": x}, outputs={"Out": out})
return out
def isnan(x, name=None):
"""
Return whether every element of input tensor is `NaN` or not.
Args:
x (Tensor): The input tensor, it's data type should be float16, float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
`Tensor`, the bool result which shows every element of `x` whether it is `NaN` or not.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([float('-inf'), -2, 3.6, float('inf'), 0, float('-nan'), float('nan')])
out = paddle.tensor.isnan(x)
print(out) # [False False False False False True True]
"""
if in_dygraph_mode():
return _C_ops.isnan_v2(x)
helper = LayerHelper("isnan_v2", **locals())
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64', 'int32', 'int64'], 'isnan')
out = helper.create_variable_for_type_inference(dtype='bool')
helper.append_op(type="isnan_v2", inputs={"X": x}, outputs={"Out": out})
return out
def prod(x, axis=None, keepdim=False, dtype=None, name=None):
"""
Compute the product of tensor elements over the given axis.
Args:
x(Tensor): The input tensor, its data type should be float32, float64, int32, int64.
axis(int|list|tuple, optional): The axis along which the product is computed. If :attr:`None`,
multiply all elements of `x` and return a Tensor with a single element,
otherwise must be in the range :math:`[-x.ndim, x.ndim)`. If :math:`axis[i]<0`,
the axis to reduce is :math:`x.ndim + axis[i]`. Default is None.
dtype(str|np.dtype, optional): The desired date type of returned tensor, can be float32, float64,
int32, int64. If specified, the input tensor is casted to dtype before operator performed.
This is very useful for avoiding data type overflows. The default value is None, the dtype
of output is the same as input Tensor `x`.
keepdim(bool, optional): Whether to reserve the reduced dimension in the output Tensor. The result
tensor will have one fewer dimension than the input unless `keepdim` is true. Default is False.
name(string, optional): The default value is None. Normally there is no need for user to set this property.
For more information, please refer to :ref:`api_guide_Name` .
Returns:
Tensor, result of product on the specified dim of input tensor.
Raises:
ValueError: The :attr:`dtype` must be float32, float64, int32 or int64.
TypeError: The type of :attr:`axis` must be int, list or tuple.
Examples:
.. code-block:: python
import paddle
# the axis is a int element
x = paddle.to_tensor([[0.2, 0.3, 0.5, 0.9],
[0.1, 0.2, 0.6, 0.7]])
out1 = paddle.prod(x)
# [0.0002268]
out2 = paddle.prod(x, -1)
# [0.027 0.0084]
out3 = paddle.prod(x, 0)
# [0.02 0.06 0.3 0.63]
out4 = paddle.prod(x, 0, keepdim=True)
# [[0.02 0.06 0.3 0.63]]
out5 = paddle.prod(x, 0, dtype='int64')
# [0 0 0 0]
# the axis is list
y = paddle.to_tensor([[[1.0, 2.0], [3.0, 4.0]],
[[5.0, 6.0], [7.0, 8.0]]])
out6 = paddle.prod(y, [0, 1])
# [105. 384.]
out7 = paddle.prod(y, (1, 2))
# [ 24. 1680.]
"""
if dtype is not None:
check_dtype(dtype, 'dtype', ['float32', 'float64', 'int32', 'int64'], 'prod')
if x.dtype != convert_np_dtype_to_dtype_(dtype):
x = layers.cast(x, dtype)
return layers.reduce_prod(input=x, dim=axis, keep_dim=keepdim, name=name)
def sign(x, name=None):
"""
This OP returns sign of every element in `x`: 1 for positive, -1 for negative and 0 for zero.
Args:
x(Tensor): The input tensor. The data type can be float16, float32 or float64.
name (str, optional): The default value is None. Normally there is no need for user to
set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor: The output sign tensor with identical shape and data type to the input :attr:`x`.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([3.0, 0.0, -2.0, 1.7], dtype='float32')
out = paddle.sign(x=x)
print(out) # [1.0, 0.0, -1.0, 1.0]
"""
if in_dygraph_mode():
return _C_ops.sign(x)
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], 'sign')
helper = LayerHelper("sign", **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(type='sign', inputs={'X': [x]}, outputs={'Out': [out]})
return out
def tanh(x, name=None):
r"""
Tanh Activation Operator.
.. math::
out = \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}
Args:
x (Tensor): Input of Tanh operator, an N-D Tensor, with data type float32, float64 or float16.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
Output of Tanh operator, a Tensor with same data type and shape as input.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
out = paddle.tanh(x)
print(out)
# [-0.37994896 -0.19737532 0.09966799 0.29131261]
"""
if in_dygraph_mode():
return _C_ops.tanh(x)
check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], 'tanh')
check_type(x, 'x', (Variable), 'tanh')
helper = LayerHelper('tanh', **locals())
out = helper.create_variable_for_type_inference(x.dtype)
helper.append_op(type='tanh', inputs={'X': x}, outputs={'Out': out})
return out
@inplace_apis_in_dygraph_only
def tanh_(x, name=None):
r"""
Inplace version of ``tanh`` API, the output Tensor will be inplaced with input ``x``.
Please refer to :ref:`api_tensor_tanh`.
"""
return _C_ops.tanh_(x)
def increment(x, value=1.0, name=None):
"""
The OP is usually used for control flow to increment the data of :attr:`x` by an amount :attr:`value`.
Notice that the number of elements in :attr:`x` must be equal to 1.
Args:
x (Tensor): A tensor that must always contain only one element, its data type supports float32, float64, int32 and int64.
value(float, optional): The amount to increment the data of :attr:`x`. Default: 1.0.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
Tensor, the elementwise-incremented tensor with the same shape and data type as :attr:`x`.
Examples:
.. code-block:: python
import paddle
data = paddle.zeros(shape=[1], dtype='float32')
counter = paddle.increment(data)
# [1.]
"""
if in_dygraph_mode():
return _C_ops.increment(x, 'step', value)
check_variable_and_dtype(x, 'x', ['float32', 'float64', 'int32', 'int64'],
'increment')
helper = LayerHelper("increment", **locals())
helper.append_op(
type='increment',
inputs={'X': [x]},
outputs={'Out': [x]},
attrs={'step': float(value)})
return x
def all(x, axis=None, keepdim=False, name=None):
"""
Computes the the ``logical and`` of tensor elements over the given dimension.
Args:
x (Tensor): An N-D Tensor, the input data type should be `bool`.
axis (int|list|tuple, optional): The dimensions along which the ``logical and`` is compute. If
:attr:`None`, and all elements of :attr:`x` and return a
Tensor with a single element, otherwise must be in the
range :math:`[-rank(x), rank(x))`. If :math:`axis[i] < 0`,
the dimension to reduce is :math:`rank + axis[i]`.
keepdim (bool, optional): Whether to reserve the reduced dimension in the
output Tensor. The result Tensor will have one fewer dimension
than the :attr:`x` unless :attr:`keepdim` is true, default
value is False.
name (str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor: Results the ``logical and`` on the specified axis of input Tensor `x`, it's data type is bool.
Raises:
ValueError: If the data type of `x` is not bool.
TypeError: The type of :attr:`axis` must be int, list or tuple.
Examples:
.. code-block:: python
import paddle
import numpy as np
# x is a bool Tensor with following elements:
# [[True, False]
# [True, True]]
x = paddle.assign(np.array([[1, 0], [1, 1]], dtype='int32'))
print(x)
x = paddle.cast(x, 'bool')
# out1 should be [False]
out1 = paddle.all(x) # [False]
print(out1)
# out2 should be [True, False]
out2 = paddle.all(x, axis=0) # [True, False]
print(out2)
# keep_dim=False, out3 should be [False, True], out.shape should be (2,)
out3 = paddle.all(x, axis=-1) # [False, True]
print(out3)
# keep_dim=True, out4 should be [[False], [True]], out.shape should be (2,1)
out4 = paddle.all(x, axis=1, keepdim=True)
out4 = paddle.cast(out4, 'int32') # [[False], [True]]
print(out4)
"""
if axis is not None and not isinstance(axis, (list, tuple)):
axis = [axis]
if not axis:
reduce_all_flag = True
else:
if len(axis) == len(x.shape):
reduce_all_flag = True
else:
reduce_all_flag = False
if in_dygraph_mode():
axis = axis if axis != None and axis != [] else [0]
return _C_ops.reduce_all(x, 'dim', axis, 'keep_dim', keepdim,
'reduce_all', reduce_all_flag)
attrs = {
'dim': axis if axis != None and axis != [] and axis != () else [0],
'keep_dim': keepdim,
'reduce_all': reduce_all_flag
}
check_variable_and_dtype(x, 'x', ['bool'], 'all')
check_type(axis, 'axis', (int, list, tuple, type(None)), 'all')
helper = LayerHelper('all', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='reduce_all',
inputs={'X': x},
outputs={'Out': out},
attrs=attrs)
return out
def any(x, axis=None, keepdim=False, name=None):
"""
Computes the the ``logical or`` of tensor elements over the given dimension.
Args:
x (Tensor): An N-D Tensor, the input data type should be `bool`.
axis (int|list|tuple, optional): The dimensions along which the ``logical or`` is compute. If
:attr:`None`, and all elements of :attr:`x` and return a
Tensor with a single element, otherwise must be in the
range :math:`[-rank(x), rank(x))`. If :math:`axis[i] < 0`,
the dimension to reduce is :math:`rank + axis[i]`.
keepdim (bool, optional): Whether to reserve the reduced dimension in the
output Tensor. The result Tensor will have one fewer dimension
than the :attr:`x` unless :attr:`keepdim` is true, default
value is False.
name (str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor: Results the ``logical or`` on the specified axis of input Tensor `x`, it's data type is bool.
Raises:
ValueError: If the data type of `x` is not bool.
TypeError: The type of :attr:`axis` must be int, list or tuple.
Examples:
.. code-block:: python
import paddle
import numpy as np
# x is a bool Tensor with following elements:
# [[True, False]
# [False, False]]
x = paddle.assign(np.array([[1, 0], [1, 1]], dtype='int32'))
print(x)
x = paddle.cast(x, 'bool')
# out1 should be [True]
out1 = paddle.any(x) # [True]
print(out1)
# out2 should be [True, True]
out2 = paddle.any(x, axis=0) # [True, True]
print(out2)
# keep_dim=False, out3 should be [True, True], out.shape should be (2,)
out3 = paddle.any(x, axis=-1) # [True, True]
print(out3)
# keep_dim=True, result should be [[True], [True]], out.shape should be (2,1)
out4 = paddle.any(x, axis=1, keepdim=True)
out4 = paddle.cast(out4, 'int32') # [[True], [True]]
print(out4)
"""
if axis is not None and not isinstance(axis, (list, tuple)):
axis = [axis]
if not axis:
reduce_all_flag = True
else:
if len(axis) == len(x.shape):
reduce_all_flag = True
else:
reduce_all_flag = False
if in_dygraph_mode():
axis = axis if axis != None and axis != [] else [0]
return _C_ops.reduce_any(x, 'dim', axis, 'keep_dim', keepdim,
'reduce_all', reduce_all_flag)
attrs = {
'dim': axis if axis != None and axis != [] and axis != () else [0],
'keep_dim': keepdim,
'reduce_all': reduce_all_flag
}
check_variable_and_dtype(x, 'x', ['bool'], 'any')
check_type(axis, 'axis', (int, list, tuple, type(None)), 'any')
helper = LayerHelper('any', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='reduce_any',
inputs={'X': x},
outputs={'Out': out},
attrs=attrs)
return out
def broadcast_shape(x_shape, y_shape):
"""
The function returns the shape of doing operation with broadcasting on tensors of x_shape and y_shape, please refer to :ref:`user_guide_broadcasting` for more details.
Args:
x_shape (list[int]|tuple[int]): A shape of tensor.
y_shape (list[int]|tuple[int]): A shape of tensor.
Returns:
list[int], the result shape.
Examples:
.. code-block:: python
import paddle
shape = paddle.broadcast_shape([2, 1, 3], [1, 3, 1])
# [2, 3, 3]
# shape = paddle.broadcast_shape([2, 1, 3], [3, 3, 1])
# ValueError (terminated with error message).
"""
return core.broadcast_shape(x_shape, y_shape)
def conj(x, name=None):
r"""
This function computes the conjugate of the Tensor elementwisely.
Args:
x (Tensor): The input tensor which hold the complex numbers.
Optional data types are: complex64, complex128, float32, float64, int32 or int64.
name (str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
out (Tensor): The conjugate of input. The shape and data type is the same with input.
If the elements of tensor is real type such as float32, float64, int32 or int64, the out is the same with input.
Examples:
.. code-block:: python
import paddle
data=paddle.to_tensor([[1+1j, 2+2j, 3+3j], [4+4j, 5+5j, 6+6j]])
#Tensor(shape=[2, 3], dtype=complex64, place=CUDAPlace(0), stop_gradient=True,
# [[(1+1j), (2+2j), (3+3j)],
# [(4+4j), (5+5j), (6+6j)]])
conj_data=paddle.conj(data)
#Tensor(shape=[2, 3], dtype=complex64, place=CUDAPlace(0), stop_gradient=True,
# [[(1-1j), (2-2j), (3-3j)],
# [(4-4j), (5-5j), (6-6j)]])
"""
if in_dygraph_mode():
return _C_ops.conj(x)
check_variable_and_dtype(x, "x", ['complex64', 'complex128', 'float32', 'float64', 'int32', 'int64'], 'conj')
helper = LayerHelper('conj', **locals())
out = helper.create_variable_for_type_inference(
dtype=helper.input_dtype())
helper.append_op(type='conj', inputs={'X': x}, outputs={'Out': [out]})
return out
def digamma(x, name=None):
r"""
Calculates the digamma of the given input tensor, element-wise.
.. math::
Out = \Psi(x) = \frac{ \Gamma^{'}(x) }{ \Gamma(x) }
Args:
x (Tensor): Input Tensor. Must be one of the following types: float32, float64.
name(str, optional): The default value is None. Normally there is no need for
user to set this property. For more information, please refer to :ref:`api_guide_Name`
Returns:
Tensor, the digamma of the input Tensor, the shape and data type is the same with input.
Examples:
.. code-block:: python
import paddle
data = paddle.to_tensor([[1, 1.5], [0, -2.2]], dtype='float32')
res = paddle.digamma(data)
print(res)
# Tensor(shape=[2, 2], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [[-0.57721591, 0.03648996],
# [ nan , 5.32286835]])
"""
if in_dygraph_mode():
return _C_ops.digamma(x)
check_variable_and_dtype(x, 'x', ['float32', 'float64'], 'digamma')
helper = LayerHelper('digamma', **locals())
out = helper.create_variable_for_type_inference(x.dtype)
helper.append_op(type='digamma', inputs={'X': x}, outputs={'Out': out})
return out
def neg(x, name=None):
"""
This function computes the negative of the Tensor elementwisely.
Args:
x (Tensor): Input of neg operator, an N-D Tensor, with data type float32, float64, int8, int16, int32, or int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
out (Tensor): The negative of input Tensor. The shape and data type are the same with input Tensor.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3])
out = paddle.neg(x)
print(out)
# [0.4 0.2 -0.1 -0.3]
"""
return layers.scale(x, scale=-1.0, bias=0.0, bias_after_scale=True, act=None, name=name)
def atan2(x, y, name=None):
r"""
Element-wise arctangent of x/y with consideration of the quadrant.
Equation:
.. math::
atan2(x,y)=\left\{\begin{matrix}
& tan^{-1}(\frac{x}{y}) & y > 0 \\
& tan^{-1}(\frac{x}{y}) + \pi & x>=0, y < 0 \\
& tan^{-1}(\frac{x}{y}) - \pi & x<0, y < 0 \\
& +\frac{\pi}{2} & x>0, y = 0 \\
& -\frac{\pi}{2} & x<0, y = 0 \\
&\text{undefined} & x=0, y = 0
\end{matrix}\right.
Args:
x (Tensor): An N-D Tensor, the data type is int32, int64, float16, float32, float64.
y (Tensor): An N-D Tensor, must have the same type as `x`.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
out (Tensor): An N-D Tensor, the shape and data type is the same with input (The output data type is float64 when the input data type is int).
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([-1, +1, +1, -1]).astype('float32')
#Tensor(shape=[4], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [-1, 1, 1, -1])
y = paddle.to_tensor([-1, -1, +1, +1]).astype('float32')
#Tensor(shape=[4], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [-1, -1, 1, 1])
out = paddle.atan2(x, y)
#Tensor(shape=[4], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [-2.35619450, 2.35619450, 0.78539819, -0.78539819])
"""
if in_dygraph_mode():
return _C_ops.atan2(x, y)
else:
check_variable_and_dtype(x, 'x', ['int32', 'int64', 'float16', 'float32', 'float64'], 'atan2')
check_variable_and_dtype(y, 'y', ['int32', 'int64', 'float16', 'float32', 'float64'], 'atan2')
helper = LayerHelper('atan2', **locals())
inputs = {'X1' : x, 'X2' : y}
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='atan2', inputs=inputs, outputs={'Out': out})
return out
def lerp(x, y, weight, name=None):
r"""
Does a linear interpolation between x and y based on weight.
Equation:
.. math::
lerp(x, y, weight) = x + weight * (y - x).
Args:
x (Tensor): An N-D Tensor, the data type is float32, float64.
y (Tensor): An N-D Tensor, the data type is float32, float64.
weight (float|Tensor): the weight for the interpolation formula.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
out (Tensor): An N-D Tensor, the shape and data type is the same with input.
Example:
.. code-block:: python
import paddle
x = paddle.arange(1., 5., dtype='float32')
y = paddle.empty([4], dtype='float32')
y.fill_(10.)
out = paddle.lerp(start, end, 0.5)
# out: [5.5., 6., 6.5, 7.]
"""
if in_dygraph_mode():
check_type(weight, 'weight', (float, paddle.Tensor, Variable), 'lerp')
if isinstance(weight, float):
weight = paddle.to_tensor(weight, dtype=x.dtype)
return _C_ops.lerp(x, y, weight)
check_variable_and_dtype(x, 'x', ['float32', 'float64'], 'lerp')
check_variable_and_dtype(y, 'y', ['float32', 'float64'], 'lerp')
check_variable_and_dtype(weight, 'weight', ['float32', 'float64'], 'lerp')
helper = LayerHelper('lerp', **locals())
inputs = {'X': x, 'Y': y, 'Weight': weight}
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(type='lerp', inputs=inputs, outputs={'Out': out})
return out
@inplace_apis_in_dygraph_only
def lerp_(x, y, weight, name=None):
r"""
Inplace version of ``lerp`` API, the output Tensor will be inplaced with input ``x``.
Please refer to :ref:`api_tensor_lerp`.
"""
out_shape = broadcast_shape(x.shape, y.shape)
check_type(weight, 'weight', (float, paddle.Tensor, Variable), 'lerp')
if isinstance(weight, float):
weight = paddle.to_tensor([weight], dtype=x.dtype)
elif isinstance(weight, (paddle.Tensor, Variable)):
out_shape = broadcast_shape(out_shape, weight.shape)
if out_shape != x.shape:
raise ValueError("The shape of broadcast output {} is different from that of inplace tensor {} in the Inplace operation.".format(out_shape, x.shape))
return _C_ops.lerp_(x, y, weight)
def rad2deg(x, name=None):
"""
Convert each of the elements of input x from angles in radians to degrees.
Equation:
.. math::
rad2deg(x)=180/ \pi * x
Args:
x (Tensor): An N-D Tensor, the data type is float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
out (Tensor): An N-D Tensor, the shape and data type is the same with input (The output data type is float32 when the input data type is int).
Examples:
.. code-block:: python
import paddle
import numpy as np
x1 = paddle.to_tensor([3.142, -3.142, 6.283, -6.283, 1.570, -1.570])
result1 = paddle.rad2deg(x1)
print(result1)
# Tensor(shape=[6], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [180.02334595, -180.02334595, 359.98937988, -359.98937988,
# 9.95437622 , -89.95437622])
x2 = paddle.to_tensor(np.pi/2)
result2 = paddle.rad2deg(x2)
print(result2)
# Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [90.])
x3 = paddle.to_tensor(1)
result3 = paddle.rad2deg(x3)
print(result3)
# Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [57.29578018])
"""
rad2deg_scale = 180 / np.pi
if in_dygraph_mode():
if convert_dtype(x.dtype) in ['int32', 'int64']:
x = cast(x, dtype="float32")
return _C_ops.scale(x, 'scale', rad2deg_scale)
else:
check_variable_and_dtype(x, 'x', ['int32', 'int64', 'float32', 'float64'], 'rad2deg')
helper = LayerHelper('rad2deg', **locals())
out_cast = x
if convert_dtype(x.dtype) in ['int32', 'int64']:
out_cast = helper.create_variable_for_type_inference(dtype=paddle.float32)
helper.append_op(
type='cast', inputs={'X':x}, outputs={'Out': out_cast}, attrs={'in_dtype': x.dtype,'out_dtype': paddle.float32})
out = helper.create_variable_for_type_inference(dtype=out_cast.dtype)
helper.append_op(
type='scale', inputs={'X':out_cast}, outputs={'Out': out}, attrs={'scale': rad2deg_scale})
return out
def deg2rad(x, name=None):
"""
Convert each of the elements of input x from degrees to angles in radians.
Equation:
.. math::
deg2rad(x)=\pi * x / 180
Args:
x (Tensor): An N-D Tensor, the data type is float32, float64, int32, int64.
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
out (Tensor): An N-D Tensor, the shape and data type is the same with input (The output data type is float32 when the input data type is int).
Examples:
.. code-block:: python
import paddle
import numpy as np
x1 = paddle.to_tensor([180.0, -180.0, 360.0, -360.0, 90.0, -90.0])
result1 = paddle.deg2rad(x1)
print(result1)
# Tensor(shape=[6], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [3.14159274, -3.14159274, 6.28318548, -6.28318548, 1.57079637,
# -1.57079637])
x2 = paddle.to_tensor(180)
result2 = paddle.deg2rad(x2)
print(result2)
# Tensor(shape=[1], dtype=float32, place=CUDAPlace(0), stop_gradient=True,
# [3.14159274])
"""
deg2rad_scale = np.pi / 180.0
if in_dygraph_mode():
if convert_dtype(x.dtype) in ['int32', 'int64']:
x = cast(x, dtype="float32")
return _C_ops.scale(x, 'scale', deg2rad_scale)
else:
check_variable_and_dtype(x, 'x', ['int32', 'int64', 'float32', 'float64'], 'deg2rad')
helper = LayerHelper('deg2rad', **locals())
out_cast = x
if convert_dtype(x.dtype) in ['int32', 'int64']:
out_cast = helper.create_variable_for_type_inference(dtype=paddle.float32)
helper.append_op(
type='cast', inputs={'X':x}, outputs={'Out': out_cast}, attrs={'in_dtype': x.dtype,'out_dtype': paddle.float32})
out = helper.create_variable_for_type_inference(dtype=out_cast.dtype)
helper.append_op(
type='scale', inputs={'X':out_cast}, outputs={'Out': out}, attrs={'scale': deg2rad_scale})
return out
def diff(x, n=1, axis=-1, prepend=None, append=None, name=None):
r"""
Computes the n-th forward difference along the given axis.
The first-order differences is computed by using the following formula:
.. math::
out[i] = x[i+1] - x[i]
Higher-order differences are computed by using paddle.diff() recursively.
Only n=1 is currently supported.
Args:
x(Tensor): The input tensor to compute the forward difference on
n(int, optional): The number of times to recursively compute the difference.
Only support n=1. Default:1
axis(int, optional): The axis to compute the difference along. Default:-1
prepend(Tensor, optional): The tensor to prepend to input along axis before computing the difference.
It's dimensions must be equivalent to that of x,
and its shapes must match x's shape except on axis.
append(Tensor, optional): The tensor to append to input along axis before computing the difference,
It's dimensions must be equivalent to that of x,
and its shapes must match x's shape except on axis.
name(str|None): A name for this layer(optional). If set None,
the layer will be named automatically.
Returns:
Tensor: The output tensor with same dtype with x.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([1, 4, 5, 2])
out = paddle.diff(x)
print(out)
# out:
# [3, 1, -3]
y = paddle.to_tensor([7, 9])
out = paddle.diff(x, append=y)
print(out)
# out:
# [3, 1, -3, 5, 2]
z = paddle.to_tensor([[1, 2, 3], [4, 5, 6]])
out = paddle.diff(z, axis=0)
print(out)
# out:
# [[3, 3, 3]]
out = paddle.diff(z, axis=1)
print(out)
# out:
# [[1, 1], [1, 1]]
"""
if axis < 0:
axis = axis + len(x.shape)
if axis > len(x.shape):
axis = len(x.shape)
if axis < 0:
axis = 0
dtype = x.dtype
axes = [axis]
infer_flags = list(1 for i in range(len(axes)))
if in_dygraph_mode():
has_pend = False
input_list = []
if prepend is not None and append is not None:
input_list = [prepend, x, append]
has_pend = True
elif prepend is not None:
input_list = [prepend, x]
has_pend = True
elif append is not None:
input_list = [x, append]
has_pend = True
if has_pend:
new_input = _C_ops.concat(input_list, 'axis', axis)
else:
new_input = x
attrs_1 = ()
attrs_2 = ()
dim_len = new_input.shape[axis]
starts_1 = [0]
attrs_1 += ('starts', starts_1)
ends_1 = [dim_len - 1]
attrs_1 += ('ends', ends_1)
input_front = _C_ops.slice(new_input, None, None, 'axes', axes, \
'infer_flags', infer_flags, *attrs_1)
starts_2 = [1]
attrs_2 += ('starts', starts_2)
ends_2 = [dim_len]
attrs_2 += ('ends', ends_2)
input_back = _C_ops.slice(new_input, None, None, 'axes', axes, \
'infer_flags', infer_flags, *attrs_2)
if x.dtype == paddle.bool:
op = getattr(_C_ops, "logical_xor")
out = op(input_back, input_front)
else:
out = layers.elementwise_sub(input_back, input_front, axis=axis)
return out
else:
check_variable_and_dtype(x, 'x', ['float32', 'float64', 'bool', 'int32', 'int64'], 'diff')
check_type(axis, 'axis', (int), 'diff')
helper = LayerHelper('diff', **locals())
has_pend = False
input_list = []
if prepend is not None and append is not None:
input_list = [prepend, x, append]
has_pend = True
elif prepend is not None:
input_list = [prepend, x]
has_pend = True
elif append is not None:
input_list = [x, append]
has_pend = True
if has_pend:
new_input = helper.create_variable_for_type_inference(dtype)
helper.append_op(
type='concat', inputs={'X': input_list}, outputs={'Out': [new_input]}, attrs={'axis': axis}
)
else:
new_input = x
dim_len = new_input.shape[axis]
attrs_1 = {'axes': axes}
starts_1 = [0]
ends_1 = [dim_len - 1]
attrs_1['starts'] = starts_1
attrs_1['ends'] = ends_1
input_front = helper.create_variable_for_type_inference(dtype)
helper.append_op(
type='slice', inputs={'Input': new_input}, attrs=attrs_1, outputs={'Out': input_front}
)
attrs_2 = {'axes': axes}
starts_2 = [1]
ends_2 = [dim_len]
attrs_2['starts'] = starts_2
attrs_2['ends'] = ends_2
input_back = helper.create_variable_for_type_inference(dtype)
helper.append_op(
type='slice', inputs={'Input': new_input}, attrs=attrs_2, outputs={'Out': input_back}
)
if dtype == paddle.bool:
out = helper.create_variable_for_type_inference(dtype)
helper.append_op(
type='logical_xor', inputs={"X": input_back, "Y": input_front}, outputs={"Out": out}
)
else:
out = layers.elementwise_sub(input_back, input_front, axis=axis)
return out
def angle(x, name=None):
r"""
Element-wise angle of complex numbers. For non-negative real numbers, the angle is 0 while
for negative real numbers, the angle is :math:`\pi`.
Equation:
.. math::
angle(x)=arctan2(x.imag, x.real)
Args:
x (Tensor): An N-D Tensor, the data type is complex64, complex128, or float32, float64 .
name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
Returns:
out (Tensor): y (Tensor): An N-D Tensor of real data type with the same precision as that of x's data type.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([-2, -1, 0, 1]).unsqueeze(-1).astype('float32')
y = paddle.to_tensor([-2, -1, 0, 1]).astype('float32')
z = x + 1j * y
print(z.numpy())
# [[-2.-2.j -2.-1.j -2.+0.j -2.+1.j]
# [-1.-2.j -1.-1.j -1.+0.j -1.+1.j]
# [ 0.-2.j 0.-1.j 0.+0.j 0.+1.j]
# [ 1.-2.j 1.-1.j 1.+0.j 1.+1.j]]
theta = paddle.angle(z)
print(theta.numpy())
# [[-2.3561945 -2.6779451 3.1415927 2.6779451]
# [-2.0344439 -2.3561945 3.1415927 2.3561945]
# [-1.5707964 -1.5707964 0. 1.5707964]
# [-1.1071488 -0.7853982 0. 0.7853982]]
"""
if in_dygraph_mode():
return _C_ops.angle(x)
check_variable_and_dtype(x, 'x',
['float32', 'float64', 'complex64', 'complex128'], 'angle')
op_type = "angle"
helper = LayerHelper(op_type, **locals())
inputs = {"X": x}
out = helper.create_variable_for_type_inference(
dtype=_complex_to_real_dtype(x.dtype))
outputs = {"Out": out}
helper.append_op(type=op_type, inputs=inputs, outputs=outputs)
return out
| [
[
[
656,
670
]
],
[
[
678,
689
],
[
56275,
56277
],
[
56284,
56286
],
[
56313,
56315
],
[
56322,
56324
],
[
56394,
56396
],
[
56403,
56405
],
[
56432,
56434
],
[
56441,
56443
],
[
56494,
56496
],
[
56503,
56505
],
[
56541,
56543
],
[
56550,
56552
],
[
58395,
58397
],
[
58404,
58406
],
[
58438,
58440
],
[
58447,
58449
],
[
101205,
101207
],
[
103428,
103430
]
],
[
[
728,
735
],
[
3170,
3177
],
[
3197,
3204
],
[
3223,
3230
],
[
3250,
3257
],
[
3277,
3284
],
[
3335,
3342
],
[
3361,
3368
]
],
[
[
773,
785
],
[
6812,
6824
]
],
[
[
823,
836
],
[
22780,
22793
]
],
[
[
874,
885
],
[
28655,
28666
],
[
67142,
67153
]
],
[
[
923,
936
],
[
7174,
7187
]
],
[
[
964,
968
],
[
101310,
101314
],
[
103541,
103545
]
],
[
[
1005,
1027
],
[
111805,
111827
]
],
[
[
1035,
1041
],
[
5663,
5669
],
[
6339,
6345
],
[
98196,
98202
],
[
98289,
98295
],
[
99144,
99150
],
[
99229,
99235
],
[
99300,
99306
],
[
101695,
101701
],
[
101857,
101863
],
[
103926,
103932
],
[
104088,
104094
],
[
107833,
107839
],
[
109701,
109707
]
],
[
[
1062,
1068
]
],
[
[
1099,
1103
],
[
91105,
91109
]
],
[
[
1105,
1121
]
],
[
[
1123,
1138
],
[
5530,
5545
],
[
8810,
8825
],
[
11550,
11565
],
[
13573,
13588
],
[
14784,
14799
],
[
16159,
16174
],
[
17856,
17871
],
[
20236,
20251
],
[
22464,
22479
],
[
27043,
27058
],
[
30580,
30595
],
[
32503,
32518
],
[
34191,
34206
],
[
38837,
38852
],
[
41590,
41605
],
[
43203,
43218
],
[
46621,
46636
],
[
49928,
49943
],
[
51288,
51303
],
[
52854,
52869
],
[
54439,
54454
],
[
56575,
56590
],
[
61926,
61941
],
[
65443,
65458
],
[
68428,
68443
],
[
70680,
70695
],
[
73061,
73076
],
[
74302,
74317
],
[
75445,
75460
],
[
76576,
76591
],
[
80514,
80529
],
[
81603,
81618
],
[
83137,
83152
],
[
86198,
86213
],
[
89614,
89629
],
[
92403,
92418
],
[
93791,
93806
],
[
96608,
96623
],
[
98132,
98147
],
[
101218,
101233
],
[
103449,
103464
],
[
106702,
106717
],
[
111492,
111507
]
],
[
[
1140,
1148
],
[
3705,
3713
],
[
5678,
5686
],
[
6354,
6362
],
[
30629,
30637
],
[
30806,
30814
],
[
56621,
56629
],
[
56697,
56705
],
[
56955,
56963
],
[
57001,
57009
],
[
57234,
57242
],
[
57280,
57288
],
[
57619,
57627
],
[
57768,
57776
],
[
58487,
58495
],
[
58555,
58563
],
[
70889,
70897
],
[
81757,
81765
],
[
98211,
98219
],
[
99159,
99167
],
[
99315,
99323
]
],
[
[
1150,
1176
],
[
27401,
27427
],
[
27859,
27885
],
[
28396,
28422
],
[
70603,
70629
],
[
72984,
73010
],
[
79597,
79623
]
],
[
[
1210,
1221
],
[
6010,
6021
],
[
6504,
6515
],
[
6657,
6668
],
[
8901,
8912
],
[
11694,
11705
],
[
13718,
13729
],
[
14920,
14931
],
[
16295,
16306
],
[
18178,
18189
],
[
20380,
20391
],
[
22608,
22619
],
[
28271,
28282
],
[
30739,
30750
],
[
32634,
32645
],
[
35805,
35816
],
[
39049,
39060
],
[
41851,
41862
],
[
43691,
43702
],
[
46785,
46796
],
[
50092,
50103
],
[
51446,
51457
],
[
53021,
53032
],
[
54608,
54619
],
[
57907,
57918
],
[
62131,
62142
],
[
66807,
66818
],
[
68494,
68505
],
[
73300,
73311
],
[
74371,
74382
],
[
75511,
75522
],
[
76642,
76653
],
[
80657,
80668
],
[
81789,
81800
],
[
83341,
83352
],
[
86719,
86730
],
[
90136,
90147
],
[
92581,
92592
],
[
93929,
93940
],
[
96895,
96906
],
[
98602,
98613
],
[
101511,
101522
],
[
103742,
103753
],
[
108224,
108235
],
[
111683,
111694
]
],
[
[
1254,
1278
],
[
7666,
7690
],
[
7799,
7823
],
[
27909,
27933
],
[
30978,
31002
],
[
31110,
31134
],
[
32675,
32699
],
[
39086,
39110
],
[
39164,
39188
],
[
39234,
39258
],
[
41709,
41733
],
[
46820,
46844
],
[
50127,
50151
],
[
51343,
51367
],
[
52908,
52932
],
[
54494,
54518
],
[
57450,
57474
],
[
68530,
68554
],
[
68628,
68652
],
[
73130,
73154
],
[
74414,
74438
],
[
75551,
75575
],
[
76682,
76706
],
[
80568,
80592
],
[
81657,
81681
],
[
83211,
83235
],
[
86585,
86609
],
[
90002,
90026
],
[
92457,
92481
],
[
93848,
93872
],
[
96679,
96703
],
[
96782,
96806
],
[
98375,
98399
],
[
98444,
98468
],
[
98513,
98537
],
[
101408,
101432
],
[
103639,
103663
],
[
108068,
108092
],
[
111547,
111571
],
[
34378,
34402
],
[
43289,
43313
]
],
[
[
1280,
1290
],
[
28193,
28203
],
[
30776,
30786
],
[
56919,
56929
],
[
57198,
57208
],
[
70869,
70879
],
[
73247,
73257
],
[
81737,
81747
],
[
86641,
86651
],
[
90058,
90068
],
[
98159,
98169
],
[
99107,
99117
],
[
108167,
108177
]
],
[
[
1292,
1303
],
[
57024,
57035
],
[
57303,
57314
],
[
79497,
79508
],
[
60696,
60707
],
[
65605,
65616
]
],
[
[
1305,
1318
],
[
101248,
101261
],
[
101578,
101591
],
[
103479,
103492
],
[
103809,
103822
],
[
26850,
26863
]
],
[
[
1371,
1392
],
[
23072,
23093
]
],
[
[
1394,
1416
]
],
[
[
1418,
1435
],
[
71079,
71096
]
],
[
[
1478,
1506
],
[
3388,
3416
],
[
8948,
8976
],
[
11731,
11759
],
[
58154,
58182
],
[
81972,
82000
],
[
98837,
98865
]
],
[
[
1581,
1584
]
],
[
[
1628,
1632
]
],
[
[
1676,
1680
]
],
[
[
1724,
1728
]
],
[
[
1772,
1777
]
],
[
[
1821,
1824
]
],
[
[
1868,
1871
]
],
[
[
1915,
1919
]
],
[
[
1963,
1967
]
],
[
[
2011,
2014
]
],
[
[
2058,
2062
]
],
[
[
2106,
2111
]
],
[
[
2155,
2160
]
],
[
[
2204,
2210
]
],
[
[
2254,
2257
]
],
[
[
2301,
2311
]
],
[
[
2355,
2366
]
],
[
[
2410,
2415
]
],
[
[
2459,
2465
]
],
[
[
2509,
2514
]
],
[
[
2558,
2564
]
],
[
[
2608,
2613
]
],
[
[
2657,
2663
]
],
[
[
2707,
2712
]
],
[
[
2756,
2760
]
],
[
[
2804,
2807
]
],
[
[
2851,
2855
]
],
[
[
2899,
2904
]
],
[
[
2948,
2951
]
],
[
[
2995,
3001
]
],
[
[
3046,
3055
]
],
[
[
3092,
3098
],
[
70650,
70656
],
[
73031,
73037
],
[
79648,
79654
],
[
79682,
79688
],
[
94854,
94860
],
[
107972,
107978
],
[
109958,
109964
]
],
[
[
3118,
3124
],
[
3737,
3743
],
[
5608,
5614
],
[
7086,
7092
],
[
8844,
8850
],
[
27164,
27170
],
[
27469,
27475
],
[
30685,
30691
],
[
32537,
32543
],
[
34225,
34231
],
[
38870,
38876
],
[
41624,
41630
],
[
43237,
43243
],
[
46655,
46661
],
[
49962,
49968
],
[
51322,
51328
],
[
52888,
52894
],
[
54473,
54479
],
[
56847,
56853
],
[
58689,
58695
],
[
61960,
61966
],
[
65477,
65483
],
[
68462,
68468
],
[
70743,
70749
],
[
70813,
70819
],
[
73095,
73101
],
[
74336,
74342
],
[
75479,
75485
],
[
76610,
76616
],
[
80548,
80554
],
[
81637,
81643
],
[
82188,
82194
],
[
83171,
83177
],
[
86292,
86298
],
[
89708,
89714
],
[
92437,
92443
],
[
93825,
93831
],
[
96642,
96648
],
[
98344,
98350
],
[
99586,
99592
],
[
101350,
101356
],
[
103581,
103587
],
[
107142,
107148
],
[
107459,
107465
],
[
107708,
107714
],
[
107871,
107877
],
[
111526,
111532
]
],
[
[
3126,
3133
]
],
[
[
3140,
3161
]
],
[
[
3303,
3326
]
],
[
[
3421,
3427
]
],
[
[
3899,
3902
]
],
[
[
6829,
6855
],
[
5709,
5735
],
[
9448,
9474
],
[
11584,
11610
],
[
12228,
12254
],
[
13607,
13633
],
[
14818,
14844
],
[
16193,
16219
],
[
17890,
17916
],
[
20270,
20296
],
[
22498,
22524
]
],
[
[
7266,
7281
],
[
6641,
6656
],
[
8885,
8900
],
[
11678,
11693
],
[
13702,
13717
],
[
14904,
14919
],
[
16279,
16294
],
[
18162,
18177
],
[
20364,
20379
],
[
22592,
22607
]
],
[
[
8532,
8535
],
[
22665,
22668
]
],
[
[
8981,
8985
]
],
[
[
9539,
9547
]
],
[
[
11764,
11773
]
],
[
[
12339,
12345
]
],
[
[
13758,
13770
]
],
[
[
14960,
14969
],
[
16337,
16346
],
[
16373,
16382
]
],
[
[
16331,
16334
]
],
[
[
16361,
16370
]
],
[
[
16403,
16411
],
[
22678,
22686
]
],
[
[
18217,
18224
]
],
[
[
20419,
20426
]
],
[
[
22647,
22651
],
[
22829,
22833
],
[
23343,
23347
],
[
23057,
23061
]
],
[
[
22694,
22704
],
[
22818,
22828
]
],
[
[
22769,
22777
],
[
23103,
23111
]
],
[
[
22850,
22871
],
[
23143,
23164
]
],
[
[
23363,
23366
]
],
[
[
28686,
28691
]
],
[
[
31482,
31487
]
],
[
[
32967,
32969
]
],
[
[
36060,
36065
]
],
[
[
39483,
39492
]
],
[
[
42142,
42149
]
],
[
[
43911,
43914
]
],
[
[
47238,
47241
]
],
[
[
50546,
50551
]
],
[
[
51685,
51689
]
],
[
[
53259,
53264
]
],
[
[
54848,
54852
]
],
[
[
58187,
58192
]
],
[
[
58736,
58741
]
],
[
[
62456,
62464
]
],
[
[
67174,
67178
]
],
[
[
68892,
68898
]
],
[
[
71143,
71150
]
],
[
[
73512,
73520
]
],
[
[
74672,
74677
]
],
[
[
75809,
75814
]
],
[
[
76941,
76945
]
],
[
[
79755,
79759
]
],
[
[
80856,
80860
]
],
[
[
82005,
82010
]
],
[
[
82210,
82219
]
],
[
[
83540,
83543
]
],
[
[
86963,
86966
]
],
[
[
90379,
90394
],
[
9216,
9231
],
[
11996,
12011
],
[
99069,
99084
],
[
99347,
99362
]
],
[
[
91149,
91153
]
],
[
[
92802,
92809
]
],
[
[
94121,
94124
]
],
[
[
94941,
94946
]
],
[
[
97154,
97158
]
],
[
[
98870,
98875
]
],
[
[
99618,
99625
]
],
[
[
102105,
102112
]
],
[
[
104336,
104340
]
],
[
[
110043,
110048
]
]
] |
# -*- coding: utf-8 -*-
from airtest.utils.logger import get_logger
from airtest.utils.safesocket import SafeSocket
from airtest.utils.nbsp import NonBlockingStreamReader
from airtest.utils.snippet import on_method_ready, reg_cleanup
from airtest.core.android.yosemite import Yosemite
import struct
LOGGING = get_logger(__name__)
class Javacap(Yosemite):
"""
This is another screencap class, it is slower in performance than minicap, but it provides the better compatibility
"""
APP_PKG = "com.netease.nie.yosemite"
SCREENCAP_SERVICE = "com.netease.nie.yosemite.Capture"
RECVTIMEOUT = None
def __init__(self, adb):
super(Javacap, self).__init__(adb)
self.frame_gen = None
@on_method_ready('install_or_upgrade')
def _setup_stream_server(self):
"""
Setup stream server
Returns:
adb shell process, non-blocking stream reader and local port
"""
localport, deviceport = self.adb.setup_forward("localabstract:javacap_{}".format)
deviceport = deviceport[len("localabstract:"):]
# setup agent proc
apkpath = self.adb.path_app(self.APP_PKG)
cmds = ["CLASSPATH=" + apkpath, 'exec', 'app_process', '/system/bin', self.SCREENCAP_SERVICE,
"--scale", "100", "--socket", "%s" % deviceport, "-lazy", "2>&1"]
proc = self.adb.start_shell(cmds)
# check proc output
nbsp = NonBlockingStreamReader(proc.stdout, print_output=True, name="javacap_sever")
while True:
line = nbsp.readline(timeout=5.0)
if line is None:
raise RuntimeError("javacap server setup timeout")
if b"Capture server listening on" in line:
break
if b"Address already in use" in line:
raise RuntimeError("javacap server setup error: %s" % line)
reg_cleanup(proc.kill)
return proc, nbsp, localport
def get_frames(self):
"""
Get the screen frames
Returns:
None
"""
proc, nbsp, localport = self._setup_stream_server()
s = SafeSocket()
s.connect((self.adb.host, localport))
t = s.recv(24)
# javacap header
LOGGING.debug(struct.unpack("<2B5I2B", t))
stopping = False
while not stopping:
s.send(b"1")
# recv frame header, count frame_size
if self.RECVTIMEOUT is not None:
header = s.recv_with_timeout(4, self.RECVTIMEOUT)
else:
header = s.recv(4)
if header is None:
LOGGING.error("javacap header is None")
# recv timeout, if not frame updated, maybe screen locked
stopping = yield None
else:
frame_size = struct.unpack("<I", header)[0]
frame_data = s.recv(frame_size)
stopping = yield frame_data
LOGGING.debug("javacap stream ends")
s.close()
nbsp.kill()
proc.kill()
self.adb.remove_forward("tcp:%s" % localport)
def get_frame_from_stream(self):
"""
Get frame from the stream
Returns:
frame
"""
if self.frame_gen is None:
self.frame_gen = self.get_frames()
return self.frame_gen.send(None)
def teardown_stream(self):
"""
End stream
Returns:
None
"""
if not self.frame_gen:
return
try:
self.frame_gen.send(1)
except (TypeError, StopIteration):
pass
else:
LOGGING.warn("%s tear down failed" % self.frame_gen)
self.frame_gen = None
| [
[
[
57,
67
],
[
309,
319
]
],
[
[
105,
115
],
[
2136,
2146
]
],
[
[
147,
170
],
[
1436,
1459
]
],
[
[
205,
220
],
[
726,
741
]
],
[
[
222,
233
],
[
1887,
1898
]
],
[
[
276,
284
],
[
346,
354
]
],
[
[
292,
298
],
[
2265,
2271
],
[
2833,
2839
]
],
[
[
299,
306
],
[
2251,
2258
],
[
2634,
2641
],
[
2965,
2972
],
[
3665,
3672
]
],
[
[
338,
345
],
[
661,
668
]
]
] |
#
# @lc app=leetcode id=447 lang=python
#
# [447] Number of Boomerangs
#
# https://leetcode.com/problems/number-of-boomerangs/description/
#
# algorithms
# Easy (49.20%)
# Likes: 296
# Dislikes: 447
# Total Accepted: 54.7K
# Total Submissions: 109.6K
# Testcase Example: '[[0,0],[1,0],[2,0]]'
#
# Given n points in the plane that are all pairwise distinct, a "boomerang" is
# a tuple of points (i, j, k) such that the distance between i and j equals the
# distance between i and k (the order of the tuple matters).
#
# Find the number of boomerangs. You may assume that n will be at most 500 and
# coordinates of points are all in the range [-10000, 10000] (inclusive).
#
# Example:
#
#
# Input:
# [[0,0],[1,0],[2,0]]
#
# Output:
# 2
#
# Explanation:
# The two boomerangs are [[1,0],[0,0],[2,0]] and [[1,0],[2,0],[0,0]]
#
#
#
#
#
import math
class Solution(object):
def _numberOfBoomerangs(self, points):
"""
:type points: List[List[int]]
:rtype: int
"""
# Time Limit
result = []
distance = [[0] * len(points) for _ in range(len(points))]
for i in range(len(points)):
for j in range(i):
distance[i][j] = (points[i][0]-points[j][0])**2 + (points[i][1]-points[j][1])**2
# distance[i][j] = math.sqrt((points[i][0]-points[j][0])**2
# + (points[i][1]-points[j][1])**2)
distance[j][i] = distance[i][j]
for m in range(i):
if distance[i][j] == distance[i-1-m][j]:
result.append([points[i], points[j], points[i-1-m]])
result.append([points[i-1-m], points[j], points[i]])
for m in range(j):
if distance[i][j] == distance[i][j-1-m]:
result.append([points[j], points[i], points[j-1-m]])
result.append([points[j-1-m], points[i], points[j]])
return len(result)
def numberOfBoomerangs(self, points):
"""
:type points: List[List[int]]
:rtype: int
"""
conunt = 0
data = {}
for i in range(len(points)):
for j in range(i):
distance = (points[i][0]-points[j][0])**2 + (points[i][1]-points[j][1])**2
exts = data.get(distance)
if not exts:
data[distance] = [[i,j]]
else:
for ext in exts:
if ext[0] == i or ext[0] == j or ext[1] == i or ext[1] == j:
conunt += 2
data[distance].append([i,j])
return conunt
# if __name__ == '__main__':
# s = Solution()
# print s.numberOfBoomerangs([[0, 0], [1, 0], [2, 0]])
# print s.numberOfBoomerangs([[3327,-549],[9196,-8118],[7610,-9506],[5098,8392],[8582,7953],[1053,5802],[3847,2652],[7654,8355],[1614,-9409],[9986,5538],[4660,2944],[4528,-9512],[7483,-1455],[3422,-3966],[2037,-4456],[5107,-4635],[4996,655],[7247,2606],[1149,8697],[7350,6083],[3002,8403],[8238,6850],[1055,5892],[5205,9021],[2835,5191],[911,-2505],[4488,-4561],[7983,-1677],[336,-2243],[4358,-1274],[3302,9465],[4091,-5350],[120,7690],[3608,7622],[6388,-9042],[57,-610],[9361,8295],[6240,-3232],[540,7797],[2141,-6625],[9341,3053],[7223,3829],[4844,1558],[2152,-8467],[9316,6510],[259,-1030],[2327,-5650],[9972,8800],[2040,-6420],[2774,4780],[4538,-7169],[4171,-6101],[7479,-3237],[7019,-1981],[4561,-4488],[7746,254],[4917,4969],[4083,-238],[6528,-7413],[1295,-7804],[5450,-8446],[1166,-5871],[2256,-8862],[2929,-5704],[4718,2055],[5429,-4392],[4887,9600],[9507,-1282],[2715,2878],[6737,-6372],[8390,-9165],[3882,3308],[5805,4317],[9422,8685],[3257,-2931],[881,-1293],[8623,-1601],[2836,879],[5889,2118],[1527,607],[4173,-3044],[6215,5412],[2908,-7926],[4130,-8024],[1304,7219],[1956,-3954],[8055,5839],[5706,212],[6508,5128],[8897,9765],[2197,-3870],[8472,-2828],[4529,7661],[4403,-9582],[6131,-7717],[7377,-3344],[5591,9944],[2069,-5148],[8370,-7449],[6828,-3974],[6123,-1216],[2072,530],[975,-2221],[7094,-2516],[9259,-4009],[7249,7809],[8473,2074],[4981,-6998],[9735,5737],[9772,5866],[8020,-6499],[8874,-6389],[3445,-9057],[4815,8167],[9847,1643],[4193,2322],[6780,2617],[9204,4107],[396,6298],[1591,6008],[2289,-4807],[3817,762],[7267,5150],[116,-6646],[887,-3760],[5572,-4741],[9741,4446],[5223,-462],[1742,38],[7705,1589],[1682,-1750],[263,4814],[867,9467],[8921,7616],[5765,-3135],[3624,4406],[2058,-2559],[1520,-675],[2591,-2012],[2679,-169],[4228,-1749],[5090,-6031],[2697,-9687],[9859,791],[352,3916],[8732,-1614],[2166,8995],[3200,9385],[4814,-1527],[7001,579],[5338,-3023],[1337,-2604],[4418,-7143],[3073,3362],[845,-7896],[3193,-8575],[6707,4635],[1746,-595],[4949,1605],[6548,-8347],[1873,5281],[39,-5961],[4276,-409],[9777,-909],[8064,3130],[6022,-245],[108,7360],[7151,4526],[6569,-3423],[4240,-2585],[8681,-2567],[5192,5389],[2069,-3061],[1146,3370],[4896,7694],[5023,6770],[2975,-8586],[7161,-6396],[1005,6938],[2695,-4579],[69,-4931],[5176,177],[2429,-1320],[1055,8999],[5257,-4704],[2766,-6062],[9081,-2042],[5679,-2498],[1249,6825],[7224,-3854],[872,2247],[2916,-6153],[3661,-9923],[7451,-8982],[7016,6498],[6440,-6563],[1568,-8384],[9966,-9651],[296,1021],[9348,-8095],[2669,8466],[2196,-8249],[2777,7875],[5605,4026],[1053,-7170],[172,-8075],[1429,-6912],[5772,-8557],[9518,-424],[2461,2886],[2426,-1099],[6323,-6006],[6870,-3711],[696,3518],[3662,6396],[5424,-3668],[4863,7620],[4435,7640],[1847,-3608],[8018,-7100],[9222,-5457],[4825,7004],[3983,-3050],[8447,-6499],[2878,-9092],[6387,5304],[6162,-938],[5651,3032],[5351,6347],[2902,-4634],[2743,8326],[8050,-6042],[2298,-1163],[7950,-9502],[5229,-4031],[3398,-9196],[512,-5424],[7808,847],[7878,6255],[4349,7108],[7163,736],[8764,9677],[6151,-5585],[2709,-2146],[7114,5612],[3220,-3790],[290,-8730],[168,8941],[107,-5529],[9439,-8311],[440,9189],[2493,7304],[117,6653],[8151,-5653],[2908,8852],[1455,-3577],[5941,-3428],[6101,-7908],[7339,5162],[9946,-5546],[7126,9519],[7016,3769],[789,7184],[2710,-2751],[1655,-1499],[5290,-1553],[4042,-2217],[2103,-9488],[788,-3393],[1211,3696],[1811,9019],[6471,-2248],[5591,8924],[6196,2930],[4087,6143],[3736,7565],[5662,-9248],[1334,2803],[4289,-9604],[6404,2296],[8897,-8306],[7096,-708],[5829,9199],[6156,-3383],[2158,-2633],[6665,-9678],[6386,3137],[8074,1977],[2061,4271],[4908,-7500],[6766,4996],[66,8780],[5749,1400],[7935,38],[1797,-5660],[2334,7046],[2386,9430],[2690,-1784],[4982,-1154],[1185,3492],[6214,-2149],[3814,8952],[7340,8241],[930,-4247],[8864,2190],[8254,5630],[7186,-5328],[762,9287],[6072,8697],[9325,-5779],[9389,1660],[7620,-8224],[7442,-9690],[9992,-7576],[5509,7529],[2269,8075],[5380,-3917],[7027,-7280],[4324,-5691],[8474,3188],[6499,3080],[5170,-9962],[7752,5932],[9325,176],[982,-1349],[4398,371],[6663,-1630],[2147,-9543],[5032,8491],[9234,541],[6021,1503],[8616,7753],[3938,-8004],[6826,8263],[6305,-8348],[7803,9157],[4732,-674],[9195,-1164],[5258,8520],[9012,2592],[3523,-238],[2964,6538],[8132,1463],[3348,-6835],[6307,2582],[58,-7672],[437,5027],[6433,4375],[7023,3259],[8990,-6672],[4911,3146],[2485,-4005],[2472,8032],[4831,-5918],[2905,196],[6675,6428],[9958,9639],[9319,4443],[7454,-7333],[3960,3761],[1601,-9630],[2441,2038],[5397,-1125],[6413,2420],[8486,1756],[2101,3398],[4902,938],[5745,-2626],[5323,-3071],[1456,8228],[7125,-1869],[1008,3435],[4122,6679],[4230,1577],[9346,8190],[1690,947],[4913,4132],[9337,310],[3007,-4249],[9083,-8507],[7507,-2464],[1243,-7591],[4826,-3011],[6135,-9851],[3918,7591],[8377,-2605],[5723,-4262],[830,-3803],[2417,-8587],[7774,8116],[5955,9465],[5415,868],[9949,-5247],[1179,2956],[6856,6614],[801,-9285],[4150,8397],[9476,8976],[1738,-4389],[9126,2008],[3202,3855],[9403,-4723],[9593,6585],[1475,-7989],[7998,-4399],[127,306],[1418,-4458],[1174,1367],[6647,-7647],[4323,3503],[8967,1477],[4218,9469],[6226,3694],[8446,-2036],[9305,3924],[9972,8860],[7779,5727],[4137,-6275],[8664,1964],[5736,-6985],[7566,-7785],[3321,8984],[4109,4495],[352,757],[3201,1027],[4260,-1480],[8856,4831],[7990,-4918],[8525,-7212],[3046,-5817],[6712,-630],[3043,-5509],[1449,-6468],[8216,-3534],[5497,304],[9481,3063],[8871,9154],[8399,2981],[1,8751],[90,-6798],[6131,-9298],[8075,-5013],[5533,6065],[70,-9589],[5205,9468],[946,1917],[5191,-6011],[2760,-7008],[3873,7329],[9458,9370],[7633,5291],[8785,2857],[797,3537],[2190,-9201],[2288,-7720],[353,4771],[9334,-1572],[9759,1220],[845,-3819],[7983,6050],[2001,-1071],[4319,-2808],[9270,7080],[6537,3143],[4409,2347],[8866,8394],[7639,4003],[7603,4788],[7540,-207],[5587,6181],[8425,5941],[952,-5888],[721,-2937],[5332,-8433],[3244,-6685],[3969,5246],[2244,8289],[8790,-8486],[1721,-4673],[1009,-3870],[7675,9875],[876,-8334],[231,-1520],[6454,7771],[4625,2042],[304,9403],[4335,-8743],[3515,-4944],[4672,8847],[2975,7917],[8514,6945],[3163,758],[1586,1953],[8624,-6693],[7281,9633],[5789,1308],[5861,-6983],[2974,-3908],[7849,-572],[215,-7525]])
| [
[
[
849,
853
]
],
[
[
862,
870
]
]
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake, tools
import os
import shutil
class LibtiffConan(ConanFile):
name = "libtiff"
description = "Library for Tag Image File Format (TIFF)"
url = "https://github.com/conan-io/conan-center-index"
author = "Bincrafters <bincrafters@gmail.com>"
license = "MIT"
homepage = "http://www.simplesystems.org/libtiff"
topics = ("tiff", "image", "bigtiff", "tagged-image-file-format")
exports_sources = ["CMakeLists.txt"]
generators = "cmake"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False], "fPIC": [True, False]}
default_options = {'shared': False, 'fPIC': True}
requires = "zlib/1.2.11"
_source_subfolder = "source_subfolder"
def config_options(self):
if self.settings.os == "Windows":
self.options.remove("fPIC")
del self.settings.compiler.libcxx
del self.settings.compiler.cppstd
def source(self):
tools.get(**self.conan_data["sources"][self.version])
os.rename('tiff-' + self.version, self._source_subfolder)
os.rename(os.path.join(self._source_subfolder, "CMakeLists.txt"),
os.path.join(self._source_subfolder, "CMakeListsOriginal.txt"))
shutil.copy("CMakeLists.txt",
os.path.join(self._source_subfolder, "CMakeLists.txt"))
def build(self):
cmake = CMake(self)
cmake.definitions['CMAKE_INSTALL_LIBDIR'] = 'lib'
cmake.definitions['CMAKE_INSTALL_BINDIR'] = 'bin'
cmake.definitions['CMAKE_INSTALL_INCLUDEDIR'] = 'include'
cmake.definitions["lzma"] = False
cmake.definitions["jpeg"] = False
cmake.definitions["jbig"] = False
if self.options.shared and self.settings.compiler == "Visual Studio":
# https://github.com/Microsoft/vcpkg/blob/master/ports/tiff/fix-cxx-shared-libs.patch
tools.replace_in_file(os.path.join(self._source_subfolder, 'libtiff', 'CMakeLists.txt'),
r'set_target_properties(tiffxx PROPERTIES SOVERSION ${SO_COMPATVERSION})',
r'set_target_properties(tiffxx PROPERTIES SOVERSION ${SO_COMPATVERSION} '
r'WINDOWS_EXPORT_ALL_SYMBOLS ON)')
if self.settings.os == "Windows" and self.settings.compiler != "Visual Studio":
tools.replace_in_file(os.path.join(self._source_subfolder, "CMakeListsOriginal.txt"),
"find_library(M_LIBRARY m)",
"if (NOT MINGW)\n find_library(M_LIBRARY m)\nendif()")
if self.version == '4.0.8':
# only one occurence must be patched. fixed in 4.0.9
tools.replace_in_file(os.path.join(self._source_subfolder, "CMakeListsOriginal.txt"),
"if (UNIX)",
"if (UNIX OR MINGW)")
tools.replace_in_file(os.path.join(self._source_subfolder, "CMakeListsOriginal.txt"),
"add_subdirectory(tools)\nadd_subdirectory(test)\nadd_subdirectory(contrib)\nadd_subdirectory(build)\n"
"add_subdirectory(man)\nadd_subdirectory(html)", "")
cmake.definitions["BUILD_SHARED_LIBS"] = self.options.shared
cmake.configure(source_folder=self._source_subfolder)
cmake.build()
cmake.install()
def package(self):
self.copy("COPYRIGHT", src=self._source_subfolder, dst="licenses", ignore_case=True, keep_path=False)
tools.rmdir(os.path.join(self.package_folder, 'lib', 'pkgconfig'))
def package_info(self):
self.cpp_info.libs = ["tiff", "tiffxx"]
if self.settings.os == "Windows" and self.settings.build_type == "Debug" and self.settings.compiler == 'Visual Studio':
self.cpp_info.libs = [lib+'d' for lib in self.cpp_info.libs]
if self.options.shared and self.settings.os == "Windows" and self.settings.compiler != 'Visual Studio':
self.cpp_info.libs = [lib+'.dll' for lib in self.cpp_info.libs]
if self.settings.os == "Linux":
self.cpp_info.libs.append("m")
| [
[
[
66,
75
],
[
135,
144
]
],
[
[
77,
82
],
[
1449,
1454
]
],
[
[
84,
89
],
[
1021,
1026
],
[
1959,
1964
],
[
2435,
2440
],
[
2807,
2812
],
[
3013,
3018
],
[
3600,
3605
]
],
[
[
97,
99
],
[
1083,
1085
],
[
1149,
1151
],
[
1159,
1161
],
[
1233,
1235
],
[
1355,
1357
],
[
1981,
1983
],
[
2457,
2459
],
[
2829,
2831
],
[
3035,
3037
],
[
3612,
3614
]
],
[
[
107,
113
],
[
1305,
1311
]
],
[
[
122,
134
]
]
] |
#! /usr/bin/env python3
#Copyright 2018 Google LLC
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import random
def makeMatches(profile_dict, player_pools):
###########################################################################
# This is the exciting part, and where most of your custom code would go! #
###########################################################################
# The python3 MMF harness passed this function filtered players and their
# filtered attributes in the player_pools dictionary. If we wanted to evaluate
# other player attributes, we could connect to redis directly and query the
# players by their ID to get the entire 'properties' player JSON passed in
# to the frontend API when they entered matchmaking.
# This basic example just pulls players at random from the specified pools in the
# profile. This just serves to show how the dictionaries are accessed and you
# should write your own rigourous logic here.
for roster in profile_dict['properties']['rosters']:
for player in roster['players']:
if 'pool' in player:
player['id'] = random.choice(list(player_pools[player['pool']]))
del player_pools[player['pool']][player['id']]
print("Selected player %s from pool %s (strategy: RANDOM)" % (player['id'], player['pool']))
else:
print(player)
return profile_dict
| [
[
[
595,
601
],
[
1652,
1658
]
],
[
[
607,
618
]
]
] |
import re
from typing import Any, List, Match, Optional
from markdown import Markdown
from markdown.extensions import Extension
from markdown.preprocessors import Preprocessor
from zerver.lib.markdown.preprocessor_priorities import PREPROCESSOR_PRIORITES
# There is a lot of duplicated code between this file and
# help_settings_links.py. So if you're making a change here consider making
# it there as well.
REGEXP = re.compile(r"\{relative\|(?P<link_type>.*?)\|(?P<key>.*?)\}")
gear_info = {
# The pattern is key: [name, link]
# key is from REGEXP: `{relative|gear|key}`
# name is what the item is called in the gear menu: `Select **name**.`
# link is used for relative links: `Select [name](link).`
"manage-streams": ["Manage streams", "/#streams/subscribed"],
"settings": ["Personal Settings", "/#settings/profile"],
"manage-organization": ["Manage organization", "/#organization/organization-profile"],
"integrations": ["Integrations", "/integrations"],
"stats": ["Usage statistics", "/stats"],
"plans": ["Plans and pricing", "/plans"],
"billing": ["Billing", "/billing"],
"invite": ["Invite users", "/#invite"],
}
gear_instructions = """
1. Click on the **gear** (<i class="fa fa-cog"></i>) icon in the upper
right corner of the web or desktop app.
1. Select {item}.
"""
def gear_handle_match(key: str) -> str:
if relative_help_links:
item = f"[{gear_info[key][0]}]({gear_info[key][1]})"
else:
item = f"**{gear_info[key][0]}**"
return gear_instructions.format(item=item)
stream_info = {
"all": ["All streams", "/#streams/all"],
"subscribed": ["Subscribed", "/#streams/subscribed"],
}
stream_instructions_no_link = """
1. Click on the **gear** (<i class="fa fa-cog"></i>) icon in the upper
right corner of the web or desktop app.
1. Click **Manage streams**.
"""
def stream_handle_match(key: str) -> str:
if relative_help_links:
return f"1. Go to [{stream_info[key][0]}]({stream_info[key][1]})."
if key == "all":
return stream_instructions_no_link + "\n\n1. Click **All streams** in the upper left."
return stream_instructions_no_link
LINK_TYPE_HANDLERS = {
"gear": gear_handle_match,
"stream": stream_handle_match,
}
class RelativeLinksHelpExtension(Extension):
def extendMarkdown(self, md: Markdown) -> None:
"""Add RelativeLinksHelpExtension to the Markdown instance."""
md.registerExtension(self)
md.preprocessors.register(
RelativeLinks(), "help_relative_links", PREPROCESSOR_PRIORITES["help_relative_links"]
)
relative_help_links: Optional[bool] = None
def set_relative_help_links(value: bool) -> None:
global relative_help_links
relative_help_links = value
class RelativeLinks(Preprocessor):
def run(self, lines: List[str]) -> List[str]:
done = False
while not done:
for line in lines:
loc = lines.index(line)
match = REGEXP.search(line)
if match:
text = [self.handleMatch(match)]
# The line that contains the directive to include the macro
# may be preceded or followed by text or tags, in that case
# we need to make sure that any preceding or following text
# stays the same.
line_split = REGEXP.split(line, maxsplit=0)
preceding = line_split[0]
following = line_split[-1]
text = [preceding, *text, following]
lines = lines[:loc] + text + lines[loc + 1 :]
break
else:
done = True
return lines
def handleMatch(self, match: Match[str]) -> str:
return LINK_TYPE_HANDLERS[match.group("link_type")](match.group("key"))
def makeExtension(*args: Any, **kwargs: Any) -> RelativeLinksHelpExtension:
return RelativeLinksHelpExtension(*args, **kwargs)
| [
[
[
7,
9
],
[
422,
424
]
],
[
[
29,
32
],
[
3913,
3916
],
[
3928,
3931
]
],
[
[
34,
38
],
[
2850,
2854
],
[
2836,
2840
]
],
[
[
40,
45
],
[
3786,
3791
]
],
[
[
47,
55
],
[
2637,
2645
]
],
[
[
78,
86
],
[
2346,
2354
]
],
[
[
119,
128
],
[
2301,
2310
]
],
[
[
164,
176
],
[
2796,
2808
]
],
[
[
234,
256
],
[
2558,
2580
]
],
[
[
413,
419
],
[
3001,
3007
],
[
3412,
3418
]
],
[
[
485,
494
],
[
1424,
1433
],
[
1445,
1454
],
[
1496,
1505
]
],
[
[
1174,
1191
],
[
1529,
1546
]
],
[
[
1341,
1358
],
[
2210,
2227
]
],
[
[
1567,
1578
],
[
1971,
1982
],
[
1994,
2005
]
],
[
[
1689,
1716
],
[
2054,
2081
],
[
2145,
2172
]
],
[
[
1877,
1896
],
[
2243,
2262
]
],
[
[
2175,
2193
],
[
3821,
3839
]
],
[
[
2274,
2300
],
[
3936,
3962
],
[
3975,
4001
]
],
[
[
2616,
2635
],
[
2637,
2651
],
[
1384,
1403
],
[
1922,
1941
]
],
[
[
2665,
2688
]
],
[
[
2782,
2795
],
[
2518,
2531
]
],
[
[
3892,
3905
]
],
[
[
2746,
2765
]
]
] |
import math
class Robo:
def __init__(self,nome):
self.__nome = nome
self.__posicao = [0.0,0.0]
self.__em_op = False
@property
def nome(self):
return self.__nome
@nome.setter
def nome(self, alterar_nome):
self.__nome = alterar_nome
@property
def posicao(self):
return self.__posicao
def __str__(self):
return(f'Robô: {self.__nome}, {self.__em_op} em {self.__posicao}')
def distancia(self,nposicao):
self.nposicao = nposicao
print(math.sqrt(((self.__posicao[0]-self.nposicao[0])**2)+((self.__posicao[1]-self.nposicao[1])**2)))
def move(self,nposicao):
self.__posicao = nposicao
class SistemaMultiRobos():
def __init__(self,quantidade):
self.__robos= []
for i in range(quantidade):
self.__robos.append(Robo(i))
def _acha_robo_ocioso(self):
for i in self.__robos:
if i.__em_op== False:
return (f'Robô: {i} livre')
def imprime_robos(self):
for i in self.__robos:
print(i)
def despacha(self, coordenadas):
pass
if __name__ == '__main__':
smr = SistemaMultiRobos(3) # sistema com 3 robôs
smr.imprime_robos()
smr.despacha((5.0, 5.0))
smr.imprime_robos()
smr.despacha((-5.0, -5.0))
smr.imprime_robos()
smr.despacha((0.0, -10.0))
smr.imprime_robos()
smr.despacha((15.0, 15.0))
smr.imprime_robos() | [
[
[
7,
11
],
[
548,
552
]
],
[
[
19,
23
],
[
868,
872
]
],
[
[
719,
736
],
[
1191,
1208
]
],
[
[
1185,
1188
],
[
1238,
1241
],
[
1263,
1266
],
[
1292,
1295
],
[
1316,
1319
],
[
1347,
1350
],
[
1371,
1374
],
[
1402,
1405
],
[
1426,
1429
],
[
1457,
1460
]
]
] |
# from JumpScale.baselib.codeexecutor.CodeExecutor import CodeExecutor
import inspect
from JumpScale import j
from ClassBase import ClassBase, JSModelBase, JSRootModelBase
from TemplateEngineWrapper import TemplateEngineWrapper
from JumpScale.data.regex.RegexTools import RegexTools
from TextFileEditor import TextFileEditor
from WordReplacer import WordReplacer
# ujson.dumps does not support some arguments like separators, indent ...etc
def isPrimAttribute(obj, key):
if key[-1] == "s":
funcprop = "new_%s" % key[:-1]
else:
funcprop = "new_%s" % key
isprimtype = not hasattr(obj, funcprop)
return isprimtype, funcprop
class Struct:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class CodeTools:
def __init__(self):
self.__jslocation__ = "j.tools.code"
self._templateengine = None
# self.executor = CodeExecutor()
self._regex = None
self._wordreplacer = None
self._codemanager = None
self._texteditor = None
@property
def codemanager(self):
if self._codemanager is None:
from CodeManager import CodeManager
self._codemanager = CodeManager()
return self._codemanager
@property
def regex(self):
if self._regex is None:
self._regex = RegexTools()
return self._regex
@property
def templateengine(self):
if self._templateengine is None:
self._templateengine = TemplateEngineWrapper()
return self._templateengine
@property
def texteditor(self):
if self._texteditor is None:
self._texteditor = TextFileEditor()
return self._texteditor
@property
def wordreplacer(self):
if self._wordreplacer is None:
self._wordreplacer = WordReplacer()
return self._wordreplacer
def textToTitle(self, text, maxnrchars=60):
"""
try to create a title out of text, ignoring irrelevant words and making lower case and removing
not needed chars
"""
ignore = "for in yes no after up down the"
ignoreitems = ignore.split(" ")
keepchars = "abcdefghijklmnopqrstuvwxyz1234567890 "
out = ""
text = text.lower().strip()
for char in text:
if char in keepchars:
out += char
text = out
text = text.replace(" ", "")
text = text.replace(" ", "")
out = ""
nr = 0
for item in text.split(" "):
if item not in ignoreitems:
nr += len(item)
if nr < maxnrchars:
out += item + " "
if len(text.split(" ")) > 0:
text = out.strip()
if len(text) > maxnrchars:
text = text[:maxnrchars]
return text
def classInfoPrint(self, classs):
"""
print info like source code of class
"""
filepath, linenr, sourcecode = self.classInfoGet(classs)
print(("line:%s in path:%s" % (linenr, filepath)))
print(sourcecode)
def classInfoGet(self, classs):
"""
returns filepath,linenr,sourcecode
"""
code, nr = inspect.getsourcelines(classs.__class__)
code = "".join(code)
path = inspect.getsourcefile(classs.__class__)
return path, nr, code
def classEditGeany(self, classs):
"""
look for editor (uses geany) and then edit the file
"""
filepath, linenr, sourcecode = self.classInfoGet(classs)
j.sal.process.executeWithoutPipe("geany %s" % filepath)
def classGetBase(self):
return ClassBase
# def classGetAppserver6GreenletSchedule(self):
# return Appserver6GreenletScheduleBase
# def classGetAppserver6Greenlet(self):
# return Appserver6GreenletBase
# def classGetAppserver6GreenletTasklets(self):
# return Appserver6GreenletTaskletsBase
def dict2object(self, obj, data):
if obj is None:
return Struct(**data)
if hasattr(obj, "_dict2obj"):
return obj._dict2obj(data)
if isinstance(data, dict):
for key, value in list(data.items()):
# is for new obj functionname
objpropname = "%s" % key
if isinstance(value, dict) and isinstance(obj.__dict__[objpropname], dict):
# is a real dict (not a dict as representation of an object)
isprimtype, funcprop = isPrimAttribute(obj, key)
if not isprimtype:
raise j.exceptions.RuntimeError("not supported")
else:
for valkey, valval in list(value.items()):
attr = getattr(obj, key)
attr[valkey] = valval
elif isinstance(data[key], list):
isprimtype, funcprop = isPrimAttribute(obj, key)
if not isprimtype:
method = getattr(obj, funcprop)
for valval in value:
newobj = method()
self.dict2object(newobj, valval)
else:
for valval, in value:
attr = getattr(obj, key)
attr.append(valval)
elif isinstance(value, dict) and not isinstance(obj.__dict__[objpropname], dict):
# is a dict which represents another object
raise j.exceptions.RuntimeError("not supported, only 1 level deep objects")
else:
obj.__dict__[objpropname] = value
return obj
else:
return data
def dict2JSModelobject(self, obj, data):
if isinstance(data, dict):
for key, value in list(data.items()):
# is for new obj functionname
objpropname = "_P_%s" % key if not key.startswith('_P_') else key
if isinstance(value, dict) and isinstance(obj.__dict__[objpropname], dict):
# is a real dict (not a dict as representation of an object)
isprimtype, funcprop = isPrimAttribute(obj, key)
if not isprimtype:
method = getattr(obj, funcprop)
for valkey, valval in list(value.items()):
newobj = method(valkey)
self.dict2JSModelobject(newobj, valval)
else:
for valkey, valval in list(value.items()):
attr = getattr(obj, key)
attr[valkey] = valval
elif isinstance(value, list):
if key == '_meta':
# we do not duplicate meta
continue
isprimtype, funcprop = isPrimAttribute(obj, key)
if not isprimtype:
method = getattr(obj, funcprop)
for valval in value:
newobj = method()
self.dict2JSModelobject(newobj, valval)
else:
for valval in value:
attr = getattr(obj, key)
attr.append(valval)
elif isinstance(value, dict) and not isinstance(obj.__dict__[objpropname], dict):
# is a dict which represents another object
obj.__dict__[objpropname] = self.dict2JSModelobject(obj.__dict__[objpropname], value)
else:
obj.__dict__[objpropname] = value
return obj
else:
return data
# def dict2object2(self,d):
# if isinstance(d, dict):
#n = {}
# for item in d:
# if isinstance(d[item], dict):
#n[item] = dict2obj(d[item])
# elif isinstance(d[item], (list, tuple)):
#n[item] = [dict2obj(elem) for elem in d[item]]
# else:
#n[item] = d[item]
# return type('obj_from_dict', (object,), n)
# else:
# return d
def object2dict4index(self, obj):
"""
convert object to a dict
only properties on first level are considered
and properties of basic types like int,str,float,bool,dict,list
ideal to index the basics of an object
"""
result = {}
def toStr(obj, possibleList=True):
if isinstance(obj, (str, int, float, bool)) or obj is None:
return str(obj)
elif possibleList == True and j.data.types.list.check(obj):
r = ""
for item in obj:
rr = toStr(obj, possibleList=False)
if rr != "":
r += "%s," % rr
r = r.rstrip(",")
return r
return ""
if isinstance(obj, ClassBase):
for key, value in list(obj.__dict__.items()):
if key[0:3] == "_P_":
key = key[3:]
elif key[0] == "_":
continue
if j.data.types.dict.check(value):
for key2 in list(value.keys()):
r = toStr(value[key2])
if r != "":
result["%s.%s" (key, key2)] = r
else:
r = toStr(value)
if r != "":
result[key] = r
return result
def object2dict(self, obj, dieOnUnknown=False, ignoreKeys=[], ignoreUnderscoreKeys=False):
if j.data.types.dict.check(obj):
return obj
data = {}
def todict(obj, data, ignoreKeys):
if isinstance(obj, dict):
value = {}
for key in list(obj.keys()):
if key in ignoreKeys:
continue
if ignoreUnderscoreKeys and key and key[0] == "_":
continue
value[key] = todict(obj[key], {}, ignoreKeys)
return value
elif isinstance(obj, (tuple, list)):
value = []
for item in obj:
value.append(todict(item, {}, ignoreKeys))
return value
elif isinstance(obj, str):
return obj.encode('utf8')
elif isinstance(obj, (int, str, float, bool)) or obj is None:
return obj
elif isinstance(obj, bytes) or obj is None:
return obj.decode('utf-8', 'ignore')
elif isinstance(obj, ClassBase):
if hasattr(obj, "_obj2dict"):
return obj._obj2dict()
else:
for key, value in list(obj.__dict__.items()):
if key[0:3] == "_P_":
key = key[3:]
if key in ignoreKeys:
continue
elif ignoreUnderscoreKeys and key[0] == "_":
continue
data[key] = todict(value, {}, ignoreKeys)
return data
else:
#from JumpScale.core.Shell import ipshellDebug,ipshell
# print "DEBUG NOW Can only convert object to dict with properties basic types or inherited of ClassBase"
# ipshell()
if dieOnUnknown:
raise j.exceptions.RuntimeError(
"Can only convert object to dict with properties basic types or inherited of ClassBase")
try:
val = str(value)
except:
val = "__UNKNOWN__"
return val
out = todict(obj, data, ignoreKeys)
# print out
return out
def object2yaml(self, obj):
return j.data.serializer.yaml.dumps(self.object2dict(obj))
def object2json(self, obj, pretty=False, skiperrors=False, ignoreKeys=[], ignoreUnderscoreKeys=False):
obj = self.object2dict(obj, dieOnUnknown=not skiperrors, ignoreKeys=ignoreKeys,
ignoreUnderscoreKeys=ignoreUnderscoreKeys)
if pretty:
return j.data.serializer.json.dumps(obj, indent=2, sort_keys=True)
else:
return j.data.serializer.json.dumps(obj)
def pprint(self, obj):
result = self.object2yaml(obj)
result = result.replace("!!python/unicode", "")
print(result)
def deIndent(self, content, level=1):
for i in range(0, level):
content = self._deIndent(content)
return content
def indent(self, content, level=1):
if not content:
return content
if content[-1] == "\n":
content = content[:-1]
lines = list()
for line in content.splitlines():
indent = " " * 4 * level
lines.append("%s%s\n" % (indent, line))
return "".join(lines)
def _deIndent(self, content):
# remove garbage & fix identation
content2 = ""
for line in content.split("\n"):
if line.strip() == "":
content2 += "\n"
else:
if line.find(" ") != 0:
raise j.exceptions.RuntimeError("identation error for %s." % content)
content2 += "%s\n" % line[4:]
return content2
| [
[
[
79,
86
],
[
3227,
3234
],
[
3312,
3319
]
],
[
[
109,
110
],
[
3578,
3579
],
[
4633,
4634
],
[
5601,
5602
],
[
9350,
9351
],
[
9837,
9838
],
[
12140,
12141
],
[
12500,
12501
],
[
12593,
12594
],
[
13556,
13557
],
[
8801,
8802
],
[
11719,
11720
]
],
[
[
134,
143
],
[
3678,
3687
],
[
9124,
9133
],
[
10861,
10870
]
],
[
[
145,
156
]
],
[
[
158,
173
]
],
[
[
208,
229
],
[
1500,
1521
]
],
[
[
274,
284
],
[
1339,
1349
]
],
[
[
312,
326
],
[
1669,
1683
]
],
[
[
352,
364
],
[
1833,
1845
]
],
[
[
449,
464
],
[
4538,
4553
],
[
4966,
4981
],
[
6284,
6299
],
[
7001,
7016
]
],
[
[
666,
672
],
[
4057,
4063
]
],
[
[
754,
763
]
]
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.