text
stringlengths
28
881k
""" fprime version handling and reporting """NEWLINEimport osNEWLINEfrom setuptools_scm import get_versionNEWLINENEWLINEROOT_PARENT_COUNT = 5NEWLINENEWLINENEWLINEdef get_fprime_version():NEWLINE """Gets the fprime version using setuptools_scm"""NEWLINE # First try to read the SCM versionNEWLINE try:NEWLINE return get_version(NEWLINE root=os.sep.join([".."] * ROOT_PARENT_COUNT), relative_to=__file__NEWLINE )NEWLINE # Fallback to a specified version when SCM is unavailableNEWLINE except LookupError:NEWLINE return "1.5.4" # Must be kept up-to-date when taggingNEWLINE
# Generated by Django 3.2.3 on 2021-07-09 11:22NEWLINENEWLINEfrom django.db import migrations, modelsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('core', '0001_initial'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.AlterField(NEWLINE model_name='article',NEWLINE name='id',NEWLINE field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),NEWLINE ),NEWLINE ]NEWLINE
from __future__ import annotationsNEWLINENEWLINEfrom typing import (NEWLINE TYPE_CHECKING,NEWLINE Callable,NEWLINE Dict,NEWLINE Hashable,NEWLINE List,NEWLINE Optional,NEWLINE Sequence,NEWLINE Set,NEWLINE Tuple,NEWLINE Union,NEWLINE cast,NEWLINE)NEWLINENEWLINEimport numpy as npNEWLINENEWLINEfrom pandas._typing import (NEWLINE AggFuncType,NEWLINE AggFuncTypeBase,NEWLINE AggFuncTypeDict,NEWLINE FrameOrSeriesUnion,NEWLINE IndexLabel,NEWLINE)NEWLINEfrom pandas.util._decorators import (NEWLINE Appender,NEWLINE Substitution,NEWLINE)NEWLINENEWLINEfrom pandas.core.dtypes.cast import maybe_downcast_to_dtypeNEWLINEfrom pandas.core.dtypes.common import (NEWLINE is_integer_dtype,NEWLINE is_list_like,NEWLINE is_scalar,NEWLINE)NEWLINEfrom pandas.core.dtypes.generic import (NEWLINE ABCDataFrame,NEWLINE ABCSeries,NEWLINE)NEWLINENEWLINEimport pandas.core.common as comNEWLINEfrom pandas.core.frame import _shared_docsNEWLINEfrom pandas.core.groupby import GrouperNEWLINEfrom pandas.core.indexes.api import (NEWLINE Index,NEWLINE MultiIndex,NEWLINE get_objs_combined_axis,NEWLINE)NEWLINEfrom pandas.core.reshape.concat import concatNEWLINEfrom pandas.core.reshape.util import cartesian_productNEWLINEfrom pandas.core.series import SeriesNEWLINENEWLINEif TYPE_CHECKING:NEWLINE from pandas import DataFrameNEWLINENEWLINENEWLINE# Note: We need to make sure `frame` is imported before `pivot`, otherwiseNEWLINE# _shared_docs['pivot_table'] will not yet exist. TODO: Fix this dependencyNEWLINE@Substitution("\ndata : DataFrame")NEWLINE@Appender(_shared_docs["pivot_table"], indents=1)NEWLINEdef pivot_table(NEWLINE data: DataFrame,NEWLINE values=None,NEWLINE index=None,NEWLINE columns=None,NEWLINE aggfunc: AggFuncType = "mean",NEWLINE fill_value=None,NEWLINE margins=False,NEWLINE dropna=True,NEWLINE margins_name="All",NEWLINE observed=False,NEWLINE) -> DataFrame:NEWLINE index = _convert_by(index)NEWLINE columns = _convert_by(columns)NEWLINENEWLINE if isinstance(aggfunc, list):NEWLINE pieces: List[DataFrame] = []NEWLINE keys = []NEWLINE for func in aggfunc:NEWLINE _table = __internal_pivot_table(NEWLINE data,NEWLINE values=values,NEWLINE index=index,NEWLINE columns=columns,NEWLINE fill_value=fill_value,NEWLINE aggfunc=func,NEWLINE margins=margins,NEWLINE dropna=dropna,NEWLINE margins_name=margins_name,NEWLINE observed=observed,NEWLINE )NEWLINE pieces.append(_table)NEWLINE keys.append(getattr(func, "__name__", func))NEWLINENEWLINE table = concat(pieces, keys=keys, axis=1)NEWLINE return table.__finalize__(data, method="pivot_table")NEWLINENEWLINE table = __internal_pivot_table(NEWLINE data,NEWLINE values,NEWLINE index,NEWLINE columns,NEWLINE aggfunc,NEWLINE fill_value,NEWLINE margins,NEWLINE dropna,NEWLINE margins_name,NEWLINE observed,NEWLINE )NEWLINE return table.__finalize__(data, method="pivot_table")NEWLINENEWLINENEWLINEdef __internal_pivot_table(NEWLINE data: DataFrame,NEWLINE values,NEWLINE index,NEWLINE columns,NEWLINE aggfunc: Union[AggFuncTypeBase, AggFuncTypeDict],NEWLINE fill_value,NEWLINE margins: bool,NEWLINE dropna: bool,NEWLINE margins_name: str,NEWLINE observed: bool,NEWLINE) -> DataFrame:NEWLINE """NEWLINE Helper of :func:`pandas.pivot_table` for any non-list ``aggfunc``.NEWLINE """NEWLINE keys = index + columnsNEWLINENEWLINE values_passed = values is not NoneNEWLINE if values_passed:NEWLINE if is_list_like(values):NEWLINE values_multi = TrueNEWLINE values = list(values)NEWLINE else:NEWLINE values_multi = FalseNEWLINE values = [values]NEWLINENEWLINE # GH14938 Make sure value labels are in dataNEWLINE for i in values:NEWLINE if i not in data:NEWLINE raise KeyError(i)NEWLINENEWLINE to_filter = []NEWLINE for x in keys + values:NEWLINE if isinstance(x, Grouper):NEWLINE x = x.keyNEWLINE try:NEWLINE if x in data:NEWLINE to_filter.append(x)NEWLINE except TypeError:NEWLINE passNEWLINE if len(to_filter) < len(data.columns):NEWLINE data = data[to_filter]NEWLINENEWLINE else:NEWLINE values = data.columnsNEWLINE for key in keys:NEWLINE try:NEWLINE values = values.drop(key)NEWLINE except (TypeError, ValueError, KeyError):NEWLINE passNEWLINE values = list(values)NEWLINENEWLINE grouped = data.groupby(keys, observed=observed)NEWLINE agged = grouped.agg(aggfunc)NEWLINE if dropna and isinstance(agged, ABCDataFrame) and len(agged.columns):NEWLINE agged = agged.dropna(how="all")NEWLINENEWLINE # gh-21133NEWLINE # we want to down cast ifNEWLINE # the original values are intsNEWLINE # as we grouped with a NaN valueNEWLINE # and then dropped, coercing to floatsNEWLINE for v in values:NEWLINE if (NEWLINE v in dataNEWLINE and is_integer_dtype(data[v])NEWLINE and v in aggedNEWLINE and not is_integer_dtype(agged[v])NEWLINE ):NEWLINE agged[v] = maybe_downcast_to_dtype(agged[v], data[v].dtype)NEWLINENEWLINE table = aggedNEWLINENEWLINE # GH17038, this check should only happen if index is defined (not None)NEWLINE if table.index.nlevels > 1 and index:NEWLINE # Related GH #17123NEWLINE # If index_names are integers, determine whether the integers referNEWLINE # to the level position or name.NEWLINE index_names = agged.index.names[: len(index)]NEWLINE to_unstack = []NEWLINE for i in range(len(index), len(keys)):NEWLINE name = agged.index.names[i]NEWLINE if name is None or name in index_names:NEWLINE to_unstack.append(i)NEWLINE else:NEWLINE to_unstack.append(name)NEWLINE table = agged.unstack(to_unstack)NEWLINENEWLINE if not dropna:NEWLINE if isinstance(table.index, MultiIndex):NEWLINE m = MultiIndex.from_arrays(NEWLINE cartesian_product(table.index.levels), names=table.index.namesNEWLINE )NEWLINE table = table.reindex(m, axis=0)NEWLINENEWLINE if isinstance(table.columns, MultiIndex):NEWLINE m = MultiIndex.from_arrays(NEWLINE cartesian_product(table.columns.levels), names=table.columns.namesNEWLINE )NEWLINE table = table.reindex(m, axis=1)NEWLINENEWLINE if isinstance(table, ABCDataFrame):NEWLINE table = table.sort_index(axis=1)NEWLINENEWLINE if fill_value is not None:NEWLINE _table = table.fillna(fill_value, downcast="infer")NEWLINE assert _table is not None # needed for mypyNEWLINE table = _tableNEWLINENEWLINE if margins:NEWLINE if dropna:NEWLINE data = data[data.notna().all(axis=1)]NEWLINE table = _add_margins(NEWLINE table,NEWLINE data,NEWLINE values,NEWLINE rows=index,NEWLINE cols=columns,NEWLINE aggfunc=aggfunc,NEWLINE observed=dropna,NEWLINE margins_name=margins_name,NEWLINE fill_value=fill_value,NEWLINE )NEWLINENEWLINE # discard the top levelNEWLINE if values_passed and not values_multi and table.columns.nlevels > 1:NEWLINE table = table.droplevel(0, axis=1)NEWLINE if len(index) == 0 and len(columns) > 0:NEWLINE table = table.TNEWLINENEWLINE # GH 15193 Make sure empty columns are removed if dropna=TrueNEWLINE if isinstance(table, ABCDataFrame) and dropna:NEWLINE table = table.dropna(how="all", axis=1)NEWLINENEWLINE return tableNEWLINENEWLINENEWLINEdef _add_margins(NEWLINE table: FrameOrSeriesUnion,NEWLINE data,NEWLINE values,NEWLINE rows,NEWLINE cols,NEWLINE aggfunc,NEWLINE observed=None,NEWLINE margins_name: str = "All",NEWLINE fill_value=None,NEWLINE):NEWLINE if not isinstance(margins_name, str):NEWLINE raise ValueError("margins_name argument must be a string")NEWLINENEWLINE msg = f'Conflicting name "{margins_name}" in margins'NEWLINE for level in table.index.names:NEWLINE if margins_name in table.index.get_level_values(level):NEWLINE raise ValueError(msg)NEWLINENEWLINE grand_margin = _compute_grand_margin(data, values, aggfunc, margins_name)NEWLINENEWLINE if table.ndim == 2:NEWLINE # i.e. DataFrameNEWLINE for level in table.columns.names[1:]:NEWLINE if margins_name in table.columns.get_level_values(level):NEWLINE raise ValueError(msg)NEWLINENEWLINE key: Union[str, Tuple[str, ...]]NEWLINE if len(rows) > 1:NEWLINE key = (margins_name,) + ("",) * (len(rows) - 1)NEWLINE else:NEWLINE key = margins_nameNEWLINENEWLINE if not values and isinstance(table, ABCSeries):NEWLINE # If there are no values and the table is a series, then there is onlyNEWLINE # one column in the data. Compute grand margin and return it.NEWLINE return table.append(Series({key: grand_margin[margins_name]}))NEWLINENEWLINE elif values:NEWLINE marginal_result_set = _generate_marginal_results(NEWLINE table, data, values, rows, cols, aggfunc, observed, margins_nameNEWLINE )NEWLINE if not isinstance(marginal_result_set, tuple):NEWLINE return marginal_result_setNEWLINE result, margin_keys, row_margin = marginal_result_setNEWLINE else:NEWLINE # no values, and table is a DataFrameNEWLINE assert isinstance(table, ABCDataFrame)NEWLINE marginal_result_set = _generate_marginal_results_without_values(NEWLINE table, data, rows, cols, aggfunc, observed, margins_nameNEWLINE )NEWLINE if not isinstance(marginal_result_set, tuple):NEWLINE return marginal_result_setNEWLINE result, margin_keys, row_margin = marginal_result_setNEWLINENEWLINE row_margin = row_margin.reindex(result.columns, fill_value=fill_value)NEWLINE # populate grand marginNEWLINE for k in margin_keys:NEWLINE if isinstance(k, str):NEWLINE row_margin[k] = grand_margin[k]NEWLINE else:NEWLINE row_margin[k] = grand_margin[k[0]]NEWLINENEWLINE from pandas import DataFrameNEWLINENEWLINE margin_dummy = DataFrame(row_margin, columns=[key]).TNEWLINENEWLINE row_names = result.index.namesNEWLINE # check the result column and leave floatsNEWLINE for dtype in set(result.dtypes):NEWLINE cols = result.select_dtypes([dtype]).columnsNEWLINE margin_dummy[cols] = margin_dummy[cols].apply(NEWLINE maybe_downcast_to_dtype, args=(dtype,)NEWLINE )NEWLINE result = result.append(margin_dummy)NEWLINE result.index.names = row_namesNEWLINENEWLINE return resultNEWLINENEWLINENEWLINEdef _compute_grand_margin(data, values, aggfunc, margins_name: str = "All"):NEWLINENEWLINE if values:NEWLINE grand_margin = {}NEWLINE for k, v in data[values].items():NEWLINE try:NEWLINE if isinstance(aggfunc, str):NEWLINE grand_margin[k] = getattr(v, aggfunc)()NEWLINE elif isinstance(aggfunc, dict):NEWLINE if isinstance(aggfunc[k], str):NEWLINE grand_margin[k] = getattr(v, aggfunc[k])()NEWLINE else:NEWLINE grand_margin[k] = aggfunc[k](v)NEWLINE else:NEWLINE grand_margin[k] = aggfunc(v)NEWLINE except TypeError:NEWLINE passNEWLINE return grand_marginNEWLINE else:NEWLINE return {margins_name: aggfunc(data.index)}NEWLINENEWLINENEWLINEdef _generate_marginal_results(NEWLINE table, data, values, rows, cols, aggfunc, observed, margins_name: str = "All"NEWLINE):NEWLINE if len(cols) > 0:NEWLINE # need to "interleave" the marginsNEWLINE table_pieces = []NEWLINE margin_keys = []NEWLINENEWLINE def _all_key(key):NEWLINE return (key, margins_name) + ("",) * (len(cols) - 1)NEWLINENEWLINE if len(rows) > 0:NEWLINE margin = data[rows + values].groupby(rows, observed=observed).agg(aggfunc)NEWLINE cat_axis = 1NEWLINENEWLINE for key, piece in table.groupby(level=0, axis=cat_axis, observed=observed):NEWLINE all_key = _all_key(key)NEWLINENEWLINE # we are going to mutate this, so need to copy!NEWLINE piece = piece.copy()NEWLINE piece[all_key] = margin[key]NEWLINENEWLINE table_pieces.append(piece)NEWLINE margin_keys.append(all_key)NEWLINE else:NEWLINE from pandas import DataFrameNEWLINENEWLINE cat_axis = 0NEWLINE for key, piece in table.groupby(level=0, axis=cat_axis, observed=observed):NEWLINE if len(cols) > 1:NEWLINE all_key = _all_key(key)NEWLINE else:NEWLINE all_key = margins_nameNEWLINE table_pieces.append(piece)NEWLINE # GH31016 this is to calculate margin for each group, and assignNEWLINE # corresponded key as indexNEWLINE transformed_piece = DataFrame(piece.apply(aggfunc)).TNEWLINE transformed_piece.index = Index([all_key], name=piece.index.name)NEWLINENEWLINE # append piece for margin into table_pieceNEWLINE table_pieces.append(transformed_piece)NEWLINE margin_keys.append(all_key)NEWLINENEWLINE result = concat(table_pieces, axis=cat_axis)NEWLINENEWLINE if len(rows) == 0:NEWLINE return resultNEWLINE else:NEWLINE result = tableNEWLINE margin_keys = table.columnsNEWLINENEWLINE if len(cols) > 0:NEWLINE row_margin = data[cols + values].groupby(cols, observed=observed).agg(aggfunc)NEWLINE row_margin = row_margin.stack()NEWLINENEWLINE # slight hackNEWLINE new_order = [len(cols)] + list(range(len(cols)))NEWLINE row_margin.index = row_margin.index.reorder_levels(new_order)NEWLINE else:NEWLINE row_margin = Series(np.nan, index=result.columns)NEWLINENEWLINE return result, margin_keys, row_marginNEWLINENEWLINENEWLINEdef _generate_marginal_results_without_values(NEWLINE table: DataFrame, data, rows, cols, aggfunc, observed, margins_name: str = "All"NEWLINE):NEWLINE if len(cols) > 0:NEWLINE # need to "interleave" the marginsNEWLINE margin_keys: Union[List, Index] = []NEWLINENEWLINE def _all_key():NEWLINE if len(cols) == 1:NEWLINE return margins_nameNEWLINE return (margins_name,) + ("",) * (len(cols) - 1)NEWLINENEWLINE if len(rows) > 0:NEWLINE margin = data[rows].groupby(rows, observed=observed).apply(aggfunc)NEWLINE all_key = _all_key()NEWLINE table[all_key] = marginNEWLINE result = tableNEWLINE margin_keys.append(all_key)NEWLINENEWLINE else:NEWLINE margin = data.groupby(level=0, axis=0, observed=observed).apply(aggfunc)NEWLINE all_key = _all_key()NEWLINE table[all_key] = marginNEWLINE result = tableNEWLINE margin_keys.append(all_key)NEWLINE return resultNEWLINE else:NEWLINE result = tableNEWLINE margin_keys = table.columnsNEWLINENEWLINE if len(cols):NEWLINE row_margin = data[cols].groupby(cols, observed=observed).apply(aggfunc)NEWLINE else:NEWLINE row_margin = Series(np.nan, index=result.columns)NEWLINENEWLINE return result, margin_keys, row_marginNEWLINENEWLINENEWLINEdef _convert_by(by):NEWLINE if by is None:NEWLINE by = []NEWLINE elif (NEWLINE is_scalar(by)NEWLINE or isinstance(by, (np.ndarray, Index, ABCSeries, Grouper))NEWLINE or hasattr(by, "__call__")NEWLINE ):NEWLINE by = [by]NEWLINE else:NEWLINE by = list(by)NEWLINE return byNEWLINENEWLINENEWLINE@Substitution("\ndata : DataFrame")NEWLINE@Appender(_shared_docs["pivot"], indents=1)NEWLINEdef pivot(NEWLINE data: DataFrame,NEWLINE index: Optional[IndexLabel] = None,NEWLINE columns: Optional[IndexLabel] = None,NEWLINE values: Optional[IndexLabel] = None,NEWLINE) -> DataFrame:NEWLINE if columns is None:NEWLINE raise TypeError("pivot() missing 1 required argument: 'columns'")NEWLINENEWLINE columns = com.convert_to_list_like(columns)NEWLINENEWLINE if values is None:NEWLINE if index is not None:NEWLINE cols = com.convert_to_list_like(index)NEWLINE else:NEWLINE cols = []NEWLINENEWLINE append = index is NoneNEWLINE indexed = data.set_index(cols + columns, append=append)NEWLINE else:NEWLINE if index is None:NEWLINE index = [Series(data.index, name=data.index.name)]NEWLINE else:NEWLINE index = com.convert_to_list_like(index)NEWLINE index = [data[idx] for idx in index]NEWLINENEWLINE data_columns = [data[col] for col in columns]NEWLINE index.extend(data_columns)NEWLINE index = MultiIndex.from_arrays(index)NEWLINENEWLINE if is_list_like(values) and not isinstance(values, tuple):NEWLINE # Exclude tuple because it is seen as a single column nameNEWLINE values = cast(Sequence[Hashable], values)NEWLINE indexed = data._constructor(NEWLINE data[values]._values, index=index, columns=valuesNEWLINE )NEWLINE else:NEWLINE indexed = data._constructor_sliced(data[values]._values, index=index)NEWLINE return indexed.unstack(columns)NEWLINENEWLINENEWLINEdef crosstab(NEWLINE index,NEWLINE columns,NEWLINE values=None,NEWLINE rownames=None,NEWLINE colnames=None,NEWLINE aggfunc=None,NEWLINE margins=False,NEWLINE margins_name: str = "All",NEWLINE dropna: bool = True,NEWLINE normalize=False,NEWLINE) -> DataFrame:NEWLINE """NEWLINE Compute a simple cross tabulation of two (or more) factors. By defaultNEWLINE computes a frequency table of the factors unless an array of values and anNEWLINE aggregation function are passed.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE index : array-like, Series, or list of arrays/SeriesNEWLINE Values to group by in the rows.NEWLINE columns : array-like, Series, or list of arrays/SeriesNEWLINE Values to group by in the columns.NEWLINE values : array-like, optionalNEWLINE Array of values to aggregate according to the factors.NEWLINE Requires `aggfunc` be specified.NEWLINE rownames : sequence, default NoneNEWLINE If passed, must match number of row arrays passed.NEWLINE colnames : sequence, default NoneNEWLINE If passed, must match number of column arrays passed.NEWLINE aggfunc : function, optionalNEWLINE If specified, requires `values` be specified as well.NEWLINE margins : bool, default FalseNEWLINE Add row/column margins (subtotals).NEWLINE margins_name : str, default 'All'NEWLINE Name of the row/column that will contain the totalsNEWLINE when margins is True.NEWLINE dropna : bool, default TrueNEWLINE Do not include columns whose entries are all NaN.NEWLINE normalize : bool, {'all', 'index', 'columns'}, or {0,1}, default FalseNEWLINE Normalize by dividing all values by the sum of values.NEWLINENEWLINE - If passed 'all' or `True`, will normalize over all values.NEWLINE - If passed 'index' will normalize over each row.NEWLINE - If passed 'columns' will normalize over each column.NEWLINE - If margins is `True`, will also normalize margin values.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE DataFrameNEWLINE Cross tabulation of the data.NEWLINENEWLINE See AlsoNEWLINE --------NEWLINE DataFrame.pivot : Reshape data based on column values.NEWLINE pivot_table : Create a pivot table as a DataFrame.NEWLINENEWLINE NotesNEWLINE -----NEWLINE Any Series passed will have their name attributes used unless row or columnNEWLINE names for the cross-tabulation are specified.NEWLINENEWLINE Any input passed containing Categorical data will have **all** of itsNEWLINE categories included in the cross-tabulation, even if the actual data doesNEWLINE not contain any instances of a particular category.NEWLINENEWLINE In the event that there aren't overlapping indexes an empty DataFrame willNEWLINE be returned.NEWLINENEWLINE ExamplesNEWLINE --------NEWLINE >>> a = np.array(["foo", "foo", "foo", "foo", "bar", "bar",NEWLINE ... "bar", "bar", "foo", "foo", "foo"], dtype=object)NEWLINE >>> b = np.array(["one", "one", "one", "two", "one", "one",NEWLINE ... "one", "two", "two", "two", "one"], dtype=object)NEWLINE >>> c = np.array(["dull", "dull", "shiny", "dull", "dull", "shiny",NEWLINE ... "shiny", "dull", "shiny", "shiny", "shiny"],NEWLINE ... dtype=object)NEWLINE >>> pd.crosstab(a, [b, c], rownames=['a'], colnames=['b', 'c'])NEWLINE b one twoNEWLINE c dull shiny dull shinyNEWLINE aNEWLINE bar 1 2 1 0NEWLINE foo 2 2 1 2NEWLINENEWLINE Here 'c' and 'f' are not represented in the data and will not beNEWLINE shown in the output because dropna is True by default. SetNEWLINE dropna=False to preserve categories with no data.NEWLINENEWLINE >>> foo = pd.Categorical(['a', 'b'], categories=['a', 'b', 'c'])NEWLINE >>> bar = pd.Categorical(['d', 'e'], categories=['d', 'e', 'f'])NEWLINE >>> pd.crosstab(foo, bar)NEWLINE col_0 d eNEWLINE row_0NEWLINE a 1 0NEWLINE b 0 1NEWLINE >>> pd.crosstab(foo, bar, dropna=False)NEWLINE col_0 d e fNEWLINE row_0NEWLINE a 1 0 0NEWLINE b 0 1 0NEWLINE c 0 0 0NEWLINE """NEWLINE if values is None and aggfunc is not None:NEWLINE raise ValueError("aggfunc cannot be used without values.")NEWLINENEWLINE if values is not None and aggfunc is None:NEWLINE raise ValueError("values cannot be used without an aggfunc.")NEWLINENEWLINE index = com.maybe_make_list(index)NEWLINE columns = com.maybe_make_list(columns)NEWLINENEWLINE common_idx = NoneNEWLINE pass_objs = [x for x in index + columns if isinstance(x, (ABCSeries, ABCDataFrame))]NEWLINE if pass_objs:NEWLINE common_idx = get_objs_combined_axis(pass_objs, intersect=True, sort=False)NEWLINENEWLINE rownames = _get_names(index, rownames, prefix="row")NEWLINE colnames = _get_names(columns, colnames, prefix="col")NEWLINENEWLINE # duplicate names mapped to unique names for pivot opNEWLINE (NEWLINE rownames_mapper,NEWLINE unique_rownames,NEWLINE colnames_mapper,NEWLINE unique_colnames,NEWLINE ) = _build_names_mapper(rownames, colnames)NEWLINENEWLINE from pandas import DataFrameNEWLINENEWLINE data = {NEWLINE **dict(zip(unique_rownames, index)),NEWLINE **dict(zip(unique_colnames, columns)),NEWLINE }NEWLINE df = DataFrame(data, index=common_idx)NEWLINENEWLINE if values is None:NEWLINE df["__dummy__"] = 0NEWLINE kwargs = {"aggfunc": len, "fill_value": 0}NEWLINE else:NEWLINE df["__dummy__"] = valuesNEWLINE kwargs = {"aggfunc": aggfunc}NEWLINENEWLINE table = df.pivot_table(NEWLINE "__dummy__",NEWLINE index=unique_rownames,NEWLINE columns=unique_colnames,NEWLINE margins=margins,NEWLINE margins_name=margins_name,NEWLINE dropna=dropna,NEWLINE **kwargs,NEWLINE )NEWLINENEWLINE # Post-processNEWLINE if normalize is not False:NEWLINE table = _normalize(NEWLINE table, normalize=normalize, margins=margins, margins_name=margins_nameNEWLINE )NEWLINENEWLINE table = table.rename_axis(index=rownames_mapper, axis=0)NEWLINE table = table.rename_axis(columns=colnames_mapper, axis=1)NEWLINENEWLINE return tableNEWLINENEWLINENEWLINEdef _normalize(table, normalize, margins: bool, margins_name="All"):NEWLINENEWLINE if not isinstance(normalize, (bool, str)):NEWLINE axis_subs = {0: "index", 1: "columns"}NEWLINE try:NEWLINE normalize = axis_subs[normalize]NEWLINE except KeyError as err:NEWLINE raise ValueError("Not a valid normalize argument") from errNEWLINENEWLINE if margins is False:NEWLINENEWLINE # Actual NormalizationsNEWLINE normalizers: Dict[Union[bool, str], Callable] = {NEWLINE "all": lambda x: x / x.sum(axis=1).sum(axis=0),NEWLINE "columns": lambda x: x / x.sum(),NEWLINE "index": lambda x: x.div(x.sum(axis=1), axis=0),NEWLINE }NEWLINENEWLINE normalizers[True] = normalizers["all"]NEWLINENEWLINE try:NEWLINE f = normalizers[normalize]NEWLINE except KeyError as err:NEWLINE raise ValueError("Not a valid normalize argument") from errNEWLINENEWLINE table = f(table)NEWLINE table = table.fillna(0)NEWLINENEWLINE elif margins is True:NEWLINE # keep index and column of pivoted tableNEWLINE table_index = table.indexNEWLINE table_columns = table.columnsNEWLINE last_ind_or_col = table.iloc[-1, :].nameNEWLINENEWLINE # check if margin name is not in (for MI cases) and not equal to lastNEWLINE # index/column and save the column and index marginNEWLINE if (margins_name not in last_ind_or_col) & (margins_name != last_ind_or_col):NEWLINE raise ValueError(f"{margins_name} not in pivoted DataFrame")NEWLINE column_margin = table.iloc[:-1, -1]NEWLINE index_margin = table.iloc[-1, :-1]NEWLINENEWLINE # keep the core tableNEWLINE table = table.iloc[:-1, :-1]NEWLINENEWLINE # Normalize coreNEWLINE table = _normalize(table, normalize=normalize, margins=False)NEWLINENEWLINE # Fix MarginsNEWLINE if normalize == "columns":NEWLINE column_margin = column_margin / column_margin.sum()NEWLINE table = concat([table, column_margin], axis=1)NEWLINE table = table.fillna(0)NEWLINE table.columns = table_columnsNEWLINENEWLINE elif normalize == "index":NEWLINE index_margin = index_margin / index_margin.sum()NEWLINE table = table.append(index_margin)NEWLINE table = table.fillna(0)NEWLINE table.index = table_indexNEWLINENEWLINE elif normalize == "all" or normalize is True:NEWLINE column_margin = column_margin / column_margin.sum()NEWLINE index_margin = index_margin / index_margin.sum()NEWLINE index_margin.loc[margins_name] = 1NEWLINE table = concat([table, column_margin], axis=1)NEWLINE table = table.append(index_margin)NEWLINENEWLINE table = table.fillna(0)NEWLINE table.index = table_indexNEWLINE table.columns = table_columnsNEWLINENEWLINE else:NEWLINE raise ValueError("Not a valid normalize argument")NEWLINENEWLINE else:NEWLINE raise ValueError("Not a valid margins argument")NEWLINENEWLINE return tableNEWLINENEWLINENEWLINEdef _get_names(arrs, names, prefix: str = "row"):NEWLINE if names is None:NEWLINE names = []NEWLINE for i, arr in enumerate(arrs):NEWLINE if isinstance(arr, ABCSeries) and arr.name is not None:NEWLINE names.append(arr.name)NEWLINE else:NEWLINE names.append(f"{prefix}_{i}")NEWLINE else:NEWLINE if len(names) != len(arrs):NEWLINE raise AssertionError("arrays and names must have the same length")NEWLINE if not isinstance(names, list):NEWLINE names = list(names)NEWLINENEWLINE return namesNEWLINENEWLINENEWLINEdef _build_names_mapper(NEWLINE rownames: List[str], colnames: List[str]NEWLINE) -> Tuple[Dict[str, str], List[str], Dict[str, str], List[str]]:NEWLINE """NEWLINE Given the names of a DataFrame's rows and columns, returns a set of unique rowNEWLINE and column names and mappers that convert to original names.NEWLINENEWLINE A row or column name is replaced if it is duplicate among the rows of the inputs,NEWLINE among the columns of the inputs or between the rows and the columns.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE rownames: list[str]NEWLINE colnames: list[str]NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE Tuple(Dict[str, str], List[str], Dict[str, str], List[str])NEWLINENEWLINE rownames_mapper: dict[str, str]NEWLINE a dictionary with new row names as keys and original rownames as valuesNEWLINE unique_rownames: list[str]NEWLINE a list of rownames with duplicate names replaced by dummy namesNEWLINE colnames_mapper: dict[str, str]NEWLINE a dictionary with new column names as keys and original column names as valuesNEWLINE unique_colnames: list[str]NEWLINE a list of column names with duplicate names replaced by dummy namesNEWLINENEWLINE """NEWLINENEWLINE def get_duplicates(names):NEWLINE seen: Set = set()NEWLINE return {name for name in names if name not in seen}NEWLINENEWLINE shared_names = set(rownames).intersection(set(colnames))NEWLINE dup_names = get_duplicates(rownames) | get_duplicates(colnames) | shared_namesNEWLINENEWLINE rownames_mapper = {NEWLINE f"row_{i}": name for i, name in enumerate(rownames) if name in dup_namesNEWLINE }NEWLINE unique_rownames = [NEWLINE f"row_{i}" if name in dup_names else name for i, name in enumerate(rownames)NEWLINE ]NEWLINENEWLINE colnames_mapper = {NEWLINE f"col_{i}": name for i, name in enumerate(colnames) if name in dup_namesNEWLINE }NEWLINE unique_colnames = [NEWLINE f"col_{i}" if name in dup_names else name for i, name in enumerate(colnames)NEWLINE ]NEWLINENEWLINE return rownames_mapper, unique_rownames, colnames_mapper, unique_colnamesNEWLINE
# -*- coding: utf-8 -*-NEWLINE"""NEWLINEPreprocessors.NEWLINE"""NEWLINEimport reNEWLINENEWLINEimport numpy as npNEWLINEfrom allennlp.modules.elmo import Elmo, batch_to_idsNEWLINEfrom sklearn.base import BaseEstimator, TransformerMixinNEWLINEfrom sklearn.externals import joblibNEWLINEfrom keras.utils.np_utils import to_categoricalNEWLINEfrom keras.preprocessing.sequence import pad_sequencesNEWLINENEWLINEfrom anago.utils import VocabularyNEWLINENEWLINEoptions_file = 'https://s3-us-west-2.amazonaws.com/allennlp/models/elmo/2x4096_512_2048cnn_2xhighway/elmo_2x4096_512_2048cnn_2xhighway_options.json'NEWLINEweight_file = 'https://s3-us-west-2.amazonaws.com/allennlp/models/elmo/2x4096_512_2048cnn_2xhighway/elmo_2x4096_512_2048cnn_2xhighway_weights.hdf5'NEWLINENEWLINENEWLINEdef normalize_number(text):NEWLINE return re.sub(r'[0-90123456789]', r'0', text)NEWLINENEWLINENEWLINEclass IndexTransformer(BaseEstimator, TransformerMixin):NEWLINE """Convert a collection of raw documents to a document id matrix.NEWLINENEWLINE Attributes:NEWLINE _use_char: boolean. Whether to use char feature.NEWLINE _num_norm: boolean. Whether to normalize text.NEWLINE _word_vocab: dict. A mapping of words to feature indices.NEWLINE _char_vocab: dict. A mapping of chars to feature indices.NEWLINE _label_vocab: dict. A mapping of labels to feature indices.NEWLINE """NEWLINENEWLINE def __init__(self, lower=True, num_norm=True,NEWLINE use_char=True, initial_vocab=None):NEWLINE """Create a preprocessor object.NEWLINENEWLINE Args:NEWLINE lower: boolean. Whether to convert the texts to lowercase.NEWLINE use_char: boolean. Whether to use char feature.NEWLINE num_norm: boolean. Whether to normalize text.NEWLINE initial_vocab: Iterable. Initial vocabulary for expanding word_vocab.NEWLINE """NEWLINE self._num_norm = num_normNEWLINE self._use_char = use_charNEWLINE self._word_vocab = Vocabulary(lower=lower)NEWLINE self._char_vocab = Vocabulary(lower=False)NEWLINE self._label_vocab = Vocabulary(lower=False, unk_token=False)NEWLINENEWLINE if initial_vocab:NEWLINE self._word_vocab.add_documents([initial_vocab])NEWLINE self._char_vocab.add_documents(initial_vocab)NEWLINENEWLINE def fit(self, X, y):NEWLINE """Learn vocabulary from training set.NEWLINENEWLINE Args:NEWLINE X : iterable. An iterable which yields either str, unicode or file objects.NEWLINENEWLINE Returns:NEWLINE self : IndexTransformer.NEWLINE """NEWLINE self._word_vocab.add_documents(X)NEWLINE self._label_vocab.add_documents(y)NEWLINE if self._use_char:NEWLINE for doc in X:NEWLINE self._char_vocab.add_documents(doc)NEWLINENEWLINE self._word_vocab.build()NEWLINE self._char_vocab.build()NEWLINE self._label_vocab.build()NEWLINENEWLINE return selfNEWLINENEWLINE def transform(self, X, y=None):NEWLINE """Transform documents to document ids.NEWLINENEWLINE Uses the vocabulary learned by fit.NEWLINENEWLINE Args:NEWLINE X : iterableNEWLINE an iterable which yields either str, unicode or file objects.NEWLINE y : iterabl, label strings.NEWLINENEWLINE Returns:NEWLINE features: document id matrix.NEWLINE y: label id matrix.NEWLINE """NEWLINE word_ids = [self._word_vocab.doc2id(doc) for doc in X]NEWLINE word_ids = pad_sequences(word_ids, padding='post')NEWLINENEWLINE if self._use_char:NEWLINE char_ids = [[self._char_vocab.doc2id(w) for w in doc] for doc in X]NEWLINE char_ids = pad_nested_sequences(char_ids)NEWLINE features = [word_ids, char_ids]NEWLINE else:NEWLINE features = word_idsNEWLINENEWLINE if y is not None:NEWLINE y = [self._label_vocab.doc2id(doc) for doc in y]NEWLINE y = pad_sequences(y, padding='post')NEWLINE y = to_categorical(y, self.label_size).astype(int)NEWLINE # In 2018/06/01, to_categorical is a bit strange.NEWLINE # >>> to_categorical([[1,3]], num_classes=4).shapeNEWLINE # (1, 2, 4)NEWLINE # >>> to_categorical([[1]], num_classes=4).shapeNEWLINE # (1, 4)NEWLINE # So, I expand dimensions when len(y.shape) == 2.NEWLINE y = y if len(y.shape) == 3 else np.expand_dims(y, axis=0)NEWLINE return features, yNEWLINE else:NEWLINE return featuresNEWLINENEWLINE def fit_transform(self, X, y=None, **params):NEWLINE """Learn vocabulary and return document id matrix.NEWLINENEWLINE This is equivalent to fit followed by transform.NEWLINENEWLINE Args:NEWLINE X : iterableNEWLINE an iterable which yields either str, unicode or file objects.NEWLINENEWLINE Returns:NEWLINE list : document id matrix.NEWLINE list: label id matrix.NEWLINE """NEWLINE return self.fit(X, y).transform(X, y)NEWLINENEWLINE def inverse_transform(self, y, lengths=None):NEWLINE """Return label strings.NEWLINENEWLINE Args:NEWLINE y: label id matrix.NEWLINE lengths: sentences length.NEWLINENEWLINE Returns:NEWLINE list: list of list of strings.NEWLINE """NEWLINE y = np.argmax(y, -1)NEWLINE inverse_y = [self._label_vocab.id2doc(ids) for ids in y]NEWLINE if lengths is not None:NEWLINE inverse_y = [iy[:l] for iy, l in zip(inverse_y, lengths)]NEWLINENEWLINE return inverse_yNEWLINENEWLINE @propertyNEWLINE def word_vocab_size(self):NEWLINE return len(self._word_vocab)NEWLINENEWLINE @propertyNEWLINE def char_vocab_size(self):NEWLINE return len(self._char_vocab)NEWLINENEWLINE @propertyNEWLINE def label_size(self):NEWLINE return len(self._label_vocab)NEWLINENEWLINE def save(self, file_path):NEWLINE joblib.dump(self, file_path)NEWLINENEWLINE @classmethodNEWLINE def load(cls, file_path):NEWLINE p = joblib.load(file_path)NEWLINENEWLINE return pNEWLINENEWLINENEWLINEdef pad_nested_sequences(sequences, dtype='int32'):NEWLINE """Pads nested sequences to the same length.NEWLINENEWLINE This function transforms a list of list sequencesNEWLINE into a 3D Numpy array of shape `(num_samples, max_sent_len, max_word_len)`.NEWLINENEWLINE Args:NEWLINE sequences: List of lists of lists.NEWLINE dtype: Type of the output sequences.NEWLINENEWLINE # ReturnsNEWLINE x: Numpy array.NEWLINE """NEWLINE max_sent_len = 0NEWLINE max_word_len = 0NEWLINE for sent in sequences:NEWLINE max_sent_len = max(len(sent), max_sent_len)NEWLINE for word in sent:NEWLINE max_word_len = max(len(word), max_word_len)NEWLINENEWLINE x = np.zeros((len(sequences), max_sent_len, max_word_len)).astype(dtype)NEWLINE for i, sent in enumerate(sequences):NEWLINE for j, word in enumerate(sent):NEWLINE x[i, j, :len(word)] = wordNEWLINENEWLINE return xNEWLINENEWLINENEWLINEclass ELMoTransformer(IndexTransformer):NEWLINENEWLINE def __init__(self, lower=True, num_norm=True,NEWLINE use_char=True, initial_vocab=None):NEWLINE super(ELMoTransformer, self).__init__(lower, num_norm, use_char, initial_vocab)NEWLINE self._elmo = Elmo(options_file, weight_file, 2, dropout=0)NEWLINENEWLINE def transform(self, X, y=None):NEWLINE """Transform documents to document ids.NEWLINENEWLINE Uses the vocabulary learned by fit.NEWLINENEWLINE Args:NEWLINE X : iterableNEWLINE an iterable which yields either str, unicode or file objects.NEWLINE y : iterabl, label strings.NEWLINENEWLINE Returns:NEWLINE features: document id matrix.NEWLINE y: label id matrix.NEWLINE """NEWLINE word_ids = [self._word_vocab.doc2id(doc) for doc in X]NEWLINE word_ids = pad_sequences(word_ids, padding='post')NEWLINENEWLINE char_ids = [[self._char_vocab.doc2id(w) for w in doc] for doc in X]NEWLINE char_ids = pad_nested_sequences(char_ids)NEWLINENEWLINE character_ids = batch_to_ids(X)NEWLINE elmo_embeddings = self._elmo(character_ids)['elmo_representations'][1]NEWLINE elmo_embeddings = elmo_embeddings.detach().numpy()NEWLINENEWLINE features = [word_ids, char_ids, elmo_embeddings]NEWLINENEWLINE if y is not None:NEWLINE y = [self._label_vocab.doc2id(doc) for doc in y]NEWLINE y = pad_sequences(y, padding='post')NEWLINE y = to_categorical(y, self.label_size).astype(int)NEWLINE # In 2018/06/01, to_categorical is a bit strange.NEWLINE # >>> to_categorical([[1,3]], num_classes=4).shapeNEWLINE # (1, 2, 4)NEWLINE # >>> to_categorical([[1]], num_classes=4).shapeNEWLINE # (1, 4)NEWLINE # So, I expand dimensions when len(y.shape) == 2.NEWLINE y = y if len(y.shape) == 3 else np.expand_dims(y, axis=0)NEWLINE return features, yNEWLINE else:NEWLINE return featuresNEWLINE
from flask import Flask, request, Response, abort, redirectNEWLINEimport osNEWLINEimport requestsNEWLINEimport loggingNEWLINEimport jsonNEWLINEimport reNEWLINEimport sysNEWLINEfrom time import sleepNEWLINENEWLINEapp = Flask(__name__)NEWLINEformat_string = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'NEWLINElogger = logging.getLogger('freshdesk-rest-service')NEWLINENEWLINE# Log to stdoutNEWLINEstdout_handler = logging.StreamHandler()NEWLINEstdout_handler.setFormatter(logging.Formatter(format_string))NEWLINElogger.addHandler(stdout_handler)NEWLINElogger.setLevel(os.getenv("logging_level", logging.WARNING))NEWLINENEWLINEFRESHDESK_DOMAIN = os.getenv("freshdesk_domain")NEWLINEFRESHDESK_API_PATH = os.getenv("freshdesk_api_path", "/api/v2/")NEWLINEFRESHDESK_FILTER_CALL_MAX_PAGE_SIZE = int(NEWLINE os.getenv("freshdesk_filter_call_max_page_size", 30))NEWLINEFRESHDESK_FILTER_CALL_MAX_PAGE_NO = int(NEWLINE os.getenv("freshdesk_filter_call_max_page_no", 10))NEWLINEFRESHDESK_APIKEY = os.getenv('freshdesk_apikey')NEWLINEFRESHDESK_HEADERS = {'Content-Type': 'application/json'}NEWLINEFRESHDESK_URL_ROOT = str(FRESHDESK_DOMAIN) + str(FRESHDESK_API_PATH)NEWLINESESAM_URL=os.getenv("sesam_url", None)NEWLINESESAM_JWT=os.getenv("sesam_jwt", None)NEWLINENEWLINEPAGE_SIZE = int(os.getenv("page_size", 100))NEWLINEDO_GENERATE_SESAM_ID = bool(os.getenv("generate_sesam_id", "True") != "False")NEWLINENEWLINEBLACKLIST_UPDATED_TOKEN_GENERATION = ["surveys"]NEWLINESESAM_CALLBACK_CONFIG = {NEWLINE 'companies': {NEWLINE 'dataset_id': 'freshdesk-company',NEWLINE 'ni_config': {NEWLINE 'from_property': 'custom_fields.customer_code',NEWLINE 'to_property': 'customer_code-ni',NEWLINE 'ni': 'global-customer'NEWLINE }NEWLINE },NEWLINE 'companies/_id_': {NEWLINE 'dataset_id': 'freshdesk-company',NEWLINE 'ni_config': {NEWLINE 'from_property': 'custom_fields.customer_code',NEWLINE 'to_property': 'customer_code-ni',NEWLINE 'ni': 'global-customer'NEWLINE }NEWLINE }NEWLINE}NEWLINEPROPERTIES_TO_ANONYMIZE_PER_URI_TEMPLATE = json.loads(os.environ.get(NEWLINE 'properties_to_anonymize_per_uri_template', "{}").replace("'", "\""))NEWLINEANONYMIZATION_STRING = os.environ.get('anonymization_string', "*")NEWLINEVALID_RESPONSE_COMBOS = [("GET", 200), ("POST", 201),NEWLINE ("PUT", 200), ("DELETE", 204)]NEWLINENEWLINErequired_vars = [ FRESHDESK_DOMAIN, FRESHDESK_APIKEY]NEWLINEfor var in required_vars:NEWLINE if var is None or not var:NEWLINE raise SystemExit("Freshdesk-rest service cannot be started:Not all mandatory variables are initialized")NEWLINENEWLINEdef get_uri_template(path):NEWLINE return re.sub(r"\d+", r"_id_", path), re.sub(r"[a-zA-Z\/]+", r"", path)NEWLINENEWLINEdef to_sesam_entity(in_dict, path, ni, method):NEWLINE def get_prop_value(key_path, entity):NEWLINE if len(key_path) == 1:NEWLINE val = entity[key_path[-1]]NEWLINE if type(val) in [int, float, bool, str]:NEWLINE return valNEWLINE else:NEWLINE return get_prop_value(key_path[1:], entity[key_path[0]])NEWLINENEWLINE def add_ni(mydict, ni):NEWLINE if not ni:NEWLINE return mydictNEWLINE namespaced_entity = {}NEWLINE for key, value in mydict.items():NEWLINE if '_id' == key:NEWLINE namespaced_entity[key] = ni + ':' + valueNEWLINE elif key in ['_updated', '_ts', '_hash','_previous', '_deleted']:NEWLINE namespaced_entity[key] = valueNEWLINE else:NEWLINE if type(value) is dict:NEWLINE namespaced_entity[ni + ':' + key] = add_ni(value, ni)NEWLINE else:NEWLINE namespaced_entity[ni + ':' + key] = valueNEWLINE return namespaced_entityNEWLINENEWLINE if DO_GENERATE_SESAM_ID:NEWLINE in_dict['_id'] = str(in_dict['id'])NEWLINE if path not in BLACKLIST_UPDATED_TOKEN_GENERATION:NEWLINE in_dict['_updated'] = str(in_dict['updated_at'])NEWLINE if method in ['PUT', 'POST', 'DELETE'] and re.match(r'^companies', path) and in_dict['custom_fields']['customer_code']:NEWLINE ni_config = SESAM_CALLBACK_CONFIG[path]['ni_config']NEWLINE from_property = ni_config['from_property']NEWLINE val = get_prop_value(from_property.split('.'), in_dict)NEWLINE in_dict[ni_config['to_property']] = '~:' + ni_config['ni'] + ':' + valNEWLINENEWLINE return add_ni(in_dict, ni)NEWLINENEWLINEdef sesam_callback(method, callback_config, resource_id, json_data, uri_template):NEWLINE if not SESAM_URL or not SESAM_JWT:NEWLINE returnNEWLINE base_url = SESAM_URL + '/api/datasets/' + callback_config['dataset_id']NEWLINE headers = {'Authorization':'bearer ' + SESAM_JWT, 'Accept': 'application/problem+json', 'content-type' : 'application/json' }NEWLINE entity_to_post = {}NEWLINE if method == 'DELETE':NEWLINE _id = callback_config['dataset_id'] + ':' + resource_idNEWLINE params = {'entity_id' : _id}NEWLINE logger.debug('issuing a %s call url=%s, params=%s' % (method, base_url + '/entity', params))NEWLINE sesam_response = requests.get(url=base_url + '/entity', headers=headers, params=params)NEWLINE if sesam_response.status_code != 200:NEWLINE logger.warn('cannot fetch \'%s\' from dataset \'%s\' to delete: %s' % (_id, callback_config['dataset_id'], sesam_response.text))NEWLINE else:NEWLINE entity_to_post = sesam_response.json()NEWLINE entity_to_post['_deleted'] = TrueNEWLINE else:NEWLINE entity_to_post = to_sesam_entity(json_data, uri_template, callback_config['dataset_id'], method)NEWLINE if entity_to_post:NEWLINE logger.debug('issuing a %s call url=%s, json=%s' % (method, base_url + '/entity', entity_to_post))NEWLINE sesam_response = requests.post(url=base_url + '/entities', headers=headers, json=entity_to_post)NEWLINE if sesam_response.status_code != 200:NEWLINE logger.warn('cannot post entity \'%s\' to dataset \'%s\' : %s' % (entity_to_post.get('_id'), callback_config['dataset_id'], sesam_response.text))NEWLINENEWLINE# Sesam Json Pull Protocol is transformed to Freshdesk API headersNEWLINE# Freshdesk rules:NEWLINE# max page size is 30 for "search" calls, 100 otherwiseNEWLINE# "search" calls accepts only following params: query, per_page, pageNEWLINENEWLINENEWLINEdef get_freshdesk_req_params(path, service_params):NEWLINE freshdesk_req_params = service_paramsNEWLINE since_support_config = {NEWLINE 'tickets': {'param': 'updated_since', 'operator': '='},NEWLINE 'contacts': {'param': '_updated_since', 'operator': '='},NEWLINE 'surveys/satisfaction_ratings': {'param': 'created_since',NEWLINE 'operator': '='},NEWLINE 'search/companies': {'param': 'updated_at', 'operator': ':>'},NEWLINE 'search/contacts': {'param': 'updated_at', 'operator': ':>'}NEWLINE }NEWLINENEWLINE uri_template, freshdesk_resource_id = get_uri_template(path)NEWLINE if "search/" not in uri_template:NEWLINE freshdesk_req_params.setdefault(NEWLINE "per_page", service_params.get("limit", PAGE_SIZE))NEWLINENEWLINE if "limit" in freshdesk_req_params:NEWLINE del freshdesk_req_params["limit"]NEWLINENEWLINE if service_params.get("since") is not None and uri_template in since_support_config:NEWLINE if "search/" in uri_template:NEWLINE since_query_segment = since_support_config[uri_template]["param"] + since_support_config[uri_template]["operator"] + "'" + re.sub(NEWLINE r"T.*", r"", freshdesk_req_params["since"]) + "'"NEWLINE if freshdesk_req_params.get("query", None) is not None:NEWLINE freshdesk_req_params["query"] = "\"(" + service_params.get(NEWLINE "query").replace("\"", "") + ") AND " + since_query_segment + "\""NEWLINE else:NEWLINE freshdesk_req_params["query"] = "\"" + \NEWLINE since_query_segment + "\""NEWLINE else:NEWLINE freshdesk_req_params[since_support_config[uri_template]NEWLINE ["param"]] = service_params.get("since")NEWLINE del freshdesk_req_params["since"]NEWLINENEWLINE return freshdesk_req_paramsNEWLINENEWLINEdef call_service(url, params, json):NEWLINE logger.info("Issuing a %s call with url=%s, with param list=%s, headers=%s",NEWLINE request.method, url, params, FRESHDESK_HEADERS)NEWLINE freshdesk_response = requests.request(method=request.method, url=url, headers=FRESHDESK_HEADERS, auth=(NEWLINE FRESHDESK_APIKEY, 'X'), params=params, json=json)NEWLINE # status code 429 is returned when rate-limit is achived, and returns retry-after valueNEWLINE if freshdesk_response.status_code in [429]:NEWLINE if freshdesk_response.headers.get('Retry-After') is not None:NEWLINE retry_after = freshdesk_response.headers.get('Retry-After')NEWLINE logger.error("sleeping for %s seconds", retry_after)NEWLINE sleep(float(retry_after))NEWLINE elif (request.method, freshdesk_response.status_code) not in VALID_RESPONSE_COMBOS:NEWLINE logger.error("Unexpected response status code=%d, request-ID=%s, response text=%s" %NEWLINE (freshdesk_response.status_code, freshdesk_response.headers.get('X-Request-Id'), freshdesk_response.text))NEWLINE elif request.method in ['PUT', 'POST', 'DELETE']:NEWLINE uri_template, freshdesk_resource_id = get_uri_template(url.replace(FRESHDESK_URL_ROOT, ''))NEWLINE json_data = {}NEWLINE if request.method != 'DELETE':NEWLINE json_data = freshdesk_response.json()NEWLINE if uri_template in SESAM_CALLBACK_CONFIG:NEWLINE sesam_callback(request.method, SESAM_CALLBACK_CONFIG[uri_template], freshdesk_resource_id, json_data, uri_template)NEWLINE return freshdesk_responseNEWLINENEWLINENEWLINE# fetches data for any GET request, supports paginationNEWLINEdef fetch_data(path, freshdesk_req_params):NEWLINE base_url = FRESHDESK_URL_ROOT + pathNEWLINE page_counter = 0NEWLINENEWLINE data_to_return = []NEWLINE base_url_next_page = base_urlNEWLINE uri_template, freshdesk_resource_id = get_uri_template(path)NEWLINE while base_url_next_page is not None:NEWLINE page_counter += 1NEWLINE freshdesk_response = call_service(NEWLINE base_url_next_page, freshdesk_req_params, None)NEWLINE if freshdesk_response.status_code != 200:NEWLINE return freshdesk_response.text, freshdesk_response.status_codeNEWLINE response_json = freshdesk_response.json()NEWLINE # search calls return entites in "results" propertyNEWLINE if "search/" in uri_template:NEWLINE data_from_freshdesk = response_json.get("results")NEWLINE total_object_count = response_json.get("total")NEWLINE if page_counter == FRESHDESK_FILTER_CALL_MAX_PAGE_NO:NEWLINE logger.error("MAX page number reached before fetching all objects: total_object_count=%s, FRESHDESK_FILTER_CALL_MAX_PAGE_NO=%s, page_counter=%s" % (NEWLINE total_object_count, FRESHDESK_FILTER_CALL_MAX_PAGE_NO, page_counter))NEWLINE return {"message": "MAX page number reached before fetching all objects"}, 500NEWLINE if total_object_count > page_counter * FRESHDESK_FILTER_CALL_MAX_PAGE_SIZE:NEWLINE freshdesk_req_params["page"] = page_counter + 1NEWLINE else:NEWLINE base_url_next_page = NoneNEWLINE else:NEWLINE data_from_freshdesk = response_jsonNEWLINE link_text = freshdesk_response.headers.get("Link")NEWLINE if link_text is not None and request.args.get("page") is None:NEWLINE base_url_next_page = link_text[1:link_text.index(">")]NEWLINE else:NEWLINE base_url_next_page = NoneNEWLINE if isinstance(data_from_freshdesk, dict):NEWLINE data_to_return = to_sesam_entity(data_from_freshdesk, uri_template, None, None)NEWLINE elif isinstance(data_from_freshdesk, list):NEWLINE for entity in data_from_freshdesk:NEWLINE data_to_return.append(to_sesam_entity(entity, uri_template, None, None))NEWLINENEWLINE if uri_template in PROPERTIES_TO_ANONYMIZE_PER_URI_TEMPLATE:NEWLINE fields_to_anonymize = PROPERTIES_TO_ANONYMIZE_PER_URI_TEMPLATE[uri_template]NEWLINE for entity in data_from_freshdesk:NEWLINE for prop in fields_to_anonymize:NEWLINE entity[prop] = ANONYMIZATION_STRINGNEWLINENEWLINE # for sub objects, it is only page size that should be sentNEWLINE freshdesk_req_params = {"per_page": 100}NEWLINE # fetch underlying entity types for the resultsetNEWLINE if uri_template == "solutions/categories":NEWLINE for entity in data_to_return:NEWLINE entity["folders"], response_code = fetch_data(NEWLINE uri_template + "/" + str(entity["id"]) + "/folders", freshdesk_req_params)NEWLINE elif uri_template == "solutions/categories/_id_/folders":NEWLINE for entity in data_to_return:NEWLINE entity["articles"], response_code = fetch_data(NEWLINE "solutions/folders/" + str(entity["id"]) + "/articles", freshdesk_req_params)NEWLINE elif uri_template in ["tickets", "search/tickets"]:NEWLINE for entity in data_to_return:NEWLINE entity["conversations"], response_code = fetch_data(NEWLINE "tickets/" + str(entity["id"]) + "/conversations", freshdesk_req_params)NEWLINE entity["time_entries"], response_code = fetch_data(NEWLINE "tickets/" + str(entity["id"]) + "/time_entries", freshdesk_req_params)NEWLINENEWLINE logger.debug("returning %s entities" % len(data_to_return))NEWLINENEWLINE return data_to_return, 200NEWLINENEWLINENEWLINE@app.route("/<path:path>", methods=["GET"])NEWLINEdef get(path):NEWLINE freshdesk_req_params = get_freshdesk_req_params(NEWLINE path, request.args.to_dict(True))NEWLINE data_to_return, status_code = fetch_data(path, freshdesk_req_params)NEWLINE return Response(response=json.dumps(data_to_return), status=status_code, mimetype='application/json', content_type='application/json; charset=utf-8')NEWLINENEWLINENEWLINE@app.route("/<path:path>", methods=["POST", "PUT", "DELETE"])NEWLINEdef push(path):NEWLINE base_url = FRESHDESK_URL_ROOT + pathNEWLINE freshdesk_req_params = get_freshdesk_req_params(NEWLINE base_url, request.args.to_dict(True))NEWLINE freshdesk_response = call_service(NEWLINE base_url, freshdesk_req_params, request.get_json())NEWLINE return Response(response=freshdesk_response, status=freshdesk_response.status_code, mimetype='application/json', content_type='application/json; charset=utf-8')NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE app.run(debug=True, host='0.0.0.0', port=os.getenv('port', 5000))NEWLINE
from rlkit.core import loggerNEWLINEfrom rlkit.core.timer import timerNEWLINEfrom rlkit.data_management.online_vae_replay_buffer import \NEWLINE OnlineVaeRelabelingBufferNEWLINEfrom rlkit.data_management.shared_obs_dict_replay_buffer \NEWLINE import SharedObsDictRelabelingBufferNEWLINEimport rlkit.torch.vae.vae_schedules as vae_schedulesNEWLINEfrom rlkit.misc.eval_util import create_stats_ordered_dictNEWLINEfrom rlkit.torch.torch_rl_algorithm import (NEWLINE TorchBatchRLAlgorithm,NEWLINE)NEWLINEimport rlkit.torch.pytorch_util as ptuNEWLINEfrom torch.multiprocessing import Process, PipeNEWLINEfrom threading import ThreadNEWLINEimport numpy as npNEWLINEfrom rlkit.core.logging import add_prefixNEWLINENEWLINEclass OnlineVaeAlgorithm(TorchBatchRLAlgorithm):NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE vae,NEWLINE vae_trainer,NEWLINE *base_args,NEWLINE vae_save_period=1,NEWLINE vae_training_schedule=vae_schedules.never_train,NEWLINE oracle_data=False,NEWLINE parallel_vae_train=True,NEWLINE vae_min_num_steps_before_training=0,NEWLINE uniform_dataset=None,NEWLINE **base_kwargsNEWLINE ):NEWLINE super().__init__(*base_args, **base_kwargs)NEWLINE assert isinstance(self.replay_buffer, OnlineVaeRelabelingBuffer)NEWLINE self.vae = vaeNEWLINE self.vae_trainer = vae_trainerNEWLINE self.vae_trainer.model = self.vaeNEWLINE self.vae_save_period = vae_save_periodNEWLINE self.vae_training_schedule = vae_training_scheduleNEWLINE self.oracle_data = oracle_dataNEWLINENEWLINE self.parallel_vae_train = parallel_vae_trainNEWLINE self.vae_min_num_steps_before_training = vae_min_num_steps_before_trainingNEWLINE self.uniform_dataset = uniform_datasetNEWLINENEWLINE self._vae_training_process = NoneNEWLINE self._update_subprocess_vae_thread = NoneNEWLINE self._vae_conn_pipe = NoneNEWLINENEWLINE def _end_epoch(self):NEWLINE timer.start_timer('vae training')NEWLINE self._train_vae(self.epoch)NEWLINE timer.stop_timer('vae training')NEWLINE super()._end_epoch()NEWLINENEWLINE def _get_diagnostics(self):NEWLINE vae_log = self._get_vae_diagnostics().copy()NEWLINE vae_log.update(super()._get_diagnostics())NEWLINE return vae_logNEWLINENEWLINE def to(self, device):NEWLINE self.vae.to(device)NEWLINE super().to(device)NEWLINENEWLINE """NEWLINE VAE-specific CodeNEWLINE """NEWLINE def _train_vae(self, epoch):NEWLINE if self.parallel_vae_train and self._vae_training_process is None:NEWLINE self.init_vae_training_subprocess()NEWLINE should_train, amount_to_train = self.vae_training_schedule(epoch)NEWLINE rl_start_epoch = int(self.min_num_steps_before_training / (NEWLINE self.num_expl_steps_per_train_loop * self.num_train_loops_per_epochNEWLINE ))NEWLINE if should_train: # or epoch <= (rl_start_epoch - 1):NEWLINE if self.parallel_vae_train:NEWLINE assert self._vae_training_process.is_alive()NEWLINE # Make sure the last vae update has finished before startingNEWLINE # another oneNEWLINE if self._update_subprocess_vae_thread is not None:NEWLINE self._update_subprocess_vae_thread.join()NEWLINE self._update_subprocess_vae_thread = Thread(NEWLINE target=OnlineVaeAlgorithm.update_vae_in_training_subprocess,NEWLINE args=(self, epoch, ptu.device)NEWLINE )NEWLINE self._update_subprocess_vae_thread.start()NEWLINE self._vae_conn_pipe.send((amount_to_train, epoch))NEWLINE else:NEWLINE _train_vae(NEWLINE self.vae_trainer,NEWLINE epoch,NEWLINE self.replay_buffer,NEWLINE amount_to_trainNEWLINE )NEWLINE self.replay_buffer.refresh_latents(epoch)NEWLINE _test_vae(NEWLINE self.vae_trainer,NEWLINE epoch,NEWLINE self.replay_buffer,NEWLINE vae_save_period=self.vae_save_period,NEWLINE uniform_dataset=self.uniform_dataset,NEWLINE )NEWLINENEWLINE def _get_vae_diagnostics(self):NEWLINE return add_prefix(NEWLINE self.vae_trainer.get_diagnostics(),NEWLINE prefix='vae_trainer/',NEWLINE )NEWLINENEWLINE def _cleanup(self):NEWLINE if self.parallel_vae_train:NEWLINE self._vae_conn_pipe.close()NEWLINE self._vae_training_process.terminate()NEWLINENEWLINE def init_vae_training_subprocess(self):NEWLINE assert isinstance(self.replay_buffer, SharedObsDictRelabelingBuffer)NEWLINENEWLINE self._vae_conn_pipe, process_pipe = Pipe()NEWLINE self._vae_training_process = Process(NEWLINE target=subprocess_train_vae_loop,NEWLINE args=(NEWLINE process_pipe,NEWLINE self.vae,NEWLINE self.vae.state_dict(),NEWLINE self.replay_buffer,NEWLINE self.replay_buffer.get_mp_info(),NEWLINE ptu.device,NEWLINE )NEWLINE )NEWLINE self._vae_training_process.start()NEWLINE self._vae_conn_pipe.send(self.vae_trainer)NEWLINENEWLINE def update_vae_in_training_subprocess(self, epoch, device):NEWLINE self.vae.__setstate__(self._vae_conn_pipe.recv())NEWLINE self.vae.to(device)NEWLINE _test_vae(NEWLINE self.vae_trainer,NEWLINE epoch,NEWLINE self.replay_buffer,NEWLINE vae_save_period=self.vae_save_period,NEWLINE uniform_dataset=self.uniform_dataset,NEWLINE )NEWLINENEWLINENEWLINEdef _train_vae(vae_trainer, epoch, replay_buffer, batches=50, oracle_data=False):NEWLINE for b in range(batches):NEWLINE batch = replay_buffer.random_vae_training_data(vae_trainer.batch_size, epoch)NEWLINE vae_trainer.train_batch(NEWLINE epoch,NEWLINE batch,NEWLINE )NEWLINE # replay_buffer.train_dynamics_model(batches=batches)NEWLINENEWLINEdef _test_vae(vae_trainer, epoch, replay_buffer, batches=10, vae_save_period=1, uniform_dataset=None):NEWLINE save_imgs = epoch % vae_save_period == 0NEWLINE log_fit_skew_stats = replay_buffer._prioritize_vae_samples and uniform_dataset is not NoneNEWLINE if uniform_dataset is not None:NEWLINE replay_buffer.log_loss_under_uniform(uniform_dataset, vae_trainer.batch_size, rl_logger=vae_trainer.vae_logger_stats_for_rl)NEWLINE for b in range(batches):NEWLINE batch = replay_buffer.random_vae_training_data(vae_trainer.batch_size, epoch)NEWLINE vae_trainer.test_batch(NEWLINE epoch,NEWLINE batch,NEWLINE )NEWLINE if save_imgs:NEWLINE vae_trainer.dump_samples(epoch)NEWLINE vae_trainer.dump_reconstructions(epoch)NEWLINE if log_fit_skew_stats:NEWLINE vae_trainer.dump_best_reconstruction(epoch)NEWLINE vae_trainer.dump_worst_reconstruction(epoch)NEWLINE vae_trainer.dump_sampling_histogram(epoch, batch_size=vae_trainer.batch_size)NEWLINE if uniform_dataset is not None:NEWLINE vae_trainer.dump_uniform_imgs_and_reconstructions(dataset=uniform_dataset, epoch=epoch)NEWLINENEWLINENEWLINEdef subprocess_train_vae_loop(NEWLINE conn_pipe,NEWLINE vae,NEWLINE vae_params,NEWLINE replay_buffer,NEWLINE mp_info,NEWLINE device,NEWLINE):NEWLINE """NEWLINE The observations and next_observations of the replay buffer are stored inNEWLINE shared memory. This loop waits until the parent signals to start vaeNEWLINE training, trains and sends the vae back, and then refreshes the latents.NEWLINE Refreshing latents in the subprocess reflects in the main process as wellNEWLINE since the latents are in shared memory. Since this is does asynchronously,NEWLINE it is possible for the main process to see half the latents updated and halfNEWLINE not.NEWLINE """NEWLINE ptu.device = deviceNEWLINE vae_trainer = conn_pipe.recv()NEWLINE vae.load_state_dict(vae_params)NEWLINE vae.to(device)NEWLINE vae_trainer.set_vae(vae)NEWLINE replay_buffer.init_from_mp_info(mp_info)NEWLINE replay_buffer.env.vae = vaeNEWLINE while True:NEWLINE amount_to_train, epoch = conn_pipe.recv()NEWLINE _train_vae(vae_trainer, replay_buffer, epoch, amount_to_train)NEWLINE conn_pipe.send(vae_trainer.model.__getstate__())NEWLINE replay_buffer.refresh_latents(epoch)NEWLINE
"""Test whether all elements of cls.args are instances of Basic. """NEWLINENEWLINE# NOTE: keep tests sorted by (module, class name) key. If a class can'tNEWLINE# be instantiated, add it here anyway with @SKIP("abstract class) (seeNEWLINE# e.g. Function).NEWLINENEWLINEimport osNEWLINEimport reNEWLINEimport warningsNEWLINEimport ioNEWLINENEWLINEfrom sympy import (Basic, S, symbols, sqrt, sin, oo, Interval, exp, Lambda, pi,NEWLINE Eq, log)NEWLINENEWLINEfrom sympy.core.compatibility import rangeNEWLINEfrom sympy.utilities.pytest import XFAIL, SKIPNEWLINEfrom sympy.utilities.exceptions import SymPyDeprecationWarningNEWLINENEWLINEx, y, z = symbols('x,y,z')NEWLINENEWLINENEWLINEdef test_all_classes_are_tested():NEWLINE this = os.path.split(__file__)[0]NEWLINE path = os.path.join(this, os.pardir, os.pardir)NEWLINE sympy_path = os.path.abspath(path)NEWLINE prefix = os.path.split(sympy_path)[0] + os.sepNEWLINENEWLINE re_cls = re.compile(r"^class ([A-Za-z][A-Za-z0-9_]*)\s*\(", re.MULTILINE)NEWLINENEWLINE modules = {}NEWLINENEWLINE for root, dirs, files in os.walk(sympy_path):NEWLINE module = root.replace(prefix, "").replace(os.sep, ".")NEWLINENEWLINE for file in files:NEWLINE if file.startswith(("_", "test_", "bench_")):NEWLINE continueNEWLINE if not file.endswith(".py"):NEWLINE continueNEWLINENEWLINE with io.open(os.path.join(root, file), "r", encoding='utf-8') as f:NEWLINE text = f.read()NEWLINENEWLINE submodule = module + '.' + file[:-3]NEWLINE names = re_cls.findall(text)NEWLINENEWLINE if not names:NEWLINE continueNEWLINENEWLINE try:NEWLINE mod = __import__(submodule, fromlist=names)NEWLINE except ImportError:NEWLINE continueNEWLINENEWLINE def is_Basic(name):NEWLINE cls = getattr(mod, name)NEWLINE if hasattr(cls, '_sympy_deprecated_func'):NEWLINE cls = cls._sympy_deprecated_funcNEWLINE return issubclass(cls, Basic)NEWLINENEWLINE names = list(filter(is_Basic, names))NEWLINENEWLINE if names:NEWLINE modules[submodule] = namesNEWLINENEWLINE ns = globals()NEWLINE failed = []NEWLINENEWLINE for module, names in modules.items():NEWLINE mod = module.replace('.', '__')NEWLINENEWLINE for name in names:NEWLINE test = 'test_' + mod + '__' + nameNEWLINENEWLINE if test not in ns:NEWLINE failed.append(module + '.' + name)NEWLINENEWLINE # reset all SymPyDeprecationWarning into errorsNEWLINE warnings.simplefilter("error", category=SymPyDeprecationWarning)NEWLINENEWLINE assert not failed, "Missing classes: %s. Please add tests for these to sympy/core/tests/test_args.py." % ", ".join(failed)NEWLINENEWLINENEWLINEdef _test_args(obj):NEWLINE return all(isinstance(arg, Basic) for arg in obj.args)NEWLINENEWLINENEWLINEdef test_sympy__assumptions__assume__AppliedPredicate():NEWLINE from sympy.assumptions.assume import AppliedPredicate, PredicateNEWLINE assert _test_args(AppliedPredicate(Predicate("test"), 2))NEWLINENEWLINEdef test_sympy__assumptions__assume__Predicate():NEWLINE from sympy.assumptions.assume import PredicateNEWLINE assert _test_args(Predicate("test"))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__UnevaluatedOnFree():NEWLINE from sympy.assumptions.sathandlers import UnevaluatedOnFreeNEWLINE from sympy import QNEWLINE assert _test_args(UnevaluatedOnFree(Q.positive))NEWLINE assert _test_args(UnevaluatedOnFree(Q.positive(x)))NEWLINE assert _test_args(UnevaluatedOnFree(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__AllArgs():NEWLINE from sympy.assumptions.sathandlers import AllArgsNEWLINE from sympy import QNEWLINE assert _test_args(AllArgs(Q.positive))NEWLINE assert _test_args(AllArgs(Q.positive(x)))NEWLINE assert _test_args(AllArgs(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__AnyArgs():NEWLINE from sympy.assumptions.sathandlers import AnyArgsNEWLINE from sympy import QNEWLINE assert _test_args(AnyArgs(Q.positive))NEWLINE assert _test_args(AnyArgs(Q.positive(x)))NEWLINE assert _test_args(AnyArgs(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__ExactlyOneArg():NEWLINE from sympy.assumptions.sathandlers import ExactlyOneArgNEWLINE from sympy import QNEWLINE assert _test_args(ExactlyOneArg(Q.positive))NEWLINE assert _test_args(ExactlyOneArg(Q.positive(x)))NEWLINE assert _test_args(ExactlyOneArg(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__CheckOldAssump():NEWLINE from sympy.assumptions.sathandlers import CheckOldAssumpNEWLINE from sympy import QNEWLINE assert _test_args(CheckOldAssump(Q.positive))NEWLINE assert _test_args(CheckOldAssump(Q.positive(x)))NEWLINE assert _test_args(CheckOldAssump(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__CheckIsPrime():NEWLINE from sympy.assumptions.sathandlers import CheckIsPrimeNEWLINE from sympy import QNEWLINE # Input must be a numberNEWLINE assert _test_args(CheckIsPrime(Q.positive))NEWLINE assert _test_args(CheckIsPrime(Q.positive(5)))NEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__codegen__ast__AugmentedAssignment():NEWLINE from sympy.codegen.ast import AugmentedAssignmentNEWLINE assert _test_args(AugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__AddAugmentedAssignment():NEWLINE from sympy.codegen.ast import AddAugmentedAssignmentNEWLINE assert _test_args(AddAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__SubAugmentedAssignment():NEWLINE from sympy.codegen.ast import SubAugmentedAssignmentNEWLINE assert _test_args(SubAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__MulAugmentedAssignment():NEWLINE from sympy.codegen.ast import MulAugmentedAssignmentNEWLINE assert _test_args(MulAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__DivAugmentedAssignment():NEWLINE from sympy.codegen.ast import DivAugmentedAssignmentNEWLINE assert _test_args(DivAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__ModAugmentedAssignment():NEWLINE from sympy.codegen.ast import ModAugmentedAssignmentNEWLINE assert _test_args(ModAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__CodeBlock():NEWLINE from sympy.codegen.ast import CodeBlock, AssignmentNEWLINE assert _test_args(CodeBlock(Assignment(x, 1), Assignment(y, 2)))NEWLINENEWLINEdef test_sympy__codegen__ast__For():NEWLINE from sympy.codegen.ast import For, CodeBlock, AddAugmentedAssignmentNEWLINE from sympy import RangeNEWLINE assert _test_args(For(x, Range(10), CodeBlock(AddAugmentedAssignment(y, 1))))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Token():NEWLINE from sympy.codegen.ast import TokenNEWLINE assert _test_args(Token())NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Type():NEWLINE from sympy.codegen.ast import TypeNEWLINE assert _test_args(Type('float128'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__IntBaseType():NEWLINE from sympy.codegen.ast import IntBaseTypeNEWLINE assert _test_args(IntBaseType('bigint'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast___SizedIntType():NEWLINE from sympy.codegen.ast import _SizedIntTypeNEWLINE assert _test_args(_SizedIntType('int128', 128))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__SignedIntType():NEWLINE from sympy.codegen.ast import SignedIntTypeNEWLINE assert _test_args(SignedIntType('int128_with_sign', 128))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__UnsignedIntType():NEWLINE from sympy.codegen.ast import UnsignedIntTypeNEWLINE assert _test_args(UnsignedIntType('unt128', 128))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__FloatType():NEWLINE from sympy.codegen.ast import FloatTypeNEWLINE assert _test_args(FloatType('float242', 242, nmant=142, nexp=99))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__ComplexType():NEWLINE from sympy.codegen.ast import ComplexTypeNEWLINE assert _test_args(ComplexType('complex42', 42, nmant=15, nexp=5))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Attribute():NEWLINE from sympy.codegen.ast import AttributeNEWLINE assert _test_args(Attribute('noexcept'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Variable():NEWLINE from sympy.codegen.ast import Variable, Type, value_constNEWLINE assert _test_args(Variable(x))NEWLINE assert _test_args(Variable(y, {value_const}, Type('float32')))NEWLINE assert _test_args(Variable(z, type_=Type('float64')))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Pointer():NEWLINE from sympy.codegen.ast import Pointer, Type, pointer_constNEWLINE assert _test_args(Pointer(x))NEWLINE assert _test_args(Pointer(y, type_=Type('float32')))NEWLINE assert _test_args(Pointer(z, {pointer_const}, Type('float64')))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Declaration():NEWLINE from sympy.codegen.ast import Declaration, Variable, TypeNEWLINE vx = Variable(x, type_=Type('float'))NEWLINE assert _test_args(Declaration(vx))NEWLINE assert _test_args(Declaration(vx, 3.0))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__combinatorics__graycode__GrayCode():NEWLINE from sympy.combinatorics.graycode import GrayCodeNEWLINE # an integer is given and returned from GrayCode as the argNEWLINE assert _test_args(GrayCode(3, start='100'))NEWLINE assert _test_args(GrayCode(3, rank=1))NEWLINENEWLINENEWLINEdef test_sympy__combinatorics__subsets__Subset():NEWLINE from sympy.combinatorics.subsets import SubsetNEWLINE assert _test_args(Subset([0, 1], [0, 1, 2, 3]))NEWLINE assert _test_args(Subset(['c', 'd'], ['a', 'b', 'c', 'd']))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__combinatorics__permutations__Permutation():NEWLINE from sympy.combinatorics.permutations import PermutationNEWLINE assert _test_args(Permutation([0, 1, 2, 3]))NEWLINENEWLINENEWLINEdef test_sympy__combinatorics__perm_groups__PermutationGroup():NEWLINE from sympy.combinatorics.permutations import PermutationNEWLINE from sympy.combinatorics.perm_groups import PermutationGroupNEWLINE assert _test_args(PermutationGroup([Permutation([0, 1])]))NEWLINENEWLINENEWLINEdef test_sympy__combinatorics__polyhedron__Polyhedron():NEWLINE from sympy.combinatorics.permutations import PermutationNEWLINE from sympy.combinatorics.polyhedron import PolyhedronNEWLINE from sympy.abc import w, x, y, zNEWLINE pgroup = [Permutation([[0, 1, 2], [3]]),NEWLINE Permutation([[0, 1, 3], [2]]),NEWLINE Permutation([[0, 2, 3], [1]]),NEWLINE Permutation([[1, 2, 3], [0]]),NEWLINE Permutation([[0, 1], [2, 3]]),NEWLINE Permutation([[0, 2], [1, 3]]),NEWLINE Permutation([[0, 3], [1, 2]]),NEWLINE Permutation([[0, 1, 2, 3]])]NEWLINE corners = [w, x, y, z]NEWLINE faces = [(w, x, y), (w, y, z), (w, z, x), (x, y, z)]NEWLINE assert _test_args(Polyhedron(corners, faces, pgroup))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__combinatorics__prufer__Prufer():NEWLINE from sympy.combinatorics.prufer import PruferNEWLINE assert _test_args(Prufer([[0, 1], [0, 2], [0, 3]], 4))NEWLINENEWLINENEWLINEdef test_sympy__combinatorics__partitions__Partition():NEWLINE from sympy.combinatorics.partitions import PartitionNEWLINE assert _test_args(Partition([1]))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__combinatorics__partitions__IntegerPartition():NEWLINE from sympy.combinatorics.partitions import IntegerPartitionNEWLINE assert _test_args(IntegerPartition([1]))NEWLINENEWLINENEWLINEdef test_sympy__concrete__products__Product():NEWLINE from sympy.concrete.products import ProductNEWLINE assert _test_args(Product(x, (x, 0, 10)))NEWLINE assert _test_args(Product(x, (x, 0, y), (y, 0, 10)))NEWLINENEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__concrete__expr_with_limits__ExprWithLimits():NEWLINE from sympy.concrete.expr_with_limits import ExprWithLimitsNEWLINE assert _test_args(ExprWithLimits(x, (x, 0, 10)))NEWLINE assert _test_args(ExprWithLimits(x*y, (x, 0, 10.),(y,1.,3)))NEWLINENEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__concrete__expr_with_limits__AddWithLimits():NEWLINE from sympy.concrete.expr_with_limits import AddWithLimitsNEWLINE assert _test_args(AddWithLimits(x, (x, 0, 10)))NEWLINE assert _test_args(AddWithLimits(x*y, (x, 0, 10),(y,1,3)))NEWLINENEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__concrete__expr_with_intlimits__ExprWithIntLimits():NEWLINE from sympy.concrete.expr_with_intlimits import ExprWithIntLimitsNEWLINE assert _test_args(ExprWithIntLimits(x, (x, 0, 10)))NEWLINE assert _test_args(ExprWithIntLimits(x*y, (x, 0, 10),(y,1,3)))NEWLINENEWLINENEWLINEdef test_sympy__concrete__summations__Sum():NEWLINE from sympy.concrete.summations import SumNEWLINE assert _test_args(Sum(x, (x, 0, 10)))NEWLINE assert _test_args(Sum(x, (x, 0, y), (y, 0, 10)))NEWLINENEWLINENEWLINEdef test_sympy__core__add__Add():NEWLINE from sympy.core.add import AddNEWLINE assert _test_args(Add(x, y, z, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__basic__Atom():NEWLINE from sympy.core.basic import AtomNEWLINE assert _test_args(Atom())NEWLINENEWLINENEWLINEdef test_sympy__core__basic__Basic():NEWLINE from sympy.core.basic import BasicNEWLINE assert _test_args(Basic())NEWLINENEWLINENEWLINEdef test_sympy__core__containers__Dict():NEWLINE from sympy.core.containers import DictNEWLINE assert _test_args(Dict({x: y, y: z}))NEWLINENEWLINENEWLINEdef test_sympy__core__containers__Tuple():NEWLINE from sympy.core.containers import TupleNEWLINE assert _test_args(Tuple(x, y, z, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__expr__AtomicExpr():NEWLINE from sympy.core.expr import AtomicExprNEWLINE assert _test_args(AtomicExpr())NEWLINENEWLINENEWLINEdef test_sympy__core__expr__Expr():NEWLINE from sympy.core.expr import ExprNEWLINE assert _test_args(Expr())NEWLINENEWLINENEWLINEdef test_sympy__core__expr__UnevaluatedExpr():NEWLINE from sympy.core.expr import UnevaluatedExprNEWLINE from sympy.abc import xNEWLINE assert _test_args(UnevaluatedExpr(x))NEWLINENEWLINENEWLINEdef test_sympy__core__function__Application():NEWLINE from sympy.core.function import ApplicationNEWLINE assert _test_args(Application(1, 2, 3))NEWLINENEWLINENEWLINEdef test_sympy__core__function__AppliedUndef():NEWLINE from sympy.core.function import AppliedUndefNEWLINE assert _test_args(AppliedUndef(1, 2, 3))NEWLINENEWLINENEWLINEdef test_sympy__core__function__Derivative():NEWLINE from sympy.core.function import DerivativeNEWLINE assert _test_args(Derivative(2, x, y, 3))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__function__Function():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__function__Lambda():NEWLINE assert _test_args(Lambda((x, y), x + y + z))NEWLINENEWLINENEWLINEdef test_sympy__core__function__Subs():NEWLINE from sympy.core.function import SubsNEWLINE assert _test_args(Subs(x + y, x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__function__WildFunction():NEWLINE from sympy.core.function import WildFunctionNEWLINE assert _test_args(WildFunction('f'))NEWLINENEWLINENEWLINEdef test_sympy__core__mod__Mod():NEWLINE from sympy.core.mod import ModNEWLINE assert _test_args(Mod(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__mul__Mul():NEWLINE from sympy.core.mul import MulNEWLINE assert _test_args(Mul(2, x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Catalan():NEWLINE from sympy.core.numbers import CatalanNEWLINE assert _test_args(Catalan())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__ComplexInfinity():NEWLINE from sympy.core.numbers import ComplexInfinityNEWLINE assert _test_args(ComplexInfinity())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__EulerGamma():NEWLINE from sympy.core.numbers import EulerGammaNEWLINE assert _test_args(EulerGamma())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Exp1():NEWLINE from sympy.core.numbers import Exp1NEWLINE assert _test_args(Exp1())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Float():NEWLINE from sympy.core.numbers import FloatNEWLINE assert _test_args(Float(1.23))NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__GoldenRatio():NEWLINE from sympy.core.numbers import GoldenRatioNEWLINE assert _test_args(GoldenRatio())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Half():NEWLINE from sympy.core.numbers import HalfNEWLINE assert _test_args(Half())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__ImaginaryUnit():NEWLINE from sympy.core.numbers import ImaginaryUnitNEWLINE assert _test_args(ImaginaryUnit())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Infinity():NEWLINE from sympy.core.numbers import InfinityNEWLINE assert _test_args(Infinity())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Integer():NEWLINE from sympy.core.numbers import IntegerNEWLINE assert _test_args(Integer(7))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__numbers__IntegerConstant():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__numbers__NaN():NEWLINE from sympy.core.numbers import NaNNEWLINE assert _test_args(NaN())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__NegativeInfinity():NEWLINE from sympy.core.numbers import NegativeInfinityNEWLINE assert _test_args(NegativeInfinity())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__NegativeOne():NEWLINE from sympy.core.numbers import NegativeOneNEWLINE assert _test_args(NegativeOne())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Number():NEWLINE from sympy.core.numbers import NumberNEWLINE assert _test_args(Number(1, 7))NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__NumberSymbol():NEWLINE from sympy.core.numbers import NumberSymbolNEWLINE assert _test_args(NumberSymbol())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__One():NEWLINE from sympy.core.numbers import OneNEWLINE assert _test_args(One())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Pi():NEWLINE from sympy.core.numbers import PiNEWLINE assert _test_args(Pi())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Rational():NEWLINE from sympy.core.numbers import RationalNEWLINE assert _test_args(Rational(1, 7))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__numbers__RationalConstant():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Zero():NEWLINE from sympy.core.numbers import ZeroNEWLINE assert _test_args(Zero())NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__operations__AssocOp():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__operations__LatticeOp():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__power__Pow():NEWLINE from sympy.core.power import PowNEWLINE assert _test_args(Pow(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__algebras__quaternion__Quaternion():NEWLINE from sympy.algebras.quaternion import QuaternionNEWLINE assert _test_args(Quaternion(x, 1, 2, 3))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__Equality():NEWLINE from sympy.core.relational import EqualityNEWLINE assert _test_args(Equality(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__GreaterThan():NEWLINE from sympy.core.relational import GreaterThanNEWLINE assert _test_args(GreaterThan(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__LessThan():NEWLINE from sympy.core.relational import LessThanNEWLINE assert _test_args(LessThan(x, 2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__relational__Relational():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__relational__StrictGreaterThan():NEWLINE from sympy.core.relational import StrictGreaterThanNEWLINE assert _test_args(StrictGreaterThan(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__StrictLessThan():NEWLINE from sympy.core.relational import StrictLessThanNEWLINE assert _test_args(StrictLessThan(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__Unequality():NEWLINE from sympy.core.relational import UnequalityNEWLINE assert _test_args(Unequality(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__sandbox__indexed_integrals__IndexedIntegral():NEWLINE from sympy.tensor import IndexedBase, IdxNEWLINE from sympy.sandbox.indexed_integrals import IndexedIntegralNEWLINE A = IndexedBase('A')NEWLINE i, j = symbols('i j', integer=True)NEWLINE a1, a2 = symbols('a1:3', cls=Idx)NEWLINE assert _test_args(IndexedIntegral(A[a1], A[a2]))NEWLINE assert _test_args(IndexedIntegral(A[i], A[j]))NEWLINENEWLINENEWLINEdef test_sympy__calculus__util__AccumulationBounds():NEWLINE from sympy.calculus.util import AccumulationBoundsNEWLINE assert _test_args(AccumulationBounds(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__EmptySet():NEWLINE from sympy.sets.sets import EmptySetNEWLINE assert _test_args(EmptySet())NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__UniversalSet():NEWLINE from sympy.sets.sets import UniversalSetNEWLINE assert _test_args(UniversalSet())NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__FiniteSet():NEWLINE from sympy.sets.sets import FiniteSetNEWLINE assert _test_args(FiniteSet(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__Interval():NEWLINE from sympy.sets.sets import IntervalNEWLINE assert _test_args(Interval(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__ProductSet():NEWLINE from sympy.sets.sets import ProductSet, IntervalNEWLINE assert _test_args(ProductSet(Interval(0, 1), Interval(0, 1)))NEWLINENEWLINENEWLINE@SKIP("does it make sense to test this?")NEWLINEdef test_sympy__sets__sets__Set():NEWLINE from sympy.sets.sets import SetNEWLINE assert _test_args(Set())NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__Intersection():NEWLINE from sympy.sets.sets import Intersection, IntervalNEWLINE assert _test_args(Intersection(Interval(0, 3), Interval(2, 4),NEWLINE evaluate=False))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__Union():NEWLINE from sympy.sets.sets import Union, IntervalNEWLINE assert _test_args(Union(Interval(0, 1), Interval(2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__Complement():NEWLINE from sympy.sets.sets import ComplementNEWLINE assert _test_args(Complement(Interval(0, 2), Interval(0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__SymmetricDifference():NEWLINE from sympy.sets.sets import FiniteSet, SymmetricDifferenceNEWLINE assert _test_args(SymmetricDifference(FiniteSet(1, 2, 3), \NEWLINE FiniteSet(2, 3, 4)))NEWLINENEWLINENEWLINEdef test_sympy__core__trace__Tr():NEWLINE from sympy.core.trace import TrNEWLINE a, b = symbols('a b')NEWLINE assert _test_args(Tr(a + b))NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__Naturals():NEWLINE from sympy.sets.fancysets import NaturalsNEWLINE assert _test_args(Naturals())NEWLINENEWLINEdef test_sympy__sets__fancysets__Naturals0():NEWLINE from sympy.sets.fancysets import Naturals0NEWLINE assert _test_args(Naturals0())NEWLINENEWLINEdef test_sympy__sets__fancysets__Integers():NEWLINE from sympy.sets.fancysets import IntegersNEWLINE assert _test_args(Integers())NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__Reals():NEWLINE from sympy.sets.fancysets import RealsNEWLINE assert _test_args(Reals())NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__Complexes():NEWLINE from sympy.sets.fancysets import ComplexesNEWLINE assert _test_args(Complexes())NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__ComplexRegion():NEWLINE from sympy.sets.fancysets import ComplexRegionNEWLINE from sympy import SNEWLINE from sympy.sets import IntervalNEWLINE a = Interval(0, 1)NEWLINE b = Interval(2, 3)NEWLINE theta = Interval(0, 2*S.Pi)NEWLINE assert _test_args(ComplexRegion(a*b))NEWLINE assert _test_args(ComplexRegion(a*theta, polar=True))NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__ImageSet():NEWLINE from sympy.sets.fancysets import ImageSetNEWLINE from sympy import S, SymbolNEWLINE x = Symbol('x')NEWLINE assert _test_args(ImageSet(Lambda(x, x**2), S.Naturals))NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__Range():NEWLINE from sympy.sets.fancysets import RangeNEWLINE assert _test_args(Range(1, 5, 1))NEWLINENEWLINENEWLINEdef test_sympy__sets__conditionset__ConditionSet():NEWLINE from sympy.sets.conditionset import ConditionSetNEWLINE from sympy import S, SymbolNEWLINE x = Symbol('x')NEWLINE assert _test_args(ConditionSet(x, Eq(x**2, 1), S.Reals))NEWLINENEWLINENEWLINEdef test_sympy__sets__contains__Contains():NEWLINE from sympy.sets.fancysets import RangeNEWLINE from sympy.sets.contains import ContainsNEWLINE assert _test_args(Contains(x, Range(0, 10, 2)))NEWLINENEWLINENEWLINE# STATSNEWLINENEWLINENEWLINEfrom sympy.stats.crv_types import NormalDistributionNEWLINEnd = NormalDistribution(0, 1)NEWLINEfrom sympy.stats.frv_types import DieDistributionNEWLINEdie = DieDistribution(6)NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ContinuousDomain():NEWLINE from sympy.stats.crv import ContinuousDomainNEWLINE assert _test_args(ContinuousDomain({x}, Interval(-oo, oo)))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__SingleContinuousDomain():NEWLINE from sympy.stats.crv import SingleContinuousDomainNEWLINE assert _test_args(SingleContinuousDomain(x, Interval(-oo, oo)))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ProductContinuousDomain():NEWLINE from sympy.stats.crv import SingleContinuousDomain, ProductContinuousDomainNEWLINE D = SingleContinuousDomain(x, Interval(-oo, oo))NEWLINE E = SingleContinuousDomain(y, Interval(0, oo))NEWLINE assert _test_args(ProductContinuousDomain(D, E))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ConditionalContinuousDomain():NEWLINE from sympy.stats.crv import (SingleContinuousDomain,NEWLINE ConditionalContinuousDomain)NEWLINE D = SingleContinuousDomain(x, Interval(-oo, oo))NEWLINE assert _test_args(ConditionalContinuousDomain(D, x > 0))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ContinuousPSpace():NEWLINE from sympy.stats.crv import ContinuousPSpace, SingleContinuousDomainNEWLINE D = SingleContinuousDomain(x, Interval(-oo, oo))NEWLINE assert _test_args(ContinuousPSpace(D, nd))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__SingleContinuousPSpace():NEWLINE from sympy.stats.crv import SingleContinuousPSpaceNEWLINE assert _test_args(SingleContinuousPSpace(x, nd))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ProductContinuousPSpace():NEWLINE from sympy.stats.crv import ProductContinuousPSpace, SingleContinuousPSpaceNEWLINE A = SingleContinuousPSpace(x, nd)NEWLINE B = SingleContinuousPSpace(y, nd)NEWLINE assert _test_args(ProductContinuousPSpace(A, B))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__stats__crv__SingleContinuousDistribution():NEWLINE passNEWLINENEWLINEdef test_sympy__stats__drv__SingleDiscreteDomain():NEWLINE from sympy.stats.drv import SingleDiscreteDomainNEWLINE assert _test_args(SingleDiscreteDomain(x, S.Naturals))NEWLINENEWLINEdef test_sympy__stats__drv__SingleDiscretePSpace():NEWLINE from sympy.stats.drv import SingleDiscretePSpaceNEWLINE from sympy.stats.drv_types import PoissonDistributionNEWLINE assert _test_args(SingleDiscretePSpace(x, PoissonDistribution(1)))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__stats__drv__SingleDiscreteDistribution():NEWLINE passNEWLINENEWLINEdef test_sympy__stats__rv__RandomDomain():NEWLINE from sympy.stats.rv import RandomDomainNEWLINE from sympy.sets.sets import FiniteSetNEWLINE assert _test_args(RandomDomain(FiniteSet(x), FiniteSet(1, 2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__SingleDomain():NEWLINE from sympy.stats.rv import SingleDomainNEWLINE from sympy.sets.sets import FiniteSetNEWLINE assert _test_args(SingleDomain(x, FiniteSet(1, 2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__ConditionalDomain():NEWLINE from sympy.stats.rv import ConditionalDomain, RandomDomainNEWLINE from sympy.sets.sets import FiniteSetNEWLINE D = RandomDomain(FiniteSet(x), FiniteSet(1, 2))NEWLINE assert _test_args(ConditionalDomain(D, x > 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__PSpace():NEWLINE from sympy.stats.rv import PSpace, RandomDomainNEWLINE from sympy import FiniteSetNEWLINE D = RandomDomain(FiniteSet(x), FiniteSet(1, 2, 3, 4, 5, 6))NEWLINE assert _test_args(PSpace(D, die))NEWLINENEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__stats__rv__SinglePSpace():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__stats__rv__RandomSymbol():NEWLINE from sympy.stats.rv import RandomSymbolNEWLINE from sympy.stats.crv import SingleContinuousPSpaceNEWLINE A = SingleContinuousPSpace(x, nd)NEWLINE assert _test_args(RandomSymbol(x, A))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__ProductPSpace():NEWLINE from sympy.stats.rv import ProductPSpaceNEWLINE from sympy.stats.crv import SingleContinuousPSpaceNEWLINE A = SingleContinuousPSpace(x, nd)NEWLINE B = SingleContinuousPSpace(y, nd)NEWLINE assert _test_args(ProductPSpace(A, B))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__ProductDomain():NEWLINE from sympy.stats.rv import ProductDomain, SingleDomainNEWLINE D = SingleDomain(x, Interval(-oo, oo))NEWLINE E = SingleDomain(y, Interval(0, oo))NEWLINE assert _test_args(ProductDomain(D, E))NEWLINENEWLINENEWLINEdef test_sympy__stats__symbolic_probability__Probability():NEWLINE from sympy.stats.symbolic_probability import ProbabilityNEWLINE from sympy.stats import NormalNEWLINE X = Normal('X', 0, 1)NEWLINE assert _test_args(Probability(X > 0))NEWLINENEWLINENEWLINEdef test_sympy__stats__symbolic_probability__Expectation():NEWLINE from sympy.stats.symbolic_probability import ExpectationNEWLINE from sympy.stats import NormalNEWLINE X = Normal('X', 0, 1)NEWLINE assert _test_args(Expectation(X > 0))NEWLINENEWLINENEWLINEdef test_sympy__stats__symbolic_probability__Covariance():NEWLINE from sympy.stats.symbolic_probability import CovarianceNEWLINE from sympy.stats import NormalNEWLINE X = Normal('X', 0, 1)NEWLINE Y = Normal('Y', 0, 3)NEWLINE assert _test_args(Covariance(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__stats__symbolic_probability__Variance():NEWLINE from sympy.stats.symbolic_probability import VarianceNEWLINE from sympy.stats import NormalNEWLINE X = Normal('X', 0, 1)NEWLINE assert _test_args(Variance(X))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__DiscreteUniformDistribution():NEWLINE from sympy.stats.frv_types import DiscreteUniformDistributionNEWLINE from sympy.core.containers import TupleNEWLINE assert _test_args(DiscreteUniformDistribution(Tuple(*list(range(6)))))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__DieDistribution():NEWLINE from sympy.stats.frv_types import DieDistributionNEWLINE assert _test_args(DieDistribution(6))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__BernoulliDistribution():NEWLINE from sympy.stats.frv_types import BernoulliDistributionNEWLINE assert _test_args(BernoulliDistribution(S.Half, 0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__BinomialDistribution():NEWLINE from sympy.stats.frv_types import BinomialDistributionNEWLINE assert _test_args(BinomialDistribution(5, S.Half, 1, 0))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__HypergeometricDistribution():NEWLINE from sympy.stats.frv_types import HypergeometricDistributionNEWLINE assert _test_args(HypergeometricDistribution(10, 5, 3))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__RademacherDistribution():NEWLINE from sympy.stats.frv_types import RademacherDistributionNEWLINE assert _test_args(RademacherDistribution())NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__FiniteDomain():NEWLINE from sympy.stats.frv import FiniteDomainNEWLINE assert _test_args(FiniteDomain({(x, 1), (x, 2)})) # x can be 1 or 2NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__SingleFiniteDomain():NEWLINE from sympy.stats.frv import SingleFiniteDomainNEWLINE assert _test_args(SingleFiniteDomain(x, {1, 2})) # x can be 1 or 2NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__ProductFiniteDomain():NEWLINE from sympy.stats.frv import SingleFiniteDomain, ProductFiniteDomainNEWLINE xd = SingleFiniteDomain(x, {1, 2})NEWLINE yd = SingleFiniteDomain(y, {1, 2})NEWLINE assert _test_args(ProductFiniteDomain(xd, yd))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__ConditionalFiniteDomain():NEWLINE from sympy.stats.frv import SingleFiniteDomain, ConditionalFiniteDomainNEWLINE xd = SingleFiniteDomain(x, {1, 2})NEWLINE assert _test_args(ConditionalFiniteDomain(xd, x > 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__FinitePSpace():NEWLINE from sympy.stats.frv import FinitePSpace, SingleFiniteDomainNEWLINE xd = SingleFiniteDomain(x, {1, 2, 3, 4, 5, 6})NEWLINE p = 1.0/6NEWLINE xd = SingleFiniteDomain(x, {1, 2})NEWLINE assert _test_args(FinitePSpace(xd, {(x, 1): S.Half, (x, 2): S.Half}))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__SingleFinitePSpace():NEWLINE from sympy.stats.frv import SingleFinitePSpaceNEWLINE from sympy import SymbolNEWLINENEWLINE assert _test_args(SingleFinitePSpace(Symbol('x'), die))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__ProductFinitePSpace():NEWLINE from sympy.stats.frv import SingleFinitePSpace, ProductFinitePSpaceNEWLINE from sympy import SymbolNEWLINE xp = SingleFinitePSpace(Symbol('x'), die)NEWLINE yp = SingleFinitePSpace(Symbol('y'), die)NEWLINE assert _test_args(ProductFinitePSpace(xp, yp))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__stats__frv__SingleFiniteDistribution():NEWLINE passNEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__stats__crv__ContinuousDistribution():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__FiniteDistributionHandmade():NEWLINE from sympy.stats.frv_types import FiniteDistributionHandmadeNEWLINE assert _test_args(FiniteDistributionHandmade({1: 1}))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ContinuousDistributionHandmade():NEWLINE from sympy.stats.crv import ContinuousDistributionHandmadeNEWLINE from sympy import Symbol, IntervalNEWLINE assert _test_args(ContinuousDistributionHandmade(Symbol('x'),NEWLINE Interval(0, 2)))NEWLINENEWLINEdef test_sympy__stats__rv__Density():NEWLINE from sympy.stats.rv import DensityNEWLINE from sympy.stats.crv_types import NormalNEWLINE assert _test_args(Density(Normal('x', 0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ArcsinDistribution():NEWLINE from sympy.stats.crv_types import ArcsinDistributionNEWLINE assert _test_args(ArcsinDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__BeniniDistribution():NEWLINE from sympy.stats.crv_types import BeniniDistributionNEWLINE assert _test_args(BeniniDistribution(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__BetaDistribution():NEWLINE from sympy.stats.crv_types import BetaDistributionNEWLINE assert _test_args(BetaDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__BetaPrimeDistribution():NEWLINE from sympy.stats.crv_types import BetaPrimeDistributionNEWLINE assert _test_args(BetaPrimeDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__CauchyDistribution():NEWLINE from sympy.stats.crv_types import CauchyDistributionNEWLINE assert _test_args(CauchyDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ChiDistribution():NEWLINE from sympy.stats.crv_types import ChiDistributionNEWLINE assert _test_args(ChiDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ChiNoncentralDistribution():NEWLINE from sympy.stats.crv_types import ChiNoncentralDistributionNEWLINE assert _test_args(ChiNoncentralDistribution(1,1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ChiSquaredDistribution():NEWLINE from sympy.stats.crv_types import ChiSquaredDistributionNEWLINE assert _test_args(ChiSquaredDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__DagumDistribution():NEWLINE from sympy.stats.crv_types import DagumDistributionNEWLINE assert _test_args(DagumDistribution(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ExponentialDistribution():NEWLINE from sympy.stats.crv_types import ExponentialDistributionNEWLINE assert _test_args(ExponentialDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__FDistributionDistribution():NEWLINE from sympy.stats.crv_types import FDistributionDistributionNEWLINE assert _test_args(FDistributionDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__FisherZDistribution():NEWLINE from sympy.stats.crv_types import FisherZDistributionNEWLINE assert _test_args(FisherZDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__FrechetDistribution():NEWLINE from sympy.stats.crv_types import FrechetDistributionNEWLINE assert _test_args(FrechetDistribution(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__GammaInverseDistribution():NEWLINE from sympy.stats.crv_types import GammaInverseDistributionNEWLINE assert _test_args(GammaInverseDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__GammaDistribution():NEWLINE from sympy.stats.crv_types import GammaDistributionNEWLINE assert _test_args(GammaDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__GumbelDistribution():NEWLINE from sympy.stats.crv_types import GumbelDistributionNEWLINE assert _test_args(GumbelDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__GompertzDistribution():NEWLINE from sympy.stats.crv_types import GompertzDistributionNEWLINE assert _test_args(GompertzDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__KumaraswamyDistribution():NEWLINE from sympy.stats.crv_types import KumaraswamyDistributionNEWLINE assert _test_args(KumaraswamyDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__LaplaceDistribution():NEWLINE from sympy.stats.crv_types import LaplaceDistributionNEWLINE assert _test_args(LaplaceDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__LogisticDistribution():NEWLINE from sympy.stats.crv_types import LogisticDistributionNEWLINE assert _test_args(LogisticDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__LogNormalDistribution():NEWLINE from sympy.stats.crv_types import LogNormalDistributionNEWLINE assert _test_args(LogNormalDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__MaxwellDistribution():NEWLINE from sympy.stats.crv_types import MaxwellDistributionNEWLINE assert _test_args(MaxwellDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__NakagamiDistribution():NEWLINE from sympy.stats.crv_types import NakagamiDistributionNEWLINE assert _test_args(NakagamiDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__NormalDistribution():NEWLINE from sympy.stats.crv_types import NormalDistributionNEWLINE assert _test_args(NormalDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ParetoDistribution():NEWLINE from sympy.stats.crv_types import ParetoDistributionNEWLINE assert _test_args(ParetoDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__QuadraticUDistribution():NEWLINE from sympy.stats.crv_types import QuadraticUDistributionNEWLINE assert _test_args(QuadraticUDistribution(1, 2))NEWLINENEWLINEdef test_sympy__stats__crv_types__RaisedCosineDistribution():NEWLINE from sympy.stats.crv_types import RaisedCosineDistributionNEWLINE assert _test_args(RaisedCosineDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__RayleighDistribution():NEWLINE from sympy.stats.crv_types import RayleighDistributionNEWLINE assert _test_args(RayleighDistribution(1))NEWLINENEWLINEdef test_sympy__stats__crv_types__ShiftedGompertzDistribution():NEWLINE from sympy.stats.crv_types import ShiftedGompertzDistributionNEWLINE assert _test_args(ShiftedGompertzDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__StudentTDistribution():NEWLINE from sympy.stats.crv_types import StudentTDistributionNEWLINE assert _test_args(StudentTDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__TriangularDistribution():NEWLINE from sympy.stats.crv_types import TriangularDistributionNEWLINE assert _test_args(TriangularDistribution(-1, 0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__UniformDistribution():NEWLINE from sympy.stats.crv_types import UniformDistributionNEWLINE assert _test_args(UniformDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__UniformSumDistribution():NEWLINE from sympy.stats.crv_types import UniformSumDistributionNEWLINE assert _test_args(UniformSumDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__VonMisesDistribution():NEWLINE from sympy.stats.crv_types import VonMisesDistributionNEWLINE assert _test_args(VonMisesDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__WeibullDistribution():NEWLINE from sympy.stats.crv_types import WeibullDistributionNEWLINE assert _test_args(WeibullDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__WignerSemicircleDistribution():NEWLINE from sympy.stats.crv_types import WignerSemicircleDistributionNEWLINE assert _test_args(WignerSemicircleDistribution(1))NEWLINENEWLINEdef test_sympy__stats__drv_types__PoissonDistribution():NEWLINE from sympy.stats.drv_types import PoissonDistributionNEWLINE assert _test_args(PoissonDistribution(1))NEWLINENEWLINEdef test_sympy__stats__drv_types__GeometricDistribution():NEWLINE from sympy.stats.drv_types import GeometricDistributionNEWLINE assert _test_args(GeometricDistribution(.5))NEWLINENEWLINEdef test_sympy__core__symbol__Dummy():NEWLINE from sympy.core.symbol import DummyNEWLINE assert _test_args(Dummy('t'))NEWLINENEWLINENEWLINEdef test_sympy__core__symbol__Symbol():NEWLINE from sympy.core.symbol import SymbolNEWLINE assert _test_args(Symbol('t'))NEWLINENEWLINENEWLINEdef test_sympy__core__symbol__Wild():NEWLINE from sympy.core.symbol import WildNEWLINE assert _test_args(Wild('x', exclude=[x]))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__combinatorial__factorials__CombinatorialFunction():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__FallingFactorial():NEWLINE from sympy.functions.combinatorial.factorials import FallingFactorialNEWLINE assert _test_args(FallingFactorial(2, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__MultiFactorial():NEWLINE from sympy.functions.combinatorial.factorials import MultiFactorialNEWLINE assert _test_args(MultiFactorial(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__RisingFactorial():NEWLINE from sympy.functions.combinatorial.factorials import RisingFactorialNEWLINE assert _test_args(RisingFactorial(2, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__binomial():NEWLINE from sympy.functions.combinatorial.factorials import binomialNEWLINE assert _test_args(binomial(2, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__subfactorial():NEWLINE from sympy.functions.combinatorial.factorials import subfactorialNEWLINE assert _test_args(subfactorial(1))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__factorial():NEWLINE from sympy.functions.combinatorial.factorials import factorialNEWLINE assert _test_args(factorial(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__factorial2():NEWLINE from sympy.functions.combinatorial.factorials import factorial2NEWLINE assert _test_args(factorial2(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__bell():NEWLINE from sympy.functions.combinatorial.numbers import bellNEWLINE assert _test_args(bell(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__bernoulli():NEWLINE from sympy.functions.combinatorial.numbers import bernoulliNEWLINE assert _test_args(bernoulli(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__catalan():NEWLINE from sympy.functions.combinatorial.numbers import catalanNEWLINE assert _test_args(catalan(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__genocchi():NEWLINE from sympy.functions.combinatorial.numbers import genocchiNEWLINE assert _test_args(genocchi(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__euler():NEWLINE from sympy.functions.combinatorial.numbers import eulerNEWLINE assert _test_args(euler(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__fibonacci():NEWLINE from sympy.functions.combinatorial.numbers import fibonacciNEWLINE assert _test_args(fibonacci(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__harmonic():NEWLINE from sympy.functions.combinatorial.numbers import harmonicNEWLINE assert _test_args(harmonic(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__lucas():NEWLINE from sympy.functions.combinatorial.numbers import lucasNEWLINE assert _test_args(lucas(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__Abs():NEWLINE from sympy.functions.elementary.complexes import AbsNEWLINE assert _test_args(Abs(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__adjoint():NEWLINE from sympy.functions.elementary.complexes import adjointNEWLINE assert _test_args(adjoint(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__arg():NEWLINE from sympy.functions.elementary.complexes import argNEWLINE assert _test_args(arg(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__conjugate():NEWLINE from sympy.functions.elementary.complexes import conjugateNEWLINE assert _test_args(conjugate(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__im():NEWLINE from sympy.functions.elementary.complexes import imNEWLINE assert _test_args(im(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__re():NEWLINE from sympy.functions.elementary.complexes import reNEWLINE assert _test_args(re(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__sign():NEWLINE from sympy.functions.elementary.complexes import signNEWLINE assert _test_args(sign(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__polar_lift():NEWLINE from sympy.functions.elementary.complexes import polar_liftNEWLINE assert _test_args(polar_lift(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__periodic_argument():NEWLINE from sympy.functions.elementary.complexes import periodic_argumentNEWLINE assert _test_args(periodic_argument(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__principal_branch():NEWLINE from sympy.functions.elementary.complexes import principal_branchNEWLINE assert _test_args(principal_branch(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__transpose():NEWLINE from sympy.functions.elementary.complexes import transposeNEWLINE assert _test_args(transpose(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__exponential__LambertW():NEWLINE from sympy.functions.elementary.exponential import LambertWNEWLINE assert _test_args(LambertW(2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__exponential__ExpBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__exponential__exp():NEWLINE from sympy.functions.elementary.exponential import expNEWLINE assert _test_args(exp(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__exponential__exp_polar():NEWLINE from sympy.functions.elementary.exponential import exp_polarNEWLINE assert _test_args(exp_polar(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__exponential__log():NEWLINE from sympy.functions.elementary.exponential import logNEWLINE assert _test_args(log(2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__hyperbolic__HyperbolicFunction():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__hyperbolic__ReciprocalHyperbolicFunction():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__hyperbolic__InverseHyperbolicFunction():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__acosh():NEWLINE from sympy.functions.elementary.hyperbolic import acoshNEWLINE assert _test_args(acosh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__acoth():NEWLINE from sympy.functions.elementary.hyperbolic import acothNEWLINE assert _test_args(acoth(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__asinh():NEWLINE from sympy.functions.elementary.hyperbolic import asinhNEWLINE assert _test_args(asinh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__atanh():NEWLINE from sympy.functions.elementary.hyperbolic import atanhNEWLINE assert _test_args(atanh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__asech():NEWLINE from sympy.functions.elementary.hyperbolic import asechNEWLINE assert _test_args(asech(2))NEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__acsch():NEWLINE from sympy.functions.elementary.hyperbolic import acschNEWLINE assert _test_args(acsch(2))NEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__cosh():NEWLINE from sympy.functions.elementary.hyperbolic import coshNEWLINE assert _test_args(cosh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__coth():NEWLINE from sympy.functions.elementary.hyperbolic import cothNEWLINE assert _test_args(coth(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__csch():NEWLINE from sympy.functions.elementary.hyperbolic import cschNEWLINE assert _test_args(csch(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__sech():NEWLINE from sympy.functions.elementary.hyperbolic import sechNEWLINE assert _test_args(sech(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__sinh():NEWLINE from sympy.functions.elementary.hyperbolic import sinhNEWLINE assert _test_args(sinh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__tanh():NEWLINE from sympy.functions.elementary.hyperbolic import tanhNEWLINE assert _test_args(tanh(2))NEWLINENEWLINENEWLINE@SKIP("does this work at all?")NEWLINEdef test_sympy__functions__elementary__integers__RoundFunction():NEWLINE from sympy.functions.elementary.integers import RoundFunctionNEWLINE assert _test_args(RoundFunction())NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__integers__ceiling():NEWLINE from sympy.functions.elementary.integers import ceilingNEWLINE assert _test_args(ceiling(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__integers__floor():NEWLINE from sympy.functions.elementary.integers import floorNEWLINE assert _test_args(floor(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__integers__frac():NEWLINE from sympy.functions.elementary.integers import fracNEWLINE assert _test_args(frac(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__miscellaneous__IdentityFunction():NEWLINE from sympy.functions.elementary.miscellaneous import IdentityFunctionNEWLINE assert _test_args(IdentityFunction())NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__miscellaneous__Max():NEWLINE from sympy.functions.elementary.miscellaneous import MaxNEWLINE assert _test_args(Max(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__miscellaneous__Min():NEWLINE from sympy.functions.elementary.miscellaneous import MinNEWLINE assert _test_args(Min(x, 2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__miscellaneous__MinMaxBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__piecewise__ExprCondPair():NEWLINE from sympy.functions.elementary.piecewise import ExprCondPairNEWLINE assert _test_args(ExprCondPair(1, True))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__piecewise__Piecewise():NEWLINE from sympy.functions.elementary.piecewise import PiecewiseNEWLINE assert _test_args(Piecewise((1, x >= 0), (0, True)))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__trigonometric__TrigonometricFunction():NEWLINE passNEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__trigonometric__ReciprocalTrigonometricFunction():NEWLINE passNEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__trigonometric__InverseTrigonometricFunction():NEWLINE passNEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__acos():NEWLINE from sympy.functions.elementary.trigonometric import acosNEWLINE assert _test_args(acos(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__acot():NEWLINE from sympy.functions.elementary.trigonometric import acotNEWLINE assert _test_args(acot(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__asin():NEWLINE from sympy.functions.elementary.trigonometric import asinNEWLINE assert _test_args(asin(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__asec():NEWLINE from sympy.functions.elementary.trigonometric import asecNEWLINE assert _test_args(asec(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__acsc():NEWLINE from sympy.functions.elementary.trigonometric import acscNEWLINE assert _test_args(acsc(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__atan():NEWLINE from sympy.functions.elementary.trigonometric import atanNEWLINE assert _test_args(atan(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__atan2():NEWLINE from sympy.functions.elementary.trigonometric import atan2NEWLINE assert _test_args(atan2(2, 3))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__cos():NEWLINE from sympy.functions.elementary.trigonometric import cosNEWLINE assert _test_args(cos(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__csc():NEWLINE from sympy.functions.elementary.trigonometric import cscNEWLINE assert _test_args(csc(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__cot():NEWLINE from sympy.functions.elementary.trigonometric import cotNEWLINE assert _test_args(cot(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__sin():NEWLINE assert _test_args(sin(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__sinc():NEWLINE from sympy.functions.elementary.trigonometric import sincNEWLINE assert _test_args(sinc(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__sec():NEWLINE from sympy.functions.elementary.trigonometric import secNEWLINE assert _test_args(sec(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__tan():NEWLINE from sympy.functions.elementary.trigonometric import tanNEWLINE assert _test_args(tan(2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__bessel__BesselBase():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__bessel__SphericalBesselBase():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__bessel__SphericalHankelBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__besseli():NEWLINE from sympy.functions.special.bessel import besseliNEWLINE assert _test_args(besseli(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__besselj():NEWLINE from sympy.functions.special.bessel import besseljNEWLINE assert _test_args(besselj(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__besselk():NEWLINE from sympy.functions.special.bessel import besselkNEWLINE assert _test_args(besselk(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__bessely():NEWLINE from sympy.functions.special.bessel import besselyNEWLINE assert _test_args(bessely(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__hankel1():NEWLINE from sympy.functions.special.bessel import hankel1NEWLINE assert _test_args(hankel1(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__hankel2():NEWLINE from sympy.functions.special.bessel import hankel2NEWLINE assert _test_args(hankel2(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__jn():NEWLINE from sympy.functions.special.bessel import jnNEWLINE assert _test_args(jn(0, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__yn():NEWLINE from sympy.functions.special.bessel import ynNEWLINE assert _test_args(yn(0, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__hn1():NEWLINE from sympy.functions.special.bessel import hn1NEWLINE assert _test_args(hn1(0, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__hn2():NEWLINE from sympy.functions.special.bessel import hn2NEWLINE assert _test_args(hn2(0, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__AiryBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__airyai():NEWLINE from sympy.functions.special.bessel import airyaiNEWLINE assert _test_args(airyai(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__airybi():NEWLINE from sympy.functions.special.bessel import airybiNEWLINE assert _test_args(airybi(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__airyaiprime():NEWLINE from sympy.functions.special.bessel import airyaiprimeNEWLINE assert _test_args(airyaiprime(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__airybiprime():NEWLINE from sympy.functions.special.bessel import airybiprimeNEWLINE assert _test_args(airybiprime(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__elliptic_integrals__elliptic_k():NEWLINE from sympy.functions.special.elliptic_integrals import elliptic_k as KNEWLINE assert _test_args(K(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__elliptic_integrals__elliptic_f():NEWLINE from sympy.functions.special.elliptic_integrals import elliptic_f as FNEWLINE assert _test_args(F(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__elliptic_integrals__elliptic_e():NEWLINE from sympy.functions.special.elliptic_integrals import elliptic_e as ENEWLINE assert _test_args(E(x))NEWLINE assert _test_args(E(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__elliptic_integrals__elliptic_pi():NEWLINE from sympy.functions.special.elliptic_integrals import elliptic_pi as PNEWLINE assert _test_args(P(x, y))NEWLINE assert _test_args(P(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__delta_functions__DiracDelta():NEWLINE from sympy.functions.special.delta_functions import DiracDeltaNEWLINE assert _test_args(DiracDelta(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__singularity_functions__SingularityFunction():NEWLINE from sympy.functions.special.singularity_functions import SingularityFunctionNEWLINE assert _test_args(SingularityFunction(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__delta_functions__Heaviside():NEWLINE from sympy.functions.special.delta_functions import HeavisideNEWLINE assert _test_args(Heaviside(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__erf():NEWLINE from sympy.functions.special.error_functions import erfNEWLINE assert _test_args(erf(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfc():NEWLINE from sympy.functions.special.error_functions import erfcNEWLINE assert _test_args(erfc(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfi():NEWLINE from sympy.functions.special.error_functions import erfiNEWLINE assert _test_args(erfi(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erf2():NEWLINE from sympy.functions.special.error_functions import erf2NEWLINE assert _test_args(erf2(2, 3))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfinv():NEWLINE from sympy.functions.special.error_functions import erfinvNEWLINE assert _test_args(erfinv(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfcinv():NEWLINE from sympy.functions.special.error_functions import erfcinvNEWLINE assert _test_args(erfcinv(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erf2inv():NEWLINE from sympy.functions.special.error_functions import erf2invNEWLINE assert _test_args(erf2inv(2, 3))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__error_functions__FresnelIntegral():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__fresnels():NEWLINE from sympy.functions.special.error_functions import fresnelsNEWLINE assert _test_args(fresnels(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__fresnelc():NEWLINE from sympy.functions.special.error_functions import fresnelcNEWLINE assert _test_args(fresnelc(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfs():NEWLINE from sympy.functions.special.error_functions import _erfsNEWLINE assert _test_args(_erfs(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Ei():NEWLINE from sympy.functions.special.error_functions import EiNEWLINE assert _test_args(Ei(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__li():NEWLINE from sympy.functions.special.error_functions import liNEWLINE assert _test_args(li(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Li():NEWLINE from sympy.functions.special.error_functions import LiNEWLINE assert _test_args(Li(2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__error_functions__TrigonometricIntegral():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Si():NEWLINE from sympy.functions.special.error_functions import SiNEWLINE assert _test_args(Si(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Ci():NEWLINE from sympy.functions.special.error_functions import CiNEWLINE assert _test_args(Ci(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Shi():NEWLINE from sympy.functions.special.error_functions import ShiNEWLINE assert _test_args(Shi(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Chi():NEWLINE from sympy.functions.special.error_functions import ChiNEWLINE assert _test_args(Chi(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__expint():NEWLINE from sympy.functions.special.error_functions import expintNEWLINE assert _test_args(expint(y, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__gamma():NEWLINE from sympy.functions.special.gamma_functions import gammaNEWLINE assert _test_args(gamma(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__loggamma():NEWLINE from sympy.functions.special.gamma_functions import loggammaNEWLINE assert _test_args(loggamma(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__lowergamma():NEWLINE from sympy.functions.special.gamma_functions import lowergammaNEWLINE assert _test_args(lowergamma(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__polygamma():NEWLINE from sympy.functions.special.gamma_functions import polygammaNEWLINE assert _test_args(polygamma(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__uppergamma():NEWLINE from sympy.functions.special.gamma_functions import uppergammaNEWLINE assert _test_args(uppergamma(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__beta_functions__beta():NEWLINE from sympy.functions.special.beta_functions import betaNEWLINE assert _test_args(beta(x, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__MathieuBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__mathieus():NEWLINE from sympy.functions.special.mathieu_functions import mathieusNEWLINE assert _test_args(mathieus(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__mathieuc():NEWLINE from sympy.functions.special.mathieu_functions import mathieucNEWLINE assert _test_args(mathieuc(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__mathieusprime():NEWLINE from sympy.functions.special.mathieu_functions import mathieusprimeNEWLINE assert _test_args(mathieusprime(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__mathieucprime():NEWLINE from sympy.functions.special.mathieu_functions import mathieucprimeNEWLINE assert _test_args(mathieucprime(1, 1, 1))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__hyper__TupleParametersBase():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__hyper__TupleArg():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__hyper():NEWLINE from sympy.functions.special.hyper import hyperNEWLINE assert _test_args(hyper([1, 2, 3], [4, 5], x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__meijerg():NEWLINE from sympy.functions.special.hyper import meijergNEWLINE assert _test_args(meijerg([1, 2, 3], [4, 5], [6], [], x))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__hyper__HyperRep():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_power1():NEWLINE from sympy.functions.special.hyper import HyperRep_power1NEWLINE assert _test_args(HyperRep_power1(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_power2():NEWLINE from sympy.functions.special.hyper import HyperRep_power2NEWLINE assert _test_args(HyperRep_power2(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_log1():NEWLINE from sympy.functions.special.hyper import HyperRep_log1NEWLINE assert _test_args(HyperRep_log1(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_atanh():NEWLINE from sympy.functions.special.hyper import HyperRep_atanhNEWLINE assert _test_args(HyperRep_atanh(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_asin1():NEWLINE from sympy.functions.special.hyper import HyperRep_asin1NEWLINE assert _test_args(HyperRep_asin1(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_asin2():NEWLINE from sympy.functions.special.hyper import HyperRep_asin2NEWLINE assert _test_args(HyperRep_asin2(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_sqrts1():NEWLINE from sympy.functions.special.hyper import HyperRep_sqrts1NEWLINE assert _test_args(HyperRep_sqrts1(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_sqrts2():NEWLINE from sympy.functions.special.hyper import HyperRep_sqrts2NEWLINE assert _test_args(HyperRep_sqrts2(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_log2():NEWLINE from sympy.functions.special.hyper import HyperRep_log2NEWLINE assert _test_args(HyperRep_log2(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_cosasin():NEWLINE from sympy.functions.special.hyper import HyperRep_cosasinNEWLINE assert _test_args(HyperRep_cosasin(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_sinasin():NEWLINE from sympy.functions.special.hyper import HyperRep_sinasinNEWLINE assert _test_args(HyperRep_sinasin(x, y))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__polynomials__OrthogonalPolynomial():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__jacobi():NEWLINE from sympy.functions.special.polynomials import jacobiNEWLINE assert _test_args(jacobi(x, 2, 2, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__gegenbauer():NEWLINE from sympy.functions.special.polynomials import gegenbauerNEWLINE assert _test_args(gegenbauer(x, 2, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__chebyshevt():NEWLINE from sympy.functions.special.polynomials import chebyshevtNEWLINE assert _test_args(chebyshevt(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__chebyshevt_root():NEWLINE from sympy.functions.special.polynomials import chebyshevt_rootNEWLINE assert _test_args(chebyshevt_root(3, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__chebyshevu():NEWLINE from sympy.functions.special.polynomials import chebyshevuNEWLINE assert _test_args(chebyshevu(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__chebyshevu_root():NEWLINE from sympy.functions.special.polynomials import chebyshevu_rootNEWLINE assert _test_args(chebyshevu_root(3, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__hermite():NEWLINE from sympy.functions.special.polynomials import hermiteNEWLINE assert _test_args(hermite(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__legendre():NEWLINE from sympy.functions.special.polynomials import legendreNEWLINE assert _test_args(legendre(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__assoc_legendre():NEWLINE from sympy.functions.special.polynomials import assoc_legendreNEWLINE assert _test_args(assoc_legendre(x, 0, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__laguerre():NEWLINE from sympy.functions.special.polynomials import laguerreNEWLINE assert _test_args(laguerre(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__assoc_laguerre():NEWLINE from sympy.functions.special.polynomials import assoc_laguerreNEWLINE assert _test_args(assoc_laguerre(x, 0, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__spherical_harmonics__Ynm():NEWLINE from sympy.functions.special.spherical_harmonics import YnmNEWLINE assert _test_args(Ynm(1, 1, x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__spherical_harmonics__Znm():NEWLINE from sympy.functions.special.spherical_harmonics import ZnmNEWLINE assert _test_args(Znm(1, 1, x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__tensor_functions__LeviCivita():NEWLINE from sympy.functions.special.tensor_functions import LeviCivitaNEWLINE assert _test_args(LeviCivita(x, y, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__tensor_functions__KroneckerDelta():NEWLINE from sympy.functions.special.tensor_functions import KroneckerDeltaNEWLINE assert _test_args(KroneckerDelta(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__dirichlet_eta():NEWLINE from sympy.functions.special.zeta_functions import dirichlet_etaNEWLINE assert _test_args(dirichlet_eta(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__zeta():NEWLINE from sympy.functions.special.zeta_functions import zetaNEWLINE assert _test_args(zeta(101))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__lerchphi():NEWLINE from sympy.functions.special.zeta_functions import lerchphiNEWLINE assert _test_args(lerchphi(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__polylog():NEWLINE from sympy.functions.special.zeta_functions import polylogNEWLINE assert _test_args(polylog(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__stieltjes():NEWLINE from sympy.functions.special.zeta_functions import stieltjesNEWLINE assert _test_args(stieltjes(x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__integrals__Integral():NEWLINE from sympy.integrals.integrals import IntegralNEWLINE assert _test_args(Integral(2, (x, 0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__integrals__risch__NonElementaryIntegral():NEWLINE from sympy.integrals.risch import NonElementaryIntegralNEWLINE assert _test_args(NonElementaryIntegral(exp(-x**2), x))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__integrals__transforms__IntegralTransform():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__MellinTransform():NEWLINE from sympy.integrals.transforms import MellinTransformNEWLINE assert _test_args(MellinTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseMellinTransform():NEWLINE from sympy.integrals.transforms import InverseMellinTransformNEWLINE assert _test_args(InverseMellinTransform(2, x, y, 0, 1))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__LaplaceTransform():NEWLINE from sympy.integrals.transforms import LaplaceTransformNEWLINE assert _test_args(LaplaceTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseLaplaceTransform():NEWLINE from sympy.integrals.transforms import InverseLaplaceTransformNEWLINE assert _test_args(InverseLaplaceTransform(2, x, y, 0))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__integrals__transforms__FourierTypeTransform():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseFourierTransform():NEWLINE from sympy.integrals.transforms import InverseFourierTransformNEWLINE assert _test_args(InverseFourierTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__FourierTransform():NEWLINE from sympy.integrals.transforms import FourierTransformNEWLINE assert _test_args(FourierTransform(2, x, y))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__integrals__transforms__SineCosineTypeTransform():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseSineTransform():NEWLINE from sympy.integrals.transforms import InverseSineTransformNEWLINE assert _test_args(InverseSineTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__SineTransform():NEWLINE from sympy.integrals.transforms import SineTransformNEWLINE assert _test_args(SineTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseCosineTransform():NEWLINE from sympy.integrals.transforms import InverseCosineTransformNEWLINE assert _test_args(InverseCosineTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__CosineTransform():NEWLINE from sympy.integrals.transforms import CosineTransformNEWLINE assert _test_args(CosineTransform(2, x, y))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__integrals__transforms__HankelTypeTransform():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseHankelTransform():NEWLINE from sympy.integrals.transforms import InverseHankelTransformNEWLINE assert _test_args(InverseHankelTransform(2, x, y, 0))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__HankelTransform():NEWLINE from sympy.integrals.transforms import HankelTransformNEWLINE assert _test_args(HankelTransform(2, x, y, 0))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__cartan_type__CartanType_generator():NEWLINE from sympy.liealgebras.cartan_type import CartanType_generatorNEWLINE assert _test_args(CartanType_generator("A2"))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__cartan_type__Standard_Cartan():NEWLINE from sympy.liealgebras.cartan_type import Standard_CartanNEWLINE assert _test_args(Standard_Cartan("A", 2))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__weyl_group__WeylGroup():NEWLINE from sympy.liealgebras.weyl_group import WeylGroupNEWLINE assert _test_args(WeylGroup("B4"))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__root_system__RootSystem():NEWLINE from sympy.liealgebras.root_system import RootSystemNEWLINE assert _test_args(RootSystem("A2"))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_a__TypeA():NEWLINE from sympy.liealgebras.type_a import TypeANEWLINE assert _test_args(TypeA(2))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_b__TypeB():NEWLINE from sympy.liealgebras.type_b import TypeBNEWLINE assert _test_args(TypeB(4))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_c__TypeC():NEWLINE from sympy.liealgebras.type_c import TypeCNEWLINE assert _test_args(TypeC(4))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_d__TypeD():NEWLINE from sympy.liealgebras.type_d import TypeDNEWLINE assert _test_args(TypeD(4))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_e__TypeE():NEWLINE from sympy.liealgebras.type_e import TypeENEWLINE assert _test_args(TypeE(6))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_f__TypeF():NEWLINE from sympy.liealgebras.type_f import TypeFNEWLINE assert _test_args(TypeF(4))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_g__TypeG():NEWLINE from sympy.liealgebras.type_g import TypeGNEWLINE assert _test_args(TypeG(2))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__And():NEWLINE from sympy.logic.boolalg import AndNEWLINE assert _test_args(And(x, y, 2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__logic__boolalg__Boolean():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__BooleanFunction():NEWLINE from sympy.logic.boolalg import BooleanFunctionNEWLINE assert _test_args(BooleanFunction(1, 2, 3))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__logic__boolalg__BooleanAtom():NEWLINE passNEWLINENEWLINEdef test_sympy__logic__boolalg__BooleanTrue():NEWLINE from sympy.logic.boolalg import trueNEWLINE assert _test_args(true)NEWLINENEWLINEdef test_sympy__logic__boolalg__BooleanFalse():NEWLINE from sympy.logic.boolalg import falseNEWLINE assert _test_args(false)NEWLINENEWLINEdef test_sympy__logic__boolalg__Equivalent():NEWLINE from sympy.logic.boolalg import EquivalentNEWLINE assert _test_args(Equivalent(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__ITE():NEWLINE from sympy.logic.boolalg import ITENEWLINE assert _test_args(ITE(x, y, 2))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Implies():NEWLINE from sympy.logic.boolalg import ImpliesNEWLINE assert _test_args(Implies(x, y))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Nand():NEWLINE from sympy.logic.boolalg import NandNEWLINE assert _test_args(Nand(x, y, 2))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Nor():NEWLINE from sympy.logic.boolalg import NorNEWLINE assert _test_args(Nor(x, y))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Not():NEWLINE from sympy.logic.boolalg import NotNEWLINE assert _test_args(Not(x))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Or():NEWLINE from sympy.logic.boolalg import OrNEWLINE assert _test_args(Or(x, y))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Xor():NEWLINE from sympy.logic.boolalg import XorNEWLINE assert _test_args(Xor(x, y, 2))NEWLINENEWLINEdef test_sympy__logic__boolalg__Xnor():NEWLINE from sympy.logic.boolalg import XnorNEWLINE assert _test_args(Xnor(x, y, 2))NEWLINENEWLINENEWLINEdef test_sympy__matrices__matrices__DeferredVector():NEWLINE from sympy.matrices.matrices import DeferredVectorNEWLINE assert _test_args(DeferredVector("X"))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__matrices__expressions__matexpr__MatrixBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__matrices__immutable__ImmutableDenseMatrix():NEWLINE from sympy.matrices.immutable import ImmutableDenseMatrixNEWLINE m = ImmutableDenseMatrix([[1, 2], [3, 4]])NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableDenseMatrix(1, 1, [1])NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableDenseMatrix(2, 2, lambda i, j: 1)NEWLINE assert m[0, 0] is S.OneNEWLINE m = ImmutableDenseMatrix(2, 2, lambda i, j: 1/(1 + i) + 1/(1 + j))NEWLINE assert m[1, 1] is S.One # true div. will give 1.0 if i,j not sympifiedNEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__immutable__ImmutableSparseMatrix():NEWLINE from sympy.matrices.immutable import ImmutableSparseMatrixNEWLINE m = ImmutableSparseMatrix([[1, 2], [3, 4]])NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableSparseMatrix(1, 1, {(0, 0): 1})NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableSparseMatrix(1, 1, [1])NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableSparseMatrix(2, 2, lambda i, j: 1)NEWLINE assert m[0, 0] is S.OneNEWLINE m = ImmutableSparseMatrix(2, 2, lambda i, j: 1/(1 + i) + 1/(1 + j))NEWLINE assert m[1, 1] is S.One # true div. will give 1.0 if i,j not sympifiedNEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__slice__MatrixSlice():NEWLINE from sympy.matrices.expressions.slice import MatrixSliceNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', 4, 4)NEWLINE assert _test_args(MatrixSlice(X, (0, 2), (0, 2)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__blockmatrix__BlockDiagMatrix():NEWLINE from sympy.matrices.expressions.blockmatrix import BlockDiagMatrixNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, x)NEWLINE Y = MatrixSymbol('Y', y, y)NEWLINE assert _test_args(BlockDiagMatrix(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__blockmatrix__BlockMatrix():NEWLINE from sympy.matrices.expressions.blockmatrix import BlockMatrixNEWLINE from sympy.matrices.expressions import MatrixSymbol, ZeroMatrixNEWLINE X = MatrixSymbol('X', x, x)NEWLINE Y = MatrixSymbol('Y', y, y)NEWLINE Z = MatrixSymbol('Z', x, y)NEWLINE O = ZeroMatrix(y, x)NEWLINE assert _test_args(BlockMatrix([[X, Z], [O, Y]]))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__inverse__Inverse():NEWLINE from sympy.matrices.expressions.inverse import InverseNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Inverse(MatrixSymbol('A', 3, 3)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matadd__MatAdd():NEWLINE from sympy.matrices.expressions.matadd import MatAddNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, y)NEWLINE Y = MatrixSymbol('Y', x, y)NEWLINE assert _test_args(MatAdd(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matexpr__Identity():NEWLINE from sympy.matrices.expressions.matexpr import IdentityNEWLINE assert _test_args(Identity(3))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__matrices__expressions__matexpr__MatrixExpr():NEWLINE passNEWLINENEWLINEdef test_sympy__matrices__expressions__matexpr__MatrixElement():NEWLINE from sympy.matrices.expressions.matexpr import MatrixSymbol, MatrixElementNEWLINE from sympy import SNEWLINE assert _test_args(MatrixElement(MatrixSymbol('A', 3, 5), S(2), S(3)))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__matrices__expressions__matexpr__MatrixSymbol():NEWLINE from sympy.matrices.expressions.matexpr import MatrixSymbolNEWLINE assert _test_args(MatrixSymbol('A', 3, 5))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matexpr__ZeroMatrix():NEWLINE from sympy.matrices.expressions.matexpr import ZeroMatrixNEWLINE assert _test_args(ZeroMatrix(3, 5))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matmul__MatMul():NEWLINE from sympy.matrices.expressions.matmul import MatMulNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, y)NEWLINE Y = MatrixSymbol('Y', y, x)NEWLINE assert _test_args(MatMul(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__dotproduct__DotProduct():NEWLINE from sympy.matrices.expressions.dotproduct import DotProductNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, 1)NEWLINE Y = MatrixSymbol('Y', x, 1)NEWLINE assert _test_args(DotProduct(X, Y))NEWLINENEWLINEdef test_sympy__matrices__expressions__diagonal__DiagonalMatrix():NEWLINE from sympy.matrices.expressions.diagonal import DiagonalMatrixNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE x = MatrixSymbol('x', 10, 1)NEWLINE assert _test_args(DiagonalMatrix(x))NEWLINENEWLINEdef test_sympy__matrices__expressions__diagonal__DiagonalOf():NEWLINE from sympy.matrices.expressions.diagonal import DiagonalOfNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('x', 10, 10)NEWLINE assert _test_args(DiagonalOf(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__hadamard__HadamardProduct():NEWLINE from sympy.matrices.expressions.hadamard import HadamardProductNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, y)NEWLINE Y = MatrixSymbol('Y', x, y)NEWLINE assert _test_args(HadamardProduct(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matpow__MatPow():NEWLINE from sympy.matrices.expressions.matpow import MatPowNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, x)NEWLINE assert _test_args(MatPow(X, 2))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__transpose__Transpose():NEWLINE from sympy.matrices.expressions.transpose import TransposeNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Transpose(MatrixSymbol('A', 3, 5)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__adjoint__Adjoint():NEWLINE from sympy.matrices.expressions.adjoint import AdjointNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Adjoint(MatrixSymbol('A', 3, 5)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__trace__Trace():NEWLINE from sympy.matrices.expressions.trace import TraceNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Trace(MatrixSymbol('A', 3, 3)))NEWLINENEWLINEdef test_sympy__matrices__expressions__determinant__Determinant():NEWLINE from sympy.matrices.expressions.determinant import DeterminantNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Determinant(MatrixSymbol('A', 3, 3)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__funcmatrix__FunctionMatrix():NEWLINE from sympy.matrices.expressions.funcmatrix import FunctionMatrixNEWLINE from sympy import symbolsNEWLINE i, j = symbols('i,j')NEWLINE assert _test_args(FunctionMatrix(3, 3, Lambda((i, j), i - j) ))NEWLINENEWLINEdef test_sympy__matrices__expressions__fourier__DFT():NEWLINE from sympy.matrices.expressions.fourier import DFTNEWLINE from sympy import SNEWLINE assert _test_args(DFT(S(2)))NEWLINENEWLINEdef test_sympy__matrices__expressions__fourier__IDFT():NEWLINE from sympy.matrices.expressions.fourier import IDFTNEWLINE from sympy import SNEWLINE assert _test_args(IDFT(S(2)))NEWLINENEWLINEfrom sympy.matrices.expressions import MatrixSymbolNEWLINEX = MatrixSymbol('X', 10, 10)NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__LofLU():NEWLINE from sympy.matrices.expressions.factorizations import LofLUNEWLINE assert _test_args(LofLU(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__UofLU():NEWLINE from sympy.matrices.expressions.factorizations import UofLUNEWLINE assert _test_args(UofLU(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__QofQR():NEWLINE from sympy.matrices.expressions.factorizations import QofQRNEWLINE assert _test_args(QofQR(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__RofQR():NEWLINE from sympy.matrices.expressions.factorizations import RofQRNEWLINE assert _test_args(RofQR(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__LofCholesky():NEWLINE from sympy.matrices.expressions.factorizations import LofCholeskyNEWLINE assert _test_args(LofCholesky(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__UofCholesky():NEWLINE from sympy.matrices.expressions.factorizations import UofCholeskyNEWLINE assert _test_args(UofCholesky(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__EigenVectors():NEWLINE from sympy.matrices.expressions.factorizations import EigenVectorsNEWLINE assert _test_args(EigenVectors(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__EigenValues():NEWLINE from sympy.matrices.expressions.factorizations import EigenValuesNEWLINE assert _test_args(EigenValues(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__UofSVD():NEWLINE from sympy.matrices.expressions.factorizations import UofSVDNEWLINE assert _test_args(UofSVD(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__VofSVD():NEWLINE from sympy.matrices.expressions.factorizations import VofSVDNEWLINE assert _test_args(VofSVD(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__SofSVD():NEWLINE from sympy.matrices.expressions.factorizations import SofSVDNEWLINE assert _test_args(SofSVD(X))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__matrices__expressions__factorizations__Factorization():NEWLINE passNEWLINENEWLINEdef test_sympy__physics__vector__frame__CoordinateSym():NEWLINE from sympy.physics.vector import CoordinateSymNEWLINE from sympy.physics.vector import ReferenceFrameNEWLINE assert _test_args(CoordinateSym('R_x', ReferenceFrame('R'), 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__paulialgebra__Pauli():NEWLINE from sympy.physics.paulialgebra import PauliNEWLINE assert _test_args(Pauli(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__anticommutator__AntiCommutator():NEWLINE from sympy.physics.quantum.anticommutator import AntiCommutatorNEWLINE assert _test_args(AntiCommutator(x, y))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PositionBra3D():NEWLINE from sympy.physics.quantum.cartesian import PositionBra3DNEWLINE assert _test_args(PositionBra3D(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PositionKet3D():NEWLINE from sympy.physics.quantum.cartesian import PositionKet3DNEWLINE assert _test_args(PositionKet3D(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PositionState3D():NEWLINE from sympy.physics.quantum.cartesian import PositionState3DNEWLINE assert _test_args(PositionState3D(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PxBra():NEWLINE from sympy.physics.quantum.cartesian import PxBraNEWLINE assert _test_args(PxBra(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PxKet():NEWLINE from sympy.physics.quantum.cartesian import PxKetNEWLINE assert _test_args(PxKet(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PxOp():NEWLINE from sympy.physics.quantum.cartesian import PxOpNEWLINE assert _test_args(PxOp(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__XBra():NEWLINE from sympy.physics.quantum.cartesian import XBraNEWLINE assert _test_args(XBra(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__XKet():NEWLINE from sympy.physics.quantum.cartesian import XKetNEWLINE assert _test_args(XKet(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__XOp():NEWLINE from sympy.physics.quantum.cartesian import XOpNEWLINE assert _test_args(XOp(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__YOp():NEWLINE from sympy.physics.quantum.cartesian import YOpNEWLINE assert _test_args(YOp(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__ZOp():NEWLINE from sympy.physics.quantum.cartesian import ZOpNEWLINE assert _test_args(ZOp(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cg__CG():NEWLINE from sympy.physics.quantum.cg import CGNEWLINE from sympy import SNEWLINE assert _test_args(CG(S(3)/2, S(3)/2, S(1)/2, -S(1)/2, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cg__Wigner3j():NEWLINE from sympy.physics.quantum.cg import Wigner3jNEWLINE assert _test_args(Wigner3j(6, 0, 4, 0, 2, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cg__Wigner6j():NEWLINE from sympy.physics.quantum.cg import Wigner6jNEWLINE assert _test_args(Wigner6j(1, 2, 3, 2, 1, 2))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cg__Wigner9j():NEWLINE from sympy.physics.quantum.cg import Wigner9jNEWLINE assert _test_args(Wigner9j(2, 1, 1, S(3)/2, S(1)/2, 1, S(1)/2, S(1)/2, 0))NEWLINENEWLINEdef test_sympy__physics__quantum__circuitplot__Mz():NEWLINE from sympy.physics.quantum.circuitplot import MzNEWLINE assert _test_args(Mz(0))NEWLINENEWLINEdef test_sympy__physics__quantum__circuitplot__Mx():NEWLINE from sympy.physics.quantum.circuitplot import MxNEWLINE assert _test_args(Mx(0))NEWLINENEWLINEdef test_sympy__physics__quantum__commutator__Commutator():NEWLINE from sympy.physics.quantum.commutator import CommutatorNEWLINE A, B = symbols('A,B', commutative=False)NEWLINE assert _test_args(Commutator(A, B))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__constants__HBar():NEWLINE from sympy.physics.quantum.constants import HBarNEWLINE assert _test_args(HBar())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__dagger__Dagger():NEWLINE from sympy.physics.quantum.dagger import DaggerNEWLINE from sympy.physics.quantum.state import KetNEWLINE assert _test_args(Dagger(Dagger(Ket('psi'))))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__CGate():NEWLINE from sympy.physics.quantum.gate import CGate, GateNEWLINE assert _test_args(CGate((0, 1), Gate(2)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__CGateS():NEWLINE from sympy.physics.quantum.gate import CGateS, GateNEWLINE assert _test_args(CGateS((0, 1), Gate(2)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__CNotGate():NEWLINE from sympy.physics.quantum.gate import CNotGateNEWLINE assert _test_args(CNotGate(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__Gate():NEWLINE from sympy.physics.quantum.gate import GateNEWLINE assert _test_args(Gate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__HadamardGate():NEWLINE from sympy.physics.quantum.gate import HadamardGateNEWLINE assert _test_args(HadamardGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__IdentityGate():NEWLINE from sympy.physics.quantum.gate import IdentityGateNEWLINE assert _test_args(IdentityGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__OneQubitGate():NEWLINE from sympy.physics.quantum.gate import OneQubitGateNEWLINE assert _test_args(OneQubitGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__PhaseGate():NEWLINE from sympy.physics.quantum.gate import PhaseGateNEWLINE assert _test_args(PhaseGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__SwapGate():NEWLINE from sympy.physics.quantum.gate import SwapGateNEWLINE assert _test_args(SwapGate(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__TGate():NEWLINE from sympy.physics.quantum.gate import TGateNEWLINE assert _test_args(TGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__TwoQubitGate():NEWLINE from sympy.physics.quantum.gate import TwoQubitGateNEWLINE assert _test_args(TwoQubitGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__UGate():NEWLINE from sympy.physics.quantum.gate import UGateNEWLINE from sympy.matrices.immutable import ImmutableDenseMatrixNEWLINE from sympy import Integer, TupleNEWLINE assert _test_args(NEWLINE UGate(Tuple(Integer(1)), ImmutableDenseMatrix([[1, 0], [0, 2]])))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__XGate():NEWLINE from sympy.physics.quantum.gate import XGateNEWLINE assert _test_args(XGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__YGate():NEWLINE from sympy.physics.quantum.gate import YGateNEWLINE assert _test_args(YGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__ZGate():NEWLINE from sympy.physics.quantum.gate import ZGateNEWLINE assert _test_args(ZGate(0))NEWLINENEWLINENEWLINE@SKIP("TODO: sympy.physics")NEWLINEdef test_sympy__physics__quantum__grover__OracleGate():NEWLINE from sympy.physics.quantum.grover import OracleGateNEWLINE assert _test_args(OracleGate())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__grover__WGate():NEWLINE from sympy.physics.quantum.grover import WGateNEWLINE assert _test_args(WGate(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__ComplexSpace():NEWLINE from sympy.physics.quantum.hilbert import ComplexSpaceNEWLINE assert _test_args(ComplexSpace(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__DirectSumHilbertSpace():NEWLINE from sympy.physics.quantum.hilbert import DirectSumHilbertSpace, ComplexSpace, FockSpaceNEWLINE c = ComplexSpace(2)NEWLINE f = FockSpace()NEWLINE assert _test_args(DirectSumHilbertSpace(c, f))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__FockSpace():NEWLINE from sympy.physics.quantum.hilbert import FockSpaceNEWLINE assert _test_args(FockSpace())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__HilbertSpace():NEWLINE from sympy.physics.quantum.hilbert import HilbertSpaceNEWLINE assert _test_args(HilbertSpace())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__L2():NEWLINE from sympy.physics.quantum.hilbert import L2NEWLINE from sympy import oo, IntervalNEWLINE assert _test_args(L2(Interval(0, oo)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__TensorPowerHilbertSpace():NEWLINE from sympy.physics.quantum.hilbert import TensorPowerHilbertSpace, FockSpaceNEWLINE f = FockSpace()NEWLINE assert _test_args(TensorPowerHilbertSpace(f, 2))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__TensorProductHilbertSpace():NEWLINE from sympy.physics.quantum.hilbert import TensorProductHilbertSpace, FockSpace, ComplexSpaceNEWLINE c = ComplexSpace(2)NEWLINE f = FockSpace()NEWLINE assert _test_args(TensorProductHilbertSpace(f, c))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__innerproduct__InnerProduct():NEWLINE from sympy.physics.quantum import Bra, Ket, InnerProductNEWLINE b = Bra('b')NEWLINE k = Ket('k')NEWLINE assert _test_args(InnerProduct(b, k))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__DifferentialOperator():NEWLINE from sympy.physics.quantum.operator import DifferentialOperatorNEWLINE from sympy import Derivative, FunctionNEWLINE f = Function('f')NEWLINE assert _test_args(DifferentialOperator(1/x*Derivative(f(x), x), f(x)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__HermitianOperator():NEWLINE from sympy.physics.quantum.operator import HermitianOperatorNEWLINE assert _test_args(HermitianOperator('H'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__IdentityOperator():NEWLINE from sympy.physics.quantum.operator import IdentityOperatorNEWLINE assert _test_args(IdentityOperator(5))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__Operator():NEWLINE from sympy.physics.quantum.operator import OperatorNEWLINE assert _test_args(Operator('A'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__OuterProduct():NEWLINE from sympy.physics.quantum.operator import OuterProductNEWLINE from sympy.physics.quantum import Ket, BraNEWLINE b = Bra('b')NEWLINE k = Ket('k')NEWLINE assert _test_args(OuterProduct(k, b))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__UnitaryOperator():NEWLINE from sympy.physics.quantum.operator import UnitaryOperatorNEWLINE assert _test_args(UnitaryOperator('U'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__piab__PIABBra():NEWLINE from sympy.physics.quantum.piab import PIABBraNEWLINE assert _test_args(PIABBra('B'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonOp():NEWLINE from sympy.physics.quantum.boson import BosonOpNEWLINE assert _test_args(BosonOp('a'))NEWLINE assert _test_args(BosonOp('a', False))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonFockKet():NEWLINE from sympy.physics.quantum.boson import BosonFockKetNEWLINE assert _test_args(BosonFockKet(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonFockBra():NEWLINE from sympy.physics.quantum.boson import BosonFockBraNEWLINE assert _test_args(BosonFockBra(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonCoherentKet():NEWLINE from sympy.physics.quantum.boson import BosonCoherentKetNEWLINE assert _test_args(BosonCoherentKet(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonCoherentBra():NEWLINE from sympy.physics.quantum.boson import BosonCoherentBraNEWLINE assert _test_args(BosonCoherentBra(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__fermion__FermionOp():NEWLINE from sympy.physics.quantum.fermion import FermionOpNEWLINE assert _test_args(FermionOp('c'))NEWLINE assert _test_args(FermionOp('c', False))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__fermion__FermionFockKet():NEWLINE from sympy.physics.quantum.fermion import FermionFockKetNEWLINE assert _test_args(FermionFockKet(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__fermion__FermionFockBra():NEWLINE from sympy.physics.quantum.fermion import FermionFockBraNEWLINE assert _test_args(FermionFockBra(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaOpBase():NEWLINE from sympy.physics.quantum.pauli import SigmaOpBaseNEWLINE assert _test_args(SigmaOpBase())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaX():NEWLINE from sympy.physics.quantum.pauli import SigmaXNEWLINE assert _test_args(SigmaX())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaY():NEWLINE from sympy.physics.quantum.pauli import SigmaYNEWLINE assert _test_args(SigmaY())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaZ():NEWLINE from sympy.physics.quantum.pauli import SigmaZNEWLINE assert _test_args(SigmaZ())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaMinus():NEWLINE from sympy.physics.quantum.pauli import SigmaMinusNEWLINE assert _test_args(SigmaMinus())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaPlus():NEWLINE from sympy.physics.quantum.pauli import SigmaPlusNEWLINE assert _test_args(SigmaPlus())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaZKet():NEWLINE from sympy.physics.quantum.pauli import SigmaZKetNEWLINE assert _test_args(SigmaZKet(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaZBra():NEWLINE from sympy.physics.quantum.pauli import SigmaZBraNEWLINE assert _test_args(SigmaZBra(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__piab__PIABHamiltonian():NEWLINE from sympy.physics.quantum.piab import PIABHamiltonianNEWLINE assert _test_args(PIABHamiltonian('P'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__piab__PIABKet():NEWLINE from sympy.physics.quantum.piab import PIABKetNEWLINE assert _test_args(PIABKet('K'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qexpr__QExpr():NEWLINE from sympy.physics.quantum.qexpr import QExprNEWLINE assert _test_args(QExpr(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qft__Fourier():NEWLINE from sympy.physics.quantum.qft import FourierNEWLINE assert _test_args(Fourier(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qft__IQFT():NEWLINE from sympy.physics.quantum.qft import IQFTNEWLINE assert _test_args(IQFT(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qft__QFT():NEWLINE from sympy.physics.quantum.qft import QFTNEWLINE assert _test_args(QFT(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qft__RkGate():NEWLINE from sympy.physics.quantum.qft import RkGateNEWLINE assert _test_args(RkGate(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__IntQubit():NEWLINE from sympy.physics.quantum.qubit import IntQubitNEWLINE assert _test_args(IntQubit(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__IntQubitBra():NEWLINE from sympy.physics.quantum.qubit import IntQubitBraNEWLINE assert _test_args(IntQubitBra(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__IntQubitState():NEWLINE from sympy.physics.quantum.qubit import IntQubitState, QubitStateNEWLINE assert _test_args(IntQubitState(QubitState(0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__Qubit():NEWLINE from sympy.physics.quantum.qubit import QubitNEWLINE assert _test_args(Qubit(0, 0, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__QubitBra():NEWLINE from sympy.physics.quantum.qubit import QubitBraNEWLINE assert _test_args(QubitBra('1', 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__QubitState():NEWLINE from sympy.physics.quantum.qubit import QubitStateNEWLINE assert _test_args(QubitState(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__density__Density():NEWLINE from sympy.physics.quantum.density import DensityNEWLINE from sympy.physics.quantum.state import KetNEWLINE assert _test_args(Density([Ket(0), 0.5], [Ket(1), 0.5]))NEWLINENEWLINENEWLINE@SKIP("TODO: sympy.physics.quantum.shor: Cmod Not Implemented")NEWLINEdef test_sympy__physics__quantum__shor__CMod():NEWLINE from sympy.physics.quantum.shor import CModNEWLINE assert _test_args(CMod())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__CoupledSpinState():NEWLINE from sympy.physics.quantum.spin import CoupledSpinStateNEWLINE assert _test_args(CoupledSpinState(1, 0, (1, 1)))NEWLINE assert _test_args(CoupledSpinState(1, 0, (1, S(1)/2, S(1)/2)))NEWLINE assert _test_args(CoupledSpinState(NEWLINE 1, 0, (1, S(1)/2, S(1)/2), ((2, 3, S(1)/2), (1, 2, 1)) ))NEWLINE j, m, j1, j2, j3, j12, x = symbols('j m j1:4 j12 x')NEWLINE assert CoupledSpinState(NEWLINE j, m, (j1, j2, j3)).subs(j2, x) == CoupledSpinState(j, m, (j1, x, j3))NEWLINE assert CoupledSpinState(j, m, (j1, j2, j3), ((1, 3, j12), (1, 2, j)) ).subs(j12, x) == \NEWLINE CoupledSpinState(j, m, (j1, j2, j3), ((1, 3, x), (1, 2, j)) )NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__J2Op():NEWLINE from sympy.physics.quantum.spin import J2OpNEWLINE assert _test_args(J2Op('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JminusOp():NEWLINE from sympy.physics.quantum.spin import JminusOpNEWLINE assert _test_args(JminusOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JplusOp():NEWLINE from sympy.physics.quantum.spin import JplusOpNEWLINE assert _test_args(JplusOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxBra():NEWLINE from sympy.physics.quantum.spin import JxBraNEWLINE assert _test_args(JxBra(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxBraCoupled():NEWLINE from sympy.physics.quantum.spin import JxBraCoupledNEWLINE assert _test_args(JxBraCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxKet():NEWLINE from sympy.physics.quantum.spin import JxKetNEWLINE assert _test_args(JxKet(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxKetCoupled():NEWLINE from sympy.physics.quantum.spin import JxKetCoupledNEWLINE assert _test_args(JxKetCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxOp():NEWLINE from sympy.physics.quantum.spin import JxOpNEWLINE assert _test_args(JxOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyBra():NEWLINE from sympy.physics.quantum.spin import JyBraNEWLINE assert _test_args(JyBra(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyBraCoupled():NEWLINE from sympy.physics.quantum.spin import JyBraCoupledNEWLINE assert _test_args(JyBraCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyKet():NEWLINE from sympy.physics.quantum.spin import JyKetNEWLINE assert _test_args(JyKet(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyKetCoupled():NEWLINE from sympy.physics.quantum.spin import JyKetCoupledNEWLINE assert _test_args(JyKetCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyOp():NEWLINE from sympy.physics.quantum.spin import JyOpNEWLINE assert _test_args(JyOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzBra():NEWLINE from sympy.physics.quantum.spin import JzBraNEWLINE assert _test_args(JzBra(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzBraCoupled():NEWLINE from sympy.physics.quantum.spin import JzBraCoupledNEWLINE assert _test_args(JzBraCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzKet():NEWLINE from sympy.physics.quantum.spin import JzKetNEWLINE assert _test_args(JzKet(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzKetCoupled():NEWLINE from sympy.physics.quantum.spin import JzKetCoupledNEWLINE assert _test_args(JzKetCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzOp():NEWLINE from sympy.physics.quantum.spin import JzOpNEWLINE assert _test_args(JzOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__Rotation():NEWLINE from sympy.physics.quantum.spin import RotationNEWLINE assert _test_args(Rotation(pi, 0, pi/2))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__SpinState():NEWLINE from sympy.physics.quantum.spin import SpinStateNEWLINE assert _test_args(SpinState(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__WignerD():NEWLINE from sympy.physics.quantum.spin import WignerDNEWLINE assert _test_args(WignerD(0, 1, 2, 3, 4, 5))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__Bra():NEWLINE from sympy.physics.quantum.state import BraNEWLINE assert _test_args(Bra(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__BraBase():NEWLINE from sympy.physics.quantum.state import BraBaseNEWLINE assert _test_args(BraBase(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__Ket():NEWLINE from sympy.physics.quantum.state import KetNEWLINE assert _test_args(Ket(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__KetBase():NEWLINE from sympy.physics.quantum.state import KetBaseNEWLINE assert _test_args(KetBase(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__State():NEWLINE from sympy.physics.quantum.state import StateNEWLINE assert _test_args(State(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__StateBase():NEWLINE from sympy.physics.quantum.state import StateBaseNEWLINE assert _test_args(StateBase(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__TimeDepBra():NEWLINE from sympy.physics.quantum.state import TimeDepBraNEWLINE assert _test_args(TimeDepBra('psi', 't'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__TimeDepKet():NEWLINE from sympy.physics.quantum.state import TimeDepKetNEWLINE assert _test_args(TimeDepKet('psi', 't'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__TimeDepState():NEWLINE from sympy.physics.quantum.state import TimeDepStateNEWLINE assert _test_args(TimeDepState('psi', 't'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__Wavefunction():NEWLINE from sympy.physics.quantum.state import WavefunctionNEWLINE from sympy.functions import sinNEWLINE from sympy import PiecewiseNEWLINE n = 1NEWLINE L = 1NEWLINE g = Piecewise((0, x < 0), (0, x > L), (sqrt(2//L)*sin(n*pi*x/L), True))NEWLINE assert _test_args(Wavefunction(g, x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__tensorproduct__TensorProduct():NEWLINE from sympy.physics.quantum.tensorproduct import TensorProductNEWLINE assert _test_args(TensorProduct(x, y))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__identitysearch__GateIdentity():NEWLINE from sympy.physics.quantum.gate import XNEWLINE from sympy.physics.quantum.identitysearch import GateIdentityNEWLINE assert _test_args(GateIdentity(X(0), X(0)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__SHOOp():NEWLINE from sympy.physics.quantum.sho1d import SHOOpNEWLINE assert _test_args(SHOOp('a'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__RaisingOp():NEWLINE from sympy.physics.quantum.sho1d import RaisingOpNEWLINE assert _test_args(RaisingOp('a'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__LoweringOp():NEWLINE from sympy.physics.quantum.sho1d import LoweringOpNEWLINE assert _test_args(LoweringOp('a'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__NumberOp():NEWLINE from sympy.physics.quantum.sho1d import NumberOpNEWLINE assert _test_args(NumberOp('N'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__Hamiltonian():NEWLINE from sympy.physics.quantum.sho1d import HamiltonianNEWLINE assert _test_args(Hamiltonian('H'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__SHOState():NEWLINE from sympy.physics.quantum.sho1d import SHOStateNEWLINE assert _test_args(SHOState(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__SHOKet():NEWLINE from sympy.physics.quantum.sho1d import SHOKetNEWLINE assert _test_args(SHOKet(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__SHOBra():NEWLINE from sympy.physics.quantum.sho1d import SHOBraNEWLINE assert _test_args(SHOBra(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__AnnihilateBoson():NEWLINE from sympy.physics.secondquant import AnnihilateBosonNEWLINE assert _test_args(AnnihilateBoson(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__AnnihilateFermion():NEWLINE from sympy.physics.secondquant import AnnihilateFermionNEWLINE assert _test_args(AnnihilateFermion(0))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__physics__secondquant__Annihilator():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__AntiSymmetricTensor():NEWLINE from sympy.physics.secondquant import AntiSymmetricTensorNEWLINE i, j = symbols('i j', below_fermi=True)NEWLINE a, b = symbols('a b', above_fermi=True)NEWLINE assert _test_args(AntiSymmetricTensor('v', (a, i), (b, j)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__BosonState():NEWLINE from sympy.physics.secondquant import BosonStateNEWLINE assert _test_args(BosonState((0, 1)))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__physics__secondquant__BosonicOperator():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__Commutator():NEWLINE from sympy.physics.secondquant import CommutatorNEWLINE assert _test_args(Commutator(x, y))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__CreateBoson():NEWLINE from sympy.physics.secondquant import CreateBosonNEWLINE assert _test_args(CreateBoson(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__CreateFermion():NEWLINE from sympy.physics.secondquant import CreateFermionNEWLINE assert _test_args(CreateFermion(0))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__physics__secondquant__Creator():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__Dagger():NEWLINE from sympy.physics.secondquant import DaggerNEWLINE from sympy import INEWLINE assert _test_args(Dagger(2*I))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FermionState():NEWLINE from sympy.physics.secondquant import FermionStateNEWLINE assert _test_args(FermionState((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FermionicOperator():NEWLINE from sympy.physics.secondquant import FermionicOperatorNEWLINE assert _test_args(FermionicOperator(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockState():NEWLINE from sympy.physics.secondquant import FockStateNEWLINE assert _test_args(FockState((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateBosonBra():NEWLINE from sympy.physics.secondquant import FockStateBosonBraNEWLINE assert _test_args(FockStateBosonBra((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateBosonKet():NEWLINE from sympy.physics.secondquant import FockStateBosonKetNEWLINE assert _test_args(FockStateBosonKet((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateBra():NEWLINE from sympy.physics.secondquant import FockStateBraNEWLINE assert _test_args(FockStateBra((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateFermionBra():NEWLINE from sympy.physics.secondquant import FockStateFermionBraNEWLINE assert _test_args(FockStateFermionBra((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateFermionKet():NEWLINE from sympy.physics.secondquant import FockStateFermionKetNEWLINE assert _test_args(FockStateFermionKet((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateKet():NEWLINE from sympy.physics.secondquant import FockStateKetNEWLINE assert _test_args(FockStateKet((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__InnerProduct():NEWLINE from sympy.physics.secondquant import InnerProductNEWLINE from sympy.physics.secondquant import FockStateKet, FockStateBraNEWLINE assert _test_args(InnerProduct(FockStateBra((0, 1)), FockStateKet((0, 1))))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__NO():NEWLINE from sympy.physics.secondquant import NO, F, FdNEWLINE assert _test_args(NO(Fd(x)*F(y)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__PermutationOperator():NEWLINE from sympy.physics.secondquant import PermutationOperatorNEWLINE assert _test_args(PermutationOperator(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__SqOperator():NEWLINE from sympy.physics.secondquant import SqOperatorNEWLINE assert _test_args(SqOperator(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__TensorSymbol():NEWLINE from sympy.physics.secondquant import TensorSymbolNEWLINE assert _test_args(TensorSymbol(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__units__dimensions__Dimension():NEWLINE from sympy.physics.units.dimensions import DimensionNEWLINE assert _test_args(Dimension("length", "L"))NEWLINENEWLINENEWLINEdef test_sympy__physics__units__dimensions__DimensionSystem():NEWLINE from sympy.physics.units.dimensions import DimensionSystemNEWLINE from sympy.physics.units.dimensions import length, time, velocityNEWLINE assert _test_args(DimensionSystem((length, time), (velocity,)))NEWLINENEWLINENEWLINEdef test_sympy__physics__units__quantities__Quantity():NEWLINE from sympy.physics.units.quantities import QuantityNEWLINE from sympy.physics.units import lengthNEWLINE assert _test_args(Quantity("dam", length, 10))NEWLINENEWLINENEWLINEdef test_sympy__physics__units__prefixes__Prefix():NEWLINE from sympy.physics.units.prefixes import PrefixNEWLINE assert _test_args(Prefix('kilo', 'k', 3))NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__AlgebraicNumber():NEWLINE from sympy.core.numbers import AlgebraicNumberNEWLINE assert _test_args(AlgebraicNumber(sqrt(2), [1, 2, 3]))NEWLINENEWLINENEWLINEdef test_sympy__polys__polytools__GroebnerBasis():NEWLINE from sympy.polys.polytools import GroebnerBasisNEWLINE assert _test_args(GroebnerBasis([x, y, z], x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__polys__polytools__Poly():NEWLINE from sympy.polys.polytools import PolyNEWLINE assert _test_args(Poly(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__polys__polytools__PurePoly():NEWLINE from sympy.polys.polytools import PurePolyNEWLINE assert _test_args(PurePoly(2, x, y))NEWLINENEWLINENEWLINE@SKIP('abstract class')NEWLINEdef test_sympy__polys__rootoftools__RootOf():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__polys__rootoftools__ComplexRootOf():NEWLINE from sympy.polys.rootoftools import ComplexRootOfNEWLINE assert _test_args(ComplexRootOf(x**3 + x + 1, 0))NEWLINENEWLINENEWLINEdef test_sympy__polys__rootoftools__RootSum():NEWLINE from sympy.polys.rootoftools import RootSumNEWLINE assert _test_args(RootSum(x**3 + x + 1, sin))NEWLINENEWLINENEWLINEdef test_sympy__series__limits__Limit():NEWLINE from sympy.series.limits import LimitNEWLINE assert _test_args(Limit(x, x, 0, dir='-'))NEWLINENEWLINENEWLINEdef test_sympy__series__order__Order():NEWLINE from sympy.series.order import OrderNEWLINE assert _test_args(Order(1, x, y))NEWLINENEWLINENEWLINE@SKIP('Abstract Class')NEWLINEdef test_sympy__series__sequences__SeqBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__series__sequences__EmptySequence():NEWLINE from sympy.series.sequences import EmptySequenceNEWLINE assert _test_args(EmptySequence())NEWLINENEWLINENEWLINE@SKIP('Abstract Class')NEWLINEdef test_sympy__series__sequences__SeqExpr():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqPer():NEWLINE from sympy.series.sequences import SeqPerNEWLINE assert _test_args(SeqPer((1, 2, 3), (0, 10)))NEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqFormula():NEWLINE from sympy.series.sequences import SeqFormulaNEWLINE assert _test_args(SeqFormula(x**2, (0, 10)))NEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqExprOp():NEWLINE from sympy.series.sequences import SeqExprOp, sequenceNEWLINE s1 = sequence((1, 2, 3))NEWLINE s2 = sequence(x**2)NEWLINE assert _test_args(SeqExprOp(s1, s2))NEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqAdd():NEWLINE from sympy.series.sequences import SeqAdd, sequenceNEWLINE s1 = sequence((1, 2, 3))NEWLINE s2 = sequence(x**2)NEWLINE assert _test_args(SeqAdd(s1, s2))NEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqMul():NEWLINE from sympy.series.sequences import SeqMul, sequenceNEWLINE s1 = sequence((1, 2, 3))NEWLINE s2 = sequence(x**2)NEWLINE assert _test_args(SeqMul(s1, s2))NEWLINENEWLINENEWLINE@SKIP('Abstract Class')NEWLINEdef test_sympy__series__series_class__SeriesBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__series__fourier__FourierSeries():NEWLINE from sympy.series.fourier import fourier_seriesNEWLINE assert _test_args(fourier_series(x, (x, -pi, pi)))NEWLINENEWLINENEWLINEdef test_sympy__series__formal__FormalPowerSeries():NEWLINE from sympy.series.formal import fpsNEWLINE assert _test_args(fps(log(1 + x), x))NEWLINENEWLINENEWLINEdef test_sympy__simplify__hyperexpand__Hyper_Function():NEWLINE from sympy.simplify.hyperexpand import Hyper_FunctionNEWLINE assert _test_args(Hyper_Function([2], [1]))NEWLINENEWLINENEWLINEdef test_sympy__simplify__hyperexpand__G_Function():NEWLINE from sympy.simplify.hyperexpand import G_FunctionNEWLINE assert _test_args(G_Function([2], [1], [], []))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__tensor__array__ndim_array__ImmutableNDimArray():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__tensor__array__dense_ndim_array__ImmutableDenseNDimArray():NEWLINE from sympy.tensor.array.dense_ndim_array import ImmutableDenseNDimArrayNEWLINE densarr = ImmutableDenseNDimArray(range(10, 34), (2, 3, 4))NEWLINE assert _test_args(densarr)NEWLINENEWLINENEWLINEdef test_sympy__tensor__array__sparse_ndim_array__ImmutableSparseNDimArray():NEWLINE from sympy.tensor.array.sparse_ndim_array import ImmutableSparseNDimArrayNEWLINE sparr = ImmutableSparseNDimArray(range(10, 34), (2, 3, 4))NEWLINE assert _test_args(sparr)NEWLINENEWLINENEWLINEdef test_sympy__tensor__indexed__Idx():NEWLINE from sympy.tensor.indexed import IdxNEWLINE assert _test_args(Idx('test'))NEWLINE assert _test_args(Idx(1, (0, 10)))NEWLINENEWLINENEWLINEdef test_sympy__tensor__indexed__Indexed():NEWLINE from sympy.tensor.indexed import Indexed, IdxNEWLINE assert _test_args(Indexed('A', Idx('i'), Idx('j')))NEWLINENEWLINENEWLINEdef test_sympy__tensor__indexed__IndexedBase():NEWLINE from sympy.tensor.indexed import IndexedBaseNEWLINE assert _test_args(IndexedBase('A', shape=(x, y)))NEWLINE assert _test_args(IndexedBase('A', 1))NEWLINE assert _test_args(IndexedBase('A')[0, 1])NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorIndexType():NEWLINE from sympy.tensor.tensor import TensorIndexTypeNEWLINE assert _test_args(TensorIndexType('Lorentz', metric=False))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorSymmetry():NEWLINE from sympy.tensor.tensor import TensorSymmetry, get_symmetric_group_sgsNEWLINE assert _test_args(TensorSymmetry(get_symmetric_group_sgs(2)))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorType():NEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, get_symmetric_group_sgs, TensorTypeNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE assert _test_args(TensorType([Lorentz], sym))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorHead():NEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, TensorHeadNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE S1 = TensorType([Lorentz], sym)NEWLINE assert _test_args(TensorHead('p', S1, 0))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorIndex():NEWLINE from sympy.tensor.tensor import TensorIndexType, TensorIndexNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE assert _test_args(TensorIndex('i', Lorentz))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__tensor__tensor__TensExpr():NEWLINE passNEWLINENEWLINEdef test_sympy__tensor__tensor__TensAdd():NEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensAddNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE a, b = tensor_indices('a,b', Lorentz)NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE S1 = TensorType([Lorentz], sym)NEWLINE p, q = S1('p,q')NEWLINE t1 = p(a)NEWLINE t2 = q(a)NEWLINE assert _test_args(TensAdd(t1, t2))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__Tensor():NEWLINE from sympy.core import SNEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensMul, TIDSNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE a, b = tensor_indices('a,b', Lorentz)NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE S1 = TensorType([Lorentz], sym)NEWLINE p = S1('p')NEWLINE assert _test_args(p(a))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensMul():NEWLINE from sympy.core import SNEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensMul, TIDSNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE a, b = tensor_indices('a,b', Lorentz)NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE S1 = TensorType([Lorentz], sym)NEWLINE p = S1('p')NEWLINE q = S1('q')NEWLINE assert _test_args(3*p(a)*q(b))NEWLINENEWLINENEWLINEdef test_as_coeff_add():NEWLINE assert (7, (3*x, 4*x**2)) == (7 + 3*x + 4*x**2).as_coeff_add()NEWLINENEWLINENEWLINEdef test_sympy__geometry__curve__Curve():NEWLINE from sympy.geometry.curve import CurveNEWLINE assert _test_args(Curve((x, 1), (x, 0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__point__Point():NEWLINE from sympy.geometry.point import PointNEWLINE assert _test_args(Point(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__geometry__point__Point2D():NEWLINE from sympy.geometry.point import Point2DNEWLINE assert _test_args(Point2D(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__geometry__point__Point3D():NEWLINE from sympy.geometry.point import Point3DNEWLINE assert _test_args(Point3D(0, 1, 2))NEWLINENEWLINENEWLINEdef test_sympy__geometry__ellipse__Ellipse():NEWLINE from sympy.geometry.ellipse import EllipseNEWLINE assert _test_args(Ellipse((0, 1), 2, 3))NEWLINENEWLINENEWLINEdef test_sympy__geometry__ellipse__Circle():NEWLINE from sympy.geometry.ellipse import CircleNEWLINE assert _test_args(Circle((0, 1), 2))NEWLINENEWLINENEWLINEdef test_sympy__geometry__parabola__Parabola():NEWLINE from sympy.geometry.parabola import ParabolaNEWLINE from sympy.geometry.line import LineNEWLINE assert _test_args(Parabola((0, 0), Line((2, 3), (4, 3))))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__geometry__line__LinearEntity():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Line():NEWLINE from sympy.geometry.line import LineNEWLINE assert _test_args(Line((0, 1), (2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Ray():NEWLINE from sympy.geometry.line import RayNEWLINE assert _test_args(Ray((0, 1), (2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Segment():NEWLINE from sympy.geometry.line import SegmentNEWLINE assert _test_args(Segment((0, 1), (2, 3)))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__geometry__line__LinearEntity2D():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Line2D():NEWLINE from sympy.geometry.line import Line2DNEWLINE assert _test_args(Line2D((0, 1), (2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Ray2D():NEWLINE from sympy.geometry.line import Ray2DNEWLINE assert _test_args(Ray2D((0, 1), (2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Segment2D():NEWLINE from sympy.geometry.line import Segment2DNEWLINE assert _test_args(Segment2D((0, 1), (2, 3)))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__geometry__line__LinearEntity3D():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Line3D():NEWLINE from sympy.geometry.line import Line3DNEWLINE assert _test_args(Line3D((0, 1, 1), (2, 3, 4)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Segment3D():NEWLINE from sympy.geometry.line import Segment3DNEWLINE assert _test_args(Segment3D((0, 1, 1), (2, 3, 4)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Ray3D():NEWLINE from sympy.geometry.line import Ray3DNEWLINE assert _test_args(Ray3D((0, 1, 1), (2, 3, 4)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__plane__Plane():NEWLINE from sympy.geometry.plane import PlaneNEWLINE assert _test_args(Plane((1, 1, 1), (-3, 4, -2), (1, 2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__polygon__Polygon():NEWLINE from sympy.geometry.polygon import PolygonNEWLINE assert _test_args(Polygon((0, 1), (2, 3), (4, 5), (6, 7)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__polygon__RegularPolygon():NEWLINE from sympy.geometry.polygon import RegularPolygonNEWLINE assert _test_args(RegularPolygon((0, 1), 2, 3, 4))NEWLINENEWLINENEWLINEdef test_sympy__geometry__polygon__Triangle():NEWLINE from sympy.geometry.polygon import TriangleNEWLINE assert _test_args(Triangle((0, 1), (2, 3), (4, 5)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__entity__GeometryEntity():NEWLINE from sympy.geometry.entity import GeometryEntityNEWLINE from sympy.geometry.point import PointNEWLINE assert _test_args(GeometryEntity(Point(1, 0), 1, [1, 2]))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__geometry__entity__GeometrySet():NEWLINE passNEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__Manifold():NEWLINE from sympy.diffgeom import ManifoldNEWLINE assert _test_args(Manifold('name', 3))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__Patch():NEWLINE from sympy.diffgeom import Manifold, PatchNEWLINE assert _test_args(Patch('name', Manifold('name', 3)))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__CoordSystem():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystemNEWLINE assert _test_args(CoordSystem('name', Patch('name', Manifold('name', 3))))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__diffgeom__diffgeom__Point():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, PointNEWLINE assert _test_args(Point(NEWLINE CoordSystem('name', Patch('name', Manifold('name', 3))), [x, y]))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__BaseScalarField():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarFieldNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE assert _test_args(BaseScalarField(cs, 0))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__BaseVectorField():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorFieldNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE assert _test_args(BaseVectorField(cs, 0))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__Differential():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, DifferentialNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE assert _test_args(Differential(BaseScalarField(cs, 0)))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__Commutator():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField, CommutatorNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE cs1 = CoordSystem('name1', Patch('name', Manifold('name', 3)))NEWLINE v = BaseVectorField(cs, 0)NEWLINE v1 = BaseVectorField(cs1, 0)NEWLINE assert _test_args(Commutator(v, v1))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__TensorProduct():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, TensorProductNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE d = Differential(BaseScalarField(cs, 0))NEWLINE assert _test_args(TensorProduct(d, d))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__WedgeProduct():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, WedgeProductNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE d = Differential(BaseScalarField(cs, 0))NEWLINE d1 = Differential(BaseScalarField(cs, 1))NEWLINE assert _test_args(WedgeProduct(d, d1))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__LieDerivative():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, BaseVectorField, LieDerivativeNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE d = Differential(BaseScalarField(cs, 0))NEWLINE v = BaseVectorField(cs, 0)NEWLINE assert _test_args(LieDerivative(v, d))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__diffgeom__diffgeom__BaseCovarDerivativeOp():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseCovarDerivativeOpNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE assert _test_args(BaseCovarDerivativeOp(cs, 0, [[[0, ]*3, ]*3, ]*3))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__CovarDerivativeOp():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField, CovarDerivativeOpNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE v = BaseVectorField(cs, 0)NEWLINE _test_args(CovarDerivativeOp(v, [[[0, ]*3, ]*3, ]*3))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__Class():NEWLINE from sympy.categories.baseclasses import ClassNEWLINE assert _test_args(Class())NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__Object():NEWLINE from sympy.categories import ObjectNEWLINE assert _test_args(Object("A"))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__categories__baseclasses__Morphism():NEWLINE from sympy.categories import Object, MorphismNEWLINE assert _test_args(Morphism(Object("A"), Object("B")))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__IdentityMorphism():NEWLINE from sympy.categories import Object, IdentityMorphismNEWLINE assert _test_args(IdentityMorphism(Object("A")))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__NamedMorphism():NEWLINE from sympy.categories import Object, NamedMorphismNEWLINE assert _test_args(NamedMorphism(Object("A"), Object("B"), "f"))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__CompositeMorphism():NEWLINE from sympy.categories import Object, NamedMorphism, CompositeMorphismNEWLINE A = Object("A")NEWLINE B = Object("B")NEWLINE C = Object("C")NEWLINE f = NamedMorphism(A, B, "f")NEWLINE g = NamedMorphism(B, C, "g")NEWLINE assert _test_args(CompositeMorphism(f, g))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__Diagram():NEWLINE from sympy.categories import Object, NamedMorphism, DiagramNEWLINE A = Object("A")NEWLINE B = Object("B")NEWLINE C = Object("C")NEWLINE f = NamedMorphism(A, B, "f")NEWLINE d = Diagram([f])NEWLINE assert _test_args(d)NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__Category():NEWLINE from sympy.categories import Object, NamedMorphism, Diagram, CategoryNEWLINE A = Object("A")NEWLINE B = Object("B")NEWLINE C = Object("C")NEWLINE f = NamedMorphism(A, B, "f")NEWLINE g = NamedMorphism(B, C, "g")NEWLINE d1 = Diagram([f, g])NEWLINE d2 = Diagram([f])NEWLINE K = Category("K", commutative_diagrams=[d1, d2])NEWLINE assert _test_args(K)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___totient():NEWLINE from sympy.ntheory.factor_ import totientNEWLINE k = symbols('k', integer=True)NEWLINE t = totient(k)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___reduced_totient():NEWLINE from sympy.ntheory.factor_ import reduced_totientNEWLINE k = symbols('k', integer=True)NEWLINE t = reduced_totient(k)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___divisor_sigma():NEWLINE from sympy.ntheory.factor_ import divisor_sigmaNEWLINE k = symbols('k', integer=True)NEWLINE n = symbols('n', integer=True)NEWLINE t = divisor_sigma(n, k)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___udivisor_sigma():NEWLINE from sympy.ntheory.factor_ import udivisor_sigmaNEWLINE k = symbols('k', integer=True)NEWLINE n = symbols('n', integer=True)NEWLINE t = udivisor_sigma(n, k)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___primenu():NEWLINE from sympy.ntheory.factor_ import primenuNEWLINE n = symbols('n', integer=True)NEWLINE t = primenu(n)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___primeomega():NEWLINE from sympy.ntheory.factor_ import primeomegaNEWLINE n = symbols('n', integer=True)NEWLINE t = primeomega(n)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__residue_ntheory__mobius():NEWLINE from sympy.ntheory import mobiusNEWLINE assert _test_args(mobius(2))NEWLINENEWLINENEWLINEdef test_sympy__physics__optics__waves__TWave():NEWLINE from sympy.physics.optics import TWaveNEWLINE A, f, phi = symbols('A, f, phi')NEWLINE assert _test_args(TWave(A, f, phi))NEWLINENEWLINENEWLINEdef test_sympy__physics__optics__gaussopt__BeamParameter():NEWLINE from sympy.physics.optics import BeamParameterNEWLINE assert _test_args(BeamParameter(530e-9, 1, w=1e-3))NEWLINENEWLINENEWLINEdef test_sympy__physics__optics__medium__Medium():NEWLINE from sympy.physics.optics import MediumNEWLINE assert _test_args(Medium('m'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Assignment():NEWLINE from sympy.codegen.ast import AssignmentNEWLINE assert _test_args(Assignment(x, y))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__expm1():NEWLINE from sympy.codegen.cfunctions import expm1NEWLINE assert _test_args(expm1(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__log1p():NEWLINE from sympy.codegen.cfunctions import log1pNEWLINE assert _test_args(log1p(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__exp2():NEWLINE from sympy.codegen.cfunctions import exp2NEWLINE assert _test_args(exp2(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__log2():NEWLINE from sympy.codegen.cfunctions import log2NEWLINE assert _test_args(log2(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__fma():NEWLINE from sympy.codegen.cfunctions import fmaNEWLINE assert _test_args(fma(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__log10():NEWLINE from sympy.codegen.cfunctions import log10NEWLINE assert _test_args(log10(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__Sqrt():NEWLINE from sympy.codegen.cfunctions import SqrtNEWLINE assert _test_args(Sqrt(x))NEWLINENEWLINEdef test_sympy__codegen__cfunctions__Cbrt():NEWLINE from sympy.codegen.cfunctions import CbrtNEWLINE assert _test_args(Cbrt(x))NEWLINENEWLINEdef test_sympy__codegen__cfunctions__hypot():NEWLINE from sympy.codegen.cfunctions import hypotNEWLINE assert _test_args(hypot(x, y))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__FFunction():NEWLINE from sympy.codegen.ffunctions import FFunctionNEWLINE assert _test_args(FFunction('f'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__F95Function():NEWLINE from sympy.codegen.ffunctions import F95FunctionNEWLINE assert _test_args(F95Function('f'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__isign():NEWLINE from sympy.codegen.ffunctions import isignNEWLINE assert _test_args(isign(1, x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__dsign():NEWLINE from sympy.codegen.ffunctions import dsignNEWLINE assert _test_args(dsign(1, x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__cmplx():NEWLINE from sympy.codegen.ffunctions import cmplxNEWLINE assert _test_args(cmplx(x, y))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__kind():NEWLINE from sympy.codegen.ffunctions import kindNEWLINE assert _test_args(kind(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__merge():NEWLINE from sympy.codegen.ffunctions import mergeNEWLINE assert _test_args(merge(1, 2, Eq(x, 0)))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions___literal():NEWLINE from sympy.codegen.ffunctions import _literalNEWLINE assert _test_args(_literal(1))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__literal_sp():NEWLINE from sympy.codegen.ffunctions import literal_spNEWLINE assert _test_args(literal_sp(1))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__literal_dp():NEWLINE from sympy.codegen.ffunctions import literal_dpNEWLINE assert _test_args(literal_dp(1))NEWLINENEWLINENEWLINEdef test_sympy__vector__coordsysrect__CoordSys3D():NEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE assert _test_args(CoordSys3D('C'))NEWLINENEWLINENEWLINEdef test_sympy__vector__point__Point():NEWLINE from sympy.vector.point import PointNEWLINE assert _test_args(Point('P'))NEWLINENEWLINENEWLINEdef test_sympy__vector__basisdependent__BasisDependent():NEWLINE from sympy.vector.basisdependent import BasisDependentNEWLINE #These classes have been created to maintain an OOP hierarchyNEWLINE #for Vectors and Dyadics. Are NOT meant to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__basisdependent__BasisDependentMul():NEWLINE from sympy.vector.basisdependent import BasisDependentMulNEWLINE #These classes have been created to maintain an OOP hierarchyNEWLINE #for Vectors and Dyadics. Are NOT meant to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__basisdependent__BasisDependentAdd():NEWLINE from sympy.vector.basisdependent import BasisDependentAddNEWLINE #These classes have been created to maintain an OOP hierarchyNEWLINE #for Vectors and Dyadics. Are NOT meant to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__basisdependent__BasisDependentZero():NEWLINE from sympy.vector.basisdependent import BasisDependentZeroNEWLINE #These classes have been created to maintain an OOP hierarchyNEWLINE #for Vectors and Dyadics. Are NOT meant to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__vector__BaseVector():NEWLINE from sympy.vector.vector import BaseVectorNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(BaseVector(0, C, ' ', ' '))NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__VectorAdd():NEWLINE from sympy.vector.vector import VectorAdd, VectorMulNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE from sympy.abc import a, b, c, x, y, zNEWLINE v1 = a*C.i + b*C.j + c*C.kNEWLINE v2 = x*C.i + y*C.j + z*C.kNEWLINE assert _test_args(VectorAdd(v1, v2))NEWLINE assert _test_args(VectorMul(x, v1))NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__VectorMul():NEWLINE from sympy.vector.vector import VectorMulNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE from sympy.abc import aNEWLINE assert _test_args(VectorMul(a, C.i))NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__VectorZero():NEWLINE from sympy.vector.vector import VectorZeroNEWLINE assert _test_args(VectorZero())NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__Vector():NEWLINE from sympy.vector.vector import VectorNEWLINE #Vector is never to be initialized using argsNEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__vector__vector__Cross():NEWLINE from sympy.vector.vector import CrossNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE _test_args(Cross(C.i, C.j))NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__Dot():NEWLINE from sympy.vector.vector import DotNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE _test_args(Dot(C.i, C.j))NEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__Dyadic():NEWLINE from sympy.vector.dyadic import DyadicNEWLINE #Dyadic is never to be initialized using argsNEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__BaseDyadic():NEWLINE from sympy.vector.dyadic import BaseDyadicNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(BaseDyadic(C.i, C.j))NEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__DyadicMul():NEWLINE from sympy.vector.dyadic import BaseDyadic, DyadicMulNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(DyadicMul(3, BaseDyadic(C.i, C.j)))NEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__DyadicAdd():NEWLINE from sympy.vector.dyadic import BaseDyadic, DyadicAddNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(2 * DyadicAdd(BaseDyadic(C.i, C.i),NEWLINE BaseDyadic(C.i, C.j)))NEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__DyadicZero():NEWLINE from sympy.vector.dyadic import DyadicZeroNEWLINE assert _test_args(DyadicZero())NEWLINENEWLINENEWLINEdef test_sympy__vector__deloperator__Del():NEWLINE from sympy.vector.deloperator import DelNEWLINE assert _test_args(Del())NEWLINENEWLINENEWLINEdef test_sympy__vector__operators__Curl():NEWLINE from sympy.vector.operators import CurlNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(Curl(C.i))NEWLINENEWLINENEWLINEdef test_sympy__vector__operators__Divergence():NEWLINE from sympy.vector.operators import DivergenceNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(Divergence(C.i))NEWLINENEWLINENEWLINEdef test_sympy__vector__operators__Gradient():NEWLINE from sympy.vector.operators import GradientNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(Gradient(C.x))NEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__Orienter():NEWLINE from sympy.vector.orienters import OrienterNEWLINE #Not to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__ThreeAngleOrienter():NEWLINE from sympy.vector.orienters import ThreeAngleOrienterNEWLINE #Not to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__AxisOrienter():NEWLINE from sympy.vector.orienters import AxisOrienterNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(AxisOrienter(x, C.i))NEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__BodyOrienter():NEWLINE from sympy.vector.orienters import BodyOrienterNEWLINE assert _test_args(BodyOrienter(x, y, z, '123'))NEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__SpaceOrienter():NEWLINE from sympy.vector.orienters import SpaceOrienterNEWLINE assert _test_args(SpaceOrienter(x, y, z, '123'))NEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__QuaternionOrienter():NEWLINE from sympy.vector.orienters import QuaternionOrienterNEWLINE a, b, c, d = symbols('a b c d')NEWLINE assert _test_args(QuaternionOrienter(a, b, c, d))NEWLINENEWLINENEWLINEdef test_sympy__vector__scalar__BaseScalar():NEWLINE from sympy.vector.scalar import BaseScalarNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(BaseScalar(0, C, ' ', ' '))NEWLINENEWLINENEWLINEdef test_sympy__physics__wigner__Wigner3j():NEWLINE from sympy.physics.wigner import Wigner3jNEWLINE assert _test_args(Wigner3j(0, 0, 0, 0, 0, 0))NEWLINENEWLINEdef test_sympy__integrals__rubi__symbol__matchpyWC():NEWLINE from sympy.integrals.rubi.symbol import matchpyWCNEWLINE assert _test_args(matchpyWC(1, True, 'a'))NEWLINE
import setuptoolsNEWLINENEWLINEwith open("README.md", "r", encoding="utf-8") as f:NEWLINE long_description = f.read()NEWLINENEWLINEsetuptools.setup(NEWLINE name="sunnyvale",NEWLINE version="0.0.1",NEWLINE author="Gunhoon Lee",NEWLINE author_email="gunhoon@gmail.com",NEWLINE description="A small example package",NEWLINE long_description=long_description,NEWLINE long_description_content_type="text/markdown",NEWLINE url="https://github.com/gunhoon/sunnyvale",NEWLINE packages=setuptools.find_packages(),NEWLINE classifiers=[NEWLINE "Programming Language :: Python :: 3",NEWLINE "License :: OSI Approved :: MIT License",NEWLINE "Operating System :: OS Independent",NEWLINE ],NEWLINE python_requires='>=3.6',NEWLINE)NEWLINE
from test_common import get_sample_layer, get_opsNEWLINEfrom nose.tools import assert_raisesNEWLINEimport tvmNEWLINENEWLINEdef test_set_tiling_wrong_inputs():NEWLINE layer = get_sample_layer()NEWLINE with assert_raises(Exception):NEWLINE # wrong iv nameNEWLINE layer.set_tiling("n", [4, 1, 1, 1])NEWLINENEWLINE with assert_raises(Exception):NEWLINE # wrong tiling lengthNEWLINE layer.set_tiling("N", [4, 1, 1, 1, 1])NEWLINENEWLINE with assert_raises(Exception):NEWLINE # wrong tiling valueNEWLINE layer.set_tiling("N", [4, 2, 1, 1])NEWLINENEWLINE # correct caseNEWLINE layer.set_tiling("N", [4, 1, 1, 1])NEWLINENEWLINENEWLINEdef test_set_tiling():NEWLINE layer = get_sample_layer()NEWLINE assert layer._loop_TCs["N_DRAM"] == 4NEWLINE assert layer._loop_TCs["N_SPM"] == 1NEWLINE assert layer._loop_TCs["N_RF"] == 1NEWLINE assert layer._loop_TCs["N_Spatial"] == 1NEWLINENEWLINE layer.set_tiling("N", [1, 1, 2, 2])NEWLINE assert layer._loop_TCs["N_DRAM"] == 1NEWLINE assert layer._loop_TCs["N_SPM"] == 1NEWLINE assert layer._loop_TCs["N_RF"] == 2NEWLINE assert layer._loop_TCs["N_Spatial"] == 2NEWLINENEWLINENEWLINEdef test_set_ordering():NEWLINE layer = get_sample_layer()NEWLINE new_order = ["M", "C", "Ox", "N", "Oy", "Fx", "Fy"]NEWLINE layer.set_ordering("DRAM", new_order)NEWLINE assert layer._loop_IVs["DRAM"] == [ x+"_DRAM" for x in new_order ]NEWLINENEWLINENEWLINEdef test_get_loop():NEWLINE layer = get_sample_layer()NEWLINE loop = layer._get_loop()NEWLINENEWLINENEWLINEdef test_get_stores():NEWLINE layer = get_sample_layer()NEWLINE stores = layer._get_stores()NEWLINE assert len(stores) == 1NEWLINE store = stores[0]NEWLINE assert isinstance(store, tvm.stmt.Store)NEWLINENEWLINE stores = layer._get_stores(pass_init=False)NEWLINE assert len(stores) == 2NEWLINE assert store in storesNEWLINE for store in stores:NEWLINE assert isinstance(store, tvm.stmt.Store)NEWLINENEWLINENEWLINEdef test_get_reads_writes():NEWLINE layer = get_sample_layer()NEWLINE reads, writes = layer._get_reads_writes()NEWLINE assert len(reads) == 3NEWLINE assert len(writes) == 1NEWLINE for read in reads:NEWLINE assert isinstance(read, tvm.expr.Load)NEWLINE for write in writes:NEWLINE assert isinstance(write, tvm.stmt.Store)NEWLINENEWLINENEWLINEdef test_get_reads_writes_of_operand():NEWLINE layer = get_sample_layer()NEWLINE reads, writes = layer._get_reads_writes_of_operand(layer._O.name)NEWLINE assert len(reads) == 1 and len(writes) == 1NEWLINE reads, writes = layer._get_reads_writes_of_operand(layer._W.name)NEWLINE assert len(reads) == 1 and len(writes) == 0NEWLINE reads, writes = layer._get_reads_writes_of_operand(layer._I.name)NEWLINE assert len(reads) == 1 and len(writes) == 0NEWLINENEWLINENEWLINEdef test_get_operands():NEWLINE layer = get_sample_layer()NEWLINE O_write, O_read, I, W = get_ops(layer)NEWLINE operands = layer._get_operands()NEWLINENEWLINE assert O_write in operands[layer._O.name]NEWLINE assert O_read in operands[layer._O.name]NEWLINE assert I in operands[layer._I.name]NEWLINE assert W in operands[layer._W.name]NEWLINENEWLINENEWLINEdef test_get_num_different_pixels():NEWLINE layer = get_sample_layer()NEWLINE reads, writes = layer._get_reads_writes_of_operand(layer._I.name)NEWLINE assert layer._get_num_different_pixels(reads[0], [1, 2, 2, 14, 14, 3, 3]) == 512NEWLINENEWLINENEWLINEdef test_get_index_vars():NEWLINE layer = get_sample_layer()NEWLINE O_write, O_read, I, W = get_ops(layer)NEWLINE assert layer._get_index_vars(O_write) == ["N", "M", "Ox", "Oy"]NEWLINE assert layer._get_index_vars(O_read) == ["N", "M", "Ox", "Oy"]NEWLINE assert set(layer._get_index_vars(I)) == set(["N", "C", "Ox", "Oy", "Fx", "Fy"])NEWLINE assert layer._get_index_vars(W) == ["M", "C", "Fx", "Fy"]NEWLINENEWLINENEWLINEdef test_get_index_exprs():NEWLINE layer = get_sample_layer()NEWLINE O_write, O_read, I, W = get_ops(layer)NEWLINE keys = ["n", "m", "c", "ox", "oy", "fx", "fy"]NEWLINE values = [ int(layer.get_TripCounts(key.title(), "loop") / layer.get_TripCounts(key.title(), "DRAM")) for key in layer.base_TCs ]NEWLINE local_vars = dict(zip(keys, values))NEWLINE assert layer._get_index_expr_evaluated(I, 0, local_vars) == 16NEWLINE assert layer._get_index_expr_evaluated(I, 1, local_vars) == 16NEWLINE assert layer._get_index_expr_evaluated(I, 2, local_vars) == 32NEWLINE assert layer._get_index_expr_evaluated(I, 3, local_vars) == 1NEWLINENEWLINENEWLINEdef test_get_tensor_from_name():NEWLINE layer = get_sample_layer()NEWLINE assert layer._get_tensor_from_name("I") == layer._INEWLINE assert layer._get_tensor_from_name("O") == layer._ONEWLINE assert layer._get_tensor_from_name("W") == layer._W
import audiovisuaaliNEWLINEfrom urllib.request import quoteNEWLINEfrom requests import get as rgetNEWLINEfrom json import loadsNEWLINENEWLINE# trumpNEWLINEasync def trump(message, client, arguments):NEWLINENEWLINE #Starting to fetch a CatfactNEWLINE query = "https://api.tronalddump.io/search/quote?query={}".format(quote(arguments))NEWLINE response = loads(rget(query).text)NEWLINENEWLINE # Checking if name is valid from responseNEWLINE if response["total"] == 0:NEWLINE letter = "No quotes found"NEWLINE returnNEWLINENEWLINE # Creating if existsNEWLINE else:NEWLINE letter = ":rofl: **| Found " + str(response["total"]) + " hits\n```" + response["_embedded"]["quotes"][0]["value"] + "```"NEWLINE try:NEWLINE letter = letter + "```" + response["_embedded"]["quotes"][1]["value"] + "```"NEWLINE except:NEWLINE passNEWLINE try:NEWLINE letter = letter + "```" + response["_embedded"]["quotes"][2]["value"] + "```"NEWLINE except:NEWLINE passNEWLINENEWLINE # Sending messageNEWLINE await client.send_message(message.channel, letter+"**")NEWLINE
from __future__ import unicode_literalsNEWLINENEWLINEfrom prompt_toolkit.buffer import BufferNEWLINEfrom prompt_toolkit.cache import SimpleCacheNEWLINEfrom prompt_toolkit.clipboard import Clipboard, InMemoryClipboardNEWLINEfrom prompt_toolkit.enums import EditingModeNEWLINEfrom prompt_toolkit.eventloop import get_event_loop, ensure_future, Return, run_in_executor, run_until_complete, call_from_executor, FromNEWLINEfrom prompt_toolkit.eventloop.base import get_traceback_from_contextNEWLINEfrom prompt_toolkit.filters import to_filter, ConditionNEWLINEfrom prompt_toolkit.input.base import InputNEWLINEfrom prompt_toolkit.input.defaults import get_default_inputNEWLINEfrom prompt_toolkit.input.typeahead import store_typeahead, get_typeaheadNEWLINEfrom prompt_toolkit.key_binding.bindings.page_navigation import load_page_navigation_bindingsNEWLINEfrom prompt_toolkit.key_binding.defaults import load_key_bindingsNEWLINEfrom prompt_toolkit.key_binding.key_bindings import KeyBindings, ConditionalKeyBindings, KeyBindingsBase, merge_key_bindings, GlobalOnlyKeyBindingsNEWLINEfrom prompt_toolkit.key_binding.key_processor import KeyProcessorNEWLINEfrom prompt_toolkit.key_binding.emacs_state import EmacsStateNEWLINEfrom prompt_toolkit.key_binding.vi_state import ViStateNEWLINEfrom prompt_toolkit.keys import KeysNEWLINEfrom prompt_toolkit.layout.controls import BufferControlNEWLINEfrom prompt_toolkit.layout.dummy import create_dummy_layoutNEWLINEfrom prompt_toolkit.layout.layout import Layout, walkNEWLINEfrom prompt_toolkit.output import Output, ColorDepthNEWLINEfrom prompt_toolkit.output.defaults import get_default_outputNEWLINEfrom prompt_toolkit.renderer import Renderer, print_formatted_textNEWLINEfrom prompt_toolkit.search import SearchStateNEWLINEfrom prompt_toolkit.styles import BaseStyle, default_ui_style, default_pygments_style, merge_styles, DynamicStyle, DummyStyle, StyleTransformation, DummyStyleTransformationNEWLINEfrom prompt_toolkit.utils import Event, in_main_threadNEWLINEfrom .current import set_appNEWLINEfrom .run_in_terminal import run_in_terminal, run_coroutine_in_terminalNEWLINENEWLINEfrom subprocess import PopenNEWLINEfrom traceback import format_tbNEWLINEimport osNEWLINEimport reNEWLINEimport signalNEWLINEimport sixNEWLINEimport sysNEWLINEimport timeNEWLINENEWLINE__all__ = [NEWLINE 'Application',NEWLINE]NEWLINENEWLINENEWLINEclass Application(object):NEWLINE """NEWLINE The main Application class!NEWLINE This glues everything together.NEWLINENEWLINE :param layout: A :class:`~prompt_toolkit.layout.Layout` instance.NEWLINE :param key_bindings:NEWLINE :class:`~prompt_toolkit.key_binding.KeyBindingsBase` instance forNEWLINE the key bindings.NEWLINE :param clipboard: :class:`~prompt_toolkit.clipboard.Clipboard` to use.NEWLINE :param on_abort: What to do when Control-C is pressed.NEWLINE :param on_exit: What to do when Control-D is pressed.NEWLINE :param full_screen: When True, run the application on the alternate screen buffer.NEWLINE :param color_depth: Any :class:`~.ColorDepth` value, a callable thatNEWLINE returns a :class:`~.ColorDepth` or `None` for default.NEWLINE :param erase_when_done: (bool) Clear the application output when it finishes.NEWLINE :param reverse_vi_search_direction: Normally, in Vi mode, a '/' searchesNEWLINE forward and a '?' searches backward. In Readline mode, this is usuallyNEWLINE reversed.NEWLINE :param min_redraw_interval: Number of seconds to wait between redraws. UseNEWLINE this for applications where `invalidate` is called a lot. This could causeNEWLINE a lot of terminal output, which some terminals are not able to process.NEWLINENEWLINE `None` means that every `invalidate` will be scheduled right awayNEWLINE (which is usually fine).NEWLINENEWLINE When one `invalidate` is called, but a scheduled redraw of a previousNEWLINE `invalidate` call has not been executed yet, nothing will happen in anyNEWLINE case.NEWLINENEWLINE :param max_render_postpone_time: When there is high CPU (a lot of otherNEWLINE scheduled calls), postpone the rendering max x seconds. '0' means:NEWLINE don't postpone. '.5' means: try to draw at least twice a second.NEWLINENEWLINE Filters:NEWLINENEWLINE :param mouse_support: (:class:`~prompt_toolkit.filters.Filter` orNEWLINE boolean). When True, enable mouse support.NEWLINE :param paste_mode: :class:`~prompt_toolkit.filters.Filter` or boolean.NEWLINE :param editing_mode: :class:`~prompt_toolkit.enums.EditingMode`.NEWLINENEWLINE :param enable_page_navigation_bindings: When `True`, enable the pageNEWLINE navigation key bindings. These include both Emacs and Vi bindings likeNEWLINE page-up, page-down and so on to scroll through pages. Mostly useful forNEWLINE creating an editor or other full screen applications. Probably, youNEWLINE don't want this for the implementation of a REPL. By default, this isNEWLINE enabled if `full_screen` is set.NEWLINENEWLINE Callbacks (all of these should accept aNEWLINE :class:`~prompt_toolkit.application.Application` object as input.)NEWLINENEWLINE :param on_reset: Called during reset.NEWLINE :param on_invalidate: Called when the UI has been invalidated.NEWLINE :param before_render: Called right before rendering.NEWLINE :param after_render: Called right after rendering.NEWLINENEWLINE I/O:NEWLINENEWLINE :param input: :class:`~prompt_toolkit.input.Input` instance.NEWLINE :param output: :class:`~prompt_toolkit.output.Output` instance. (ProbablyNEWLINE Vt100_Output or Win32Output.)NEWLINENEWLINE Usage:NEWLINENEWLINE app = Application(...)NEWLINE app.run()NEWLINE """NEWLINE def __init__(self, layout=None,NEWLINE style=None,NEWLINE include_default_pygments_style=True,NEWLINE style_transformation=None,NEWLINE key_bindings=None, clipboard=None,NEWLINE full_screen=False, color_depth=None,NEWLINE mouse_support=False,NEWLINENEWLINE enable_page_navigation_bindings=None, # Can be None, True or False.NEWLINENEWLINE paste_mode=False,NEWLINE editing_mode=EditingMode.EMACS,NEWLINE erase_when_done=False,NEWLINE reverse_vi_search_direction=False,NEWLINE min_redraw_interval=None,NEWLINE max_render_postpone_time=0,NEWLINENEWLINE on_reset=None, on_invalidate=None,NEWLINE before_render=None, after_render=None,NEWLINENEWLINE # I/O.NEWLINE input=None, output=None):NEWLINENEWLINE # If `enable_page_navigation_bindings` is not specified, enable it inNEWLINE # case of full screen applications only. This can be overridden by the user.NEWLINE if enable_page_navigation_bindings is None:NEWLINE enable_page_navigation_bindings = Condition(lambda: self.full_screen)NEWLINENEWLINE paste_mode = to_filter(paste_mode)NEWLINE mouse_support = to_filter(mouse_support)NEWLINE reverse_vi_search_direction = to_filter(reverse_vi_search_direction)NEWLINE enable_page_navigation_bindings = to_filter(enable_page_navigation_bindings)NEWLINE include_default_pygments_style = to_filter(include_default_pygments_style)NEWLINENEWLINE assert layout is None or isinstance(layout, Layout), 'Got layout: %r' % (layout, )NEWLINE assert key_bindings is None or isinstance(key_bindings, KeyBindingsBase)NEWLINE assert clipboard is None or isinstance(clipboard, Clipboard)NEWLINE assert isinstance(full_screen, bool)NEWLINE assert (color_depth is None or callable(color_depth) orNEWLINE color_depth in ColorDepth._ALL), 'Got color_depth: %r' % (color_depth, )NEWLINE assert isinstance(editing_mode, six.string_types)NEWLINE assert style is None or isinstance(style, BaseStyle)NEWLINE assert style_transformation is None or isinstance(style_transformation, StyleTransformation)NEWLINE assert isinstance(erase_when_done, bool)NEWLINE assert min_redraw_interval is None or isinstance(min_redraw_interval, (float, int))NEWLINE assert max_render_postpone_time is None or isinstance(max_render_postpone_time, (float, int))NEWLINENEWLINE assert on_reset is None or callable(on_reset)NEWLINE assert on_invalidate is None or callable(on_invalidate)NEWLINE assert before_render is None or callable(before_render)NEWLINE assert after_render is None or callable(after_render)NEWLINENEWLINE assert output is None or isinstance(output, Output)NEWLINE assert input is None or isinstance(input, Input)NEWLINENEWLINE if layout is None:NEWLINE layout = create_dummy_layout()NEWLINENEWLINE if style_transformation is None:NEWLINE style_transformation = DummyStyleTransformation()NEWLINENEWLINE self.style = styleNEWLINE self.style_transformation = style_transformationNEWLINENEWLINE # Key bindings.NEWLINE self.key_bindings = key_bindingsNEWLINE self._default_bindings = load_key_bindings()NEWLINE self._page_navigation_bindings = load_page_navigation_bindings()NEWLINENEWLINE self.layout = layoutNEWLINE self.clipboard = clipboard or InMemoryClipboard()NEWLINE self.full_screen = full_screenNEWLINE self._color_depth = color_depthNEWLINE self.mouse_support = mouse_supportNEWLINENEWLINE self.paste_mode = paste_modeNEWLINE self.editing_mode = editing_modeNEWLINE self.erase_when_done = erase_when_doneNEWLINE self.reverse_vi_search_direction = reverse_vi_search_directionNEWLINE self.enable_page_navigation_bindings = enable_page_navigation_bindingsNEWLINE self.min_redraw_interval = min_redraw_intervalNEWLINE self.max_render_postpone_time = max_render_postpone_timeNEWLINENEWLINE # Events.NEWLINE self.on_invalidate = Event(self, on_invalidate)NEWLINE self.on_reset = Event(self, on_reset)NEWLINE self.before_render = Event(self, before_render)NEWLINE self.after_render = Event(self, after_render)NEWLINENEWLINE # I/O.NEWLINE self.output = output or get_default_output()NEWLINE self.input = input or get_default_input()NEWLINENEWLINE # List of 'extra' functions to execute before a Application.run.NEWLINE self.pre_run_callables = []NEWLINENEWLINE self._is_running = FalseNEWLINE self.future = NoneNEWLINENEWLINE #: Quoted insert. This flag is set if we go into quoted insert mode.NEWLINE self.quoted_insert = FalseNEWLINENEWLINE #: Vi state. (For Vi key bindings.)NEWLINE self.vi_state = ViState()NEWLINE self.emacs_state = EmacsState()NEWLINENEWLINE #: When to flush the input (For flushing escape keys.) This is importantNEWLINE #: on terminals that use vt100 input. We can't distinguish the escapeNEWLINE #: key from for instance the left-arrow key, if we don't know what followsNEWLINE #: after "\x1b". This little timer will consider "\x1b" to be escape ifNEWLINE #: nothing did follow in this time span.NEWLINE #: This seems to work like the `ttimeoutlen` option in Vim.NEWLINE self.ttimeoutlen = .5 # Seconds.NEWLINENEWLINE #: Like Vim's `timeoutlen` option. This can be `None` or a float. ForNEWLINE #: instance, suppose that we have a key binding AB and a second keyNEWLINE #: binding A. If the uses presses A and then waits, we don't handleNEWLINE #: this binding yet (unless it was marked 'eager'), because we don'tNEWLINE #: know what will follow. This timeout is the maximum amount of timeNEWLINE #: that we wait until we call the handlers anyway. Pass `None` toNEWLINE #: disable this timeout.NEWLINE self.timeoutlen = 1.0NEWLINENEWLINE #: The `Renderer` instance.NEWLINE # Make sure that the same stdout is used, when a custom renderer has been passed.NEWLINE self._merged_style = self._create_merged_style(include_default_pygments_style)NEWLINENEWLINE self.renderer = Renderer(NEWLINE self._merged_style,NEWLINE self.output,NEWLINE self.input,NEWLINE full_screen=full_screen,NEWLINE mouse_support=mouse_support,NEWLINE cpr_not_supported_callback=self.cpr_not_supported_callback)NEWLINENEWLINE #: Render counter. This one is increased every time the UI is rendered.NEWLINE #: It can be used as a key for caching certain information during oneNEWLINE #: rendering.NEWLINE self.render_counter = 0NEWLINENEWLINE # Invalidate flag. When 'True', a repaint has been scheduled.NEWLINE self._invalidated = FalseNEWLINE self._invalidate_events = [] # Collection of 'invalidate' Event objects.NEWLINE self._last_redraw_time = 0 # Unix timestamp of last redraw. Used whenNEWLINE # `min_redraw_interval` is given.NEWLINENEWLINE #: The `InputProcessor` instance.NEWLINE self.key_processor = KeyProcessor(_CombinedRegistry(self))NEWLINENEWLINE # If `run_in_terminal` was called. This will point to a `Future` what will beNEWLINE # set at the point when the previous run finishes.NEWLINE self._running_in_terminal = FalseNEWLINE self._running_in_terminal_f = NoneNEWLINENEWLINE # Trigger initialize callback.NEWLINE self.reset()NEWLINENEWLINE def _create_merged_style(self, include_default_pygments_style):NEWLINE """NEWLINE Create a `Style` object that merges the default UI style, the defaultNEWLINE pygments style, and the custom user style.NEWLINE """NEWLINE dummy_style = DummyStyle()NEWLINE pygments_style = default_pygments_style()NEWLINENEWLINE @DynamicStyleNEWLINE def conditional_pygments_style():NEWLINE if include_default_pygments_style():NEWLINE return pygments_styleNEWLINE else:NEWLINE return dummy_styleNEWLINENEWLINE return merge_styles([NEWLINE default_ui_style(),NEWLINE conditional_pygments_style,NEWLINE DynamicStyle(lambda: self.style),NEWLINE ])NEWLINENEWLINE @propertyNEWLINE def color_depth(self):NEWLINE """NEWLINE Active :class:`.ColorDepth`.NEWLINE """NEWLINE depth = self._color_depthNEWLINENEWLINE if callable(depth):NEWLINE depth = depth()NEWLINENEWLINE if depth is None:NEWLINE depth = ColorDepth.default()NEWLINENEWLINE return depthNEWLINENEWLINE @propertyNEWLINE def current_buffer(self):NEWLINE """NEWLINE The currently focused :class:`~.Buffer`.NEWLINENEWLINE (This returns a dummy :class:`.Buffer` when none of the actual buffersNEWLINE has the focus. In this case, it's really not practical to check forNEWLINE `None` values or catch exceptions every time.)NEWLINE """NEWLINE return self.layout.current_buffer or Buffer(name='dummy-buffer') # Dummy buffer.NEWLINENEWLINE @propertyNEWLINE def current_search_state(self):NEWLINE """NEWLINE Return the current :class:`.SearchState`. (The one for the focusedNEWLINE :class:`.BufferControl`.)NEWLINE """NEWLINE ui_control = self.layout.current_controlNEWLINE if isinstance(ui_control, BufferControl):NEWLINE return ui_control.search_stateNEWLINE else:NEWLINE return SearchState() # Dummy search state. (Don't return None!)NEWLINENEWLINE def reset(self):NEWLINE """NEWLINE Reset everything, for reading the next input.NEWLINE """NEWLINE # Notice that we don't reset the buffers. (This happens just beforeNEWLINE # returning, and when we have multiple buffers, we clearly want theNEWLINE # content in the other buffers to remain unchanged between severalNEWLINE # calls of `run`. (And the same is true for the focus stack.)NEWLINENEWLINE self.exit_style = ''NEWLINENEWLINE self.renderer.reset()NEWLINE self.key_processor.reset()NEWLINE self.layout.reset()NEWLINE self.vi_state.reset()NEWLINE self.emacs_state.reset()NEWLINENEWLINE # Trigger reset event.NEWLINE self.on_reset.fire()NEWLINENEWLINE # Make sure that we have a 'focusable' widget focused.NEWLINE # (The `Layout` class can't determine this.)NEWLINE layout = self.layoutNEWLINENEWLINE if not layout.current_control.is_focusable():NEWLINE for w in layout.find_all_windows():NEWLINE if w.content.is_focusable():NEWLINE layout.current_window = wNEWLINE breakNEWLINENEWLINE def invalidate(self):NEWLINE """NEWLINE Thread safe way of sending a repaint trigger to the input event loop.NEWLINE """NEWLINE # Never schedule a second redraw, when a previous one has not yet beenNEWLINE # executed. (This should protect against other threads callingNEWLINE # 'invalidate' many times, resulting in 100% CPU.)NEWLINE if self._invalidated:NEWLINE returnNEWLINE else:NEWLINE self._invalidated = TrueNEWLINENEWLINE # Trigger event.NEWLINE self.on_invalidate.fire()NEWLINENEWLINE def redraw():NEWLINE self._invalidated = FalseNEWLINE self._redraw()NEWLINENEWLINE def schedule_redraw():NEWLINE # Call redraw in the eventloop (thread safe).NEWLINE # Usually with the high priority, in order to make the applicationNEWLINE # feel responsive, but this can be tuned by changing the value ofNEWLINE # `max_render_postpone_time`.NEWLINE if self.max_render_postpone_time:NEWLINE _max_postpone_until = time.time() + self.max_render_postpone_timeNEWLINE else:NEWLINE _max_postpone_until = NoneNEWLINENEWLINE call_from_executor(NEWLINE redraw, _max_postpone_until=_max_postpone_until)NEWLINENEWLINE if self.min_redraw_interval:NEWLINE # When a minimum redraw interval is set, wait minimum this amountNEWLINE # of time between redraws.NEWLINE diff = time.time() - self._last_redraw_timeNEWLINE if diff < self.min_redraw_interval:NEWLINE def redraw_in_future():NEWLINE time.sleep(self.min_redraw_interval - diff)NEWLINE schedule_redraw()NEWLINE run_in_executor(redraw_in_future)NEWLINE else:NEWLINE schedule_redraw()NEWLINE else:NEWLINE schedule_redraw()NEWLINENEWLINE @propertyNEWLINE def invalidated(self):NEWLINE " True when a redraw operation has been scheduled. "NEWLINE return self._invalidatedNEWLINENEWLINE def _redraw(self, render_as_done=False):NEWLINE """NEWLINE Render the command line again. (Not thread safe!) (From other threads,NEWLINE or if unsure, use :meth:`.Application.invalidate`.)NEWLINENEWLINE :param render_as_done: make sure to put the cursor after the UI.NEWLINE """NEWLINE # Only draw when no sub application was started.NEWLINE if self._is_running and not self._running_in_terminal:NEWLINE if self.min_redraw_interval:NEWLINE self._last_redraw_time = time.time()NEWLINENEWLINE # Clear the 'rendered_ui_controls' list. (The `Window` class willNEWLINE # populate this during the next rendering.)NEWLINE self.rendered_user_controls = []NEWLINENEWLINE # RenderNEWLINE self.render_counter += 1NEWLINE self.before_render.fire()NEWLINENEWLINE # NOTE: We want to make sure this Application is the active one, ifNEWLINE # we have a situation with multiple concurrent running apps.NEWLINE # We had the case with pymux where `invalidate()` was calledNEWLINE # at the point where another Application was active. ThisNEWLINE # would cause prompt_toolkit to render the wrong applicationNEWLINE # to this output device.NEWLINE with set_app(self):NEWLINE if render_as_done:NEWLINE if self.erase_when_done:NEWLINE self.renderer.erase()NEWLINE else:NEWLINE # Draw in 'done' state and reset renderer.NEWLINE self.renderer.render(self, self.layout, is_done=render_as_done)NEWLINE else:NEWLINE self.renderer.render(self, self.layout)NEWLINENEWLINE self.layout.update_parents_relations()NEWLINENEWLINE # Fire render event.NEWLINE self.after_render.fire()NEWLINENEWLINE self._update_invalidate_events()NEWLINENEWLINE def _update_invalidate_events(self):NEWLINE """NEWLINE Make sure to attach 'invalidate' handlers to all invalidate events inNEWLINE the UI.NEWLINE """NEWLINE # Remove all the original event handlers. (Components can be removedNEWLINE # from the UI.)NEWLINE for ev in self._invalidate_events:NEWLINE ev -= self._invalidate_handlerNEWLINENEWLINE # Gather all new events.NEWLINE # (All controls are able to invalidate themselves.)NEWLINE def gather_events():NEWLINE for c in self.layout.find_all_controls():NEWLINE for ev in c.get_invalidate_events():NEWLINE yield evNEWLINENEWLINE self._invalidate_events = list(gather_events())NEWLINENEWLINE for ev in self._invalidate_events:NEWLINE ev += self._invalidate_handlerNEWLINENEWLINE def _invalidate_handler(self, sender):NEWLINE """NEWLINE Handler for invalidate events coming from UIControls.NEWLINENEWLINE (This handles the difference in signature between event handler andNEWLINE `self.invalidate`. It also needs to be a method -not a nestedNEWLINE function-, so that we can remove it again .)NEWLINE """NEWLINE self.invalidate()NEWLINENEWLINE def _on_resize(self):NEWLINE """NEWLINE When the window size changes, we erase the current output and requestNEWLINE again the cursor position. When the CPR answer arrives, the output isNEWLINE drawn again.NEWLINE """NEWLINE # Erase, request position (when cursor is at the start position)NEWLINE # and redraw again. -- The order is important.NEWLINE self.renderer.erase(leave_alternate_screen=False)NEWLINE self._request_absolute_cursor_position()NEWLINE self._redraw()NEWLINENEWLINE def _pre_run(self, pre_run=None):NEWLINE " Called during `run`. "NEWLINE if pre_run:NEWLINE pre_run()NEWLINENEWLINE # Process registered "pre_run_callables" and clear list.NEWLINE for c in self.pre_run_callables:NEWLINE c()NEWLINE del self.pre_run_callables[:]NEWLINENEWLINE def run_async(self, pre_run=None):NEWLINE """NEWLINE Run asynchronous. Return a prompt_toolkitNEWLINE :class:`~prompt_toolkit.eventloop.Future` object.NEWLINENEWLINE If you wish to run on top of asyncio, remember that a prompt_toolkitNEWLINE `Future` needs to be converted to an asyncio `Future`. The cleanest wayNEWLINE is to call :meth:`~prompt_toolkit.eventloop.Future.to_asyncio_future`.NEWLINE Also make sure to tell prompt_toolkit to use the asyncio event loop.NEWLINENEWLINE .. code:: pythonNEWLINENEWLINE from prompt_toolkit.eventloop import use_asyncio_event_loopNEWLINE from asyncio import get_event_loopNEWLINENEWLINE use_asyncio_event_loop()NEWLINE get_event_loop().run_until_complete(NEWLINE application.run_async().to_asyncio_future())NEWLINENEWLINE """NEWLINE assert not self._is_running, 'Application is already running.'NEWLINENEWLINE def _run_async():NEWLINE " Coroutine. "NEWLINE loop = get_event_loop()NEWLINE f = loop.create_future()NEWLINE self.future = f # XXX: make sure to set this before calling '_redraw'.NEWLINENEWLINE # Counter for cancelling 'flush' timeouts. Every time when a key isNEWLINE # pressed, we start a 'flush' timer for flushing our escape key. ButNEWLINE # when any subsequent input is received, a new timer is started andNEWLINE # the current timer will be ignored.NEWLINE flush_counter = [0] # Non local.NEWLINENEWLINE # Reset.NEWLINE self.reset()NEWLINE self._pre_run(pre_run)NEWLINENEWLINE # Feed type ahead input first.NEWLINE self.key_processor.feed_multiple(get_typeahead(self.input))NEWLINE self.key_processor.process_keys()NEWLINENEWLINE def read_from_input():NEWLINE # Ignore when we aren't running anymore. This callback willNEWLINE # removed from the loop next time. (It could be that it wasNEWLINE # still in the 'tasks' list of the loop.)NEWLINE # Except: if we need to process incoming CPRs.NEWLINE if not self._is_running and not self.renderer.waiting_for_cpr:NEWLINE returnNEWLINENEWLINE # Get keys from the input object.NEWLINE keys = self.input.read_keys()NEWLINENEWLINE # Feed to key processor.NEWLINE self.key_processor.feed_multiple(keys)NEWLINE self.key_processor.process_keys()NEWLINENEWLINE # Quit when the input stream was closed.NEWLINE if self.input.closed:NEWLINE f.set_exception(EOFError)NEWLINE else:NEWLINE # Increase this flush counter.NEWLINE flush_counter[0] += 1NEWLINE counter = flush_counter[0]NEWLINENEWLINE # Automatically flush keys.NEWLINE # (_daemon needs to be set, otherwise, this will hang theNEWLINE # application for .5 seconds before exiting.)NEWLINE run_in_executor(NEWLINE lambda: auto_flush_input(counter), _daemon=True)NEWLINENEWLINE def auto_flush_input(counter):NEWLINE # Flush input after timeout.NEWLINE # (Used for flushing the enter key.)NEWLINE time.sleep(self.ttimeoutlen)NEWLINENEWLINE if flush_counter[0] == counter:NEWLINE call_from_executor(flush_input)NEWLINENEWLINE def flush_input():NEWLINE if not self.is_done:NEWLINE # Get keys, and feed to key processor.NEWLINE keys = self.input.flush_keys()NEWLINE self.key_processor.feed_multiple(keys)NEWLINE self.key_processor.process_keys()NEWLINENEWLINE if self.input.closed:NEWLINE f.set_exception(EOFError)NEWLINENEWLINE # Enter raw mode.NEWLINE with self.input.raw_mode():NEWLINE with self.input.attach(read_from_input):NEWLINE # Draw UI.NEWLINE self._request_absolute_cursor_position()NEWLINE self._redraw()NEWLINENEWLINE has_sigwinch = hasattr(signal, 'SIGWINCH') and in_main_thread()NEWLINE if has_sigwinch:NEWLINE previous_winch_handler = loop.add_signal_handler(NEWLINE signal.SIGWINCH, self._on_resize)NEWLINENEWLINE # Wait for UI to finish.NEWLINE try:NEWLINE result = yield From(f)NEWLINE finally:NEWLINE # In any case, when the application finishes. (Successful,NEWLINE # or because of an error.)NEWLINE try:NEWLINE self._redraw(render_as_done=True)NEWLINE finally:NEWLINE # _redraw has a good chance to fail if it calls widgetsNEWLINE # with bad code. Make sure to reset the renderer anyway.NEWLINE self.renderer.reset()NEWLINENEWLINE # Unset `is_running`, this ensures that possiblyNEWLINE # scheduled draws won't paint during the followingNEWLINE # yield.NEWLINE self._is_running = FalseNEWLINENEWLINE # Detach event handlers for invalidate events.NEWLINE # (Important when a UIControl is embedded inNEWLINE # multiple applications, like ptterm in pymux. AnNEWLINE # invalidate should not trigger a repaint inNEWLINE # terminated applications.)NEWLINE for ev in self._invalidate_events:NEWLINE ev -= self._invalidate_handlerNEWLINE self._invalidate_events = []NEWLINENEWLINE # Wait for CPR responses.NEWLINE if self.input.responds_to_cpr:NEWLINE yield From(self.renderer.wait_for_cpr_responses())NEWLINENEWLINE if has_sigwinch:NEWLINE loop.add_signal_handler(signal.SIGWINCH, previous_winch_handler)NEWLINENEWLINE # Wait for the run-in-terminals to terminate.NEWLINE previous_run_in_terminal_f = self._running_in_terminal_fNEWLINENEWLINE if previous_run_in_terminal_f:NEWLINE yield From(previous_run_in_terminal_f)NEWLINENEWLINE # Store unprocessed input as typeahead for next time.NEWLINE store_typeahead(self.input, self.key_processor.empty_queue())NEWLINENEWLINE raise Return(result)NEWLINENEWLINE def _run_async2():NEWLINE self._is_running = TrueNEWLINE with set_app(self):NEWLINE try:NEWLINE f = From(_run_async())NEWLINE result = yield fNEWLINE finally:NEWLINE # Set the `_is_running` flag to `False`. Normally thisNEWLINE # happened already in the finally block in `run_async`NEWLINE # above, but in case of exceptions, that's not always theNEWLINE # case.NEWLINE self._is_running = FalseNEWLINE raise Return(result)NEWLINENEWLINE return ensure_future(_run_async2())NEWLINENEWLINE def run(self, pre_run=None, set_exception_handler=True, inputhook=None):NEWLINE """NEWLINE A blocking 'run' call that waits until the UI is finished.NEWLINENEWLINE :param set_exception_handler: When set, in case of an exception, go outNEWLINE of the alternate screen and hide the application, display theNEWLINE exception, and wait for the user to press ENTER.NEWLINE :param inputhook: None or a callable that takes an `InputHookContext`.NEWLINE """NEWLINE loop = get_event_loop()NEWLINENEWLINE def run():NEWLINE f = self.run_async(pre_run=pre_run)NEWLINE run_until_complete(f, inputhook=inputhook)NEWLINE return f.result()NEWLINENEWLINE def handle_exception(context):NEWLINE " Print the exception, using run_in_terminal. "NEWLINE # For Python 2: we have to get traceback at this point, becauseNEWLINE # we're still in the 'except:' block of the event loop where theNEWLINE # traceback is still available. Moving this code in theNEWLINE # 'print_exception' coroutine will loose the exception.NEWLINE tb = get_traceback_from_context(context)NEWLINE formatted_tb = ''.join(format_tb(tb))NEWLINENEWLINE def print_exception():NEWLINE # Print output. Similar to 'loop.default_exception_handler',NEWLINE # but don't use logger. (This works better on Python 2.)NEWLINE print('\nUnhandled exception in event loop:')NEWLINE print(formatted_tb)NEWLINE print('Exception %s' % (context.get('exception'), ))NEWLINENEWLINE yield From(_do_wait_for_enter('Press ENTER to continue...'))NEWLINE run_coroutine_in_terminal(print_exception)NEWLINENEWLINE if set_exception_handler:NEWLINE # Run with patched exception handler.NEWLINE previous_exc_handler = loop.get_exception_handler()NEWLINE loop.set_exception_handler(handle_exception)NEWLINE try:NEWLINE return run()NEWLINE finally:NEWLINE loop.set_exception_handler(previous_exc_handler)NEWLINE else:NEWLINE run()NEWLINENEWLINE def cpr_not_supported_callback(self):NEWLINE """NEWLINE Called when we don't receive the cursor position response in time.NEWLINE """NEWLINE if not self.input.responds_to_cpr:NEWLINE return # We know about this already.NEWLINENEWLINE def in_terminal():NEWLINE self.output.write(NEWLINE "WARNING: your terminal doesn't support cursor position requests (CPR).\r\n")NEWLINE self.output.flush()NEWLINE run_in_terminal(in_terminal)NEWLINENEWLINE def exit(self, result=None, exception=None, style=''):NEWLINE """NEWLINE Exit application.NEWLINENEWLINE :param result: Set this result for the application.NEWLINE :param exception: Set this exception as the result for an application. ForNEWLINE a prompt, this is often `EOFError` or `KeyboardInterrupt`.NEWLINE :param style: Apply this style on the whole content when quitting,NEWLINE often this is 'class:exiting' for a prompt. (Used whenNEWLINE `erase_when_done` is not set.)NEWLINE """NEWLINE assert result is None or exception is NoneNEWLINENEWLINE if self.future is None:NEWLINE raise Exception(NEWLINE 'Application is not running. Application.exit() failed.')NEWLINENEWLINE if self.future.done():NEWLINE raise Exception(NEWLINE 'Return value already set. Application.exit() failed.')NEWLINENEWLINE self.exit_style = styleNEWLINENEWLINE if exception is not None:NEWLINE self.future.set_exception(exception)NEWLINE else:NEWLINE self.future.set_result(result)NEWLINENEWLINE def _request_absolute_cursor_position(self):NEWLINE """NEWLINE Send CPR request.NEWLINE """NEWLINE # Note: only do this if the input queue is not empty, and a returnNEWLINE # value has not been set. Otherwise, we won't be able to read theNEWLINE # response anyway.NEWLINE if not self.key_processor.input_queue and not self.is_done:NEWLINE self.renderer.request_absolute_cursor_position()NEWLINENEWLINE def run_system_command(self, command, wait_for_enter=True,NEWLINE display_before_text='',NEWLINE wait_text='Press ENTER to continue...'):NEWLINE """NEWLINE Run system command (While hiding the prompt. When finished, all theNEWLINE output will scroll above the prompt.)NEWLINENEWLINE :param command: Shell command to be executed.NEWLINE :param wait_for_enter: FWait for the user to press enter, when theNEWLINE command is finished.NEWLINE :param display_before_text: If given, text to be displayed before theNEWLINE command executes.NEWLINE :return: A `Future` object.NEWLINE """NEWLINE assert isinstance(wait_for_enter, bool)NEWLINENEWLINE def run():NEWLINE # Try to use the same input/output file descriptors as the one,NEWLINE # used to run this application.NEWLINE try:NEWLINE input_fd = self.input.fileno()NEWLINE except AttributeError:NEWLINE input_fd = sys.stdin.fileno()NEWLINE try:NEWLINE output_fd = self.output.fileno()NEWLINE except AttributeError:NEWLINE output_fd = sys.stdout.fileno()NEWLINENEWLINE # Run sub process.NEWLINE def run_command():NEWLINE self.print_text(display_before_text)NEWLINE p = Popen(command, shell=True,NEWLINE stdin=input_fd, stdout=output_fd)NEWLINE p.wait()NEWLINE yield run_in_executor(run_command)NEWLINENEWLINE # Wait for the user to press enter.NEWLINE if wait_for_enter:NEWLINE yield From(_do_wait_for_enter(wait_text))NEWLINENEWLINE return run_coroutine_in_terminal(run)NEWLINENEWLINE def suspend_to_background(self, suspend_group=True):NEWLINE """NEWLINE (Not thread safe -- to be called from inside the key bindings.)NEWLINE Suspend process.NEWLINENEWLINE :param suspend_group: When true, suspend the whole process group.NEWLINE (This is the default, and probably what you want.)NEWLINE """NEWLINE # Only suspend when the operating system supports it.NEWLINE # (Not on Windows.)NEWLINE if hasattr(signal, 'SIGTSTP'):NEWLINE def run():NEWLINE # Send `SIGSTP` to own process.NEWLINE # This will cause it to suspend.NEWLINENEWLINE # Usually we want the whole process group to be suspended. ThisNEWLINE # handles the case when input is piped from another process.NEWLINE if suspend_group:NEWLINE os.kill(0, signal.SIGTSTP)NEWLINE else:NEWLINE os.kill(os.getpid(), signal.SIGTSTP)NEWLINENEWLINE run_in_terminal(run)NEWLINENEWLINE def print_text(self, text, style=None):NEWLINE """NEWLINE Print a list of (style_str, text) tuples to the output.NEWLINE (When the UI is running, this method has to be called throughNEWLINE `run_in_terminal`, otherwise it will destroy the UI.)NEWLINENEWLINE :param text: List of ``(style_str, text)`` tuples.NEWLINE :param style: Style class to use. Defaults to the active style in the CLI.NEWLINE """NEWLINE print_formatted_text(NEWLINE output=self.output,NEWLINE formatted_text=text,NEWLINE style=style or self._merged_style,NEWLINE color_depth=self.color_depth,NEWLINE style_transformation=self.style_transformation)NEWLINENEWLINE @propertyNEWLINE def is_running(self):NEWLINE " `True` when the application is currently active/running. "NEWLINE return self._is_runningNEWLINENEWLINE @propertyNEWLINE def is_done(self):NEWLINE return self.future and self.future.done()NEWLINENEWLINE def get_used_style_strings(self):NEWLINE """NEWLINE Return a list of used style strings. This is helpful for debugging, andNEWLINE for writing a new `Style`.NEWLINE """NEWLINE return sorted([NEWLINE re.sub(r'\s+', ' ', style_str).strip()NEWLINE for style_str in self.renderer._attrs_for_style.keys()])NEWLINENEWLINENEWLINEclass _CombinedRegistry(KeyBindingsBase):NEWLINE """NEWLINE The `KeyBindings` of key bindings for a `Application`.NEWLINE This merges the global key bindings with the one of the current userNEWLINE control.NEWLINE """NEWLINE def __init__(self, app):NEWLINE self.app = appNEWLINE self._cache = SimpleCache()NEWLINENEWLINE @propertyNEWLINE def _version(self):NEWLINE """ Not needed - this object is not going to be wrapped in anotherNEWLINE KeyBindings object. """NEWLINE raise NotImplementedErrorNEWLINENEWLINE def _create_key_bindings(self, current_window, other_controls):NEWLINE """NEWLINE Create a `KeyBindings` object that merges the `KeyBindings` from theNEWLINE `UIControl` with all the parent controls and the global key bindings.NEWLINE """NEWLINE key_bindings = []NEWLINE collected_containers = set()NEWLINENEWLINE # Collect key bindings from currently focused control and all parentNEWLINE # controls. Don't include key bindings of container parent controls.NEWLINE container = current_windowNEWLINE while True:NEWLINE collected_containers.add(container)NEWLINE kb = container.get_key_bindings()NEWLINE if kb is not None:NEWLINE key_bindings.append(kb)NEWLINENEWLINE if container.is_modal():NEWLINE breakNEWLINENEWLINE parent = self.app.layout.get_parent(container)NEWLINE if parent is None:NEWLINE breakNEWLINE else:NEWLINE container = parentNEWLINENEWLINE # Include global bindings (starting at the top-model container).NEWLINE for c in walk(container):NEWLINE if c not in collected_containers:NEWLINE kb = c.get_key_bindings()NEWLINE if kb is not None:NEWLINE key_bindings.append(GlobalOnlyKeyBindings(kb))NEWLINENEWLINE # Add App key bindingsNEWLINE if self.app.key_bindings:NEWLINE key_bindings.append(self.app.key_bindings)NEWLINENEWLINE # Add mouse bindings.NEWLINE key_bindings.append(ConditionalKeyBindings(NEWLINE self.app._page_navigation_bindings,NEWLINE self.app.enable_page_navigation_bindings))NEWLINE key_bindings.append(self.app._default_bindings)NEWLINENEWLINE # Reverse this list. The current control's key bindings should comeNEWLINE # last. They need priority.NEWLINE key_bindings = key_bindings[::-1]NEWLINENEWLINE return merge_key_bindings(key_bindings)NEWLINENEWLINE @propertyNEWLINE def _key_bindings(self):NEWLINE current_window = self.app.layout.current_windowNEWLINE other_controls = list(self.app.layout.find_all_controls())NEWLINE key = current_window, frozenset(other_controls)NEWLINENEWLINE return self._cache.get(NEWLINE key, lambda: self._create_key_bindings(current_window, other_controls))NEWLINENEWLINE def get_bindings_for_keys(self, keys):NEWLINE return self._key_bindings.get_bindings_for_keys(keys)NEWLINENEWLINE def get_bindings_starting_with_keys(self, keys):NEWLINE return self._key_bindings.get_bindings_starting_with_keys(keys)NEWLINENEWLINENEWLINEdef _do_wait_for_enter(wait_text):NEWLINE """NEWLINE Create a sub application to wait for the enter key press.NEWLINE This has two advantages over using 'input'/'raw_input':NEWLINE - This will share the same input/output I/O.NEWLINE - This doesn't block the event loop.NEWLINE """NEWLINE from prompt_toolkit.shortcuts import PromptSessionNEWLINENEWLINE key_bindings = KeyBindings()NEWLINENEWLINE @key_bindings.add('enter')NEWLINE def _(event):NEWLINE event.app.exit()NEWLINENEWLINE @key_bindings.add(Keys.Any)NEWLINE def _(event):NEWLINE " Disallow typing. "NEWLINE passNEWLINENEWLINE session = PromptSession(NEWLINE message=wait_text,NEWLINE key_bindings=key_bindings)NEWLINE yield From(session.app.run_async())NEWLINE
import csvNEWLINEimport jsonNEWLINEimport numpy as npNEWLINEimport osNEWLINEimport pandas as pdNEWLINEimport pickleNEWLINENEWLINEdef get_paths():NEWLINE paths = json.loads(open("Settings.json").read())NEWLINE for key in paths:NEWLINE paths[key] = os.path.expandvars(paths[key])NEWLINE return pathsNEWLINENEWLINEdef identity(x):NEWLINE return xNEWLINENEWLINE# For pandas >= 10.1 this will trigger the columns to be parsed as stringsNEWLINEconverters = { "FullDescription" : identityNEWLINE , "Title": identityNEWLINE , "LocationRaw": identityNEWLINE , "LocationNormalized": identityNEWLINE }NEWLINENEWLINEdef get_train_df():NEWLINE train_path = get_paths()["train_data_path"]NEWLINE return pd.read_csv(train_path, converters=converters)NEWLINENEWLINEdef get_valid_df():NEWLINE valid_path = get_paths()["valid_data_path"]NEWLINE return pd.read_csv(valid_path, converters=converters)NEWLINENEWLINEdef save_model(model):NEWLINE out_path = get_paths()["model_path"]NEWLINE pickle.dump(model, open(out_path, "w"))NEWLINENEWLINEdef load_model():NEWLINE in_path = get_paths()["model_path"]NEWLINE return pickle.load(open(in_path))NEWLINENEWLINEdef write_submission(predictions):NEWLINE prediction_path = get_paths()["prediction_path"]NEWLINE writer = csv.writer(open(prediction_path, "w"), lineterminator="\n")NEWLINE valid = get_valid_df()NEWLINE rows = [x for x in zip(valid["Id"], predictions.flatten())]NEWLINE writer.writerow(("Id", "SalaryNormalized"))NEWLINE writer.writerows(rows)
#!/usr/bin/env python2NEWLINEimport numpy as npNEWLINEimport path_parserNEWLINEfrom mpl_toolkits.mplot3d import Axes3DNEWLINEimport matplotlib.pyplot as pltNEWLINEfrom matplotlib import cmNEWLINEfrom matplotlib.ticker import LinearLocator, FormatStrFormatterNEWLINEfrom scipy.spatial import KDTreeNEWLINEruta='sample_map_origin_map_1.txt'NEWLINE#ruta='Trayectoria2.txt'NEWLINENEWLINEmap_size_x=250.0 #cmNEWLINEmap_size_y=250.0 #cmNEWLINEresolution = 1.0 #cmNEWLINEmatrix = np.zeros( (map_size_x/resolution,map_size_y/resolution,2),dtype='f' )NEWLINEmatrix_dist = np.zeros( (map_size_x/resolution,map_size_y/resolution),dtype='f' )NEWLINENEWLINEdef show_nearest(target,tree,xy):NEWLINE dist, index = tree.query(target) #Obtiene los puntos mas cercanos al caminoNEWLINE global lookahead_offsetNEWLINE lookahead_offset = np.int(2 + (5/(5*dist+1)))NEWLINE lookahead_target = xy[(index + lookahead_offset) % len(xy)]NEWLINENEWLINE x1, y1 = targetNEWLINE x3, y3 = lookahead_targetNEWLINENEWLINE plt.scatter(*target, color='r')NEWLINE plt.scatter(*xy[index], color='g')NEWLINE ax = plt.axes()NEWLINE ax.arrow(x1, y1, (x3-x1)/5, (y3-y1)/5 , head_width=0.01, head_length=0.01, fc='k', ec='k')NEWLINE plt.scatter(*lookahead_target, color='m')NEWLINE plt.show(block=False)NEWLINE global matrixNEWLINE x_index=np.int(x1*10)NEWLINE y_index=np.int(y1*10)NEWLINENEWLINE matrix[x_index,y_index,0]=x3-x1NEWLINE matrix[x_index,y_index,1]=y3-y1NEWLINENEWLINEdef near(initial_position,xind,yind,tree,xy,ax):NEWLINE dist, index = tree.query(initial_position)NEWLINE global matrix_distNEWLINE matrix_dist[xind,yind]=distNEWLINE #Encontar el punto mas cercano a llegarNEWLINE lookahead_offset = np.int(1 + (5/(5*dist+1)))NEWLINE lookahead_target = xy[(index + lookahead_offset) % len(xy)]NEWLINE x1, y1 = initial_positionNEWLINE x3, y3 = lookahead_targetNEWLINE #print x1,y1,x3,y3NEWLINE #plt.scatter(*initial_position, color='r')NEWLINE #plt.scatter(*xy[index], color='g')NEWLINE #ax.arrow(x1, y1, (x3-x1), (y3-y1) , head_width=0.01, head_length=0.01, fc='k', ec='k')NEWLINE #plt.scatter(*lookahead_target, color='m')NEWLINE x_index=xindNEWLINE y_index=yindNEWLINE matrix[x_index,y_index,0]=x3-x1 #distancias en xNEWLINE matrix[x_index,y_index,1]=y3-y1 #distancias en yNEWLINEdef main():NEWLINE arr_in=np.array(list(path_parser.read_points(ruta)))NEWLINE ax,ay=arr_in.TNEWLINE min_x=np.min(ax)NEWLINE min_y=np.min(ay)NEWLINE max_x=np.max(ax)NEWLINE max_y=np.max(ay)NEWLINE print 'Minimo en x',min_xNEWLINE print 'Minimo en y',min_yNEWLINE print 'Maximo en x',max_xNEWLINE print 'Maximo en y',max_yNEWLINE if (min_x>0 and min_y>0):NEWLINE offsetg=0NEWLINE else:NEWLINE if min_x>min_y:NEWLINE offsetg=min_xNEWLINE else:NEWLINE offsetg=min_yNEWLINE scale_x=0.7*(map_size_x/100)/(max_x-offsetg)NEWLINE scale_y=0.7*(map_size_y/100)/(max_y-offsetg)NEWLINE scale=np.array([scale_x,scale_y])NEWLINE xy = np.multiply(scale,arr_in)+np.array([0.30,0.30])NEWLINE x,y = xy.TNEWLINE fig = plt.figure(figsize=(7,7), facecolor='w')NEWLINE fig.canvas.set_window_title('Trayectoria')NEWLINE plt.plot(x,y)NEWLINE tree = KDTree(xy)NEWLINE plt.plot(x, y, ':o', markersize=4)NEWLINE plt.tight_layout()#Ajusta los titulos de subplots para evitar que salgan de la figura.NEWLINE NEWLINE print('please wait ...')NEWLINE X=np.arange(0,map_size_x/100,resolution/100)NEWLINE Y=np.arange(0,map_size_y/100,resolution/100)NEWLINE X,Y=np.meshgrid(X,Y)NEWLINE lim_x=int(map_size_x/resolution);NEWLINE lim_y=int(map_size_y/resolution);NEWLINE print lim_x,lim_yNEWLINE fig = plt.figure(figsize=(7,7), facecolor='w')NEWLINE fig.canvas.set_window_title('Puntos de prueba')NEWLINE ax = plt.axes()NEWLINE for xi in range(0, lim_x):NEWLINE print float(xi)/lim_x*100NEWLINE for yi in range(0, lim_y):NEWLINE #show_nearest((x,y))NEWLINE near((xi*resolution/100,yi*resolution/100),xi,yi,tree,xy,ax)NEWLINE Z=matrix_dist;NEWLINE fig = plt.figure(figsize=(7,7), facecolor='w')NEWLINE ax = fig.gca(projection='3d')NEWLINE fig.canvas.set_window_title('Distancias')NEWLINE surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm,linewidth=0, antialiased=False)NEWLINE plt.show()NEWLINE np.save('TrayA1.npy', matrix)NEWLINE print('matrixForce is saved.')NEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE cadena='Hola'NEWLINE
# Generated by Django 3.2.5 on 2021-07-18 23:06NEWLINENEWLINEfrom django.db import migrations, modelsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('login', '0001_initial'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.AlterField(NEWLINE model_name='config',NEWLINE name='level',NEWLINE field=models.SmallIntegerField(),NEWLINE ),NEWLINE migrations.AlterField(NEWLINE model_name='user',NEWLINE name='age',NEWLINE field=models.PositiveSmallIntegerField(blank=True),NEWLINE ),NEWLINE ]NEWLINE
import numpy as npNEWLINEimport unittestNEWLINEimport sysNEWLINEsys.path.append("tests/python")NEWLINE# Don't import the test class, otherwise they will run twice.NEWLINEimport test_interaction_constraints as test_icNEWLINErng = np.random.RandomState(1994)NEWLINENEWLINENEWLINEclass TestGPUInteractionConstraints(unittest.TestCase):NEWLINE cputest = test_ic.TestInteractionConstraints()NEWLINENEWLINE def test_interaction_constraints(self):NEWLINE self.cputest.test_interaction_constraints(tree_method='gpu_hist')NEWLINENEWLINE def test_training_accuracy(self):NEWLINE self.cputest.test_training_accuracy(tree_method='gpu_hist')NEWLINE
import open3d as o3dNEWLINENEWLINENEWLINEFilterScope=o3d.geometry.FilterScopeNEWLINENEWLINEdef removePCDOutlier(pcd,voxel_size=0.001,nb_points=32,radius=0.004):NEWLINE '''NEWLINE 尝试除去点云离群点NEWLINE '''NEWLINE downpcd=pcd.voxel_down_sample(voxel_size=voxel_size)NEWLINE inlierPcd,idxs=downpcd.remove_radius_outlier(nb_points=nb_points,radius=radius)NEWLINE return inlierPcd,idxsNEWLINENEWLINEdef smoothMeshSimple(mesh,iterTimes=1):NEWLINE return mesh.filter_smooth_simple(number_of_iterations=iterTimes,filter_scope=FilterScope.Vertex)NEWLINENEWLINEdef smoothMeshLaplacian(mesh,iterTimes=10,nLambda=0.85):NEWLINE return mesh.filter_smooth_laplacian(NEWLINE number_of_iterations=iterTimes,filter_scope=FilterScope.Vertex,NEWLINE **{"lambda":nLambda}NEWLINE )NEWLINENEWLINEdef smoothMeshTaubin(mesh,iterTimes=30,nLambda=0.85,nMu=-0.25):NEWLINE return mesh.filter_smooth_taubin(number_of_iterations=iterTimes,filter_scope=FilterScope.Vertex,NEWLINE **{"lambda":nLambda,"mu":nMu}NEWLINE )NEWLINENEWLINEdef postProcessMesh(mesh,smoothFunc,*args,**kw):NEWLINE mesh=mesh.remove_non_manifold_edges()NEWLINE mesh=mesh.remove_degenerate_triangles()NEWLINE mesh=mesh.remove_duplicated_triangles()NEWLINE mesh=mesh.remove_unreferenced_vertices()NEWLINE mesh=mesh.remove_duplicated_vertices()NEWLINE meshf=mesh.filter_sharpen(number_of_iterations=1,strength=0.05,filter_scope=FilterScope.Color)NEWLINE mesh.vertex_colors=meshf.vertex_colorsNEWLINE mesh=smoothFunc(mesh,*args,**kw)NEWLINE mesh.compute_vertex_normals()NEWLINE return meshNEWLINE
import timeNEWLINEimport jsonNEWLINEimport datetimeNEWLINEimport reNEWLINEimport randomNEWLINEimport csvNEWLINEimport osNEWLINEimport subprocessNEWLINEimport requestsNEWLINENEWLINEfrom django.shortcuts import render, get_object_or_404NEWLINEfrom django.db.models import Count, Q, SumNEWLINEfrom django.http import HttpResponse, JsonResponseNEWLINEfrom django.core.cache import cacheNEWLINEfrom django.core.mail import send_mailNEWLINEfrom django.conf import settings as conf_settingsNEWLINEfrom requests.sessions import default_headersNEWLINENEWLINENEWLINEfrom apps.data.models import (NEWLINE Dataset,NEWLINE DATA_MAPPING,NEWLINE DatasetOrganization,NEWLINE Taxon,NEWLINE #RawDataOccurrence,NEWLINE #SimpleData,NEWLINE)NEWLINEfrom apps.data.helpers.mod_search import (NEWLINE OccurrenceSearch,NEWLINE DatasetSearch,NEWLINE PublisherSearch,NEWLINE SpeciesSearch,NEWLINE filter_occurrence,NEWLINE)NEWLINENEWLINEfrom utils.decorators import json_retNEWLINEfrom utils.general import get_cache_or_setNEWLINEfrom utils.solr_query import (NEWLINE SolrQuery,NEWLINE get_init_menu,NEWLINE)NEWLINEfrom utils.map_data import convert_grid_to_coor, get_geojson, convert_x_coor_to_grid, convert_y_coor_to_gridNEWLINENEWLINENEWLINEfrom .cached import COUNTRY_ROWS, YEAR_ROWSNEWLINENEWLINEfrom conf.settings import ENVNEWLINENEWLINE#----------------- defaul map geojson -----------------#NEWLINEdefault_solr = SolrQuery('taibif_occurrence')NEWLINEdefault_solr_url = default_solr.generate_solr_url()NEWLINEdefault_map_geojson = get_geojson(default_solr_url)NEWLINEcache.set('default_map_geojson', default_map_geojson, 2592000)NEWLINENEWLINEreq = default_solr.request()NEWLINEresp = default_solr.get_response()NEWLINEcache.set('default_solr_count', resp['count'] if resp else 0, 2592000)NEWLINENEWLINE#----------------- defaul map geojson -----------------#NEWLINENEWLINENEWLINENEWLINEdef search_occurrence_v1_charts(request):NEWLINE year_start = 1000NEWLINE year_end = 2021NEWLINE lat_query, lng_query = '', ''NEWLINENEWLINE solr_q_fq_list=[]NEWLINE solr_fq = ''NEWLINE solr_q_list = []NEWLINE solr_q = '*:*'NEWLINE # print (list(request.GET.lists()))NEWLINE for term, values in list(request.GET.lists()):NEWLINE if term !='q' :NEWLINE if term != 'menu':NEWLINE if term =='year':NEWLINE val = values[0].replace(",", " TO ")NEWLINE solr_q_fq_list.append('{}:[{}]'.format(term,val))NEWLINE year_start =values[0].split(',',1)NEWLINE year_end =values[0].split(',',2)NEWLINE elif term =='dataset':NEWLINE solr_q_fq_list.append('{}:"{}"'.format('taibif_dataset_name', '" OR "'.join(values)))NEWLINE elif term =='month':NEWLINE solr_q_fq_list.append('{}:{}'.format(term, ' OR '.join(values)))NEWLINE elif term =='country':NEWLINE solr_q_fq_list.append('{}:{}'.format(term, ' OR '.join(values)))NEWLINE elif term =='publisher':NEWLINE solr_q_fq_list.append('{}:{}'.format(term, ' OR '.join(values)))NEWLINE #-----map------#NEWLINE elif term == 'lat':NEWLINE coor_list = [ float(c) for c in values]NEWLINE y1 = convert_y_coor_to_grid(min(coor_list))NEWLINE y2 = convert_y_coor_to_grid(max(coor_list))NEWLINE lat_query = "&fq={!frange l=" + str(y1) + " u=" + str(y2) + "}grid_y"NEWLINE elif term == 'lng':NEWLINE coor_list = [ float(c) for c in values]NEWLINE x1 = convert_x_coor_to_grid(min(coor_list))NEWLINE x2 = convert_x_coor_to_grid(max(coor_list))NEWLINE lng_query = "&fq={!frange l=" + str(x1) + " u=" + str(x2) + "}grid_x"NEWLINENEWLINE else:NEWLINE solr_q_list.append('{}:{}'.format('_text_', ' OR '.join(values)))NEWLINENEWLINENEWLINE if len(solr_q_list) > 0:NEWLINE solr_q = ' AND '.join(solr_q_list)NEWLINENEWLINE if len(solr_q_fq_list) > 0:NEWLINE solr_fq = ' AND '.join(solr_q_fq_list)NEWLINENEWLINE charts_year = []NEWLINE charts_month = []NEWLINE charts_dataset = []NEWLINENEWLINE search_count = 0NEWLINE search_offset = 0NEWLINE search_results = []NEWLINE NEWLINE facet_dataset = 'dataset:{type:terms,field:taibif_dataset_name_zh,limit:-1,mincount:0}'NEWLINE facet_month = 'month:{type:range,field:month,start:1,end:13,gap:1}'NEWLINE facet_year = 'year:{type:terms,field:year,limit:-1,mincount:0}'NEWLINE facet_json = 'json.facet={'+facet_dataset + ',' +facet_month+ ',' +facet_year+'}'NEWLINENEWLINE url = f'http://solr:8983/solr/taibif_occurrence/select?facet=true&q.op=AND&q={solr_q}&fq={solr_fq}&{facet_json}{lng_query}{lat_query}'NEWLINE r = requests.get(url)NEWLINENEWLINE if r.status_code == 200:NEWLINE data = r.json()NEWLINE search_count = data['response']['numFound']NEWLINE if search_count != 0:NEWLINE search_offset = data['response']['start']NEWLINE search_results = data['response']['docs']NEWLINE charts_year =[{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['facets']['year']['buckets']]NEWLINE charts_month = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['facets']['month']['buckets']]NEWLINE charts_dataset = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['facets']['dataset']['buckets']]NEWLINE else:NEWLINE charts_year = [{'key': 0, 'label': 0, 'count': 0}]NEWLINE charts_month = [{'key': 0, 'label': 0, 'count': 0}]NEWLINE charts_dataset = [{'key': 0, 'label': 0, 'count': 0}]NEWLINENEWLINE ret = {NEWLINE 'charts': [NEWLINE {NEWLINE 'key': 'year',NEWLINE 'label': '年份',NEWLINE 'rows': charts_year,NEWLINE },NEWLINE {NEWLINE 'key': 'month',NEWLINE 'label': '月份',NEWLINE 'rows': charts_month,NEWLINE },NEWLINE {NEWLINE 'key': 'dataset',NEWLINE 'label': '資料集',NEWLINE 'rows': charts_dataset,NEWLINE },NEWLINE ],NEWLINE }NEWLINE return JsonResponse(ret)NEWLINENEWLINENEWLINENEWLINEdef get_map_species(request):NEWLINE query_list = []NEWLINE for key, values in request.GET.lists():NEWLINE if key != 'facet':NEWLINE query_list.append((key, values))NEWLINE solr = SolrQuery('taibif_occurrence')NEWLINE solr_url = solr.generate_solr_url(request.GET.lists())NEWLINE map_url = solr_url.replace('rows=20','rows=10')NEWLINE r = requests.get(map_url)NEWLINE resp = {NEWLINE 'count' :0NEWLINE }NEWLINE if r.status_code == 200:NEWLINE data = r.json()NEWLINE resp.update({'count':data['response']['numFound']})NEWLINE resp['results'] = data['response']['docs']NEWLINE NEWLINE return JsonResponse(resp)NEWLINENEWLINENEWLINEdef occurrence_search_v2(request):NEWLINE time_start = time.time()NEWLINE facet_values = []NEWLINE facet_selected = {}NEWLINE query_list = []NEWLINE for key, values in request.GET.lists():NEWLINE if key == 'facet':NEWLINE facet_values = valuesNEWLINE else:NEWLINE query_list.append((key, values))NEWLINENEWLINE for key, values in request.GET.lists():NEWLINE if key in facet_values:NEWLINE facet_selected[key] = valuesNEWLINENEWLINE solr = SolrQuery('taibif_occurrence', facet_values)NEWLINE req = solr.request(query_list)NEWLINE #response = req['solr_response']NEWLINE resp = solr.get_response()NEWLINE if not resp:NEWLINE return JsonResponse({NEWLINE 'results': 0,NEWLINE 'solr_error_msg': solr.solr_error,NEWLINE 'solr_url': solr.solr_url,NEWLINE 'solr_tuples': solr.solr_tuples,NEWLINE })NEWLINE # for frontend menu data sturctNEWLINE menus = solr.get_menus()NEWLINENEWLINE # get full menu if no facet returnNEWLINE if len(menus) == 0:NEWLINE menus = get_init_menu(facet_values)NEWLINENEWLINE new_menus = []NEWLINE selected_facet_menu = {}NEWLINE if len(facet_selected) >= 1:NEWLINE for key, values in facet_selected.items():NEWLINE # get each facet, countNEWLINE solr_menu = SolrQuery('taibif_occurrence', facet_values)NEWLINE tmp_query_list = query_list[:]NEWLINE tmp_query_list.remove((key, values))NEWLINE solr_menu.request(tmp_query_list)NEWLINE if submenu := solr_menu.get_menus(key):NEWLINE selected_facet_menu[key] = submenuNEWLINE # reset menus (prevent too less count will filter out by solr facet default limit)NEWLINE for i, v in enumerate(menus):NEWLINE key = v['key']NEWLINE if key in selected_facet_menu:NEWLINE #print ('--------', i, facet_selected[key], selected_facet_menu[key], menus[i])NEWLINE print(i, key, selected_facet_menu,'xxxx')NEWLINE tmp_menu = selected_facet_menu[key].copy()NEWLINE tmp_menu_add = []NEWLINE for selected in facet_selected[key]:NEWLINE filtered = list(filter(lambda x: x['key'] == selected, tmp_menu['rows']))NEWLINE if len(filtered) == 0 and len(tmp_menu['rows']) > 0:NEWLINE #print(key, selected, tmp_menu)NEWLINE tmp_menu['rows'].pop()NEWLINE count = 0NEWLINE for item in menus[i]['rows']:NEWLINE #print (key, item['key'], selected, item['count'])NEWLINE if str(item['key']) == str(selected):NEWLINE count = item['count']NEWLINE breakNEWLINE tmp_menu_add.append((selected, count))NEWLINE for x in tmp_menu_add:NEWLINE tmp_menu['rows'].append({NEWLINE 'key': x[0],NEWLINE 'label': x[0],NEWLINE 'count': x[1],NEWLINE })NEWLINE # resort add add fixed menu backNEWLINE tmp_menu['rows'] = sorted(tmp_menu['rows'], key=lambda x: x['count'], reverse=True)NEWLINE new_menus.append(tmp_menu)NEWLINE else:NEWLINE new_menus.append(menus[i])NEWLINENEWLINE # month hackNEWLINE #print(new_menus)NEWLINE for menu in new_menus:NEWLINE if menu['key'] == 'month':NEWLINE month_rows = []NEWLINE for month in range(1, 13):NEWLINE count = 0NEWLINE for x in menu['rows']:NEWLINE if str(x['key']) == str(month):NEWLINE count = x['count']NEWLINE breakNEWLINE month_rows.append({NEWLINE 'key': str(month),NEWLINE 'label': str(month),NEWLINE 'count': countNEWLINE })NEWLINE menu['rows'] = month_rowsNEWLINENEWLINE # HACK, for menu items all zero:NEWLINE for menu in new_menus:NEWLINE menu_default = NoneNEWLINE if menu['key'] not in['month', 'year']:NEWLINE #print(menu['key'], sum([x.get('count', 0) for x in menu['rows']]))NEWLINE total = sum([x.get('count', 0) for x in menu['rows']])NEWLINE if total == 0:NEWLINE if not menu_default:NEWLINE menu_default = get_init_menu(facet_values)NEWLINE found = filter(lambda x: x['key'] == menu['key'], menu_default)NEWLINE if submenu := list(found):NEWLINE # replace submenu !!NEWLINE menu['rows'] = submenu[0]['rows']NEWLINENEWLINE resp['menus'] = new_menusNEWLINENEWLINE # TODO, init taxon_keyNEWLINE req_dict = dict(request.GET)NEWLINE taxon_key = ''NEWLINE if tkey := req_dict.get('taxon_key', ''):NEWLINE taxon_key = tkeyNEWLINE # treeNEWLINE treeRoot = Taxon.objects.filter(rank='kingdom').all()NEWLINE treeData = [{NEWLINE 'id': x.id,NEWLINE 'data': {NEWLINE 'name': x.get_name(),NEWLINE 'count': x.count,NEWLINE },NEWLINE } for x in treeRoot]NEWLINE resp['tree'] = treeDataNEWLINE # TODO, init taxon_keyNEWLINE #resp['taxon_checked'] = tkeyNEWLINE if request.GET.get('debug_solr', ''):NEWLINE resp['solr_resp'] = solr.solr_responseNEWLINE resp['solr_url'] = solr.solr_urlNEWLINE resp['solr_tuples'] = solr.solr_tuplesNEWLINENEWLINE resp['solr_qtime'] = req['solr_response']['responseHeader']['QTime']NEWLINENEWLINE #--------------- map ---------------#NEWLINE # check if solr data has been updatedNEWLINE solr_updated = False if cache.get('default_solr_count') == resp['count'] else TrueNEWLINE if query_list: # 如果有帶篩選條件NEWLINE resp['map_geojson'] = get_geojson(solr.solr_url)NEWLINE elif solr_updated or not cache.get('default_map_geojson'):NEWLINE # 如果沒有篩選條件且solr資料有更新 或 如果沒有篩選條件且cache沒有default_map_geojsonNEWLINE resp['map_geojson'] = get_geojson(solr.solr_url)NEWLINE cache.set('default_map_geojson', resp['map_geojson'])NEWLINE cache.set('default_solr_count', resp['count'])NEWLINE else: # 如果沒有篩選條件且solr沒更新且cache有default_map_geojsonNEWLINE resp['map_geojson'] = default_map_geojsonNEWLINE resp['elapsed'] = time.time() - time_startNEWLINE #print('final', time.time() - time_start)NEWLINE return JsonResponse(resp)NEWLINENEWLINENEWLINEdef taxon_tree_node(request, pk):NEWLINE taxon = Taxon.objects.get(pk=pk)NEWLINE children = [{NEWLINE 'id':x.id,NEWLINE 'data': {NEWLINE 'name': x.get_name(),NEWLINE 'count': x.count,NEWLINE 'rank': x.rank,NEWLINE }NEWLINE } for x in taxon.children]NEWLINENEWLINE data = {NEWLINE 'rank': taxon.rank,NEWLINE 'id': taxon.id,NEWLINE 'data': {NEWLINE 'name': taxon.get_name(),NEWLINE 'count': taxon.count,NEWLINE 'rank': taxon.rank,NEWLINE },NEWLINE 'children': children,NEWLINE }NEWLINE return HttpResponse(json.dumps(data), content_type="application/json")NEWLINENEWLINE# DEPRICATEDNEWLINE#@json_retNEWLINEdef search_occurrence(request, cat=''):NEWLINE has_menu = True if request.GET.get('menu', '') else FalseNEWLINENEWLINE has_filter, q = SimpleData.public_objects.filter_by_key_values(list(request.GET.lists()))NEWLINE menu_list = []NEWLINE if has_menu:NEWLINE # TODO, normalize country data?NEWLINE #country_code_list = [x['country'] for x in q.exclude(country__isnull=True).annotate(count=Count('country')).order_by('-count')]NEWLINE #year_list = [x['year'] for x in q.exclude(year__isnull=True).annotate(count=Count('year')).all()]NEWLINE #print (country_code_list)NEWLINE #print (year_list)NEWLINE #print (q.exclude(year__isnull=True).annotate(count=Count('year')).query)NEWLINE ## yearNEWLINE #q = SimpleData.objects.values('year').exclude(year__isnull=True).annotate(count=Count('year')).order_by('-count')NEWLINENEWLINE dataset_menu = []NEWLINE if has_filter:NEWLINE #q_by_dataset = q.group_by_dataset(request.GET)NEWLINE q_by_dataset = q.values('taibif_dataset_name').\NEWLINE exclude(taibif_dataset_name__isnull=True).\NEWLINE annotate(count=Count('taibif_dataset_name')).\NEWLINE order_by('-count')NEWLINE dataset_menu = [{NEWLINE 'label': x['taibif_dataset_name'],NEWLINE 'key': x['taibif_dataset_name'],NEWLINE 'count': x['count']NEWLINE } for x in q_by_dataset.all()]NEWLINE else:NEWLINE ds_list = Dataset.public_objects.values('name', 'title', 'num_occurrence').all()NEWLINE dataset_menu = [{NEWLINE 'label': x['title'],NEWLINE 'key': x['name'],NEWLINE 'count': x['num_occurrence']NEWLINE } for x in ds_list]NEWLINENEWLINE publisher_query = Dataset.objects\NEWLINE .values('organization','organization_verbatim')\NEWLINE .exclude(organization__isnull=True)\NEWLINE .annotate(count=Count('organization'))\NEWLINE .order_by('-count')NEWLINE publisher_rows = [{NEWLINE 'key':x['organization'],NEWLINE 'label':x['organization_verbatim'],NEWLINE 'count': x['count']NEWLINE } for x in publisher_query]NEWLINENEWLINE menu_list = [NEWLINE {NEWLINE 'key': 'countrycode',NEWLINE 'label': '國家/區域',NEWLINE 'rows': [{'label': x['label'], 'key': x['label'] } for x in COUNTRY_ROWS]NEWLINE },NEWLINE {NEWLINE 'key': 'year',NEWLINE 'label': '年份',NEWLINE 'rows': YEAR_ROWSNEWLINE },NEWLINE {NEWLINE 'key': 'month',NEWLINE 'label': '月份',NEWLINE 'rows': [{'label': '{}月'.format(x),'key': x} for x in range(1, 13)]NEWLINE },NEWLINE {NEWLINE 'key': 'dataset',NEWLINE 'label': '資料集',NEWLINE 'rows': dataset_menu,NEWLINE },NEWLINE {NEWLINE 'key':'publisher',NEWLINE 'label': '發布者',NEWLINE 'rows': publisher_rowsNEWLINE }NEWLINE ]NEWLINENEWLINE # searchNEWLINE occur_search = OccurrenceSearch(list(request.GET.lists()), using='')NEWLINE res = {}NEWLINENEWLINE if cat in ['search', 'download']:NEWLINE res = occur_search.get_results()NEWLINE elif cat == 'taxonomy':NEWLINE taxon_num_list = []NEWLINE def _get_taxon_num(q):NEWLINE rank_list = ['kingdom', 'phylum', 'class', 'order', 'family', 'genus', 'species']NEWLINE data = []NEWLINE for r in rank_list:NEWLINE field_name = 'taxon_{}_id'.format(r)NEWLINE # TODO: exclude is nullNEWLINE num = q.values(field_name).exclude().annotate(count=Count(field_name)).count()NEWLINE #print (r, num)NEWLINE data.append(num)NEWLINE return dataNEWLINENEWLINE if not has_filter:NEWLINE key = 'occurrence_all_taxonomy'NEWLINE if value:= cache.get(key):NEWLINE taxon_num_list = valueNEWLINE else:NEWLINE taxon_num_list = _get_taxon_num(q)NEWLINE cache.set(key, taxon_num_list, 2592000)NEWLINE #taxon_num_list = get_cache_or_set(, ) # 無效 why?NEWLINE else:NEWLINE taxon_num_list = _get_taxon_num(q)NEWLINENEWLINE res['taxon_num_list'] = taxon_num_listNEWLINENEWLINE elif cat == 'gallery':NEWLINE passNEWLINE #taibif_ids = q.values('taibif_id').all()NEWLINE #RawOccurrenceData.public_objects.values('associatedmedia').filter(id__in=taibif_ids).all()NEWLINE elif cat == 'charts':NEWLINE #occur_search.limit = 1 #NEWLINE #res = occur_search.get_results()NEWLINE chart = request.GET.get('chart', '')NEWLINE data = []NEWLINE #print (chart, has_filter)NEWLINE if chart == 'year':NEWLINE def _group_by_year(q):NEWLINE q_by_cat = q.values('year') \NEWLINE .exclude(year__isnull=True) \NEWLINE .annotate(count=Count('year')) \NEWLINE .order_by('-year')NEWLINE return list(q_by_cat.all())NEWLINENEWLINE if not has_filter:NEWLINE #data = get_cache_or_set('occurrence_all_by_year', _group_by_year(q))NEWLINE key = 'occurrence_all_by_year'NEWLINE if value:= cache.get(key):NEWLINE data = valueNEWLINE else:NEWLINE data = _group_by_year(q)NEWLINE cache.set(key, data, 2592000)NEWLINE else:NEWLINE data = _group_by_year(q)NEWLINENEWLINE res = [NEWLINE [i['year'] for i in data],NEWLINE [i['count'] for i in data]NEWLINE ]NEWLINE elif chart == 'month':NEWLINE def _group_by_month(q):NEWLINE q_by_cat = q.values('month') \NEWLINE .filter(month__in=list(range(1,13))) \NEWLINE .exclude(year__isnull=True) \NEWLINE .annotate(count=Count('month')) \NEWLINE .order_by('month')NEWLINE return list(q_by_cat.all())NEWLINENEWLINE if not has_filter:NEWLINE #data = get_cache_or_set('occurrence_all_by_month', _group_by_month(q))NEWLINE key = 'occurrence_all_by_month'NEWLINE if value:= cache.get(key):NEWLINE data = valueNEWLINE else:NEWLINE data = _group_by_month(q)NEWLINE cache.set(key, data, 2592000)NEWLINE else:NEWLINE data = _group_by_month(q)NEWLINENEWLINE res = [NEWLINE [i['month'] for i in data],NEWLINE [i['count'] for i in data]NEWLINE ]NEWLINENEWLINE elif chart == 'dataset':NEWLINE if not has_filter:NEWLINE data = Dataset.public_objects.values('title', 'num_occurrence').order_by('-num_occurrence').all()[0:10]NEWLINE res = list(data)NEWLINENEWLINE else:NEWLINE #if cached := cache.get('occurrence_all_by_dataset'):NEWLINE # res = cachedNEWLINE #else:NEWLINE q_by_cat = q.values('taibif_dataset_name') \NEWLINE .annotate(count=Count('taibif_dataset_name')) \NEWLINE .order_by('-count')NEWLINE dataset = Dataset.public_objects.values('title', 'name').all()NEWLINE dataset_map = {i['name']: i['title']for i in dataset}NEWLINE data = list(q_by_cat.all()[0:10])NEWLINE print (q_by_cat.query)NEWLINE res = [{NEWLINE 'title': dataset_map[i['taibif_dataset_name']],NEWLINE 'num_occurrence': i['count']} for i in data]NEWLINE #cache.set('occurrence_all_by_dataset', res)NEWLINENEWLINE #elif cat == 'taxonomy':NEWLINE # occur_taxonomy = OccurrenceSearch(list(request.GET.lists()), using='')NEWLINE #occur_taxonomy.limit = 200NEWLINE #q = occur_taxonomy.queryNEWLINE #q = q.values('year').annotate(count=Count('year')).order_by('year')NEWLINE #print (len(q.all()))NEWLINE #q = q.values('month').annotate(count=Count('month')).order_by('month')NEWLINE #print (q.all())NEWLINE #res = occur_taxonomy.get_results()NEWLINE data = {NEWLINE 'search': res,NEWLINE }NEWLINENEWLINE if has_menu:NEWLINE data['menus'] = menu_listNEWLINE # treeNEWLINE treeRoot = Taxon.objects.filter(rank='kingdom').all()NEWLINE treeData = [{NEWLINE 'id': x.id,NEWLINE 'data': {NEWLINE 'name': x.get_name(),NEWLINE 'count': x.count,NEWLINE },NEWLINE } for x in treeRoot]NEWLINE data['tree'] = treeDataNEWLINENEWLINE #return {'data': data}NEWLINE return HttpResponse(json.dumps(data), content_type="application/json")NEWLINENEWLINE#@json_retNEWLINEdef search_dataset(request):NEWLINE has_menu = True if request.GET.get('menu', '') else FalseNEWLINE menu_list = []NEWLINENEWLINE ds_search = DatasetSearch(list(request.GET.lists()))NEWLINE if has_menu:NEWLINENEWLINE #publisher_query = Dataset.objects\NEWLINE publisher_query = ds_search.query\NEWLINE .values('organization','organization_name')\NEWLINE .exclude(organization__isnull=True)\NEWLINE .annotate(count=Count('organization'))\NEWLINE .order_by('-count')NEWLINE #publisher_query = publisher_query.filter()NEWLINE #publisher_query = ds_search.query.values('organization','organization_verbatim')\NEWLINE # .exclude(organization__isnull=True)\NEWLINE # .annotate(count=Count('organization'))\NEWLINE # .order_by('-count')NEWLINE # NEWLINE #print (publisher_query) NEWLINE #for x in publisher_query : NEWLINE # print('===========',x)NEWLINE publisher_rows = [{NEWLINE 'key':x['organization'],NEWLINE 'label':x['organization_name'],NEWLINE 'count': x['count']NEWLINE } for x in publisher_query]NEWLINENEWLINENEWLINE rights_query = ds_search.query\NEWLINE .values('data_license')\NEWLINE .exclude(data_license__exact='')\NEWLINE .annotate(count=Count('data_license'))\NEWLINE .order_by('-count')NEWLINE rights_rows = [{NEWLINE 'key': DATA_MAPPING['rights'][x['data_license']],NEWLINE 'label':DATA_MAPPING['rights'][x['data_license']],NEWLINE 'count': x['count']NEWLINE } for x in rights_query]NEWLINENEWLINENEWLINE country_query = ds_search.query\NEWLINE .values('country')\NEWLINE .exclude(country__exact='')\NEWLINE .annotate(count=Count('country'))\NEWLINE .order_by('-count')NEWLINE country_rows = [{NEWLINE 'key':x['country'],NEWLINE 'label':DATA_MAPPING['country'][x['country']],NEWLINE 'count': x['count']NEWLINE } for x in country_query]NEWLINENEWLINE menu_list = [NEWLINE {NEWLINE 'key':'publisher',NEWLINE 'label': '發布者',NEWLINE 'rows': publisher_rowsNEWLINE },NEWLINE {NEWLINE 'key': 'country',NEWLINE 'label': '分布地區/國家',NEWLINE 'rows': country_rowsNEWLINE },NEWLINE {NEWLINE 'key': 'rights',NEWLINE 'label': '授權狀態',NEWLINE 'rows': rights_rowsNEWLINE }NEWLINE ]NEWLINENEWLINE # searchNEWLINE res = ds_search.get_results()NEWLINENEWLINE data = {NEWLINE 'search': res,NEWLINE }NEWLINE if has_menu:NEWLINE data['menus'] = menu_listNEWLINE #return {'data': data}NEWLINE return HttpResponse(json.dumps(data), content_type="application/json")NEWLINENEWLINE#@json_retNEWLINEdef search_publisher(request):NEWLINE has_menu = True if request.GET.get('menu', '') else FalseNEWLINE menu_list = []NEWLINENEWLINE if has_menu:NEWLINE country_list = DatasetOrganization.objects\NEWLINE .values('country_code')\NEWLINE .exclude(country_code__isnull=True)\NEWLINE .annotate(count=Count('country_code'))\NEWLINE .order_by('-count').all()NEWLINE menu_list = [NEWLINE {NEWLINE 'key': 'countrycode',NEWLINE 'label': '國家/區域',NEWLINE 'rows': [{'label': DATA_MAPPING['country'][x['country_code']], 'count': x['count'], 'key': x['country_code'] } for x in country_list]NEWLINE },NEWLINE ]NEWLINENEWLINE menus = [NEWLINE {NEWLINE 'key': 'country_code',NEWLINE 'label': '國家/區域',NEWLINE 'rows': [{'label': DATA_MAPPING['country'][x['country_code']], 'key': x['country_code'], 'count': x['count']} for x in country_list]NEWLINE },NEWLINE ]NEWLINENEWLINE # searchNEWLINE publisher_search = PublisherSearch(list(request.GET.lists()))NEWLINE res = publisher_search.get_results()NEWLINENEWLINE data = {NEWLINE 'search': res,NEWLINE }NEWLINENEWLINE if has_menu:NEWLINE data['menus'] = menu_listNEWLINENEWLINE #return {'data': data }NEWLINE return HttpResponse(json.dumps(data), content_type="application/json")NEWLINENEWLINE#@json_retNEWLINEdef search_species(request):NEWLINE status = request.GET.get('status', '')NEWLINE rank = request.GET.get('rank', '')NEWLINE print (status)NEWLINENEWLINE species_search = SpeciesSearch(list(request.GET.lists()))NEWLINE #species_ids = list(species_search.query.values('id').all())NEWLINE #print (species_ids, len(species_ids))NEWLINE has_menu = True if request.GET.get('menu', '') else FalseNEWLINE menu_list = []NEWLINE if has_menu:NEWLINE menus = [NEWLINE # {NEWLINE # 'key': 'highertaxon',NEWLINE # 'label': '高階分類群',NEWLINE # 'rows': [{NEWLINE # 'key': x.id,NEWLINE # 'label': x.get_name(),NEWLINE # } for x in Taxon.objects.filter(rank='kingdom')],NEWLINE # },NEWLINE {NEWLINE 'key': 'rank',NEWLINE 'label': '分類位階',NEWLINE 'rows': [{NEWLINE 'key': x['key'],NEWLINE 'label': x['label'],NEWLINE } for x in Taxon.get_tree(rank=rank, status=status)]NEWLINE },NEWLINE {NEWLINE 'key': 'status',NEWLINE 'label': '狀態',NEWLINE 'rows': [NEWLINE {'label': '有效的', 'key': 'accepted'},NEWLINE {'label': '同物異名', 'key': 'synonym'}NEWLINE ]NEWLINE },NEWLINE NEWLINE ]NEWLINENEWLINE # searchNEWLINE res = species_search.get_results()NEWLINE data = {NEWLINE 'search': res,NEWLINE }NEWLINE if has_menu:NEWLINE data['menus'] = menusNEWLINENEWLINE #return {'data': data }NEWLINE return HttpResponse(json.dumps(data), content_type="application/json")NEWLINENEWLINEdef data_stats(request):NEWLINE '''for D3 charts'''NEWLINE is_most = request.GET.get('most', '')NEWLINE current_year = datetime.datetime.now().yearNEWLINENEWLINE query = Dataset.objectsNEWLINE if is_most:NEWLINE query = query.filter(is_most_project=True)NEWLINE rows = query.all()NEWLINENEWLINE hdata = {}NEWLINE current_year_data = {NEWLINE 'dataset': [{'x': '{}月'.format(x), 'y': 0} for x in range(1, 13)],NEWLINE 'occurrence': [{'x': '{}月'.format(x), 'y': 0} for x in range(1, 13)]NEWLINE }NEWLINE history_data = {NEWLINE 'dataset': [],NEWLINE 'occurrence': []NEWLINE }NEWLINE for i in rows:NEWLINE if not i.pub_date:NEWLINE continueNEWLINENEWLINE y = str(i.pub_date.year)NEWLINE if str(current_year) == y:NEWLINE m = i.pub_date.monthNEWLINE current_year_data['dataset'][m-1]['y'] += 1NEWLINE current_year_data['occurrence'][m-1]['y'] += i.num_occurrenceNEWLINE if y not in hdata:NEWLINE hdata[y] = {NEWLINE 'dataset': 0,NEWLINE 'occurrence': 0NEWLINE }NEWLINE else:NEWLINE hdata[y]['dataset'] += 1NEWLINE hdata[y]['occurrence'] += i.num_occurrenceNEWLINENEWLINE #print (hdata)NEWLINE sorted_year = sorted(hdata)NEWLINE accu_ds = 0NEWLINE accu_occur = 0NEWLINE for y in sorted_year:NEWLINE accu_occur += hdata[y]['occurrence']NEWLINE accu_ds += hdata[y]['dataset']NEWLINE history_data['dataset'].append({NEWLINE 'year': int(y),NEWLINE 'y1': hdata[y]['dataset'],NEWLINE 'y2': accu_dsNEWLINE })NEWLINE history_data['occurrence'].append({NEWLINE 'year': int(y),NEWLINE 'y1': hdata[y]['occurrence'],NEWLINE 'y2': accu_occurNEWLINE })NEWLINE data = {NEWLINE 'current_year': current_year_data,NEWLINE 'history': history_data,NEWLINE }NEWLINENEWLINE return HttpResponse(json.dumps(data), content_type="application/json")NEWLINENEWLINENEWLINE@json_retNEWLINEdef species_detail(request, pk):NEWLINE taxon = Taxon.objects.get(pk=pk)NEWLINE #rows = RawDataOccurrence.objects.values('taibif_dataset_name', 'decimallatitude', 'decimallongitude').filter(scientificname=taxon.name).all()NEWLINE scname = '{} {}'.format(taxon.parent.name, taxon.name)NEWLINE return {'data': {} }NEWLINENEWLINENEWLINENEWLINENEWLINE## Kuan-Yu added for API occurence recordNEWLINENEWLINE###exampleNEWLINEfilt1 = 'speices'NEWLINEfilt2 = 'database'NEWLINEpk1 = 'Rana latouchii'NEWLINEpk2 = 'manager_17_15'NEWLINEpk3 = 'Rana longicrus'NEWLINEpk4 = 'e10100001_4_10'NEWLINENEWLINENEWLINENEWLINEdef ChartYear(request):NEWLINENEWLINE if filt1 == 'hi':NEWLINE species = SimpleData.objects.filter(Q(scientific_name=pk1) | Q(scientific_name=pk3))NEWLINE sp_year = species.values('year') \NEWLINE .exclude(year__isnull=True) \NEWLINE .annotate(count=Count('year')) \NEWLINE .order_by('-year')NEWLINENEWLINE chart_year = [NEWLINE {NEWLINE "page": 1,NEWLINE "pages": 1,NEWLINE "per_page": "50",NEWLINE "total": 1NEWLINE },NEWLINE [NEWLINE {NEWLINE 'year': x['year'],NEWLINE 'count': x['count']NEWLINE } for x in sp_yearNEWLINE ]NEWLINE ]NEWLINENEWLINE if filt2 == 'you':NEWLINENEWLINE dataset = SimpleData.objects.filter(Q(taibif_dataset_name=pk2) | Q(taibif_dataset_name=pk4))NEWLINE data_year = dataset.values( 'year') \NEWLINE .exclude(year__isnull=True) \NEWLINE .annotate(count=Count('year')) \NEWLINE .order_by('-year')NEWLINE chart_year = [NEWLINE {NEWLINE "page": 1,NEWLINE "pages": 1,NEWLINE "per_page": "50",NEWLINE "total": 1NEWLINE },NEWLINE [NEWLINE {NEWLINE 'year': x['year'],NEWLINE 'count': x['count']NEWLINE } for x in data_yearNEWLINE ]NEWLINE ]NEWLINENEWLINE if (filt2 == filt2 and filt1 == filt1):NEWLINENEWLINE data_sp = SimpleData.objects.filter(Q(scientific_name=pk1) | Q(scientific_name=pk3)) \NEWLINE .filter(Q(taibif_dataset_name=pk2) | Q(taibif_dataset_name=pk4))NEWLINENEWLINE data_sp_month = data_sp.values('year') \NEWLINE .exclude(year__isnull=True) \NEWLINE .annotate(count=Count('year')) \NEWLINE .order_by('-year')NEWLINENEWLINE chart_year = [NEWLINE {NEWLINE "page": 1,NEWLINE "pages": 1,NEWLINE "per_page": "50",NEWLINE "total": 1NEWLINE },NEWLINE [NEWLINE {NEWLINE 'year': x['year'],NEWLINE 'count': x['count']NEWLINE } for x in data_sp_monthNEWLINE ]NEWLINE ]NEWLINENEWLINENEWLINE return HttpResponse(json.dumps(chart_year), content_type="application/json")NEWLINENEWLINENEWLINEdef ChartMonth(request):NEWLINENEWLINE if filt1 == 'hi':NEWLINENEWLINE species = SimpleData.objects.filter(Q(scientific_name=pk1) | Q(scientific_name=pk3))NEWLINE sp_month = species.values( 'month') \NEWLINE .exclude(month__isnull=True) \NEWLINE .annotate(count=Count('month')) \NEWLINE .order_by('-month')NEWLINENEWLINENEWLINE chart_month = [NEWLINE {NEWLINE "page": 1,NEWLINE "pages": 1,NEWLINE "per_page": "50",NEWLINE "total": 1NEWLINE },NEWLINE [NEWLINE {NEWLINE 'month': x['month'],NEWLINE 'count': x['count']NEWLINE } for x in sp_monthNEWLINE ]NEWLINE ]NEWLINENEWLINENEWLINENEWLINE if filt2 == 'you':NEWLINENEWLINE dataset = SimpleData.objects.filter(Q(taibif_dataset_name=pk2) | Q(taibif_dataset_name=pk4))NEWLINENEWLINE data_month = dataset.values('month') \NEWLINE .exclude(month__isnull=True) \NEWLINE .annotate(count=Count('month')) \NEWLINE .order_by('-month')NEWLINENEWLINE chart_month = [NEWLINE {NEWLINE "page": 1,NEWLINE "pages": 1,NEWLINE "per_page": "50",NEWLINE "total": 1NEWLINE },NEWLINE [NEWLINE {NEWLINE 'month': x['month'],NEWLINE 'count': x['count']NEWLINE } for x in data_monthNEWLINE ]NEWLINE ]NEWLINENEWLINE if (filt2 == filt2 and filt1 == filt1):NEWLINENEWLINE data_sp = SimpleData.objects.filter(Q(scientific_name=pk1) | Q(scientific_name=pk3)) \NEWLINE .filter(Q(taibif_dataset_name=pk2) | Q(taibif_dataset_name=pk4))NEWLINENEWLINE data_sp_month = data_sp.values('month') \NEWLINE .exclude(month__isnull=True) \NEWLINE .annotate(count=Count('month')) \NEWLINE .order_by('-month')NEWLINENEWLINE chart_month = [NEWLINE {NEWLINE "page": 1,NEWLINE "pages": 1,NEWLINE "per_page": "50",NEWLINE "total": 1NEWLINE },NEWLINE [NEWLINE {NEWLINE 'month': x['month'],NEWLINE 'count': x['count']NEWLINE } for x in data_sp_monthNEWLINE ]NEWLINE ]NEWLINENEWLINENEWLINENEWLINE return HttpResponse(json.dumps(chart_month), content_type="application/json")NEWLINENEWLINENEWLINEdef taxon_bar(request):NEWLINENEWLINE ## Use species findNEWLINE if filt1 == 'hi':NEWLINENEWLINE species = SimpleData.objects.filter(Q(scientific_name=pk1)).values('taxon_genus_id','taxon_species_id')NEWLINE sp_id = species.annotate(sp_c=Count('taxon_species_id')) \NEWLINE .aggregate(Sum('sp_c')).get('sp_c__sum')NEWLINE sp_none = species.exclude(Q(taxon_genus_id__isnull=True) | Q(taxon_species_id__isnull=True))[:1]NEWLINE ss = {'count': sp_id}NEWLINENEWLINENEWLINENEWLINE genus = SimpleData.objects.filter(Q(taxon_genus_id=sp_none[0]['taxon_genus_id'])).values('taxon_family_id','taxon_genus_id')NEWLINE ge_id = genus.annotate(genus_c=Count('taxon_genus_id')) \NEWLINE .aggregate(Sum('genus_c')).get('genus_c__sum')NEWLINE ge_none = genus.exclude(Q(taxon_genus_id__isnull=True) | Q(taxon_family_id__isnull=True))[:1]NEWLINE gg = {'count':ge_id}NEWLINENEWLINENEWLINENEWLINE family = SimpleData.objects.filter(Q(taxon_family_id=ge_none[0]['taxon_family_id'])).values('taxon_order_id','taxon_family_id')NEWLINE fam_id = family.annotate(fam_c=Count('taxon_family_id')) \NEWLINE .aggregate(Sum('fam_c')).get('fam_c__sum')NEWLINE fam_none = family.exclude(Q(taxon_order_id__isnull=True) | Q(taxon_family_id__isnull=True))[:1]NEWLINE ff = {'count': fam_id}NEWLINENEWLINENEWLINE order = SimpleData.objects.filter(Q(taxon_order_id=fam_none[0]['taxon_order_id'])).values('taxon_class_id', 'taxon_order_id')NEWLINE ord_id = order.annotate(ord_c=Count('taxon_order_id')) \NEWLINE .aggregate(Sum('ord_c')).get('ord_c__sum')NEWLINE ord_none = order.exclude(Q(taxon_order_id__isnull=True) | Q(taxon_class_id__isnull=True))[:1]NEWLINE oo = {'count': ord_id}NEWLINENEWLINENEWLINE clas = SimpleData.objects.filter(Q(taxon_class_id=ord_none[0]['taxon_class_id'])).values( 'taxon_phylum_id', 'taxon_class_id')NEWLINE clas_id = clas.annotate(clas_c=Count('taxon_class_id')) \NEWLINE .aggregate(Sum('clas_c')).get('clas_c__sum')NEWLINE clas_none = clas.exclude(Q(taxon_order_id__isnull=True) | Q(taxon_phylum_id__isnull=True))[:1]NEWLINENEWLINE cc = {'count': clas_id}NEWLINENEWLINENEWLINE phylum = SimpleData.objects.filter(Q(taxon_phylum_id=clas_none[0]['taxon_phylum_id'])).values('taxon_kingdom_id', 'taxon_phylum_id')NEWLINE phy_id = phylum.annotate(phyl_c=Count('taxon_phylum_id')) \NEWLINE .aggregate(Sum('phyl_c')).get('phyl_c__sum')NEWLINE phy_none = phylum.exclude(Q(taxon_kingdom_id__isnull=True) | Q(taxon_phylum_id__isnull=True))[:1]NEWLINENEWLINE pp = {'count': phy_id}NEWLINENEWLINENEWLINE king = SimpleData.objects.filter(Q(taxon_kingdom_id=phy_none[0]['taxon_kingdom_id'])).values('taxon_kingdom_id')NEWLINE king_id = king.annotate(king_c=Count('taxon_kingdom_id')) \NEWLINE .aggregate(Sum('king_c')).get('king_c__sum')NEWLINE kk = {'count': king_id}NEWLINENEWLINE ## Ude dataset and species searchNEWLINE if (filt2 == filt2 and filt1 == filt1):NEWLINENEWLINE species = SimpleData.objects.filter(Q(scientific_name=pk1) | Q(taibif_dataset_name=pk2)).values('taxon_genus_id','taxon_species_id')NEWLINE sp_id = species.annotate(sp_c=Count('taxon_species_id')) \NEWLINE .aggregate(Sum('sp_c')).get('sp_c__sum')NEWLINE sp_none = species.exclude(Q(taxon_genus_id__isnull=True) | Q(taxon_species_id__isnull=True))[:1]NEWLINE ss = {'count': sp_id}NEWLINENEWLINE genus = SimpleData.objects.filter(Q(taxon_genus_id=sp_none[0]['taxon_genus_id']) | Q(taibif_dataset_name=pk2)).values('taxon_family_id',NEWLINE 'taxon_genus_id')NEWLINE ge_id = genus.annotate(genus_c=Count('taxon_genus_id')) \NEWLINE .aggregate(Sum('genus_c')).get('genus_c__sum')NEWLINE ge_none = genus.exclude(Q(taxon_genus_id__isnull=True) | Q(taxon_family_id__isnull=True))[:1]NEWLINE gg = {'count': ge_id}NEWLINENEWLINE family = SimpleData.objects.filter(Q(taxon_family_id=ge_none[0]['taxon_family_id']) | Q(taibif_dataset_name=pk2)).values('taxon_order_id',NEWLINE 'taxon_family_id')NEWLINE fam_id = family.annotate(fam_c=Count('taxon_family_id')) \NEWLINE .aggregate(Sum('fam_c')).get('fam_c__sum')NEWLINE fam_none = family.exclude(Q(taxon_order_id__isnull=True) | Q(taxon_family_id__isnull=True))[:1]NEWLINE ff = {'count': fam_id}NEWLINENEWLINE order = SimpleData.objects.filter(Q(taxon_order_id=fam_none[0]['taxon_order_id']) | Q(taibif_dataset_name=pk2)).values('taxon_class_id',NEWLINE 'taxon_order_id')NEWLINE ord_id = order.annotate(ord_c=Count('taxon_order_id')) \NEWLINE .aggregate(Sum('ord_c')).get('ord_c__sum')NEWLINE ord_none = order.exclude(Q(taxon_order_id__isnull=True) | Q(taxon_class_id__isnull=True))[:1]NEWLINE oo = {'count': ord_id}NEWLINENEWLINE clas = SimpleData.objects.filter(Q(taxon_class_id=ord_none[0]['taxon_class_id']) | Q(taibif_dataset_name=pk2)).values('taxon_phylum_id',NEWLINE 'taxon_class_id')NEWLINE clas_id = clas.annotate(clas_c=Count('taxon_class_id')) \NEWLINE .aggregate(Sum('clas_c')).get('clas_c__sum')NEWLINE clas_none = clas.exclude(Q(taxon_order_id__isnull=True) | Q(taxon_phylum_id__isnull=True))[:1]NEWLINENEWLINE cc = {'count': clas_id}NEWLINENEWLINE phylum = SimpleData.objects.filter(Q(taxon_phylum_id=clas_none[0]['taxon_phylum_id']) | Q(taibif_dataset_name=pk2)).values(NEWLINE 'taxon_kingdom_id', 'taxon_phylum_id')NEWLINE phy_id = phylum.annotate(phyl_c=Count('taxon_phylum_id')) \NEWLINE .aggregate(Sum('phyl_c')).get('phyl_c__sum')NEWLINE phy_none = phylum.exclude(Q(taxon_kingdom_id__isnull=True) | Q(taxon_phylum_id__isnull=True))[:1]NEWLINENEWLINE pp = {'count': phy_id}NEWLINENEWLINE king = SimpleData.objects.filter(Q(taxon_kingdom_id=phy_none[0]['taxon_kingdom_id']) | Q(taibif_dataset_name=pk2)).values('taxon_kingdom_id')NEWLINE king_id = king.annotate(king_c=Count('taxon_kingdom_id')) \NEWLINE .aggregate(Sum('king_c')).get('king_c__sum')NEWLINE kk = {'count': king_id}NEWLINENEWLINENEWLINENEWLINENEWLINE data = [NEWLINE {NEWLINE "page": 1,NEWLINE "pages": 1,NEWLINE "per_page": "50",NEWLINE "total": 1NEWLINE },NEWLINE [NEWLINE ss,gg,ff,oo,cc,pp,kkNEWLINE ]NEWLINE ]NEWLINENEWLINENEWLINENEWLINE return HttpResponse(json.dumps(data), content_type="application/json")NEWLINENEWLINE#------- DEPRECATED ------#NEWLINENEWLINEdef search_occurrence_v1(request):NEWLINE year_start = 1000NEWLINE year_end = 2021NEWLINENEWLINE solr_q_fq_list=[]NEWLINE solr_fq = ''NEWLINE solr_q_list = []NEWLINE solr_q = '*:*'NEWLINE for term, values in list(request.GET.lists()):NEWLINE if term !='q' :NEWLINE if term != 'menu':NEWLINE if term =='year':NEWLINE val = values[0].replace(",", " TO ")NEWLINE solr_q_fq_list.append('{}:[{}]'.format(term,val))NEWLINE year_start =values[0].split(',',1)NEWLINE year_end =values[0].split(',',2)NEWLINE elif term =='dataset':NEWLINE solr_q_fq_list.append('{}:"{}"'.format('taibif_dataset_name_zh', '" OR "'.join(values)))NEWLINE elif term =='month':NEWLINE solr_q_fq_list.append('{}:{}'.format(term, ' OR '.join(values)))NEWLINENEWLINE else:NEWLINE solr_q_list.append('{}:{}'.format('_text_', ' OR '.join(values)))NEWLINENEWLINENEWLINE if len(solr_q_list) > 0:NEWLINE solr_q = ' OR '.join(solr_q_list)NEWLINENEWLINE if len(solr_q_fq_list) > 0:NEWLINE solr_fq = ' OR '.join(solr_q_fq_list)NEWLINENEWLINE menu_year = []NEWLINE menu_month = []NEWLINE menu_dataset = []NEWLINE menu_country = []NEWLINE menu_publisher = []NEWLINENEWLINE search_count = 0NEWLINE search_limit = 20NEWLINE search_offset = 0NEWLINE search_results = []NEWLINE #publisher_query = Dataset.objects\NEWLINE # .values('organization','organization_verbatim')\NEWLINE # .exclude(organization__isnull=True)\NEWLINE # .annotate(count=Count('organization'))\NEWLINE # .order_by('-count')NEWLINE #menu_publisher = [{NEWLINE # 'key':x['organization'],NEWLINE # 'label':x['organization_verbatim'],NEWLINE # 'count': x['count']NEWLINE #} for x in publisher_query]NEWLINE NEWLINE NEWLINENEWLINE time_start = time.time() NEWLINE facet_dataset = 'dataset:{type:terms,field:taibif_dataset_name_zh}'NEWLINE facet_month = 'month:{type:range,field:month,start:1,end:13,gap:1}'NEWLINE facet_country = 'country:{type:terms,field:country,mincount:0,limit:-1}'NEWLINE facet_publisher = 'publisher:{type:terms,field:publisher}'NEWLINE facet_json = 'json.facet={'+facet_dataset + ',' +facet_month+ ',' +facet_country+','+facet_publisher+'}'NEWLINE r = requests.get(f'http://solr:8983/solr/taibif_occurrence/select?facet=true&q.op=AND&rows={search_limit}&q={solr_q}&fq={solr_fq}&{facet_json}')NEWLINENEWLINE if r.status_code == 200:NEWLINE data = r.json()NEWLINE search_count = data['response']['numFound']NEWLINE if search_count != 0:NEWLINE search_offset = data['response']['start']NEWLINE search_results = data['response']['docs']NEWLINE for i, v in enumerate(search_results):NEWLINE ## copy fieldsNEWLINE date = '{}-{}-{}'.format(v['year'] if v.get('year', '') else '',NEWLINE v['month'] if v.get('month', '') else '',NEWLINE v['day'] if v.get('day', '') else '')NEWLINE search_results[i]['vernacular_name'] = v.get('vernacularName', '')NEWLINE search_results[i]['scientific_name'] = v.get('scientificName', '')NEWLINE search_results[i]['dataset'] = v['taibif_dataset_name']NEWLINE search_results[i]['date'] = dateNEWLINE search_results[i]['taibif_id'] = '{}__{}'.format(v['taibif_dataset_name'], v['_version_'])NEWLINE search_results[i]['kingdom'] = v.get('kingdom_zh', '')NEWLINE search_results[i]['phylum'] = v.get('phylum_zh', '')NEWLINE search_results[i]['class'] = v.get('class_zh', '')NEWLINE search_results[i]['order'] = v.get('order_zh', '')NEWLINE search_results[i]['family'] = v.get('family_zh', '')NEWLINE search_results[i]['genus'] = v.get('genus_zh', '')NEWLINE search_results[i]['species'] = v.get('species_zh', '')NEWLINENEWLINE menu_year = [{'key': 0, 'label': 0, 'count': 0,'year_start':year_start,'year_end':year_end}]NEWLINE menu_month = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['facets']['month']['buckets']]NEWLINE menu_dataset = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['facets']['dataset']['buckets']]NEWLINE menu_country = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['facets']['country']['buckets']]NEWLINE menu_publisher = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['facets']['publisher']['buckets']]NEWLINE else:NEWLINE menu_year = [{'key': 0, 'label': 0, 'count': 0,'year_start':year_start,'year_end':year_end}]NEWLINE menu_month = [{'key': x, 'label': x, 'count': 0} for x in range(12)]NEWLINE menu_dataset = [{'key': 0, 'label': 0, 'count': 0}]NEWLINE menu_country = [{'key': 0, 'label': 0, 'count': 0}]NEWLINE menu_publisher = [{'key': 0, 'label': 0, 'count': 0}]NEWLINE NEWLINENEWLINE #search_limit = 20NEWLINE NEWLINE ret = {NEWLINE 'menus': [NEWLINE {NEWLINE 'key': 'country', #'countrycode',NEWLINE 'label': '國家/區域',NEWLINE 'rows': menu_country,NEWLINE },NEWLINE {NEWLINE 'key': 'year',NEWLINE 'label': '年份',NEWLINE 'rows': menu_year,NEWLINE },NEWLINE {NEWLINE 'key': 'month',NEWLINE 'label': '月份',NEWLINE 'rows': menu_month,NEWLINE },NEWLINE {NEWLINE 'key': 'dataset',NEWLINE 'label': '資料集',NEWLINE 'rows': menu_dataset,NEWLINE },NEWLINE {NEWLINE 'key':'publisher',NEWLINE 'label': '發布者',NEWLINE 'rows': menu_publisher,NEWLINE }NEWLINE ],NEWLINE 'search': {NEWLINE 'elapsed': time.time() - time_start,NEWLINE 'results': search_results,NEWLINE 'offset': search_offset,NEWLINE 'limit': search_limit,NEWLINE 'count': search_count,NEWLINE 'has_more': TrueNEWLINE },NEWLINE }NEWLINENEWLINE # treeNEWLINE treeRoot = Taxon.objects.filter(rank='kingdom').all()NEWLINE treeData = [{NEWLINE 'id': x.id,NEWLINE 'data': {NEWLINE 'name': x.get_name(),NEWLINE 'count': x.count,NEWLINE },NEWLINE } for x in treeRoot]NEWLINE ret['tree'] = treeDataNEWLINE return JsonResponse(ret)NEWLINENEWLINEdef export(request):NEWLINE search_count = 0NEWLINE solr = SolrQuery('taibif_occurrence')NEWLINE solr_url = solr.generate_solr_url(request.GET.lists())NEWLINE NEWLINE if len(solr_url) > 0:NEWLINE generateCSV(solr_url,request)NEWLINENEWLINE return JsonResponse({"status":search_count}, safe=False)NEWLINENEWLINEdef generateCSV(solr_url,request):NEWLINENEWLINE #directory = os.path.abspath(os.path.join(os.path.curdir))NEWLINE #taibifVolumesPath = '/taibif-volumes/media/'NEWLINE #csvFolder = directory+taibifVolumesPathNEWLINE CSV_MEDIA_FOLDER = 'csv'NEWLINE csvFolder = os.path.join(conf_settings.MEDIA_ROOT, CSV_MEDIA_FOLDER)NEWLINE timestramp = str(int(time.time()))NEWLINE filename = timestramp +'.csv'NEWLINE downloadURL = '没有任何資料'NEWLINE csvFilePath = os.path.join(csvFolder, filename)NEWLINE dataPolicyURL = request.scheme+"://"+request.META['HTTP_HOST']+'/data-policy'NEWLINE if not os.path.exists(csvFolder):NEWLINE os.makedirs(csvFolder)NEWLINENEWLINE if len(solr_url) > 0:NEWLINENEWLINE downloadURL = request.scheme+"://"+request.META['HTTP_HOST']+conf_settings.MEDIA_URL+os.path.join(CSV_MEDIA_FOLDER, filename)NEWLINE #print("curl "+f'"{solr_url}"'+" > "+csvFolder+filename)NEWLINENEWLINE result = subprocess.Popen("curl "+f'"{solr_url}"'+" > "+csvFilePath, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)NEWLINENEWLINE sendMail(downloadURL,request,dataPolicyURL)NEWLINENEWLINEdef sendMail(downloadURL,request,dataPolicyURL):NEWLINE subject = '出現紀錄搜尋'NEWLINENEWLINENEWLINE currentTime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")NEWLINE searchCondition = request.GET["search_condition"]NEWLINENEWLINE html = f"""\NEWLINE<html>NEWLINE <head></head>NEWLINE <body style='text-align:left'>NEWLINE您好,NEWLINE<br/><br/>NEWLINE您在TaiBIF上查詢的檔案已夾帶於附件中,NEWLINENEWLINE<br/><br/>NEWLINE檔案相關的詳細說明為:NEWLINENEWLINE<br/><br/>NEWLINE搜尋條件:{searchCondition}NEWLINENEWLINENEWLINE<br/>NEWLINE搜尋時間:{currentTime}NEWLINENEWLINE<br/><br/>NEWLINE檔案類型:CSVNEWLINENEWLINENEWLINE<br/><br/>NEWLINE使用條款:<a href="{dataPolicyURL}">{dataPolicyURL}</a>NEWLINENEWLINE<br/><br/>NEWLINE下載鏈結:<a href="{downloadURL}">{downloadURL}</a>NEWLINENEWLINE<br/><br/>NEWLINE若有問題再麻煩您回覆至NEWLINENEWLINE<br/><br/>NEWLINEtaibif.brcas@gmail.comNEWLINENEWLINE<br/><br/>NEWLINETaiBIF團隊 敬上NEWLINE </body>NEWLINE</html>NEWLINE"""NEWLINENEWLINE send_mail(NEWLINE subject,NEWLINE None,NEWLINE conf_settings.TAIBIF_SERVICE_EMAIL,NEWLINE [request.GET["email"]],NEWLINE html_message=html)NEWLINENEWLINENEWLINE# def search_occurrence_v2_map(request):NEWLINE# time_start = time.time()NEWLINE# facet_values = []NEWLINE# query_list = []NEWLINE# for key, values in request.GET.lists():NEWLINE# if key == 'facet':NEWLINE# facet_values = valuesNEWLINE# else:NEWLINE# query_list.append((key, values))NEWLINE# solr = SolrQuery('taibif_occurrence')NEWLINE# req = solr.request(query_list)NEWLINE# #response = req['solr_response']NEWLINE# resp = solr.get_response()NEWLINE# if not resp:NEWLINE# return JsonResponse({NEWLINE# 'results': 0,NEWLINE# 'solr_error_msg': solr.solr_error,NEWLINE# 'solr_url': solr.solr_url,NEWLINE# 'solr_tuples': solr.solr_tuples,NEWLINE# })NEWLINENEWLINE# solr_menu = SolrQuery('taibif_occurrence', facet_values)NEWLINE# solr_menu.request()NEWLINE# resp_menu = solr_menu.get_response()NEWLINENEWLINE# # for frontend menu data sturcttNEWLINE# menus = []NEWLINE# if resp_menu['facets']:NEWLINE# if data := resp_menu['facets'].get('country', ''):NEWLINE# rows = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['buckets']]NEWLINE# menus.append({NEWLINE# 'key': 'country', #'countrycode',NEWLINE# 'label': '國家/區域',NEWLINE# 'rows': rows,NEWLINE# })NEWLINE# if data := resp_menu['facets'].get('year', ''):NEWLINE# #menu_year = [{'key': 0, 'label': 0, 'count': 0,'year_start':1990,'year_end':2021}]NEWLINE# # TODONEWLINE# menus.append({NEWLINE# 'key': 'year',NEWLINE# 'label': '年份',NEWLINE# 'rows': ['FAKE_FOR_SPACE',],NEWLINE# })NEWLINE# if data := resp_menu['facets'].get('month', ''):NEWLINE# rows = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in sorted(data['buckets'], key=lambda x: x['val'])]NEWLINE# menus.append({NEWLINE# 'key': 'month',NEWLINE# 'label': '月份',NEWLINE# 'rows': rows,NEWLINE# })NEWLINE# if data := resp_menu['facets'].get('dataset', ''):NEWLINE# rows = menu_dataset = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['buckets']]NEWLINE# menus.append({NEWLINE# 'key': 'dataset',NEWLINE# 'label': '資料集',NEWLINE# 'rows': rows,NEWLINE# })NEWLINE# if data := resp_menu['facets'].get('publisher', ''):NEWLINE# rows = [{'key': x['val'], 'label': x['val'], 'count': x['count']} for x in data['buckets']]NEWLINE# menus.append({NEWLINE# 'key':'publisher',NEWLINE# 'label': '發布者',NEWLINE# 'rows': rows,NEWLINE# })NEWLINENEWLINE# resp['menus'] = menusNEWLINENEWLINE# # treeNEWLINE# treeRoot = Taxon.objects.filter(rank='kingdom').all()NEWLINE# treeData = [{NEWLINE# 'id': x.id,NEWLINE# 'data': {NEWLINE# 'name': x.get_name(),NEWLINE# 'count': x.count,NEWLINE# },NEWLINE# } for x in treeRoot]NEWLINE# resp['tree'] = treeDataNEWLINE# if request.GET.get('debug_solr', ''):NEWLINE# resp['solr_resp'] = solr.solr_responseNEWLINE# resp['solr_url'] = solr.solr_url,NEWLINE# resp['solr_tuples'] = solr.solr_tuples,NEWLINENEWLINE# resp['solr_qtime'] = req['solr_response']['responseHeader']['QTime']NEWLINE# resp['elapsed'] = time.time() - time_startNEWLINENEWLINE# # mapNEWLINE# facet_pivot_map = 'facet.pivot=grid_x,grid_y'NEWLINE# # print(solr.solr_url)NEWLINE# if 'grid_x' in solr.solr_url:NEWLINE# map_url = f'{solr.solr_url}&facet=true&{facet_pivot_map}&facet.limit=-1'NEWLINE# else:NEWLINE# map_url = f'{solr.solr_url}&facet=true&fq=grid_x%3A%5B0%20TO%20*%5D&fq=grid_y%3A%5B0%20TO%20*%5D&{facet_pivot_map}&facet.limit=-1'NEWLINENEWLINE# map_url = map_url.replace('rows=20','rows=0')NEWLINE# r = requests.get(map_url)NEWLINE# data_c = {}NEWLINE# if r.status_code == 200:NEWLINE# data = r.json()NEWLINE# data_c = data['facet_counts']['facet_pivot']['grid_x,grid_y']NEWLINENEWLINE# map_geojson = {"type":"FeatureCollection","features":[]}NEWLINE# for i in data_c:NEWLINE# current_grid_x = i['value']NEWLINE# for j in i['pivot']:NEWLINE# current_grid_y = j['value']NEWLINE# current_count = j['count']NEWLINE# current_center_x, current_center_y = convert_grid_to_coor(current_grid_x, current_grid_y)NEWLINE# tmp = [{NEWLINE# "type": "Feature",NEWLINE# "geometry":{"type":"Point","coordinates":[current_center_x,current_center_y]},NEWLINE# "properties": {NEWLINE# "counts": current_countNEWLINE# }NEWLINE# }]NEWLINE# map_geojson['features'] += tmpNEWLINE# resp['map_geojson'] = map_geojsonNEWLINENEWLINE# return JsonResponse(resp)NEWLINE
"""Gets the next song in the playlistNEWLINENEWLINERun with --help for more info.NEWLINE"""NEWLINENEWLINEfrom nextsong.cli import nextsongNEWLINENEWLINEnextsong()NEWLINE
""" fprime version handling and reporting """NEWLINEimport osNEWLINEfrom setuptools_scm import get_versionNEWLINENEWLINEROOT_PARENT_COUNT = 5NEWLINENEWLINENEWLINEdef get_fprime_version():NEWLINE """Gets the fprime version using setuptools_scm"""NEWLINE # First try to read the SCM versionNEWLINE try:NEWLINE return get_version(NEWLINE root=os.sep.join([".."] * ROOT_PARENT_COUNT), relative_to=__file__NEWLINE )NEWLINE # Fallback to a specified version when SCM is unavailableNEWLINE except LookupError:NEWLINE return "1.5.4" # Must be kept up-to-date when taggingNEWLINE
from __future__ import divisionNEWLINEfrom features import mfccNEWLINEfrom operator import addNEWLINEimport scipy.io.wavfile as wavNEWLINEimport numpy as npNEWLINENEWLINEwords = ['apple','banana','kiwi','lime','orange']NEWLINENEWLINENEWLINEfor x in range(len(words)):NEWLINE fileString = words[x]+"_mfcc"NEWLINE data = []NEWLINE for i in range(10):NEWLINE (rate,sig) = wav.read("training_sets/"+ words[x] + "-" + str(i+1) + ".wav")NEWLINE print "Reading: " + words[x] + "-" + str(i+1) + ".wav"NEWLINE duration = len(sig)/rateNEWLINE mfcc_feat = mfcc(sig,rate,winlen=duration/20,winstep=duration/20)NEWLINE s = mfcc_feat[:20]NEWLINE st = []NEWLINE for elem in s:NEWLINE st.extend(elem)NEWLINE NEWLINE st /= np.max(np.abs(st),axis=0)NEWLINE data.append(st)NEWLINE print stNEWLINE NEWLINE with open("mfccData/" + fileString+ ".npy", 'w') as outfile:NEWLINE np.save(outfile,data)NEWLINENEWLINENEWLINENEWLINENEWLINE
#!/usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE"""NEWLINEIn this example, we are going to make a dark code editor widget and make it show visualNEWLINEwhitespaces.NEWLINENEWLINE"""NEWLINEimport sysNEWLINEimport osNEWLINEos.environ['QT_API'] = 'pyside2'NEWLINE# os.environ['QT_API'] = 'pyqt5'NEWLINEfrom pyqode.qt import QtWidgets, QtGuiNEWLINEfrom pyqode.core import apiNEWLINEfrom pyqode.core import modesNEWLINEfrom pyqode.core import panelsNEWLINENEWLINENEWLINEdef main():NEWLINE app = QtWidgets.QApplication(sys.argv)NEWLINE window = QtWidgets.QMainWindow()NEWLINENEWLINE # code from the simple exampleNEWLINE editor = api.CodeEdit()NEWLINE editor.file.open(__file__)NEWLINE editor.modes.append(modes.CaretLineHighlighterMode())NEWLINE sh = modes.PygmentsSyntaxHighlighter(editor.document())NEWLINE editor.modes.append(sh)NEWLINE editor.panels.append(panels.SearchAndReplacePanel(),NEWLINE api.Panel.Position.TOP)NEWLINE # make the code edit show whitespaces in dark grayNEWLINE editor.show_white_spaces = TrueNEWLINE editor.whitespaces_foreground = QtGui.QColor('#606020')NEWLINENEWLINE # make a dark editor using the monokai themeNEWLINE sh.pygments_style = 'monokai'NEWLINENEWLINE window.setCentralWidget(editor)NEWLINE window.show()NEWLINENEWLINE app.exec_()NEWLINENEWLINE editor.file.close()NEWLINE del editorNEWLINE del windowNEWLINE del appNEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE main()NEWLINENEWLINE
from .mnist_LeNet import MNIST_LeNet, MNIST_LeNet_Autoencoder, MNIST_Discriminator_L, MNIST_Discriminator_S, MNIST_GeneratorNEWLINEfrom .fmnist_LeNet import FashionMNIST_LeNet, FashionMNIST_LeNet_Autoencoder, FashionMNIST_Discriminator_L, FashionMNIST_Discriminator_SNEWLINEfrom .cifar10_LeNet import CIFAR10_LeNet, CIFAR10_LeNet_AutoencoderNEWLINEfrom .mlp import MLP, MLP_AutoencoderNEWLINEfrom .vae import VariationalAutoencoderNEWLINEfrom .dgm import DeepGenerativeModel, StackedDeepGenerativeModelNEWLINENEWLINENEWLINEdef build_network(net_name, ae_net=None):NEWLINE """Builds the neural network."""NEWLINENEWLINE implemented_networks = ('mnist_LeNet', 'mnist_DGM_M2', 'mnist_DGM_M1M2',NEWLINE 'fmnist_LeNet', 'fmnist_DGM_M2', 'fmnist_DGM_M1M2',NEWLINE 'cifar10_LeNet', 'cifar10_DGM_M2', 'cifar10_DGM_M1M2',NEWLINE 'arrhythmia_mlp', 'cardio_mlp', 'satellite_mlp', 'satimage-2_mlp', 'shuttle_mlp',NEWLINE 'thyroid_mlp',NEWLINE 'arrhythmia_DGM_M2', 'cardio_DGM_M2', 'satellite_DGM_M2', 'satimage-2_DGM_M2',NEWLINE 'shuttle_DGM_M2', 'thyroid_DGM_M2')NEWLINE assert net_name in implemented_networksNEWLINENEWLINE net = NoneNEWLINENEWLINE if net_name == 'mnist_LeNet':NEWLINE net = MNIST_LeNet()NEWLINENEWLINE if net_name == 'mnist_DGM_M2':NEWLINE net = DeepGenerativeModel([1*28*28, 2, 32, [128, 64]], classifier_net=MNIST_LeNet)NEWLINENEWLINE if net_name == 'mnist_DGM_M1M2':NEWLINE net = StackedDeepGenerativeModel([1*28*28, 2, 32, [128, 64]], features=ae_net)NEWLINENEWLINE if net_name == 'fmnist_LeNet':NEWLINE net = FashionMNIST_LeNet()NEWLINENEWLINE if net_name == 'fmnist_DGM_M2':NEWLINE net = DeepGenerativeModel([1*28*28, 2, 64, [256, 128]], classifier_net=FashionMNIST_LeNet)NEWLINENEWLINE if net_name == 'fmnist_DGM_M1M2':NEWLINE net = StackedDeepGenerativeModel([1*28*28, 2, 64, [256, 128]], features=ae_net)NEWLINENEWLINE if net_name == 'cifar10_LeNet':NEWLINE net = CIFAR10_LeNet()NEWLINENEWLINE if net_name == 'cifar10_DGM_M2':NEWLINE net = DeepGenerativeModel([3*32*32, 2, 128, [512, 256]], classifier_net=CIFAR10_LeNet)NEWLINENEWLINE if net_name == 'cifar10_DGM_M1M2':NEWLINE net = StackedDeepGenerativeModel([3*32*32, 2, 128, [512, 256]], features=ae_net)NEWLINENEWLINE if net_name == 'arrhythmia_mlp':NEWLINE net = MLP(x_dim=274, h_dims=[128, 64], rep_dim=32, bias=False)NEWLINENEWLINE if net_name == 'cardio_mlp':NEWLINE net = MLP(x_dim=21, h_dims=[32, 16], rep_dim=8, bias=False)NEWLINENEWLINE if net_name == 'satellite_mlp':NEWLINE net = MLP(x_dim=36, h_dims=[32, 16], rep_dim=8, bias=False)NEWLINENEWLINE if net_name == 'satimage-2_mlp':NEWLINE net = MLP(x_dim=36, h_dims=[32, 16], rep_dim=8, bias=False)NEWLINENEWLINE if net_name == 'shuttle_mlp':NEWLINE net = MLP(x_dim=9, h_dims=[32, 16], rep_dim=8, bias=False)NEWLINENEWLINE if net_name == 'thyroid_mlp':NEWLINE net = MLP(x_dim=6, h_dims=[32, 16], rep_dim=4, bias=False)NEWLINENEWLINE if net_name == 'arrhythmia_DGM_M2':NEWLINE net = DeepGenerativeModel([274, 2, 32, [128, 64]])NEWLINENEWLINE if net_name == 'cardio_DGM_M2':NEWLINE net = DeepGenerativeModel([21, 2, 8, [32, 16]])NEWLINENEWLINE if net_name == 'satellite_DGM_M2':NEWLINE net = DeepGenerativeModel([36, 2, 8, [32, 16]])NEWLINENEWLINE if net_name == 'satimage-2_DGM_M2':NEWLINE net = DeepGenerativeModel([36, 2, 8, [32, 16]])NEWLINENEWLINE if net_name == 'shuttle_DGM_M2':NEWLINE net = DeepGenerativeModel([9, 2, 8, [32, 16]])NEWLINENEWLINE if net_name == 'thyroid_DGM_M2':NEWLINE net = DeepGenerativeModel([6, 2, 4, [32, 16]])NEWLINENEWLINE return netNEWLINENEWLINENEWLINENEWLINEdef build_GANs(net_name):NEWLINE """Builds the neural network."""NEWLINENEWLINE implemented_networks = ('mnist_LeNet', 'fmnist_LeNet', 'cifar10_LeNet',NEWLINE 'arrhythmia_mlp', 'cardio_mlp', 'satellite_mlp', 'satimage-2_mlp', 'shuttle_mlp',NEWLINE 'thyroid_mlp')NEWLINE assert net_name in implemented_networksNEWLINENEWLINE if net_name == 'mnist_LeNet':NEWLINE D_l = MNIST_Discriminator_L()NEWLINE D_s = MNIST_Discriminator_S()NEWLINE D_g = MNIST_Discriminator_S()NEWLINE G = MNIST_Generator()NEWLINENEWLINENEWLINE if net_name == 'fmnist_LeNet':NEWLINE D_l = FashionMNIST_Discriminator_L()NEWLINE D_s = FashionMNIST_Discriminator_S()NEWLINE D_g = FashionMNIST_Discriminator_S()NEWLINE G = MNIST_Generator()NEWLINE #NEWLINE # if net_name == 'cifar10_LeNet':NEWLINE # net = CIFAR10_LeNet(out_dim=num_clt)NEWLINE #NEWLINE #NEWLINE # if net_name == 'arrhythmia_mlp':NEWLINE # net = MLP_Classifier(x_dim=274, h_dims=[128, 64], rep_dim=32,out_dim=num_clt, bias=False)NEWLINE #NEWLINE # if net_name == 'cardio_mlp':NEWLINE # net = MLP_Classifier(x_dim=21, h_dims=[32, 16], rep_dim=8,out_dim=num_clt, bias=False)NEWLINE #NEWLINE # if net_name == 'satellite_mlp':NEWLINE # net = MLP_Classifier(x_dim=36, h_dims=[32, 16], rep_dim=8,out_dim=num_clt, bias=False)NEWLINE #NEWLINE # if net_name == 'satimage-2_mlp':NEWLINE # net = MLP_Classifier(x_dim=36, h_dims=[32, 16], rep_dim=8,out_dim=num_clt, bias=False)NEWLINE #NEWLINE # if net_name == 'shuttle_mlp':NEWLINE # net = MLP_Classifier(x_dim=9, h_dims=[32, 16], rep_dim=8,out_dim=num_clt, bias=False)NEWLINE #NEWLINE # if net_name == 'thyroid_mlp':NEWLINE # net = MLP_Classifier(x_dim=6, h_dims=[32, 16], rep_dim=4,out_dim=num_clt, bias=False)NEWLINENEWLINE return D_l,D_s, D_g, GNEWLINENEWLINEdef build_autoencoder(net_name):NEWLINE """Builds the corresponding autoencoder network."""NEWLINENEWLINE implemented_networks = ('mnist_LeNet', 'mnist_DGM_M1M2',NEWLINE 'fmnist_LeNet', 'fmnist_DGM_M1M2',NEWLINE 'cifar10_LeNet', 'cifar10_DGM_M1M2',NEWLINE 'arrhythmia_mlp', 'cardio_mlp', 'satellite_mlp', 'satimage-2_mlp', 'shuttle_mlp',NEWLINE 'thyroid_mlp')NEWLINENEWLINE assert net_name in implemented_networksNEWLINENEWLINE ae_net = NoneNEWLINENEWLINE if net_name == 'mnist_LeNet':NEWLINE ae_net = MNIST_LeNet_Autoencoder()NEWLINENEWLINE if net_name == 'fmnist_LeNet':NEWLINE ae_net = FashionMNIST_LeNet_Autoencoder()NEWLINENEWLINE if net_name == 'fmnist_DGM_M1M2':NEWLINE ae_net = VariationalAutoencoder([1*28*28, 64, [256, 128]])NEWLINENEWLINE if net_name == 'cifar10_LeNet':NEWLINE ae_net = CIFAR10_LeNet_Autoencoder()NEWLINENEWLINE if net_name == 'cifar10_DGM_M1M2':NEWLINE ae_net = VariationalAutoencoder([3*32*32, 128, [512, 256]])NEWLINENEWLINE if net_name == 'arrhythmia_mlp':NEWLINE ae_net = MLP_Autoencoder(x_dim=274, h_dims=[128, 64], rep_dim=32, bias=False)NEWLINENEWLINE if net_name == 'cardio_mlp':NEWLINE ae_net = MLP_Autoencoder(x_dim=21, h_dims=[32, 16], rep_dim=8, bias=False)NEWLINENEWLINE if net_name == 'satellite_mlp':NEWLINE ae_net = MLP_Autoencoder(x_dim=36, h_dims=[32, 16], rep_dim=8, bias=False)NEWLINENEWLINE if net_name == 'satimage-2_mlp':NEWLINE ae_net = MLP_Autoencoder(x_dim=36, h_dims=[32, 16], rep_dim=8, bias=False)NEWLINENEWLINE if net_name == 'shuttle_mlp':NEWLINE ae_net = MLP_Autoencoder(x_dim=9, h_dims=[32, 16], rep_dim=8, bias=False)NEWLINENEWLINE if net_name == 'thyroid_mlp':NEWLINE ae_net = MLP_Autoencoder(x_dim=6, h_dims=[32, 16], rep_dim=4, bias=False)NEWLINENEWLINE return ae_netNEWLINE
# Generated by Django 2.0.2 on 2018-03-14 19:54NEWLINENEWLINEfrom django.db import migrations, modelsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('course_api', '0007_course_simple_name'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.AddField(NEWLINE model_name='schedule',NEWLINE name='term',NEWLINE field=models.CharField(db_index=True, default='201810', max_length=32, verbose_name='Term'),NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='subjectclass',NEWLINE name='term',NEWLINE field=models.CharField(db_index=True, default='201810', max_length=32, verbose_name='Term'),NEWLINE ),NEWLINE ]NEWLINE
# -*- coding: utf-8 -*-NEWLINE# Copyright 2020 Green Valley Belgium NVNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINE# @@license_version:1.7@@NEWLINENEWLINEimport jsonNEWLINEimport loggingNEWLINENEWLINEimport webapp2NEWLINEfrom google.appengine.ext import webappNEWLINENEWLINEfrom mcfw.properties import azzertNEWLINEfrom rogerthat.bizz.communities.communities import get_communityNEWLINEfrom rogerthat.bizz.friend_helper import FriendHelperNEWLINEfrom rogerthat.bizz.service.i18n import excel_export, excel_importNEWLINEfrom rogerthat.dal.friend import get_friends_mapNEWLINEfrom rogerthat.dal.profile import get_service_profileNEWLINEfrom rogerthat.dal.service import get_friend_serviceidentity_connectionNEWLINEfrom rogerthat.models import ProfileHashIndexNEWLINEfrom rogerthat.rpc import usersNEWLINEfrom rogerthat.rpc.service import BusinessExceptionNEWLINEfrom rogerthat.templates import renderNEWLINEfrom rogerthat.to.friends import FriendTO, FRIEND_TYPE_SERVICENEWLINEfrom rogerthat.translations import DEFAULT_LANGUAGENEWLINEfrom rogerthat.utils import safe_file_name, filename_friendly_timeNEWLINEfrom rogerthat.utils.channel import broadcast_via_iframe_resultNEWLINEfrom rogerthat.utils.crypto import md5_hexNEWLINEfrom rogerthat.utils.service import add_slash_defaultNEWLINENEWLINEtry:NEWLINE from cStringIO import StringIONEWLINEexcept ImportError:NEWLINE from StringIO import StringIONEWLINENEWLINENEWLINEclass ServicePageHandler(webapp.RequestHandler):NEWLINENEWLINE def get(self):NEWLINE service_email = self.request.GET.get('service')NEWLINE azzert(service_email)NEWLINENEWLINE user = users.get_current_user()NEWLINE service_identity_user = add_slash_default(users.User(service_email))NEWLINE azzert(get_friend_serviceidentity_connection(user, service_identity_user),NEWLINE "%s tried to get service page of service %s, but is not connected" % (user.email(), service_identity_user.email()))NEWLINENEWLINE params = {'service_email': service_email, 'container_id': 'servicePageContainer_%s' % md5_hex(service_email)}NEWLINE self.response.out.write(render('service_page', [DEFAULT_LANGUAGE], params, 'web'))NEWLINENEWLINENEWLINEclass ServiceMenuItemBrandingHandler(webapp.RequestHandler):NEWLINENEWLINE def get(self):NEWLINE service_email = self.request.GET.get('service')NEWLINE azzert(service_email)NEWLINENEWLINE user = users.get_current_user()NEWLINE service_identity_user = add_slash_default(users.User(service_email))NEWLINE azzert(get_friend_serviceidentity_connection(user, service_identity_user),NEWLINE "%s tried to get a menu item page of service %s, but is not connected" % (user.email(), service_identity_user.email()))NEWLINENEWLINE branding = self.request.GET.get('branding')NEWLINE azzert(branding)NEWLINE params = {'container_id': 'smi_branding_container_%s' %NEWLINE branding, 'branding': branding, 'service_email': service_email}NEWLINE self.response.out.write(render('smi_branding', [DEFAULT_LANGUAGE], params, 'web'))NEWLINENEWLINENEWLINEclass ServiceAboutPageHandler(webapp.RequestHandler):NEWLINENEWLINE def get(self):NEWLINE service_email = self.request.GET.get('service')NEWLINE azzert(service_email)NEWLINENEWLINE user = users.get_current_user()NEWLINE service_identity_user = add_slash_default(users.User(service_email))NEWLINE azzert(get_friend_serviceidentity_connection(user, service_identity_user),NEWLINE "%s tried to get About page of service %s, but is not connected" % (user.email(), service_identity_user.email()))NEWLINENEWLINE helper = FriendHelper.from_data_store(service_identity_user, FRIEND_TYPE_SERVICE)NEWLINE service = FriendTO.fromDBFriendMap(helper, get_friends_map(user), service_identity_user,NEWLINE includeServiceDetails=True, targetUser=user)NEWLINE azzert(service.type == FriendTO.TYPE_SERVICE)NEWLINENEWLINE params = {'service': service,NEWLINE 'service_name': service.name or service.email,NEWLINE 'container_id': 'serviceAboutPageContainer_%s' % md5_hex(service_email)}NEWLINE self.response.out.write(render('service_about', [DEFAULT_LANGUAGE], params, 'web'))NEWLINENEWLINENEWLINEclass EditableTranslationSetExcelDownloadHandler(webapp2.RequestHandler):NEWLINENEWLINE def get(self):NEWLINE browser_timezone_str = self.request.get('tz_offset', '0')NEWLINE try:NEWLINE browser_timezone = int(browser_timezone_str)NEWLINE except ValueError:NEWLINE logging.warning("Invalid browser timezone offset: [%s]" % browser_timezone_str)NEWLINE browser_timezone = 0NEWLINE if abs(browser_timezone) > 24 * 3600:NEWLINE logging.warning("Invalid browser timezone offset: [%s]" % browser_timezone_str)NEWLINE browser_timezone = 0NEWLINENEWLINE service_user = users.get_current_user()NEWLINE book, latest_export_timestamp = excel_export(service_user, browser_timezone)NEWLINENEWLINE # ReturnNEWLINE output = StringIO()NEWLINE book.save(output)NEWLINE output.seek(0)NEWLINENEWLINE filename = "Rogerthat_%s_%s.xls" % (filename_friendly_time(latest_export_timestamp), service_user.email())NEWLINENEWLINE self.response.headers['Content-Type'] = 'application/vnd.ms-excel'NEWLINE self.response.headers['Content-Disposition'] = 'attachment; filename=%s' % safe_file_name(filename)NEWLINE self.response.out.write(output.getvalue())NEWLINENEWLINENEWLINEclass PostEditableTranslationSetExcelHandler(webapp2.RequestHandler):NEWLINENEWLINE def post(self):NEWLINE import xlrdNEWLINE try:NEWLINE service_user = users.get_current_user()NEWLINENEWLINE file_ = self.request.POST.get('file').fileNEWLINE book = xlrd.open_workbook(file_contents=file_.read())NEWLINENEWLINE excel_import(service_user, book)NEWLINE except BusinessException as be:NEWLINE self.response.out.write(broadcast_via_iframe_result(NEWLINE u'rogerthat.service.translations.post_result', error=be.message))NEWLINE returnNEWLINE except:NEWLINE self.response.out.write(broadcast_via_iframe_result(NEWLINE u'rogerthat.service.translations.post_result', error=u"Unknown error has occurred."))NEWLINE logging.exception("Failure receiving translations!")NEWLINE returnNEWLINE self.response.out.write(broadcast_via_iframe_result(u'rogerthat.service.translations.post_result'))NEWLINENEWLINENEWLINEclass GetServiceAppHandler(webapp2.RequestHandler):NEWLINENEWLINE def get_default_app_id(self, user_hash):NEWLINE index = ProfileHashIndex.get(ProfileHashIndex.create_key(user_hash))NEWLINE if not index:NEWLINE logging.debug('No profile found with user_hash %s', user_hash)NEWLINE return NoneNEWLINE profile = get_service_profile(index.user)NEWLINE if not profile:NEWLINE logging.debug('Profile not found: %s', index.user)NEWLINE community = get_community(profile.community_id)NEWLINE return community.default_appNEWLINENEWLINE def get(self):NEWLINE user_hash = self.request.GET['user']NEWLINE self.response.out.write(json.dumps({'app_id': self.get_default_app_id(user_hash)}))NEWLINE
# -------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root forNEWLINE# license information.NEWLINE# --------------------------------------------------------------------------NEWLINEfrom enum import EnumNEWLINEfrom typing import TYPE_CHECKING, Any, Dict, ListNEWLINENEWLINEfrom azure.core.exceptions import HttpResponseErrorNEWLINEfrom azure.core.paging import PageIteratorNEWLINE# from azure.core import CaseInsensitiveEnumMetaNEWLINE# from six import with_metaclassNEWLINENEWLINEfrom ._generated.models import TableServiceStats as GenTableServiceStatsNEWLINEfrom ._generated.models import AccessPolicy as GenAccessPolicyNEWLINEfrom ._generated.models import Logging as GeneratedLoggingNEWLINEfrom ._generated.models import Metrics as GeneratedMetricsNEWLINEfrom ._generated.models import RetentionPolicy as GeneratedRetentionPolicyNEWLINEfrom ._generated.models import CorsRule as GeneratedCorsRuleNEWLINEfrom ._generated.models import QueryOptionsNEWLINEfrom ._deserialize import (NEWLINE _convert_to_entity,NEWLINE _return_context_and_deserialized,NEWLINE _extract_continuation_token,NEWLINE)NEWLINEfrom ._error import _process_table_errorNEWLINEfrom ._constants import NEXT_PARTITION_KEY, NEXT_ROW_KEY, NEXT_TABLE_NAMENEWLINENEWLINEif TYPE_CHECKING:NEWLINE from ._generated.models import TableQueryResponseNEWLINE from ._generated.models import TableServiceProperties as GenTableServicePropertiesNEWLINENEWLINENEWLINEclass TableServiceStats(GenTableServiceStats):NEWLINE """Stats for the serviceNEWLINENEWLINE :param geo_replication: Geo-Replication information for the Secondary Storage Service.NEWLINE :type geo_replication: ~azure.data.tables.models.GeoReplicationNEWLINE """NEWLINENEWLINE def __init__( # pylint: disable=super-init-not-calledNEWLINE self, geo_replication=None, **kwargsNEWLINE ):NEWLINE self.geo_replication = geo_replicationNEWLINENEWLINENEWLINEclass AccessPolicy(GenAccessPolicy):NEWLINE """Access Policy class used by the set and get access policy methods.NEWLINENEWLINE A stored access policy can specify the start time, expiry time, andNEWLINE permissions for the Shared Access Signatures with which it's associated.NEWLINE Depending on how you want to control access to your resource, you canNEWLINE specify all of these parameters within the stored access policy, and omitNEWLINE them from the URL for the Shared Access Signature. Doing so permits you toNEWLINE modify the associated signature's behavior at any time, as well as to revokeNEWLINE it. Or you can specify one or more of the access policy parameters withinNEWLINE the stored access policy, and the others on the URL. Finally, you canNEWLINE specify all of the parameters on the URL. In this case, you can use theNEWLINE stored access policy to revoke the signature, but not to modify its behavior.NEWLINENEWLINE Together the Shared Access Signature and the stored access policy mustNEWLINE include all fields required to authenticate the signature. If any requiredNEWLINE fields are missing, the request will fail. Likewise, if a field is specifiedNEWLINE both in the Shared Access Signature URL and in the stored access policy, theNEWLINE request will fail with status code 400 (Bad Request).NEWLINENEWLINE :param str permission:NEWLINE The permissions associated with the shared access signature. TheNEWLINE user is restricted to operations allowed by the permissions.NEWLINE Required unless an id is given referencing a stored access policyNEWLINE which contains this field. This field must be omitted if it has beenNEWLINE specified in an associated stored access policy.NEWLINE :param expiry:NEWLINE The time at which the shared access signature becomes invalid.NEWLINE Required unless an id is given referencing a stored access policyNEWLINE which contains this field. This field must be omitted if it hasNEWLINE been specified in an associated stored access policy. Azure will alwaysNEWLINE convert values to UTC. If a date is passed in without timezone info, itNEWLINE is assumed to be UTC.NEWLINE :type expiry: ~datetime.datetime or strNEWLINE :param start:NEWLINE The time at which the shared access signature becomes valid. IfNEWLINE omitted, start time for this call is assumed to be the time when theNEWLINE storage service receives the request. Azure will always convert valuesNEWLINE to UTC. If a date is passed in without timezone info, it is assumed toNEWLINE be UTC.NEWLINE :type start: ~datetime.datetime or strNEWLINE """NEWLINENEWLINE def __init__( # pylint: disable=super-init-not-calledNEWLINE self, permission=None, expiry=None, start=None, **kwargsNEWLINE ):NEWLINE self.start = startNEWLINE self.expiry = expiryNEWLINE self.permission = permissionNEWLINENEWLINENEWLINEclass TableAnalyticsLogging(GeneratedLogging):NEWLINE """Azure Analytics Logging settings.NEWLINENEWLINE All required parameters must be populated in order to send to Azure.NEWLINENEWLINE :keyword str version: Required. The version of Storage Analytics to configure.NEWLINE :keyword bool delete: Required. Indicates whether all delete requests should be logged.NEWLINE :keyword bool read: Required. Indicates whether all read requests should be logged.NEWLINE :keyword bool write: Required. Indicates whether all write requests should be logged.NEWLINE :keyword ~azure.data.tables.RetentionPolicy retention_policy: Required.NEWLINE The retention policy for the metrics.NEWLINE """NEWLINENEWLINE def __init__( # pylint: disable=super-init-not-calledNEWLINE self, **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...)-> NoneNEWLINENEWLINE self.version = kwargs.get("version", u"1.0")NEWLINE self.delete = kwargs.get("delete", False)NEWLINE self.read = kwargs.get("read", False)NEWLINE self.write = kwargs.get("write", False)NEWLINE self.retention_policy = kwargs.get("retention_policy") or RetentionPolicy()NEWLINENEWLINE @classmethodNEWLINE def _from_generated(cls, generated):NEWLINE if not generated:NEWLINE return cls()NEWLINE return cls(NEWLINE version=generated.version,NEWLINE delete=generated.delete,NEWLINE read=generated.read,NEWLINE write=generated.write,NEWLINE retention_policy=RetentionPolicy._from_generated( # pylint: disable=protected-accessNEWLINE generated.retention_policyNEWLINE )NEWLINE )NEWLINENEWLINENEWLINEclass Metrics(GeneratedMetrics):NEWLINE """A summary of request statistics grouped by API in hour or minute aggregates.NEWLINENEWLINE All required parameters must be populated in order to send to Azure.NEWLINENEWLINE :keyword str version: The version of Storage Analytics to configure.NEWLINE :keyword bool enabled: Required. Indicates whether metrics are enabled for the service.NEWLINE :keyword bool include_apis: Indicates whether metrics should generate summaryNEWLINE statistics for called API operations.NEWLINE :keyword ~azure.data.tables.RetentionPolicy retention_policy: Required.NEWLINE The retention policy for the metrics.NEWLINE """NEWLINENEWLINE def __init__( # pylint: disable=super-init-not-calledNEWLINE self,NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE self.version = kwargs.get("version", u"1.0")NEWLINE self.enabled = kwargs.get("enabled", False)NEWLINE self.include_apis = kwargs.get("include_apis")NEWLINE self.retention_policy = kwargs.get("retention_policy") or RetentionPolicy()NEWLINENEWLINE @classmethodNEWLINE def _from_generated(cls, generated):NEWLINE # type: (...) -> MetricsNEWLINE """A summary of request statistics grouped by API in hour or minute aggregates.NEWLINENEWLINE :param Metrics generated: generated MetricsNEWLINE """NEWLINE if not generated:NEWLINE return cls()NEWLINE return cls(NEWLINE version=generated.version,NEWLINE enabled=generated.enabled,NEWLINE include_apis=generated.include_apis,NEWLINE retention_policy=RetentionPolicy._from_generated( # pylint: disable=protected-accessNEWLINE generated.retention_policyNEWLINE )NEWLINE )NEWLINENEWLINENEWLINEclass RetentionPolicy(GeneratedRetentionPolicy):NEWLINE def __init__( # pylint: disable=super-init-not-calledNEWLINE self,NEWLINE enabled=False, # type: boolNEWLINE days=None, # type: intNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) ->NoneNEWLINE """The retention policy which determines how long the associated data shouldNEWLINE persist.NEWLINENEWLINE All required parameters must be populated in order to send to Azure.NEWLINENEWLINE :param bool enabled: Required. Indicates whether a retention policy is enabledNEWLINE for the storage service.NEWLINE :param int days: Indicates the number of days that metrics or logging orNEWLINE soft-deleted data should be retained. All data older than this value willNEWLINE be deleted.NEWLINE :param Any kwargs:NEWLINE """NEWLINE self.enabled = enabledNEWLINE self.days = daysNEWLINE if self.enabled and (self.days is None):NEWLINE raise ValueError("If policy is enabled, 'days' must be specified.")NEWLINENEWLINE @classmethodNEWLINE def _from_generated(cls, generated, **kwargs): # pylint: disable=unused-argumentNEWLINE # type: (GeneratedRetentionPolicy, Dict[str, Any]) -> RetentionPolicyNEWLINE """The retention policy which determines how long the associated data shouldNEWLINE persist.NEWLINENEWLINE All required parameters must be populated in order to send to Azure.NEWLINENEWLINE :param RetentionPolicy generated: Generated Retention PolicyNEWLINE """NEWLINENEWLINE if not generated:NEWLINE return cls()NEWLINE return cls(NEWLINE enabled=generated.enabled,NEWLINE days=generated.days,NEWLINE )NEWLINENEWLINENEWLINEclass CorsRule(GeneratedCorsRule):NEWLINE """CORS is an HTTP feature that enables a web application running under oneNEWLINE domain to access resources in another domain. Web browsers implement aNEWLINE security restriction known as same-origin policy that prevents a web pageNEWLINE from calling APIs in a different domain; CORS provides a secure way toNEWLINE allow one domain (the origin domain) to call APIs in another domain.NEWLINENEWLINE All required parameters must be populated in order to send to Azure.NEWLINENEWLINE :param list[str] allowed_origins:NEWLINE A list of origin domains that will be allowed via CORS, or "*" to allowNEWLINE all domains. The list of must contain at least one entry. Limited to 64NEWLINE origin domains. Each allowed origin can have up to 256 characters.NEWLINE :param list[str] allowed_methods:NEWLINE A list of HTTP methods that are allowed to be executed by the origin.NEWLINE The list of must contain at least one entry. For Azure Storage,NEWLINE permitted methods are DELETE, GET, HEAD, MERGE, POST, OPTIONS or PUT.NEWLINE :keyword int max_age_in_seconds:NEWLINE The number of seconds that the client/browser should cache aNEWLINE pre-flight response.NEWLINE :keyword list[str] exposed_headers:NEWLINE Defaults to an empty list. A list of response headers to expose to CORSNEWLINE clients. Limited to 64 defined headers and two prefixed headers. EachNEWLINE header can be up to 256 characters.NEWLINE :keyword list[str] allowed_headers:NEWLINE Defaults to an empty list. A list of headers allowed to be part ofNEWLINE the cross-origin request. Limited to 64 defined headers and 2 prefixedNEWLINE headers. Each header can be up to 256 characters.NEWLINE """NEWLINENEWLINE def __init__( # pylint: disable=super-init-not-calledNEWLINE self,NEWLINE allowed_origins, # type: List[str]NEWLINE allowed_methods, # type: List[str]NEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...)-> NoneNEWLINENEWLINE self.allowed_origins = ",".join(allowed_origins)NEWLINE self.allowed_methods = ",".join(allowed_methods)NEWLINE self.allowed_headers = ",".join(kwargs.get("allowed_headers", []))NEWLINE self.exposed_headers = ",".join(kwargs.get("exposed_headers", []))NEWLINE self.max_age_in_seconds = kwargs.get("max_age_in_seconds", 0)NEWLINENEWLINE @classmethodNEWLINE def _from_generated(cls, generated):NEWLINE return cls(NEWLINE [generated.allowed_origins],NEWLINE [generated.allowed_methods],NEWLINE allowed_headers=[generated.allowed_headers],NEWLINE exposed_headers=[generated.exposed_headers],NEWLINE max_age_in_seconds=generated.max_age_in_seconds,NEWLINE )NEWLINENEWLINENEWLINEclass TablePropertiesPaged(PageIterator):NEWLINE """An iterable of Table properties.NEWLINENEWLINE :param callable command: Function to retrieve the next page of items.NEWLINE :keyword int results_per_page: The maximum number of results retrieved per API call.NEWLINE :keyword str filter: The filter to apply to results.NEWLINE :keyword str continuation_token: An opaque continuation token.NEWLINE """NEWLINENEWLINE def __init__(self, command, **kwargs):NEWLINE super(TablePropertiesPaged, self).__init__(NEWLINE self._get_next_cb,NEWLINE self._extract_data_cb,NEWLINE continuation_token=kwargs.get("continuation_token") or "",NEWLINE )NEWLINE self._command = commandNEWLINE self._headers = NoneNEWLINE self._response = NoneNEWLINE self.results_per_page = kwargs.get("results_per_page")NEWLINE self.filter = kwargs.get("filter")NEWLINE self._location_mode = NoneNEWLINENEWLINE def _get_next_cb(self, continuation_token, **kwargs):NEWLINE query_options = QueryOptions(top=self.results_per_page, filter=self.filter)NEWLINE try:NEWLINE return self._command(NEWLINE query_options=query_options,NEWLINE next_table_name=continuation_token or None,NEWLINE cls=kwargs.pop("cls", None) or _return_context_and_deserialized,NEWLINE use_location=self._location_mode,NEWLINE )NEWLINE except HttpResponseError as error:NEWLINE _process_table_error(error)NEWLINENEWLINE def _extract_data_cb(self, get_next_return):NEWLINE self._location_mode, self._response, self._headers = get_next_returnNEWLINE props_list = [NEWLINE TableItem._from_generated(t, **self._headers) for t in self._response.value # pylint: disable=protected-accessNEWLINE ]NEWLINE return self._headers[NEXT_TABLE_NAME] or None, props_listNEWLINENEWLINENEWLINEclass TableEntityPropertiesPaged(PageIterator):NEWLINE """An iterable of TableEntity properties.NEWLINENEWLINE :param callable command: Function to retrieve the next page of items.NEWLINE :param str table: The name of the table.NEWLINE :keyword int results_per_page: The maximum number of results retrieved per API call.NEWLINE :keyword str filter: The filter to apply to results.NEWLINE :keyword str select: The select filter to apply to results.NEWLINE :keyword str continuation_token: An opaque continuation token.NEWLINE """NEWLINENEWLINE def __init__(self, command, table, **kwargs):NEWLINE super(TableEntityPropertiesPaged, self).__init__(NEWLINE self._get_next_cb,NEWLINE self._extract_data_cb,NEWLINE continuation_token=kwargs.get("continuation_token") or {},NEWLINE )NEWLINE self._command = commandNEWLINE self._headers = NoneNEWLINE self._response = NoneNEWLINE self.table = tableNEWLINE self.results_per_page = kwargs.get("results_per_page")NEWLINE self.filter = kwargs.get("filter")NEWLINE self.select = kwargs.get("select")NEWLINE self._location_mode = NoneNEWLINENEWLINE def _get_next_cb(self, continuation_token, **kwargs):NEWLINE next_partition_key, next_row_key = _extract_continuation_token(NEWLINE continuation_tokenNEWLINE )NEWLINE query_options = QueryOptions(NEWLINE top=self.results_per_page, select=self.select, filter=self.filterNEWLINE )NEWLINE try:NEWLINE return self._command(NEWLINE query_options=query_options,NEWLINE next_row_key=next_row_key,NEWLINE next_partition_key=next_partition_key,NEWLINE table=self.table,NEWLINE cls=kwargs.pop("cls", None) or _return_context_and_deserialized,NEWLINE use_location=self._location_mode,NEWLINE )NEWLINE except HttpResponseError as error:NEWLINE _process_table_error(error)NEWLINENEWLINE def _extract_data_cb(self, get_next_return):NEWLINE self._location_mode, self._response, self._headers = get_next_returnNEWLINE props_list = [_convert_to_entity(t) for t in self._response.value]NEWLINE next_entity = {}NEWLINE if self._headers[NEXT_PARTITION_KEY] or self._headers[NEXT_ROW_KEY]:NEWLINE next_entity = {NEWLINE "PartitionKey": self._headers[NEXT_PARTITION_KEY],NEWLINE "RowKey": self._headers[NEXT_ROW_KEY],NEWLINE }NEWLINE return next_entity or None, props_listNEWLINENEWLINENEWLINEclass TableSasPermissions(object):NEWLINE def __init__(NEWLINE self,NEWLINE _str=None, # type: strNEWLINE **kwargs # type: AnyNEWLINE ):NEWLINE # type: (...) -> NoneNEWLINE """NEWLINE :keyword bool read:NEWLINE Get entities and query entities.NEWLINE :keyword bool add:NEWLINE Add entities. Add and Update permissions are required for upsert operations.NEWLINE :keyword bool update:NEWLINE Update entities. Add and Update permissions are required for upsert operations.NEWLINE :keyword bool delete:NEWLINE Delete entities.NEWLINE :param str _str:NEWLINE A string representing the permissions.NEWLINE """NEWLINE if not _str:NEWLINE _str = ""NEWLINE self.read = kwargs.pop("read", None) or ("r" in _str)NEWLINE self.add = kwargs.pop("add", None) or ("a" in _str)NEWLINE self.update = kwargs.pop("update", None) or ("u" in _str)NEWLINE self.delete = kwargs.pop("delete", None) or ("d" in _str)NEWLINENEWLINE def __or__(self, other):NEWLINE # type: (TableSasPermissions) -> TableSasPermissionsNEWLINE return TableSasPermissions(_str=str(self) + str(other))NEWLINENEWLINE def __add__(self, other):NEWLINE # type: (TableSasPermissions) -> TableSasPermissionsNEWLINE return TableSasPermissions(_str=str(self) + str(other))NEWLINENEWLINE def __str__(self):NEWLINE # type: () -> strNEWLINE return (NEWLINE ("r" if self.read else "")NEWLINE + ("a" if self.add else "")NEWLINE + ("u" if self.update else "")NEWLINE + ("d" if self.delete else "")NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def from_string(NEWLINE cls,NEWLINE permission,NEWLINE **kwargsNEWLINE ):NEWLINE # Type: (str, Dict[str, Any]) -> AccountSasPermissionsNEWLINE """Create AccountSasPermissions from a string.NEWLINENEWLINE To specify read, write, delete, etc. permissions you need only toNEWLINE include the first letter of the word in the string. E.g. for read and writeNEWLINE permissions you would provide a string "rw".NEWLINENEWLINE :param str permission: Specify permissions inNEWLINE the string with the first letter of the word.NEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: An AccountSasPermissions objectNEWLINE :rtype: :class:`~azure.data.tables.AccountSasPermissions`NEWLINE """NEWLINE p_read = "r" in permissionNEWLINE p_add = "a" in permissionNEWLINE p_delete = "d" in permissionNEWLINE p_update = "u" in permissionNEWLINENEWLINE parsed = cls(NEWLINE **dict(kwargs, read=p_read, add=p_add, delete=p_delete, update=p_update)NEWLINE )NEWLINE parsed._str = permission # pylint: disable=protected-access,attribute-defined-outside-initNEWLINE return parsedNEWLINENEWLINENEWLINEdef service_stats_deserialize(generated):NEWLINE # type: (GenTableServiceStats) -> Dict[str, Any]NEWLINE """Deserialize a ServiceStats objects into a dict."""NEWLINE return {NEWLINE "geo_replication": {NEWLINE "status": generated.geo_replication.status, # type: ignoreNEWLINE "last_sync_time": generated.geo_replication.last_sync_time, # type: ignoreNEWLINE }NEWLINE }NEWLINENEWLINENEWLINEdef service_properties_deserialize(generated):NEWLINE # type: (GenTableServiceProperties) -> Dict[str, Any]NEWLINE """Deserialize a ServiceProperties objects into a dict."""NEWLINE return {NEWLINE "analytics_logging": TableAnalyticsLogging._from_generated(generated.logging), # pylint: disable=protected-accessNEWLINE "hour_metrics": Metrics._from_generated( # pylint: disable=protected-accessNEWLINE generated.hour_metricsNEWLINE ),NEWLINE "minute_metrics": Metrics._from_generated( # pylint: disable=protected-accessNEWLINE generated.minute_metricsNEWLINE ),NEWLINE "cors": [NEWLINE CorsRule._from_generated(cors) # pylint: disable=protected-accessNEWLINE for cors in generated.cors # type: ignoreNEWLINE ],NEWLINE }NEWLINENEWLINENEWLINEclass TableItem(object):NEWLINE """NEWLINE Represents an Azure TableItem.NEWLINE Returned by TableServiceClient.list_tables and TableServiceClient.query_tables.NEWLINENEWLINE :ivar str name: The name of the table.NEWLINE """NEWLINENEWLINE def __init__(self, name, **kwargs): # pylint: disable=unused-argumentNEWLINE # type: (str, Dict[str, Any]) -> NoneNEWLINE """NEWLINE :param str name: Name of the TableNEWLINE """NEWLINE self.name = nameNEWLINENEWLINE # TODO: TableQueryResponse is not the correct typeNEWLINE @classmethodNEWLINE def _from_generated(cls, generated, **kwargs):NEWLINE # type: (TableQueryResponse, Dict[str, Any]) -> TableItemNEWLINE return cls(generated.table_name, **kwargs) # type: ignoreNEWLINENEWLINENEWLINEclass TablePayloadFormat(object):NEWLINE """NEWLINE Specifies the accepted content type of the response payload. More informationNEWLINE can be found here: https://msdn.microsoft.com/en-us/library/azure/dn535600.aspxNEWLINE """NEWLINENEWLINE JSON_NO_METADATA = "application/json;odata=nometadata"NEWLINE """Returns no type information for the entity properties."""NEWLINENEWLINE JSON_MINIMAL_METADATA = "application/json;odata=minimalmetadata"NEWLINE """Returns minimal type information for the entity properties."""NEWLINENEWLINE JSON_FULL_METADATA = "application/json;odata=fullmetadata"NEWLINE """Returns minimal type information for the entity properties plus some extra odata properties."""NEWLINENEWLINENEWLINEclass UpdateMode(str, Enum):NEWLINE REPLACE = "replace"NEWLINE MERGE = "merge"NEWLINENEWLINENEWLINEclass TransactionOperation(str, Enum):NEWLINE CREATE = "create"NEWLINE UPSERT = "upsert"NEWLINE UPDATE = "update"NEWLINE DELETE = "delete"NEWLINENEWLINENEWLINEclass SASProtocol(str, Enum):NEWLINE HTTPS = "https"NEWLINE HTTP = "http"NEWLINENEWLINENEWLINEclass LocationMode(str, Enum):NEWLINE """NEWLINE Specifies the location the request should be sent to. This mode only appliesNEWLINE for RA-GRS accounts which allow secondary read access. All other account typesNEWLINE must use PRIMARY.NEWLINE """NEWLINENEWLINE PRIMARY = "primary" #: Requests should be sent to the primary location.NEWLINE SECONDARY = (NEWLINE "secondary" #: Requests should be sent to the secondary location, if possible.NEWLINE )NEWLINENEWLINENEWLINEclass ResourceTypes(object):NEWLINE """NEWLINE Specifies the resource types that are accessible with the account SAS.NEWLINENEWLINE :param bool service:NEWLINE Access to service-level APIs (e.g., Get/Set Service Properties,NEWLINE Get Service Stats, List Tables)NEWLINE :param bool object:NEWLINE Access to object-level APIs for tables (e.g. Get/Create/Query Entity etc.)NEWLINE """NEWLINENEWLINE def __init__(self, service=False, object=False): # pylint: disable=redefined-builtinNEWLINE # type: (bool, bool) -> NoneNEWLINE self.service = serviceNEWLINE self.object = objectNEWLINE self._str = ("s" if self.service else "") + ("o" if self.object else "")NEWLINENEWLINE def __str__(self):NEWLINE return self._strNEWLINENEWLINE @classmethodNEWLINE def from_string(cls, string):NEWLINE # type: (str) -> ResourceTypesNEWLINE """Create a ResourceTypes from a string.NEWLINENEWLINE To specify service, container, or object you need only toNEWLINE include the first letter of the word in the string. E.g. service and container,NEWLINE you would provide a string "sc".NEWLINENEWLINE :param str string: Specify service, container, or object inNEWLINE in the string with the first letter of the word.NEWLINE :return: A ResourceTypes objectNEWLINE :rtype: :class:`~azure.data.tables.ResourceTypes`NEWLINE """NEWLINE res_service = "s" in stringNEWLINE res_object = "o" in stringNEWLINENEWLINE parsed = cls(res_service, res_object)NEWLINE parsed._str = string # pylint: disable = protected-accessNEWLINE return parsedNEWLINENEWLINENEWLINEclass AccountSasPermissions(object):NEWLINE """NEWLINE :class:`~ResourceTypes` class to be used with generate_account_sasNEWLINE function and for the AccessPolicies used with set_*_acl. There are two types ofNEWLINE SAS which may be used to grant resource access. One is to grant access to aNEWLINE specific resource (resource-specific). Another is to grant access to theNEWLINE entire service for a specific account and allow certain operations based onNEWLINE perms found here.NEWLINENEWLINE :ivar bool read:NEWLINE Valid for all signed resources types (Service, Container, and Object).NEWLINE Permits read permissions to the specified resource type.NEWLINE :ivar bool write:NEWLINE Valid for all signed resources types (Service, Container, and Object).NEWLINE Permits write permissions to the specified resource type.NEWLINE :ivar bool delete:NEWLINE Valid for Container and Object resource types, except for queue messages.NEWLINE :ivar bool list:NEWLINE Valid for Service and Container resource types only.NEWLINE :ivar bool add:NEWLINE Valid for the following Object resource types only: queue messages, and append blobs.NEWLINE :ivar bool create:NEWLINE Valid for the following Object resource types only: blobs and files.NEWLINE Users can create new blobs or files, but may not overwrite existingNEWLINE blobs or files.NEWLINE :ivar bool update:NEWLINE Valid for the following Object resource types only: queue messages.NEWLINE :ivar bool process:NEWLINE Valid for the following Object resource type only: queue messages.NEWLINE """NEWLINENEWLINE def __init__(self, **kwargs):NEWLINE self.read = kwargs.pop("read", None)NEWLINE self.write = kwargs.pop("write", None)NEWLINE self.delete = kwargs.pop("delete", None)NEWLINE self.list = kwargs.pop("list", None)NEWLINE self.add = kwargs.pop("add", None)NEWLINE self.create = kwargs.pop("create", None)NEWLINE self.update = kwargs.pop("update", None)NEWLINE self.process = kwargs.pop("process", None)NEWLINE self._str = (NEWLINE ("r" if self.read else "")NEWLINE + ("w" if self.write else "")NEWLINE + ("d" if self.delete else "")NEWLINE + ("l" if self.list else "")NEWLINE + ("a" if self.add else "")NEWLINE + ("c" if self.create else "")NEWLINE + ("u" if self.update else "")NEWLINE + ("p" if self.process else "")NEWLINE )NEWLINENEWLINE def __str__(self):NEWLINE return self._strNEWLINENEWLINE @classmethodNEWLINE def from_string(cls, permission, **kwargs):NEWLINE # type: (str, Dict[str, Any]) -> AccountSasPermissionsNEWLINE """Create AccountSasPermissions from a string.NEWLINENEWLINE To specify read, write, delete, etc. permissions you need only toNEWLINE include the first letter of the word in the string. E.g. for read and writeNEWLINE permissions you would provide a string "rw".NEWLINENEWLINE :param permission: Specify permissions in the string with the first letter of the word.NEWLINE :type permission: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: An AccountSasPermissions objectNEWLINE :rtype: :class:`~azure.data.tables.AccountSasPermissions`NEWLINE """NEWLINE p_read = "r" in permissionNEWLINE p_write = "w" in permissionNEWLINE p_delete = "d" in permissionNEWLINE p_list = "l" in permissionNEWLINE p_add = "a" in permissionNEWLINE p_create = "c" in permissionNEWLINE p_update = "u" in permissionNEWLINE p_process = "p" in permissionNEWLINENEWLINE parsed = cls(NEWLINE **dict(NEWLINE kwargs,NEWLINE read=p_read,NEWLINE write=p_write,NEWLINE delete=p_delete,NEWLINE list=p_list,NEWLINE add=p_add,NEWLINE create=p_create,NEWLINE update=p_update,NEWLINE process=p_process,NEWLINE )NEWLINE )NEWLINE parsed._str = permission # pylint: disable = protected-accessNEWLINE return parsedNEWLINE
# -*- coding: utf-8 -*-NEWLINENEWLINE"""Utilities for PyBEL testing."""NEWLINENEWLINEimport itertools as ittNEWLINEimport randomNEWLINENEWLINEfrom .utils import nNEWLINEfrom ..dsl import proteinNEWLINEfrom ..struct import BELGraphNEWLINENEWLINE__all__ = [NEWLINE 'generate_random_graph',NEWLINE]NEWLINENEWLINENEWLINEdef generate_random_graph(n_nodes, n_edges, namespace='NS'):NEWLINE """Generate a sub-graph with random nodes and edges.NEWLINENEWLINE :param int n_nodes: Number of nodes to makeNEWLINE :param int n_edges: Number of edges to makeNEWLINE :param str namespace: The namespace of the nodes to useNEWLINE :rtype: pybel.BELGraphNEWLINE """NEWLINE graph = BELGraph()NEWLINENEWLINE nodes = [NEWLINE protein(namespace=namespace, name=str(i))NEWLINE for i in range(1, n_nodes)NEWLINE ]NEWLINENEWLINE edges = list(itt.combinations(nodes, r=2))NEWLINE edge_sample = random.sample(edges, n_edges)NEWLINENEWLINE for u, v in edge_sample:NEWLINE graph.add_increases(u, v, citation=n(), evidence=n())NEWLINENEWLINE return graphNEWLINE
import sysNEWLINEimport numpy as npNEWLINEimport pandas as pdNEWLINEimport networkx as nxNEWLINENEWLINEimport hypercomparison.utilsNEWLINEimport hypercomparison.networksNEWLINEimport hypercomparison.embedding_centeredNEWLINEfrom hypercomparison.link_prediction import LinkPredictionTaskNEWLINEfrom hypercomparison.network_train_test_splitter import NetworkTrainTestSplitterWithMSTNEWLINENEWLINE# Force numpy to only use single thread in linear algebraNEWLINEimport osNEWLINEos.environ['OPENBLAS_NUM_THREADS'] = '1'NEWLINEos.environ['MKL_NUM_THREADS'] = '1'NEWLINENEWLINElogger = hypercomparison.utils.get_logger(__name__)NEWLINENEWLINEnetwork_name = sys.argv[1]NEWLINEdimensions = int(sys.argv[2])NEWLINENEWLINEout_path = sys.argv[-1]NEWLINEresult_list = []NEWLINEnetwork = hypercomparison.networks.RealNetwork(network_name)NEWLINEif dimensions > len(network.G.nodes()):NEWLINE dimensions = len(network.G.nodes())NEWLINE NEWLINElogger.info("Working on network {} dimension {}".format(network_name, dimensions))NEWLINE#split test and negative edgesNEWLINEsplitter = NetworkTrainTestSplitterWithMST(network.G)NEWLINEG, test_edges = splitter.train_test_split()NEWLINEnegative_edges = splitter.generate_negative_edges()NEWLINE#calculate embeddingsNEWLINEadjacency_matrix = nx.to_numpy_array(G)NEWLINEe = np.linalg.eigvals(adjacency_matrix)NEWLINEbeta=1/max(e).real - 0.001NEWLINElogger.info("network {} dimension {}, beta calculated".format(network_name, dimensions))NEWLINEembeddings = hypercomparison.embedding_centered.HOPE(dimension=dimensions, beta=beta).train(G)NEWLINE#perform link predictionNEWLINEtest = LinkPredictionTask(test_edges, negative_edges, embeddings, name=network_name) NEWLINEroc_auc, aupr, average_precision, precision = test.do_link_prediction()NEWLINEresult_list.append([network_name, dimensions, beta, roc_auc, aupr, average_precision, precision])NEWLINEdf = pd.DataFrame(result_list, columns=['network_name', 'dimensions', 'beta', 'roc_auc', 'aupr', 'average_precision', 'precision'])NEWLINENEWLINEdf.to_csv(out_path, index=None)
"""NEWLINESync Media to S3NEWLINE================NEWLINENEWLINEDjango command that scans all files in your settings.MEDIA_ROOT andNEWLINEsettings.STATIC_ROOT folders and uploads them to S3 with the same directoryNEWLINEstructure.NEWLINENEWLINEThis command can optionally do the following but it is off by default:NEWLINE* gzip compress any CSS and Javascript files it finds and adds the appropriateNEWLINE 'Content-Encoding' header.NEWLINE* set a far future 'Expires' header for optimal caching.NEWLINE* upload only media or static files.NEWLINE* use any other provider compatible with Amazon S3.NEWLINE* set other than 'public-read' ACL.NEWLINENEWLINENote: This script requires the Python boto library and valid Amazon WebNEWLINEServices API keys.NEWLINENEWLINERequired settings.py variables:NEWLINEAWS_ACCESS_KEY_ID = ''NEWLINEAWS_SECRET_ACCESS_KEY = ''NEWLINEAWS_BUCKET_NAME = ''NEWLINENEWLINEWhen you call this command with the `--renamegzip` param, it will addNEWLINEthe '.gz' extension to the file name. But Safari just doesn't recognizeNEWLINE'.gz' files and your site won't work on it! To fix this problem, you canNEWLINEset any other extension (like .jgz) in the `SYNC_S3_RENAME_GZIP_EXT`NEWLINEvariable.NEWLINENEWLINECommand options are:NEWLINE -p PREFIX, --prefix=PREFIXNEWLINE The prefix to prepend to the path on S3.NEWLINE --gzip Enables gzipping CSS and Javascript files.NEWLINE --expires Enables setting a far future expires header.NEWLINE --force Skip the file mtime check to force upload of allNEWLINE files.NEWLINE --filter-list Override default directory and file exclusionNEWLINE filters. (enter as comma separated line)NEWLINE --renamegzip Enables renaming of gzipped files by appending '.gz'.NEWLINE to the original file name. This way your originalNEWLINE assets will not be replaced by the gzipped ones.NEWLINE You can change the extension setting theNEWLINE `SYNC_S3_RENAME_GZIP_EXT` var in your settings.pyNEWLINE file.NEWLINE --invalidate Invalidates the objects in CloudFront after uploadingNEWLINE stuff to s3.NEWLINE --media-only Only MEDIA_ROOT files will be uploaded to S3.NEWLINE --static-only Only STATIC_ROOT files will be uploaded to S3.NEWLINE --s3host Override default s3 host.NEWLINE --acl Override default ACL settings ('public-read' ifNEWLINE settings.AWS_DEFAULT_ACL is not defined).NEWLINENEWLINETODO:NEWLINE * Use fnmatch (or regex) to allow more complex FILTER_LIST rules.NEWLINENEWLINE"""NEWLINEimport datetimeNEWLINEimport emailNEWLINEimport mimetypesNEWLINEfrom optparse import make_optionNEWLINEimport osNEWLINEimport timeNEWLINEimport gzipNEWLINEtry:NEWLINE from cStringIO import StringIONEWLINE assert StringIONEWLINEexcept ImportError:NEWLINE from StringIO import StringIONEWLINENEWLINENEWLINEfrom django.conf import settingsNEWLINEfrom django.core.management.base import BaseCommand, CommandErrorNEWLINENEWLINE# Make sure boto is availableNEWLINEtry:NEWLINE import botoNEWLINE import boto.exceptionNEWLINE HAS_BOTO = TrueNEWLINEexcept ImportError:NEWLINE HAS_BOTO = FalseNEWLINENEWLINENEWLINEclass Command(BaseCommand):NEWLINE # Extra variables to avoid passing these aroundNEWLINE AWS_ACCESS_KEY_ID = ''NEWLINE AWS_SECRET_ACCESS_KEY = ''NEWLINE AWS_BUCKET_NAME = ''NEWLINE AWS_CLOUDFRONT_DISTRIBUTION = ''NEWLINE SYNC_S3_RENAME_GZIP_EXT = ''NEWLINENEWLINE DIRECTORIES = ''NEWLINE FILTER_LIST = ['.DS_Store', '.svn', '.hg', '.git', 'Thumbs.db']NEWLINE GZIP_CONTENT_TYPES = (NEWLINE 'text/css',NEWLINE 'application/javascript',NEWLINE 'application/x-javascript',NEWLINE 'text/javascript'NEWLINE )NEWLINENEWLINE uploaded_files = []NEWLINE upload_count = 0NEWLINE skip_count = 0NEWLINENEWLINE option_list = BaseCommand.option_list + (NEWLINE make_option('-p', '--prefix',NEWLINE dest='prefix',NEWLINE default=getattr(settings, 'SYNC_MEDIA_S3_PREFIX', ''),NEWLINE help="The prefix to prepend to the path on S3."),NEWLINE make_option('-d', '--dir',NEWLINE dest='dir',NEWLINE help="Custom static root directory to use"),NEWLINE make_option('--s3host',NEWLINE dest='s3host',NEWLINE default=getattr(settings, 'AWS_S3_HOST', ''),NEWLINE help="The s3 host (enables connecting to other providers/regions)"),NEWLINE make_option('--acl',NEWLINE dest='acl',NEWLINE default=getattr(settings, 'AWS_DEFAULT_ACL', 'public-read'),NEWLINE help="Enables to override default acl (public-read)."),NEWLINE make_option('--gzip',NEWLINE action='store_true', dest='gzip', default=False,NEWLINE help="Enables gzipping CSS and Javascript files."),NEWLINE make_option('--renamegzip',NEWLINE action='store_true', dest='renamegzip', default=False,NEWLINE help="Enables renaming of gzipped assets to have '.gz' appended to the filename."),NEWLINE make_option('--expires',NEWLINE action='store_true', dest='expires', default=False,NEWLINE help="Enables setting a far future expires header."),NEWLINE make_option('--force',NEWLINE action='store_true', dest='force', default=False,NEWLINE help="Skip the file mtime check to force upload of all files."),NEWLINE make_option('--filter-list', dest='filter_list',NEWLINE action='store', default='',NEWLINE help="Override default directory and file exclusion filters. (enter as comma seperated line)"),NEWLINE make_option('--invalidate', dest='invalidate', default=False,NEWLINE action='store_true',NEWLINE help='Invalidates the associated objects in CloudFront'),NEWLINE make_option('--media-only', dest='media_only', default='',NEWLINE action='store_true',NEWLINE help="Only MEDIA_ROOT files will be uploaded to S3"),NEWLINE make_option('--static-only', dest='static_only', default='',NEWLINE action='store_true',NEWLINE help="Only STATIC_ROOT files will be uploaded to S3"),NEWLINE )NEWLINENEWLINE help = 'Syncs the complete MEDIA_ROOT structure and files to S3 into the given bucket name.'NEWLINE args = 'bucket_name'NEWLINENEWLINE can_import_settings = TrueNEWLINENEWLINE def handle(self, *args, **options):NEWLINE if not HAS_BOTO:NEWLINE raise ImportError("The boto Python library is not installed.")NEWLINENEWLINE # Check for AWS keys in settingsNEWLINE if not hasattr(settings, 'AWS_ACCESS_KEY_ID') or not hasattr(settings, 'AWS_SECRET_ACCESS_KEY'):NEWLINE raise CommandError('Missing AWS keys from settings file. Please supply both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY.')NEWLINE else:NEWLINE self.AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_IDNEWLINE self.AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEYNEWLINENEWLINE if not hasattr(settings, 'AWS_BUCKET_NAME'):NEWLINE raise CommandError('Missing bucket name from settings file. Please add the AWS_BUCKET_NAME to your settings file.')NEWLINE else:NEWLINE if not settings.AWS_BUCKET_NAME:NEWLINE raise CommandError('AWS_BUCKET_NAME cannot be empty.')NEWLINE self.AWS_BUCKET_NAME = settings.AWS_BUCKET_NAMENEWLINENEWLINE if not hasattr(settings, 'MEDIA_ROOT'):NEWLINE raise CommandError('MEDIA_ROOT must be set in your settings.')NEWLINE else:NEWLINE if not settings.MEDIA_ROOT:NEWLINE raise CommandError('MEDIA_ROOT must be set in your settings.')NEWLINENEWLINE self.AWS_CLOUDFRONT_DISTRIBUTION = getattr(settings, 'AWS_CLOUDFRONT_DISTRIBUTION', '')NEWLINENEWLINE self.SYNC_S3_RENAME_GZIP_EXT = \NEWLINE getattr(settings, 'SYNC_S3_RENAME_GZIP_EXT', '.gz')NEWLINENEWLINE self.verbosity = int(options.get('verbosity'))NEWLINE self.prefix = options.get('prefix')NEWLINE self.do_gzip = options.get('gzip')NEWLINE self.rename_gzip = options.get('renamegzip')NEWLINE self.do_expires = options.get('expires')NEWLINE self.do_force = options.get('force')NEWLINE self.invalidate = options.get('invalidate')NEWLINE self.DIRECTORIES = options.get('dir')NEWLINE self.s3host = options.get('s3host')NEWLINE self.default_acl = options.get('acl')NEWLINE self.FILTER_LIST = getattr(settings, 'FILTER_LIST', self.FILTER_LIST)NEWLINE filter_list = options.get('filter_list')NEWLINE if filter_list:NEWLINE # command line option overrides default filter_list andNEWLINE # settings.filter_listNEWLINE self.FILTER_LIST = filter_list.split(',')NEWLINENEWLINE self.media_only = options.get('media_only')NEWLINE self.static_only = options.get('static_only')NEWLINE # Get directoriesNEWLINE if self.media_only and self.static_only:NEWLINE raise CommandError("Can't use --media-only and --static-only together. Better not use anything...")NEWLINE elif self.media_only:NEWLINE self.DIRECTORIES = [settings.MEDIA_ROOT]NEWLINE elif self.static_only:NEWLINE self.DIRECTORIES = [settings.STATIC_ROOT]NEWLINE elif self.DIRECTORIES:NEWLINE self.DIRECTORIES = [self.DIRECTORIES]NEWLINE else:NEWLINE self.DIRECTORIES = [settings.MEDIA_ROOT, settings.STATIC_ROOT]NEWLINENEWLINE # Now call the syncing method to walk the MEDIA_ROOT directory andNEWLINE # upload all files found.NEWLINE self.sync_s3()NEWLINENEWLINE # Sending the invalidation request to CloudFront if the userNEWLINE # requested this actionNEWLINE if self.invalidate:NEWLINE self.invalidate_objects_cf()NEWLINENEWLINE print("")NEWLINE print("%d files uploaded." % self.upload_count)NEWLINE print("%d files skipped." % self.skip_count)NEWLINENEWLINE def open_cf(self):NEWLINE """NEWLINE Returns an open connection to CloudFrontNEWLINE """NEWLINE return boto.connect_cloudfront(NEWLINE self.AWS_ACCESS_KEY_ID, self.AWS_SECRET_ACCESS_KEY)NEWLINENEWLINE def invalidate_objects_cf(self):NEWLINE """NEWLINE Split the invalidation request in groups of 1000 objectsNEWLINE """NEWLINE if not self.AWS_CLOUDFRONT_DISTRIBUTION:NEWLINE raise CommandError(NEWLINE 'An object invalidation was requested but the variable 'NEWLINE 'AWS_CLOUDFRONT_DISTRIBUTION is not present in your settings.')NEWLINENEWLINE # We can't send more than 1000 objects in the same invalidationNEWLINE # request.NEWLINE chunk = 1000NEWLINENEWLINE # Connecting to CloudFrontNEWLINE conn = self.open_cf()NEWLINENEWLINE # Splitting the object listNEWLINE objs = self.uploaded_filesNEWLINE chunks = [objs[i:i + chunk] for i in range(0, len(objs), chunk)]NEWLINENEWLINE # Invalidation requestsNEWLINE for paths in chunks:NEWLINE conn.create_invalidation_request(NEWLINE self.AWS_CLOUDFRONT_DISTRIBUTION, paths)NEWLINENEWLINE def sync_s3(self):NEWLINE """NEWLINE Walks the media/static directories and syncs files to S3NEWLINE """NEWLINE bucket, key = self.open_s3()NEWLINE for directory in self.DIRECTORIES:NEWLINE os.path.walk(directory, self.upload_s3, (bucket, key, self.AWS_BUCKET_NAME, directory))NEWLINENEWLINE def compress_string(self, s):NEWLINE """Gzip a given string."""NEWLINE zbuf = StringIO()NEWLINE zfile = gzip.GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)NEWLINE zfile.write(s)NEWLINE zfile.close()NEWLINE return zbuf.getvalue()NEWLINENEWLINE def get_s3connection_kwargs(self):NEWLINE """Returns connection kwargs as a dict"""NEWLINE kwargs = {}NEWLINE if self.s3host:NEWLINE kwargs['host'] = self.s3hostNEWLINE return kwargsNEWLINENEWLINE def open_s3(self):NEWLINE """NEWLINE Opens connection to S3 returning bucket and keyNEWLINE """NEWLINE conn = boto.connect_s3(NEWLINE self.AWS_ACCESS_KEY_ID,NEWLINE self.AWS_SECRET_ACCESS_KEY,NEWLINE **self.get_s3connection_kwargs())NEWLINE try:NEWLINE bucket = conn.get_bucket(self.AWS_BUCKET_NAME)NEWLINE except boto.exception.S3ResponseError:NEWLINE bucket = conn.create_bucket(self.AWS_BUCKET_NAME)NEWLINE return bucket, boto.s3.key.Key(bucket)NEWLINENEWLINE def upload_s3(self, arg, dirname, names):NEWLINE """NEWLINE This is the callback to os.path.walk and where much of the work happensNEWLINE """NEWLINE bucket, key, bucket_name, root_dir = argNEWLINENEWLINE # Skip directories we don't want to syncNEWLINE if os.path.basename(dirname) in self.FILTER_LIST:NEWLINE # prevent walk from processing subfiles/subdirs below the ignored oneNEWLINE del names[:]NEWLINE returnNEWLINENEWLINE # Later we assume the MEDIA_ROOT ends with a trailing slashNEWLINE if not root_dir.endswith(os.path.sep):NEWLINE root_dir = root_dir + os.path.sepNEWLINENEWLINE for file in names:NEWLINE headers = {}NEWLINENEWLINE if file in self.FILTER_LIST:NEWLINE continue # Skip files we don't want to syncNEWLINENEWLINE filename = os.path.join(dirname, file)NEWLINE if os.path.isdir(filename):NEWLINE continue # Don't try to upload directoriesNEWLINENEWLINE file_key = filename[len(root_dir):]NEWLINE if self.prefix:NEWLINE file_key = '%s/%s' % (self.prefix, file_key)NEWLINENEWLINE # Check if file on S3 is older than local file, if so, uploadNEWLINE if not self.do_force:NEWLINE s3_key = bucket.get_key(file_key)NEWLINE if s3_key:NEWLINE s3_datetime = datetime.datetime(*time.strptime(NEWLINE s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])NEWLINE local_datetime = datetime.datetime.utcfromtimestamp(NEWLINE os.stat(filename).st_mtime)NEWLINE if local_datetime < s3_datetime:NEWLINE self.skip_count += 1NEWLINE if self.verbosity > 1:NEWLINE print("File %s hasn't been modified since last being uploaded" % file_key)NEWLINE continueNEWLINENEWLINE # File is newer, let's process and uploadNEWLINE if self.verbosity > 0:NEWLINE print("Uploading %s..." % file_key)NEWLINENEWLINE content_type = mimetypes.guess_type(filename)[0]NEWLINE if content_type:NEWLINE headers['Content-Type'] = content_typeNEWLINE file_obj = open(filename, 'rb')NEWLINE file_size = os.fstat(file_obj.fileno()).st_sizeNEWLINE filedata = file_obj.read()NEWLINE if self.do_gzip:NEWLINE # Gzipping only if file is large enough (>1K is recommended)NEWLINE # and only if file is a common text type (not a binary file)NEWLINE if file_size > 1024 and content_type in self.GZIP_CONTENT_TYPES:NEWLINE filedata = self.compress_string(filedata)NEWLINE if self.rename_gzip:NEWLINE # If rename_gzip is True, then rename the fileNEWLINE # by appending an extension (like '.gz)' toNEWLINE # original filename.NEWLINE file_key = '%s.%s' % (NEWLINE file_key, self.SYNC_S3_RENAME_GZIP_EXT)NEWLINE headers['Content-Encoding'] = 'gzip'NEWLINE if self.verbosity > 1:NEWLINE print("\tgzipped: %dk to %dk" % (file_size / 1024, len(filedata) / 1024))NEWLINE if self.do_expires:NEWLINE # HTTP/1.0NEWLINE headers['Expires'] = '%s GMT' % (email.Utils.formatdate(time.mktime((datetime.datetime.now() + datetime.timedelta(days=365 * 2)).timetuple())))NEWLINE # HTTP/1.1NEWLINE headers['Cache-Control'] = 'max-age %d' % (3600 * 24 * 365 * 2)NEWLINE if self.verbosity > 1:NEWLINE print("\texpires: %s" % headers['Expires'])NEWLINE print("\tcache-control: %s" % headers['Cache-Control'])NEWLINENEWLINE try:NEWLINE key.name = file_keyNEWLINE key.set_contents_from_string(filedata, headers, replace=True,NEWLINE policy=self.default_acl)NEWLINE except boto.exception.S3CreateError as e:NEWLINE print("Failed: %s" % e)NEWLINE except Exception as e:NEWLINE print(e)NEWLINE raiseNEWLINE else:NEWLINE self.upload_count += 1NEWLINE self.uploaded_files.append(file_key)NEWLINENEWLINE file_obj.close()NEWLINE
# =============================================================================== #NEWLINE# #NEWLINE# This file has been generated automatically!! Do not change this manually! #NEWLINE# #NEWLINE# =============================================================================== #NEWLINEfrom __future__ import annotationsNEWLINENEWLINEfrom pydantic import FieldNEWLINENEWLINEfrom ..base_object import BaseObjectNEWLINENEWLINENEWLINEclass EndGroupCallScreenSharing(BaseObject):NEWLINE """NEWLINE Ends screen sharing in a joined group callNEWLINE NEWLINE :param group_call_id: Group call identifierNEWLINE :type group_call_id: :class:`int`NEWLINE NEWLINE """NEWLINENEWLINE ID: str = Field("endGroupCallScreenSharing", alias="@type")NEWLINE group_call_id: intNEWLINENEWLINE @staticmethodNEWLINE def read(q: dict) -> EndGroupCallScreenSharing:NEWLINE return EndGroupCallScreenSharing.construct(**q)NEWLINE
# Copyright (C) 2021 Intel CorporationNEWLINE#NEWLINE# SPDX-License-Identifier: MITNEWLINENEWLINEimport osNEWLINEimport base64NEWLINEimport uuidNEWLINENEWLINEfrom django.conf import settingsNEWLINEfrom django.core.cache import cacheNEWLINEfrom rest_framework import statusNEWLINEfrom rest_framework.response import ResponseNEWLINENEWLINEfrom cvat.apps.engine.serializers import DataSerializerNEWLINENEWLINEclass TusFile:NEWLINE _tus_cache_timeout = 3600NEWLINE def __init__(self, file_id, upload_dir):NEWLINE self.file_id = file_idNEWLINE self.upload_dir = upload_dirNEWLINE self.file_path = os.path.join(self.upload_dir, self.file_id)NEWLINE self.filename = cache.get("tus-uploads/{}/filename".format(file_id))NEWLINE self.file_size = int(cache.get("tus-uploads/{}/file_size".format(file_id)))NEWLINE self.metadata = cache.get("tus-uploads/{}/metadata".format(file_id))NEWLINE self.offset = cache.get("tus-uploads/{}/offset".format(file_id))NEWLINENEWLINE def init_file(self):NEWLINE os.makedirs(self.upload_dir, exist_ok=True)NEWLINE file_path = os.path.join(self.upload_dir, self.file_id)NEWLINE with open(file_path, 'wb') as file:NEWLINE file.seek(self.file_size - 1)NEWLINE file.write(b'\0')NEWLINENEWLINE def write_chunk(self, chunk):NEWLINE with open(self.file_path, 'r+b') as file:NEWLINE file.seek(chunk.offset)NEWLINE file.write(chunk.content)NEWLINE self.offset = cache.incr("tus-uploads/{}/offset".format(self.file_id), chunk.size)NEWLINENEWLINE def is_complete(self):NEWLINE return self.offset == self.file_sizeNEWLINENEWLINE def rename(self):NEWLINE file_id_path = os.path.join(self.upload_dir, self.file_id)NEWLINE file_path = os.path.join(self.upload_dir, self.filename)NEWLINE file_exists = os.path.lexists(os.path.join(self.upload_dir, self.filename))NEWLINE if file_exists:NEWLINE raise FileExistsError("File {} is already uploaded".format(self.filename))NEWLINE os.rename(file_id_path, file_path)NEWLINENEWLINE def clean(self):NEWLINE cache.delete_many([NEWLINE "tus-uploads/{}/file_size".format(self.file_id),NEWLINE "tus-uploads/{}/filename".format(self.file_id),NEWLINE "tus-uploads/{}/offset".format(self.file_id),NEWLINE "tus-uploads/{}/metadata".format(self.file_id),NEWLINE ])NEWLINENEWLINE @staticmethodNEWLINE def get_tusfile(file_id, upload_dir):NEWLINE file_exists = cache.get("tus-uploads/{}/filename".format(file_id), None) is not NoneNEWLINE if file_exists:NEWLINE return TusFile(file_id, upload_dir)NEWLINE return NoneNEWLINENEWLINE @staticmethodNEWLINE def create_file(metadata, file_size, upload_dir):NEWLINE file_id = str(uuid.uuid4())NEWLINE cache.add("tus-uploads/{}/filename".format(file_id), "{}".format(metadata.get("filename")), TusFile._tus_cache_timeout)NEWLINE cache.add("tus-uploads/{}/file_size".format(file_id), file_size, TusFile._tus_cache_timeout)NEWLINE cache.add("tus-uploads/{}/offset".format(file_id), 0, TusFile._tus_cache_timeout)NEWLINE cache.add("tus-uploads/{}/metadata".format(file_id), metadata, TusFile._tus_cache_timeout)NEWLINENEWLINE tus_file = TusFile(file_id, upload_dir)NEWLINE tus_file.init_file()NEWLINE return tus_fileNEWLINENEWLINEclass TusChunk:NEWLINE def __init__(self, request):NEWLINE self.META = request.METANEWLINE self.offset = int(request.META.get("HTTP_UPLOAD_OFFSET", 0))NEWLINE self.size = int(request.META.get("CONTENT_LENGTH", settings.TUS_DEFAULT_CHUNK_SIZE))NEWLINE self.content = request.bodyNEWLINENEWLINE# This upload mixin is implemented using tusNEWLINE# tus is open protocol for file uploads (see more https://tus.io/)NEWLINEclass UploadMixin(object):NEWLINE _tus_api_version = '1.0.0'NEWLINE _tus_api_version_supported = ['1.0.0']NEWLINE _tus_api_extensions = []NEWLINE _tus_max_file_size = str(settings.TUS_MAX_FILE_SIZE)NEWLINE _base_tus_headers = {NEWLINE 'Tus-Resumable': _tus_api_version,NEWLINE 'Tus-Version': ",".join(_tus_api_version_supported),NEWLINE 'Tus-Extension': ",".join(_tus_api_extensions),NEWLINE 'Tus-Max-Size': _tus_max_file_size,NEWLINE 'Access-Control-Allow-Origin': "*",NEWLINE 'Access-Control-Allow-Methods': "PATCH,HEAD,GET,POST,OPTIONS",NEWLINE 'Access-Control-Expose-Headers': "Tus-Resumable,upload-length,upload-metadata,Location,Upload-Offset",NEWLINE 'Access-Control-Allow-Headers': "Tus-Resumable,upload-length,upload-metadata,Location,Upload-Offset,content-type",NEWLINE 'Cache-Control': 'no-store'NEWLINE }NEWLINE file_id_regex = r'(?P<file_id>\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b)'NEWLINENEWLINE def _tus_response(self, status, data=None, extra_headers=None):NEWLINE response = Response(data, status)NEWLINE for key, value in self._base_tus_headers.items():NEWLINE response.__setitem__(key, value)NEWLINE if extra_headers:NEWLINE for key, value in extra_headers.items():NEWLINE response.__setitem__(key, value)NEWLINE return responseNEWLINENEWLINE def _get_metadata(self, request):NEWLINE metadata = {}NEWLINE if request.META.get("HTTP_UPLOAD_METADATA"):NEWLINE for kv in request.META.get("HTTP_UPLOAD_METADATA").split(","):NEWLINE splited_metadata = kv.split(" ")NEWLINE if len(splited_metadata) == 2:NEWLINE key, value = splited_metadataNEWLINE value = base64.b64decode(value)NEWLINE if isinstance(value, bytes):NEWLINE value = value.decode()NEWLINE metadata[key] = valueNEWLINE else:NEWLINE metadata[splited_metadata[0]] = ""NEWLINE return metadataNEWLINENEWLINE def upload_data(self, request):NEWLINE tus_request = request.headers.get('Upload-Length', None) is not None or request.method == 'OPTIONS'NEWLINE bulk_file_upload = request.headers.get('Upload-Multiple', None) is not NoneNEWLINE start_upload = request.headers.get('Upload-Start', None) is not NoneNEWLINE finish_upload = request.headers.get('Upload-Finish', None) is not NoneNEWLINE one_request_upload = start_upload and finish_uploadNEWLINE if one_request_upload or finish_upload:NEWLINE return self.upload_finished(request)NEWLINE elif start_upload:NEWLINE return Response(status=status.HTTP_202_ACCEPTED)NEWLINE elif tus_request:NEWLINE return self.init_tus_upload(request)NEWLINE elif bulk_file_upload:NEWLINE return self.append(request)NEWLINE else: # backward compatibility case - no upload headers were foundNEWLINE return self.upload_finished(request)NEWLINENEWLINE def init_tus_upload(self, request):NEWLINE if request.method == 'OPTIONS':NEWLINE return self._tus_response(status=status.HTTP_204)NEWLINE else:NEWLINE metadata = self._get_metadata(request)NEWLINE filename = metadata.get('filename', '')NEWLINE if not self.validate_filename(filename):NEWLINE return self._tus_response(status=status.HTTP_400_BAD_REQUEST,NEWLINE data="File name {} is not allowed".format(filename))NEWLINENEWLINENEWLINE message_id = request.META.get("HTTP_MESSAGE_ID")NEWLINE if message_id:NEWLINE metadata["message_id"] = base64.b64decode(message_id)NEWLINENEWLINE file_exists = os.path.lexists(os.path.join(self.get_upload_dir(), filename))NEWLINE if file_exists:NEWLINE return self._tus_response(status=status.HTTP_409_CONFLICT,NEWLINE data="File with same name already exists")NEWLINENEWLINE file_size = int(request.META.get("HTTP_UPLOAD_LENGTH", "0"))NEWLINE if file_size > int(self._tus_max_file_size):NEWLINE return self._tus_response(status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,NEWLINE data="File size exceeds max limit of {} bytes".format(self._tus_max_file_size))NEWLINENEWLINE tus_file = TusFile.create_file(metadata, file_size, self.get_upload_dir())NEWLINENEWLINE location = request.build_absolute_uri()NEWLINE if 'HTTP_X_FORWARDED_HOST' not in request.META:NEWLINE location = request.META.get('HTTP_ORIGIN') + request.META.get('PATH_INFO')NEWLINE return self._tus_response(NEWLINE status=status.HTTP_201_CREATED,NEWLINE extra_headers={'Location': '{}{}'.format(location, tus_file.file_id)})NEWLINENEWLINE def append_tus_chunk(self, request, file_id):NEWLINE if request.method == 'HEAD':NEWLINE tus_file = TusFile.get_tusfile(str(file_id), self.get_upload_dir())NEWLINE if tus_file:NEWLINE return self._tus_response(status=status.HTTP_200_OK, extra_headers={NEWLINE 'Upload-Offset': tus_file.offset,NEWLINE 'Upload-Length': tus_file.file_size})NEWLINE return self._tus_response(status=status.HTTP_404_NOT_FOUND)NEWLINE else:NEWLINE tus_file = TusFile.get_tusfile(str(file_id), self.get_upload_dir())NEWLINE chunk = TusChunk(request)NEWLINENEWLINE if chunk.offset != tus_file.offset:NEWLINE return self._tus_response(status=status.HTTP_409_CONFLICT)NEWLINENEWLINE if chunk.offset > tus_file.file_size:NEWLINE return self._tus_response(status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE)NEWLINENEWLINE tus_file.write_chunk(chunk)NEWLINENEWLINE if tus_file.is_complete():NEWLINE tus_file.rename()NEWLINE tus_file.clean()NEWLINENEWLINE return self._tus_response(status=status.HTTP_204_NO_CONTENT,NEWLINE extra_headers={'Upload-Offset': tus_file.offset})NEWLINENEWLINE def validate_filename(self, filename):NEWLINE upload_dir = self.get_upload_dir()NEWLINE file_path = os.path.join(upload_dir, filename)NEWLINE return os.path.commonprefix((os.path.realpath(file_path), upload_dir)) == upload_dirNEWLINENEWLINE def get_upload_dir(self):NEWLINE return self._object.data.get_upload_dirname()NEWLINENEWLINE def get_request_client_files(self, request):NEWLINE serializer = DataSerializer(self._object, data=request.data)NEWLINE serializer.is_valid(raise_exception=True)NEWLINE data = {k: v for k, v in serializer.validated_data.items()}NEWLINE return data.get('client_files', None)NEWLINENEWLINE def append(self, request):NEWLINE client_files = self.get_request_client_files(request)NEWLINE if client_files:NEWLINE upload_dir = self.get_upload_dir()NEWLINE for client_file in client_files:NEWLINE with open(os.path.join(upload_dir, client_file['file'].name), 'ab+') as destination:NEWLINE destination.write(client_file['file'].read())NEWLINE return Response(status=status.HTTP_200_OK)NEWLINENEWLINE # override this to do stuff after uploadNEWLINE def upload_finished(self, request):NEWLINE raise NotImplementedError('You need to implement upload_finished in UploadMixin')NEWLINE
import operatorNEWLINENEWLINENEWLINEdef parse(data):NEWLINE return [NEWLINE tuple(tuple(int(c) for c in l.split(",")) for l in s.split("\n")[1:])NEWLINE for s in data.split("\n\n")NEWLINE ]NEWLINENEWLINENEWLINEdef rot_x(cords):NEWLINE return tuple((x, z, -y) for x, y, z in cords)NEWLINENEWLINENEWLINEdef rot_y(cords):NEWLINE return tuple((z, y, -x) for x, y, z in cords)NEWLINENEWLINENEWLINEdef rot_z(cords):NEWLINE return tuple((y, -x, z) for x, y, z in cords)NEWLINENEWLINENEWLINEdef rep(f, n, arg):NEWLINE for _ in range(n):NEWLINE arg = f(arg)NEWLINE return argNEWLINENEWLINENEWLINEdef rots(cords):NEWLINE for rx in range(2):NEWLINE for ry in range(4) if not rx else (0, 2):NEWLINE for rz in range(4):NEWLINE yield rep(rot_x, rx, rep(rot_y, ry, rep(rot_z, rz, cords)))NEWLINENEWLINENEWLINEdef sub(a, b):NEWLINE return tuple(map(operator.sub, a, b))NEWLINENEWLINENEWLINEdef add(a, b):NEWLINE return tuple(map(operator.add, a, b))NEWLINENEWLINENEWLINEdef find(beacons, s):NEWLINE for r in rots(s):NEWLINE for b in beacons:NEWLINE for bb in r[:-12]:NEWLINE off = sub(b, bb)NEWLINE match = 0NEWLINE for bbb in r:NEWLINE if add(bbb, off) in beacons:NEWLINE match += 1NEWLINE if match == 12:NEWLINE breakNEWLINE else:NEWLINE continueNEWLINE return off, tuple(add(bbb, off) for bbb in r)NEWLINE return None, NoneNEWLINENEWLINENEWLINEdef slam(data):NEWLINE beacons = set(data.pop(0))NEWLINE sensors = set()NEWLINE while data:NEWLINE for i, s in enumerate(data):NEWLINE sensor, found = find(beacons, s)NEWLINE if sensor:NEWLINE sensors.add(sensor)NEWLINE beacons.update(found)NEWLINE del data[i]NEWLINE print(f"Found {len(beacons)} with {len(sensors)}")NEWLINE return sensors, beaconsNEWLINENEWLINENEWLINEdef aoc(data):NEWLINE return len(slam(parse(data))[1])NEWLINE
#!/usr/bin/env pythonNEWLINE#NEWLINE# __COPYRIGHT__NEWLINE#NEWLINE# Permission is hereby granted, free of charge, to any person obtainingNEWLINE# a copy of this software and associated documentation files (theNEWLINE# "Software"), to deal in the Software without restriction, includingNEWLINE# without limitation the rights to use, copy, modify, merge, publish,NEWLINE# distribute, sublicense, and/or sell copies of the Software, and toNEWLINE# permit persons to whom the Software is furnished to do so, subject toNEWLINE# the following conditions:NEWLINE#NEWLINE# The above copyright notice and this permission notice shall be includedNEWLINE# in all copies or substantial portions of the Software.NEWLINE#NEWLINE# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANYNEWLINE# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THENEWLINE# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE ANDNEWLINE# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BENEWLINE# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTIONNEWLINE# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTIONNEWLINE# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.NEWLINE#NEWLINENEWLINE__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"NEWLINENEWLINE"""NEWLINEVerify basic use of CPPPDEFINES with various data types.NEWLINE"""NEWLINENEWLINEimport TestSConsNEWLINENEWLINEtest = TestSCons.TestSCons()NEWLINENEWLINEtest.write('SConstruct', """\NEWLINEtest_list = [NEWLINE 'xyz',NEWLINE ['x', 'y', 'z'],NEWLINE ['x', ['y', 123], 'z', ('int', '$INTEGER')],NEWLINE { 'c' : 3, 'b': None, 'a' : 1 },NEWLINE "${TESTDEFS}",NEWLINE]NEWLINEfor i in test_list:NEWLINE env = Environment(CPPDEFPREFIX='-D', CPPDEFSUFFIX='', INTEGER=0, TESTDEFS=["FOO", "BAR=1"])NEWLINE print(env.Clone(CPPDEFINES=i).subst('$_CPPDEFFLAGS'))NEWLINEfor i in test_list:NEWLINE env = Environment(CPPDEFPREFIX='|', CPPDEFSUFFIX='|', INTEGER=1, TESTDEFS=["FOO", "BAR=1"])NEWLINE print(env.Clone(CPPDEFINES=i).subst('$_CPPDEFFLAGS'))NEWLINE""")NEWLINENEWLINEexpect = test.wrap_stdout(build_str="scons: `.' is up to date.\n",NEWLINE read_str = """\NEWLINE-DxyzNEWLINE-Dx -Dy -DzNEWLINE-Dx -Dy=123 -Dz -Dint=0NEWLINE-Da=1 -Db -Dc=3NEWLINE-DFOO -DBAR=1NEWLINE|xyz|NEWLINE|x| |y| |z|NEWLINE|x| |y=123| |z| |int=1|NEWLINE|a=1| |b| |c=3|NEWLINE|FOO| |BAR=1|NEWLINE""")NEWLINENEWLINEtest.run(arguments = '.', stdout=expect)NEWLINENEWLINEtest.pass_test()NEWLINENEWLINE# Local Variables:NEWLINE# tab-width:4NEWLINE# indent-tabs-mode:nilNEWLINE# End:NEWLINE# vim: set expandtab tabstop=4 shiftwidth=4:NEWLINE
#!/usr/bin/env pythonNEWLINE# Jonas Schnelli, 2013NEWLINE# make sure the Lissomcoin-Qt.app contains the right plist (including the right version)NEWLINE# fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267)NEWLINENEWLINEfrom string import TemplateNEWLINEfrom datetime import dateNEWLINENEWLINEbitcoinDir = "./";NEWLINENEWLINEinFile = bitcoinDir+"/share/qt/Info.plist"NEWLINEoutFile = "Lissomcoin-Qt.app/Contents/Info.plist"NEWLINEversion = "unknown";NEWLINENEWLINEfileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro"NEWLINEfor line in open(fileForGrabbingVersion):NEWLINE lineArr = line.replace(" ", "").split("=");NEWLINE if lineArr[0].startswith("VERSION"):NEWLINE version = lineArr[1].replace("\n", "");NEWLINENEWLINEfIn = open(inFile, "r")NEWLINEfileContent = fIn.read()NEWLINEs = Template(fileContent)NEWLINEnewFileContent = s.substitute(VERSION=version,YEAR=date.today().year)NEWLINENEWLINEfOut = open(outFile, "w");NEWLINEfOut.write(newFileContent);NEWLINENEWLINEprint "Info.plist fresh created"NEWLINE
from __future__ import print_functionNEWLINE# Copyright (c) 2012 Google Inc. All rights reserved.NEWLINE# Use of this source code is governed by a BSD-style license that can beNEWLINE# found in the LICENSE file.NEWLINENEWLINEimport filecmpNEWLINEimport gyp.commonNEWLINEimport gyp.xcodeproj_fileNEWLINEimport gyp.xcode_ninjaNEWLINEimport errnoNEWLINEimport osNEWLINEimport sysNEWLINEimport posixpathNEWLINEimport reNEWLINEimport shutilNEWLINEimport subprocessNEWLINEimport tempfileNEWLINENEWLINENEWLINE# Project files generated by this module will use _intermediate_var as aNEWLINE# custom Xcode setting whose value is a DerivedSources-like directory that'sNEWLINE# project-specific and configuration-specific. The normal choice,NEWLINE# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictiveNEWLINE# as it is likely that multiple targets within a single project file will wantNEWLINE# to access the same set of generated files. The other option,NEWLINE# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,NEWLINE# it is not configuration-specific. INTERMEDIATE_DIR is defined asNEWLINE# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).NEWLINE_intermediate_var = 'INTERMEDIATE_DIR'NEWLINENEWLINE# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among allNEWLINE# targets that share the same BUILT_PRODUCTS_DIR.NEWLINE_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'NEWLINENEWLINE_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'NEWLINENEWLINEgenerator_default_variables = {NEWLINE 'EXECUTABLE_PREFIX': '',NEWLINE 'EXECUTABLE_SUFFIX': '',NEWLINE 'STATIC_LIB_PREFIX': 'lib',NEWLINE 'SHARED_LIB_PREFIX': 'lib',NEWLINE 'STATIC_LIB_SUFFIX': '.a',NEWLINE 'SHARED_LIB_SUFFIX': '.dylib',NEWLINE # INTERMEDIATE_DIR is a place for targets to build up intermediate products.NEWLINE # It is specific to each build environment. It is only guaranteed to existNEWLINE # and be constant within the context of a project, corresponding to a singleNEWLINE # input file. Some build environments may allow their intermediate directoryNEWLINE # to be shared on a wider scale, but this is not guaranteed.NEWLINE 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,NEWLINE 'OS': 'mac',NEWLINE 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',NEWLINE 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',NEWLINE 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',NEWLINE 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',NEWLINE 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',NEWLINE 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',NEWLINE 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',NEWLINE 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,NEWLINE 'CONFIGURATION_NAME': '$(CONFIGURATION)',NEWLINE}NEWLINENEWLINE# The Xcode-specific sections that hold paths.NEWLINEgenerator_additional_path_sections = [NEWLINE 'mac_bundle_resources',NEWLINE 'mac_framework_headers',NEWLINE 'mac_framework_private_headers',NEWLINE # 'mac_framework_dirs', input already handles _dirs endings.NEWLINE]NEWLINENEWLINE# The Xcode-specific keys that exist on targets and aren't moved down toNEWLINE# configurations.NEWLINEgenerator_additional_non_configuration_keys = [NEWLINE 'ios_app_extension',NEWLINE 'ios_watch_app',NEWLINE 'ios_watchkit_extension',NEWLINE 'mac_bundle',NEWLINE 'mac_bundle_resources',NEWLINE 'mac_framework_headers',NEWLINE 'mac_framework_private_headers',NEWLINE 'mac_xctest_bundle',NEWLINE 'xcode_create_dependents_test_runner',NEWLINE]NEWLINENEWLINE# We want to let any rules apply to files that are resources also.NEWLINEgenerator_extra_sources_for_rules = [NEWLINE 'mac_bundle_resources',NEWLINE 'mac_framework_headers',NEWLINE 'mac_framework_private_headers',NEWLINE]NEWLINENEWLINEgenerator_filelist_paths = NoneNEWLINENEWLINE# Xcode's standard set of library directories, which don't need to be duplicatedNEWLINE# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.NEWLINExcode_standard_library_dirs = frozenset([NEWLINE '$(SDKROOT)/usr/lib',NEWLINE '$(SDKROOT)/usr/local/lib',NEWLINE])NEWLINENEWLINEdef CreateXCConfigurationList(configuration_names):NEWLINE xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})NEWLINE if len(configuration_names) == 0:NEWLINE configuration_names = ['Default']NEWLINE for configuration_name in configuration_names:NEWLINE xcbc = gyp.xcodeproj_file.XCBuildConfiguration({NEWLINE 'name': configuration_name})NEWLINE xccl.AppendProperty('buildConfigurations', xcbc)NEWLINE xccl.SetProperty('defaultConfigurationName', configuration_names[0])NEWLINE return xcclNEWLINENEWLINENEWLINEclass XcodeProject(object):NEWLINE def __init__(self, gyp_path, path, build_file_dict):NEWLINE self.gyp_path = gyp_pathNEWLINE self.path = pathNEWLINE self.project = gyp.xcodeproj_file.PBXProject(path=path)NEWLINE projectDirPath = gyp.common.RelativePath(NEWLINE os.path.dirname(os.path.abspath(self.gyp_path)),NEWLINE os.path.dirname(path) or '.')NEWLINE self.project.SetProperty('projectDirPath', projectDirPath)NEWLINE self.project_file = \NEWLINE gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})NEWLINE self.build_file_dict = build_file_dictNEWLINENEWLINE # TODO(mark): add destructor that cleans up self.path if created_dir isNEWLINE # True and things didn't complete successfully. Or do something evenNEWLINE # better with "try"?NEWLINE self.created_dir = FalseNEWLINE try:NEWLINE os.makedirs(self.path)NEWLINE self.created_dir = TrueNEWLINE except OSError as e:NEWLINE if e.errno != errno.EEXIST:NEWLINE raiseNEWLINENEWLINE def Finalize1(self, xcode_targets, serialize_all_tests):NEWLINE # Collect a list of all of the build configuration names used by theNEWLINE # various targets in the file. It is very heavily advised to keep eachNEWLINE # target in an entire project (even across multiple project files) usingNEWLINE # the same set of configuration names.NEWLINE configurations = []NEWLINE for xct in self.project.GetProperty('targets'):NEWLINE xccl = xct.GetProperty('buildConfigurationList')NEWLINE xcbcs = xccl.GetProperty('buildConfigurations')NEWLINE for xcbc in xcbcs:NEWLINE name = xcbc.GetProperty('name')NEWLINE if name not in configurations:NEWLINE configurations.append(name)NEWLINENEWLINE # Replace the XCConfigurationList attached to the PBXProject object withNEWLINE # a new one specifying all of the configuration names used by the variousNEWLINE # targets.NEWLINE try:NEWLINE xccl = CreateXCConfigurationList(configurations)NEWLINE self.project.SetProperty('buildConfigurationList', xccl)NEWLINE except:NEWLINE sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)NEWLINE raiseNEWLINENEWLINE # The need for this setting is explained above where _intermediate_var isNEWLINE # defined. The comments below about wanting to avoid project-wide buildNEWLINE # settings apply here too, but this needs to be set on a project-wide basisNEWLINE # so that files relative to the _intermediate_var setting can be displayedNEWLINE # properly in the Xcode UI.NEWLINE #NEWLINE # Note that for configuration-relative files such as anything relative toNEWLINE # _intermediate_var, for the purposes of UI tree view display, Xcode willNEWLINE # only resolve the configuration name once, when the project file isNEWLINE # opened. If the active build configuration is changed, the project fileNEWLINE # must be closed and reopened if it is desired for the tree view to update.NEWLINE # This is filed as Apple radar 6588391.NEWLINE xccl.SetBuildSetting(_intermediate_var,NEWLINE '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')NEWLINE xccl.SetBuildSetting(_shared_intermediate_var,NEWLINE '$(SYMROOT)/DerivedSources/$(CONFIGURATION)')NEWLINENEWLINE # Set user-specified project-wide build settings and config files. ThisNEWLINE # is intended to be used very sparingly. Really, almost everything shouldNEWLINE # go into target-specific build settings sections. The project-wideNEWLINE # settings are only intended to be used in cases where Xcode attempts toNEWLINE # resolve variable references in a project context as opposed to a targetNEWLINE # context, such as when resolving sourceTree references while building upNEWLINE # the tree tree view for UI display.NEWLINE # Any values set globally are applied to all configurations, then anyNEWLINE # per-configuration values are applied.NEWLINE for xck, xcv in self.build_file_dict.get('xcode_settings', {}).items():NEWLINE xccl.SetBuildSetting(xck, xcv)NEWLINE if 'xcode_config_file' in self.build_file_dict:NEWLINE config_ref = self.project.AddOrGetFileInRootGroup(NEWLINE self.build_file_dict['xcode_config_file'])NEWLINE xccl.SetBaseConfiguration(config_ref)NEWLINE build_file_configurations = self.build_file_dict.get('configurations', {})NEWLINE if build_file_configurations:NEWLINE for config_name in configurations:NEWLINE build_file_configuration_named = \NEWLINE build_file_configurations.get(config_name, {})NEWLINE if build_file_configuration_named:NEWLINE xcc = xccl.ConfigurationNamed(config_name)NEWLINE for xck, xcv in build_file_configuration_named.get('xcode_settings',NEWLINE {}).items():NEWLINE xcc.SetBuildSetting(xck, xcv)NEWLINE if 'xcode_config_file' in build_file_configuration_named:NEWLINE config_ref = self.project.AddOrGetFileInRootGroup(NEWLINE build_file_configurations[config_name]['xcode_config_file'])NEWLINE xcc.SetBaseConfiguration(config_ref)NEWLINENEWLINE # Sort the targets based on how they appeared in the input.NEWLINE # TODO(mark): Like a lot of other things here, this assumes internalNEWLINE # knowledge of PBXProject - in this case, of its "targets" property.NEWLINENEWLINE # ordinary_targets are ordinary targets that are already in the projectNEWLINE # file. run_test_targets are the targets that run unittests and should beNEWLINE # used for the Run All Tests target. support_targets are the action/ruleNEWLINE # targets used by GYP file targets, just kept for the assert check.NEWLINE ordinary_targets = []NEWLINE run_test_targets = []NEWLINE support_targets = []NEWLINENEWLINE # targets is full list of targets in the project.NEWLINE targets = []NEWLINENEWLINE # does the it define it's own "all"?NEWLINE has_custom_all = FalseNEWLINENEWLINE # targets_for_all is the list of ordinary_targets that should be listedNEWLINE # in this project's "All" target. It includes each non_runtest_targetNEWLINE # that does not have suppress_wildcard set.NEWLINE targets_for_all = []NEWLINENEWLINE for target in self.build_file_dict['targets']:NEWLINE target_name = target['target_name']NEWLINE toolset = target['toolset']NEWLINE qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,NEWLINE toolset)NEWLINE xcode_target = xcode_targets[qualified_target]NEWLINE # Make sure that the target being added to the sorted list is already inNEWLINE # the unsorted list.NEWLINE assert xcode_target in self.project._properties['targets']NEWLINE targets.append(xcode_target)NEWLINE ordinary_targets.append(xcode_target)NEWLINE if xcode_target.support_target:NEWLINE support_targets.append(xcode_target.support_target)NEWLINE targets.append(xcode_target.support_target)NEWLINENEWLINE if not int(target.get('suppress_wildcard', False)):NEWLINE targets_for_all.append(xcode_target)NEWLINENEWLINE if target_name.lower() == 'all':NEWLINE has_custom_all = TrueNEWLINENEWLINE # If this target has a 'run_as' attribute, add its target to theNEWLINE # targets, and add it to the test targets.NEWLINE if target.get('run_as'):NEWLINE # Make a target to run something. It should have oneNEWLINE # dependency, the parent xcode target.NEWLINE xccl = CreateXCConfigurationList(configurations)NEWLINE run_target = gyp.xcodeproj_file.PBXAggregateTarget({NEWLINE 'name': 'Run ' + target_name,NEWLINE 'productName': xcode_target.GetProperty('productName'),NEWLINE 'buildConfigurationList': xccl,NEWLINE },NEWLINE parent=self.project)NEWLINE run_target.AddDependency(xcode_target)NEWLINENEWLINE command = target['run_as']NEWLINE script = ''NEWLINE if command.get('working_directory'):NEWLINE script = script + 'cd "%s"\n' % \NEWLINE gyp.xcodeproj_file.ConvertVariablesToShellSyntax(NEWLINE command.get('working_directory'))NEWLINENEWLINE if command.get('environment'):NEWLINE script = script + "\n".join(NEWLINE ['export %s="%s"' %NEWLINE (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))NEWLINE for (key, val) in command.get('environment').items()]) + "\n"NEWLINENEWLINE # Some test end up using sockets, files on disk, etc. and can getNEWLINE # confused if more then one test runs at a time. The generatorNEWLINE # flag 'xcode_serialize_all_test_runs' controls the forcing of allNEWLINE # tests serially. It defaults to True. To get serial runs thisNEWLINE # little bit of python does the same as the linux flock utility toNEWLINE # make sure only one runs at a time.NEWLINE command_prefix = ''NEWLINE if serialize_all_tests:NEWLINE command_prefix = \NEWLINE"""python -c "import fcntl, subprocess, sysNEWLINEfile = open('$TMPDIR/GYP_serialize_test_runs', 'a')NEWLINEfcntl.flock(file.fileno(), fcntl.LOCK_EX)NEWLINEsys.exit(subprocess.call(sys.argv[1:]))" """NEWLINENEWLINE # If we were unable to exec for some reason, we want to exitNEWLINE # with an error, and fixup variable references to be shellNEWLINE # syntax instead of xcode syntax.NEWLINE script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \NEWLINE gyp.xcodeproj_file.ConvertVariablesToShellSyntax(NEWLINE gyp.common.EncodePOSIXShellList(command.get('action')))NEWLINENEWLINE ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({NEWLINE 'shellScript': script,NEWLINE 'showEnvVarsInLog': 0,NEWLINE })NEWLINE run_target.AppendProperty('buildPhases', ssbp)NEWLINENEWLINE # Add the run target to the project file.NEWLINE targets.append(run_target)NEWLINE run_test_targets.append(run_target)NEWLINE xcode_target.test_runner = run_targetNEWLINENEWLINENEWLINE # Make sure that the list of targets being replaced is the same length asNEWLINE # the one replacing it, but allow for the added test runner targets.NEWLINE assert len(self.project._properties['targets']) == \NEWLINE len(ordinary_targets) + len(support_targets)NEWLINENEWLINE self.project._properties['targets'] = targetsNEWLINENEWLINE # Get rid of unnecessary levels of depth in groups like the Source group.NEWLINE self.project.RootGroupsTakeOverOnlyChildren(True)NEWLINENEWLINE # Sort the groups nicely. Do this after sorting the targets, because theNEWLINE # Products group is sorted based on the order of the targets.NEWLINE self.project.SortGroups()NEWLINENEWLINE # Create an "All" target if there's more than one target in this projectNEWLINE # file and the project didn't define its own "All" target. Put a generatedNEWLINE # "All" target first so that people opening up the project for the firstNEWLINE # time will build everything by default.NEWLINE if len(targets_for_all) > 1 and not has_custom_all:NEWLINE xccl = CreateXCConfigurationList(configurations)NEWLINE all_target = gyp.xcodeproj_file.PBXAggregateTarget(NEWLINE {NEWLINE 'buildConfigurationList': xccl,NEWLINE 'name': 'All',NEWLINE },NEWLINE parent=self.project)NEWLINENEWLINE for target in targets_for_all:NEWLINE all_target.AddDependency(target)NEWLINENEWLINE # TODO(mark): This is evil because it relies on internal knowledge ofNEWLINE # PBXProject._properties. It's important to get the "All" target first,NEWLINE # though.NEWLINE self.project._properties['targets'].insert(0, all_target)NEWLINENEWLINE # The same, but for run_test_targets.NEWLINE if len(run_test_targets) > 1:NEWLINE xccl = CreateXCConfigurationList(configurations)NEWLINE run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(NEWLINE {NEWLINE 'buildConfigurationList': xccl,NEWLINE 'name': 'Run All Tests',NEWLINE },NEWLINE parent=self.project)NEWLINE for run_test_target in run_test_targets:NEWLINE run_all_tests_target.AddDependency(run_test_target)NEWLINENEWLINE # Insert after the "All" target, which must exist if there is more thanNEWLINE # one run_test_target.NEWLINE self.project._properties['targets'].insert(1, run_all_tests_target)NEWLINENEWLINE def Finalize2(self, xcode_targets, xcode_target_to_target_dict):NEWLINE # Finalize2 needs to happen in a separate step because the process ofNEWLINE # updating references to other projects depends on the ordering of targetsNEWLINE # within remote project files. Finalize1 is responsible for sorting duty,NEWLINE # and once all project files are sorted, Finalize2 can come in and updateNEWLINE # these references.NEWLINENEWLINE # To support making a "test runner" target that will run all the testsNEWLINE # that are direct dependents of any given target, we look forNEWLINE # xcode_create_dependents_test_runner being set on an Aggregate target,NEWLINE # and generate a second target that will run the tests runners found underNEWLINE # the marked target.NEWLINE for bf_tgt in self.build_file_dict['targets']:NEWLINE if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):NEWLINE tgt_name = bf_tgt['target_name']NEWLINE toolset = bf_tgt['toolset']NEWLINE qualified_target = gyp.common.QualifiedTarget(self.gyp_path,NEWLINE tgt_name, toolset)NEWLINE xcode_target = xcode_targets[qualified_target]NEWLINE if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):NEWLINE # Collect all the run test targets.NEWLINE all_run_tests = []NEWLINE pbxtds = xcode_target.GetProperty('dependencies')NEWLINE for pbxtd in pbxtds:NEWLINE pbxcip = pbxtd.GetProperty('targetProxy')NEWLINE dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')NEWLINE if hasattr(dependency_xct, 'test_runner'):NEWLINE all_run_tests.append(dependency_xct.test_runner)NEWLINENEWLINE # Directly depend on all the runners as they depend on the targetNEWLINE # that builds them.NEWLINE if len(all_run_tests) > 0:NEWLINE run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({NEWLINE 'name': 'Run %s Tests' % tgt_name,NEWLINE 'productName': tgt_name,NEWLINE },NEWLINE parent=self.project)NEWLINE for run_test_target in all_run_tests:NEWLINE run_all_target.AddDependency(run_test_target)NEWLINENEWLINE # Insert the test runner after the related target.NEWLINE idx = self.project._properties['targets'].index(xcode_target)NEWLINE self.project._properties['targets'].insert(idx + 1, run_all_target)NEWLINENEWLINE # Update all references to other projects, to make sure that the lists ofNEWLINE # remote products are complete. Otherwise, Xcode will fill them in whenNEWLINE # it opens the project file, which will result in unnecessary diffs.NEWLINE # TODO(mark): This is evil because it relies on internal knowledge ofNEWLINE # PBXProject._other_pbxprojects.NEWLINE for other_pbxproject in self.project._other_pbxprojects.keys():NEWLINE self.project.AddOrGetProjectReference(other_pbxproject)NEWLINENEWLINE self.project.SortRemoteProductReferences()NEWLINENEWLINE # Give everything an ID.NEWLINE self.project_file.ComputeIDs()NEWLINENEWLINE # Make sure that no two objects in the project file have the same ID. IfNEWLINE # multiple objects wind up with the same ID, upon loading the file, XcodeNEWLINE # will only recognize one object (the last one in the file?) and theNEWLINE # results are unpredictable.NEWLINE self.project_file.EnsureNoIDCollisions()NEWLINENEWLINE def Write(self):NEWLINE # Write the project file to a temporary location first. Xcode watches forNEWLINE # changes to the project file and presents a UI sheet offering to reloadNEWLINE # the project when it does change. However, in some cases, especially whenNEWLINE # multiple projects are open or when Xcode is busy, things don't work soNEWLINE # seamlessly. Sometimes, Xcode is able to detect that a project file hasNEWLINE # changed but can't unload it because something else is referencing it.NEWLINE # To mitigate this problem, and to avoid even having Xcode present the UINEWLINE # sheet when an open project is rewritten for inconsequential changes, theNEWLINE # project file is written to a temporary file in the xcodeproj directoryNEWLINE # first. The new temporary file is then compared to the existing projectNEWLINE # file, if any. If they differ, the new file replaces the old; otherwise,NEWLINE # the new project file is simply deleted. Xcode properly detects a fileNEWLINE # being renamed over an open project file as a change and so it remainsNEWLINE # able to present the "project file changed" sheet under this system.NEWLINE # Writing to a temporary file first also avoids the possible problem ofNEWLINE # Xcode rereading an incomplete project file.NEWLINE (output_fd, new_pbxproj_path) = \NEWLINE tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',NEWLINE dir=self.path)NEWLINENEWLINE try:NEWLINE output_file = os.fdopen(output_fd, 'wb')NEWLINENEWLINE self.project_file.Print(output_file)NEWLINE output_file.close()NEWLINENEWLINE pbxproj_path = os.path.join(self.path, 'project.pbxproj')NEWLINENEWLINE same = FalseNEWLINE try:NEWLINE same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)NEWLINE except OSError as e:NEWLINE if e.errno != errno.ENOENT:NEWLINE raiseNEWLINENEWLINE if same:NEWLINE # The new file is identical to the old one, just get rid of the newNEWLINE # one.NEWLINE os.unlink(new_pbxproj_path)NEWLINE else:NEWLINE # The new file is different from the old one, or there is no old one.NEWLINE # Rename the new file to the permanent name.NEWLINE #NEWLINE # tempfile.mkstemp uses an overly restrictive mode, resulting in aNEWLINE # file that can only be read by the owner, regardless of the umask.NEWLINE # There's no reason to not respect the umask here, which means thatNEWLINE # an extra hoop is required to fetch it and reset the new file's mode.NEWLINE #NEWLINE # No way to get the umask without setting a new one? Set a safe oneNEWLINE # and then set it back to the old value.NEWLINE umask = os.umask(0o77)NEWLINE os.umask(umask)NEWLINENEWLINE os.chmod(new_pbxproj_path, 0o666 & ~umask)NEWLINE os.rename(new_pbxproj_path, pbxproj_path)NEWLINENEWLINE except Exception:NEWLINE # Don't leave turds behind. In fact, if this code was responsible forNEWLINE # creating the xcodeproj directory, get rid of that too.NEWLINE os.unlink(new_pbxproj_path)NEWLINE if self.created_dir:NEWLINE shutil.rmtree(self.path, True)NEWLINE raiseNEWLINENEWLINENEWLINEdef AddSourceToTarget(source, type, pbxp, xct):NEWLINE # TODO(mark): Perhaps source_extensions and library_extensions can be made aNEWLINE # little bit fancier.NEWLINE source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']NEWLINENEWLINE # .o is conceptually more of a "source" than a "library," but Xcode thinksNEWLINE # of "sources" as things to compile and "libraries" (or "frameworks") asNEWLINE # things to link with. Adding an object file to an Xcode target's frameworksNEWLINE # phase works properly.NEWLINE library_extensions = ['a', 'dylib', 'framework', 'o']NEWLINENEWLINE basename = posixpath.basename(source)NEWLINE (root, ext) = posixpath.splitext(basename)NEWLINE if ext:NEWLINE ext = ext[1:].lower()NEWLINENEWLINE if ext in source_extensions and type != 'none':NEWLINE xct.SourcesPhase().AddFile(source)NEWLINE elif ext in library_extensions and type != 'none':NEWLINE xct.FrameworksPhase().AddFile(source)NEWLINE else:NEWLINE # Files that aren't added to a sources or frameworks build phase can stillNEWLINE # go into the project file, just not as part of a build phase.NEWLINE pbxp.AddOrGetFileInRootGroup(source)NEWLINENEWLINENEWLINEdef AddResourceToTarget(resource, pbxp, xct):NEWLINE # TODO(mark): Combine with AddSourceToTarget above? Or just inline this callNEWLINE # where it's used.NEWLINE xct.ResourcesPhase().AddFile(resource)NEWLINENEWLINENEWLINEdef AddHeaderToTarget(header, pbxp, xct, is_public):NEWLINE # TODO(mark): Combine with AddSourceToTarget above? Or just inline this callNEWLINE # where it's used.NEWLINE settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]NEWLINE xct.HeadersPhase().AddFile(header, settings)NEWLINENEWLINENEWLINE_xcode_variable_re = re.compile(r'(\$\((.*?)\))')NEWLINEdef ExpandXcodeVariables(string, expansions):NEWLINE """Expands Xcode-style $(VARIABLES) in string per the expansions dict.NEWLINENEWLINE In some rare cases, it is appropriate to expand Xcode variables when aNEWLINE project file is generated. For any substring $(VAR) in string, if VAR is aNEWLINE key in the expansions dict, $(VAR) will be replaced with expansions[VAR].NEWLINE Any $(VAR) substring in string for which VAR is not a key in the expansionsNEWLINE dict will remain in the returned string.NEWLINE """NEWLINENEWLINE matches = _xcode_variable_re.findall(string)NEWLINE if matches is None:NEWLINE return stringNEWLINENEWLINE matches.reverse()NEWLINE for match in matches:NEWLINE (to_replace, variable) = matchNEWLINE if not variable in expansions:NEWLINE continueNEWLINENEWLINE replacement = expansions[variable]NEWLINE string = re.sub(re.escape(to_replace), replacement, string)NEWLINENEWLINE return stringNEWLINENEWLINENEWLINE_xcode_define_re = re.compile(r'([\\\"\' ])')NEWLINEdef EscapeXcodeDefine(s):NEWLINE """We must escape the defines that we give to XCode so that it knows not toNEWLINE split on spaces and to respect backslash and quote literals. However, weNEWLINE must not quote the define, or Xcode will incorrectly intepret variablesNEWLINE especially $(inherited)."""NEWLINE return re.sub(_xcode_define_re, r'\\\1', s)NEWLINENEWLINENEWLINEdef PerformBuild(data, configurations, params):NEWLINE options = params['options']NEWLINENEWLINE for build_file, build_file_dict in data.items():NEWLINE (build_file_root, build_file_ext) = os.path.splitext(build_file)NEWLINE if build_file_ext != '.gyp':NEWLINE continueNEWLINE xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'NEWLINE if options.generator_output:NEWLINE xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)NEWLINENEWLINE for config in configurations:NEWLINE arguments = ['xcodebuild', '-project', xcodeproj_path]NEWLINE arguments += ['-configuration', config]NEWLINE print("Building [%s]: %s" % (config, arguments))NEWLINE subprocess.check_call(arguments)NEWLINENEWLINENEWLINEdef CalculateGeneratorInputInfo(params):NEWLINE toplevel = params['options'].toplevel_dirNEWLINE if params.get('flavor') == 'ninja':NEWLINE generator_dir = os.path.relpath(params['options'].generator_output or '.')NEWLINE output_dir = params.get('generator_flags', {}).get('output_dir', 'out')NEWLINE output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))NEWLINE qualified_out_dir = os.path.normpath(os.path.join(NEWLINE toplevel, output_dir, 'gypfiles-xcode-ninja'))NEWLINE else:NEWLINE output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))NEWLINE qualified_out_dir = os.path.normpath(os.path.join(NEWLINE toplevel, output_dir, 'gypfiles'))NEWLINENEWLINE global generator_filelist_pathsNEWLINE generator_filelist_paths = {NEWLINE 'toplevel': toplevel,NEWLINE 'qualified_out_dir': qualified_out_dir,NEWLINE }NEWLINENEWLINENEWLINEdef GenerateOutput(target_list, target_dicts, data, params):NEWLINE # Optionally configure each spec to use ninja as the external builder.NEWLINE ninja_wrapper = params.get('flavor') == 'ninja'NEWLINE if ninja_wrapper:NEWLINE (target_list, target_dicts, data) = \NEWLINE gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)NEWLINENEWLINE options = params['options']NEWLINE generator_flags = params.get('generator_flags', {})NEWLINE parallel_builds = generator_flags.get('xcode_parallel_builds', True)NEWLINE serialize_all_tests = \NEWLINE generator_flags.get('xcode_serialize_all_test_runs', True)NEWLINE upgrade_check_project_version = \NEWLINE generator_flags.get('xcode_upgrade_check_project_version', None)NEWLINENEWLINE # Format upgrade_check_project_version with leading zeros as needed.NEWLINE if upgrade_check_project_version:NEWLINE upgrade_check_project_version = str(upgrade_check_project_version)NEWLINE while len(upgrade_check_project_version) < 4:NEWLINE upgrade_check_project_version = '0' + upgrade_check_project_versionNEWLINENEWLINE skip_excluded_files = \NEWLINE not generator_flags.get('xcode_list_excluded_files', True)NEWLINE xcode_projects = {}NEWLINE for build_file, build_file_dict in data.items():NEWLINE (build_file_root, build_file_ext) = os.path.splitext(build_file)NEWLINE if build_file_ext != '.gyp':NEWLINE continueNEWLINE xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'NEWLINE if options.generator_output:NEWLINE xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)NEWLINE xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)NEWLINE xcode_projects[build_file] = xcpNEWLINE pbxp = xcp.projectNEWLINENEWLINE # Set project-level attributes from multiple optionsNEWLINE project_attributes = {}NEWLINE if parallel_builds:NEWLINE project_attributes['BuildIndependentTargetsInParallel'] = 'YES'NEWLINE if upgrade_check_project_version:NEWLINE project_attributes['LastUpgradeCheck'] = upgrade_check_project_versionNEWLINE project_attributes['LastTestingUpgradeCheck'] = \NEWLINE upgrade_check_project_versionNEWLINE project_attributes['LastSwiftUpdateCheck'] = \NEWLINE upgrade_check_project_versionNEWLINE pbxp.SetProperty('attributes', project_attributes)NEWLINENEWLINE # Add gyp/gypi files to projectNEWLINE if not generator_flags.get('standalone'):NEWLINE main_group = pbxp.GetProperty('mainGroup')NEWLINE build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})NEWLINE main_group.AppendChild(build_group)NEWLINE for included_file in build_file_dict['included_files']:NEWLINE build_group.AddOrGetFileByPath(included_file, False)NEWLINENEWLINE xcode_targets = {}NEWLINE xcode_target_to_target_dict = {}NEWLINE for qualified_target in target_list:NEWLINE [build_file, target_name, toolset] = \NEWLINE gyp.common.ParseQualifiedTarget(qualified_target)NEWLINENEWLINE spec = target_dicts[qualified_target]NEWLINE if spec['toolset'] != 'target':NEWLINE raise Exception(NEWLINE 'Multiple toolsets not supported in xcode build (target %s)' %NEWLINE qualified_target)NEWLINE configuration_names = [spec['default_configuration']]NEWLINE for configuration_name in sorted(spec['configurations'].keys()):NEWLINE if configuration_name not in configuration_names:NEWLINE configuration_names.append(configuration_name)NEWLINE xcp = xcode_projects[build_file]NEWLINE pbxp = xcp.projectNEWLINENEWLINE # Set up the configurations for the target according to the list of namesNEWLINE # supplied.NEWLINE xccl = CreateXCConfigurationList(configuration_names)NEWLINENEWLINE # Create an XCTarget subclass object for the target. The type withNEWLINE # "+bundle" appended will be used if the target has "mac_bundle" set.NEWLINE # loadable_modules not in a mac_bundle are mapped toNEWLINE # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interpretsNEWLINE # to create a single-file mh_bundle.NEWLINE _types = {NEWLINE 'executable': 'com.apple.product-type.tool',NEWLINE 'loadable_module': 'com.googlecode.gyp.xcode.bundle',NEWLINE 'shared_library': 'com.apple.product-type.library.dynamic',NEWLINE 'static_library': 'com.apple.product-type.library.static',NEWLINE 'mac_kernel_extension': 'com.apple.product-type.kernel-extension',NEWLINE 'executable+bundle': 'com.apple.product-type.application',NEWLINE 'loadable_module+bundle': 'com.apple.product-type.bundle',NEWLINE 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',NEWLINE 'shared_library+bundle': 'com.apple.product-type.framework',NEWLINE 'executable+extension+bundle': 'com.apple.product-type.app-extension',NEWLINE 'executable+watch+extension+bundle':NEWLINE 'com.apple.product-type.watchkit-extension',NEWLINE 'executable+watch+bundle':NEWLINE 'com.apple.product-type.application.watchapp',NEWLINE 'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',NEWLINE }NEWLINENEWLINE target_properties = {NEWLINE 'buildConfigurationList': xccl,NEWLINE 'name': target_name,NEWLINE }NEWLINENEWLINE type = spec['type']NEWLINE is_xctest = int(spec.get('mac_xctest_bundle', 0))NEWLINE is_bundle = int(spec.get('mac_bundle', 0)) or is_xctestNEWLINE is_app_extension = int(spec.get('ios_app_extension', 0))NEWLINE is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))NEWLINE is_watch_app = int(spec.get('ios_watch_app', 0))NEWLINE if type != 'none':NEWLINE type_bundle_key = typeNEWLINE if is_xctest:NEWLINE type_bundle_key += '+xctest'NEWLINE assert type == 'loadable_module', (NEWLINE 'mac_xctest_bundle targets must have type loadable_module 'NEWLINE '(target %s)' % target_name)NEWLINE elif is_app_extension:NEWLINE assert is_bundle, ('ios_app_extension flag requires mac_bundle 'NEWLINE '(target %s)' % target_name)NEWLINE type_bundle_key += '+extension+bundle'NEWLINE elif is_watchkit_extension:NEWLINE assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle 'NEWLINE '(target %s)' % target_name)NEWLINE type_bundle_key += '+watch+extension+bundle'NEWLINE elif is_watch_app:NEWLINE assert is_bundle, ('ios_watch_app flag requires mac_bundle 'NEWLINE '(target %s)' % target_name)NEWLINE type_bundle_key += '+watch+bundle'NEWLINE elif is_bundle:NEWLINE type_bundle_key += '+bundle'NEWLINENEWLINE xctarget_type = gyp.xcodeproj_file.PBXNativeTargetNEWLINE try:NEWLINE target_properties['productType'] = _types[type_bundle_key]NEWLINE except KeyError as e:NEWLINE gyp.common.ExceptionAppend(e, "-- unknown product type while "NEWLINE "writing target %s" % target_name)NEWLINE raiseNEWLINE else:NEWLINE xctarget_type = gyp.xcodeproj_file.PBXAggregateTargetNEWLINE assert not is_bundle, (NEWLINE 'mac_bundle targets cannot have type none (target "%s")' %NEWLINE target_name)NEWLINE assert not is_xctest, (NEWLINE 'mac_xctest_bundle targets cannot have type none (target "%s")' %NEWLINE target_name)NEWLINENEWLINE target_product_name = spec.get('product_name')NEWLINE if target_product_name is not None:NEWLINE target_properties['productName'] = target_product_nameNEWLINENEWLINE xct = xctarget_type(target_properties, parent=pbxp,NEWLINE force_outdir=spec.get('product_dir'),NEWLINE force_prefix=spec.get('product_prefix'),NEWLINE force_extension=spec.get('product_extension'))NEWLINE pbxp.AppendProperty('targets', xct)NEWLINE xcode_targets[qualified_target] = xctNEWLINE xcode_target_to_target_dict[xct] = specNEWLINENEWLINE spec_actions = spec.get('actions', [])NEWLINE spec_rules = spec.get('rules', [])NEWLINENEWLINE # Xcode has some "issues" with checking dependencies for the "CompileNEWLINE # sources" step with any source files/headers generated by actions/rules.NEWLINE # To work around this, if a target is building anything directly (notNEWLINE # type "none"), then a second target is used to run the GYP actions/rulesNEWLINE # and is made a dependency of this target. This way the work is doneNEWLINE # before the dependency checks for what should be recompiled.NEWLINE support_xct = NoneNEWLINE # The Xcode "issues" don't affect xcode-ninja builds, since the dependencyNEWLINE # logic all happens in ninja. Don't bother creating the extra targets inNEWLINE # that case.NEWLINE if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:NEWLINE support_xccl = CreateXCConfigurationList(configuration_names)NEWLINE support_target_suffix = generator_flags.get(NEWLINE 'support_target_suffix', ' Support')NEWLINE support_target_properties = {NEWLINE 'buildConfigurationList': support_xccl,NEWLINE 'name': target_name + support_target_suffix,NEWLINE }NEWLINE if target_product_name:NEWLINE support_target_properties['productName'] = \NEWLINE target_product_name + ' Support'NEWLINE support_xct = \NEWLINE gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,NEWLINE parent=pbxp)NEWLINE pbxp.AppendProperty('targets', support_xct)NEWLINE xct.AddDependency(support_xct)NEWLINE # Hang the support target off the main target so it can be tested/foundNEWLINE # by the generator during Finalize.NEWLINE xct.support_target = support_xctNEWLINENEWLINE prebuild_index = 0NEWLINENEWLINE # Add custom shell script phases for "actions" sections.NEWLINE for action in spec_actions:NEWLINE # There's no need to write anything into the script to ensure that theNEWLINE # output directories already exist, because Xcode will look at theNEWLINE # declared outputs and automatically ensure that they exist for us.NEWLINENEWLINE # Do we have a message to print when this action runs?NEWLINE message = action.get('message')NEWLINE if message:NEWLINE message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)NEWLINE else:NEWLINE message = ''NEWLINENEWLINE # Turn the list into a string that can be passed to a shell.NEWLINE action_string = gyp.common.EncodePOSIXShellList(action['action'])NEWLINENEWLINE # Convert Xcode-type variable references to sh-compatible environmentNEWLINE # variable references.NEWLINE message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)NEWLINE action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(NEWLINE action_string)NEWLINENEWLINE script = ''NEWLINE # Include the optional messageNEWLINE if message_sh:NEWLINE script += message_sh + '\n'NEWLINE # Be sure the script runs in exec, and that if exec fails, the scriptNEWLINE # exits signalling an error.NEWLINE script += 'exec ' + action_string_sh + '\nexit 1\n'NEWLINE ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({NEWLINE 'inputPaths': action['inputs'],NEWLINE 'name': 'Action "' + action['action_name'] + '"',NEWLINE 'outputPaths': action['outputs'],NEWLINE 'shellScript': script,NEWLINE 'showEnvVarsInLog': 0,NEWLINE })NEWLINENEWLINE if support_xct:NEWLINE support_xct.AppendProperty('buildPhases', ssbp)NEWLINE else:NEWLINE # TODO(mark): this assumes too much knowledge of the internals ofNEWLINE # xcodeproj_file; some of these smarts should move into xcodeproj_fileNEWLINE # itself.NEWLINE xct._properties['buildPhases'].insert(prebuild_index, ssbp)NEWLINE prebuild_index = prebuild_index + 1NEWLINENEWLINE # TODO(mark): Should verify that at most one of these is specified.NEWLINE if int(action.get('process_outputs_as_sources', False)):NEWLINE for output in action['outputs']:NEWLINE AddSourceToTarget(output, type, pbxp, xct)NEWLINENEWLINE if int(action.get('process_outputs_as_mac_bundle_resources', False)):NEWLINE for output in action['outputs']:NEWLINE AddResourceToTarget(output, pbxp, xct)NEWLINENEWLINE # tgt_mac_bundle_resources holds the list of bundle resources soNEWLINE # the rule processing can check against it.NEWLINE if is_bundle:NEWLINE tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])NEWLINE else:NEWLINE tgt_mac_bundle_resources = []NEWLINENEWLINE # Add custom shell script phases driving "make" for "rules" sections.NEWLINE #NEWLINE # Xcode's built-in rule support is almost powerful enough to use directly,NEWLINE # but there are a few significant deficiencies that render them unusable.NEWLINE # There are workarounds for some of its inadequacies, but in aggregate,NEWLINE # the workarounds added complexity to the generator, and some workaroundsNEWLINE # actually require input files to be crafted more carefully than I'd like.NEWLINE # Consequently, until Xcode rules are made more capable, "rules" inputNEWLINE # sections will be handled in Xcode output by shell script build phasesNEWLINE # performed prior to the compilation phase.NEWLINE #NEWLINE # The following problems with Xcode rules were found. The numbers areNEWLINE # Apple radar IDs. I hope that these shortcomings are addressed, I reallyNEWLINE # liked having the rules handled directly in Xcode during the period thatNEWLINE # I was prototyping this.NEWLINE #NEWLINE # 6588600 Xcode compiles custom script rule outputs too soon, compilationNEWLINE # fails. This occurs when rule outputs from distinct inputs areNEWLINE # interdependent. The only workaround is to put rules and theirNEWLINE # inputs in a separate target from the one that compiles the ruleNEWLINE # outputs. This requires input file cooperation and it means thatNEWLINE # process_outputs_as_sources is unusable.NEWLINE # 6584932 Need to declare that custom rule outputs should be excluded fromNEWLINE # compilation. A possible workaround is to lie to Xcode about aNEWLINE # rule's output, giving it a dummy file it doesn't know how toNEWLINE # compile. The rule action script would need to touch the dummy.NEWLINE # 6584839 I need a way to declare additional inputs to a custom rule.NEWLINE # A possible workaround is a shell script phase prior toNEWLINE # compilation that touches a rule's primary input files if anyNEWLINE # would-be additional inputs are newer than the output. ModifyingNEWLINE # the source tree - even just modification times - feels dirty.NEWLINE # 6564240 Xcode "custom script" build rules always dump all environmentNEWLINE # variables. This is a low-prioroty problem and is not aNEWLINE # show-stopper.NEWLINE rules_by_ext = {}NEWLINE for rule in spec_rules:NEWLINE rules_by_ext[rule['extension']] = ruleNEWLINENEWLINE # First, some definitions:NEWLINE #NEWLINE # A "rule source" is a file that was listed in a target's "sources"NEWLINE # list and will have a rule applied to it on the basis of matching theNEWLINE # rule's "extensions" attribute. Rule sources are direct inputs toNEWLINE # rules.NEWLINE #NEWLINE # Rule definitions may specify additional inputs in their "inputs"NEWLINE # attribute. These additional inputs are used for dependency trackingNEWLINE # purposes.NEWLINE #NEWLINE # A "concrete output" is a rule output with input-dependent variablesNEWLINE # resolved. For example, given a rule with:NEWLINE # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],NEWLINE # if the target's "sources" list contained "one.ext" and "two.ext",NEWLINE # the "concrete output" for rule input "two.ext" would be "two.cc". IfNEWLINE # a rule specifies multiple outputs, each input file that the rule isNEWLINE # applied to will have the same number of concrete outputs.NEWLINE #NEWLINE # If any concrete outputs are outdated or missing relative to theirNEWLINE # corresponding rule_source or to any specified additional input, theNEWLINE # rule action must be performed to generate the concrete outputs.NEWLINENEWLINE # concrete_outputs_by_rule_source will have an item at the same indexNEWLINE # as the rule['rule_sources'] that it corresponds to. Each item is aNEWLINE # list of all of the concrete outputs for the rule_source.NEWLINE concrete_outputs_by_rule_source = []NEWLINENEWLINE # concrete_outputs_all is a flat list of all concrete outputs that thisNEWLINE # rule is able to produce, given the known set of input filesNEWLINE # (rule_sources) that apply to it.NEWLINE concrete_outputs_all = []NEWLINENEWLINE # messages & actions are keyed by the same indices as rule['rule_sources']NEWLINE # and concrete_outputs_by_rule_source. They contain the message andNEWLINE # action to perform after resolving input-dependent variables. TheNEWLINE # message is optional, in which case None is stored for each rule source.NEWLINE messages = []NEWLINE actions = []NEWLINENEWLINE for rule_source in rule.get('rule_sources', []):NEWLINE rule_source_dirname, rule_source_basename = \NEWLINE posixpath.split(rule_source)NEWLINE (rule_source_root, rule_source_ext) = \NEWLINE posixpath.splitext(rule_source_basename)NEWLINENEWLINE # These are the same variable names that Xcode uses for its own nativeNEWLINE # rule support. Because Xcode's rule engine is not being used, theyNEWLINE # need to be expanded as they are written to the makefile.NEWLINE rule_input_dict = {NEWLINE 'INPUT_FILE_BASE': rule_source_root,NEWLINE 'INPUT_FILE_SUFFIX': rule_source_ext,NEWLINE 'INPUT_FILE_NAME': rule_source_basename,NEWLINE 'INPUT_FILE_PATH': rule_source,NEWLINE 'INPUT_FILE_DIRNAME': rule_source_dirname,NEWLINE }NEWLINENEWLINE concrete_outputs_for_this_rule_source = []NEWLINE for output in rule.get('outputs', []):NEWLINE # Fortunately, Xcode and make both use $(VAR) format for theirNEWLINE # variables, so the expansion is the only transformation necessary.NEWLINE # Any remaning $(VAR)-type variables in the string can be givenNEWLINE # directly to make, which will pick up the correct settings fromNEWLINE # what Xcode puts into the environment.NEWLINE concrete_output = ExpandXcodeVariables(output, rule_input_dict)NEWLINE concrete_outputs_for_this_rule_source.append(concrete_output)NEWLINENEWLINE # Add all concrete outputs to the project.NEWLINE pbxp.AddOrGetFileInRootGroup(concrete_output)NEWLINENEWLINE concrete_outputs_by_rule_source.append( \NEWLINE concrete_outputs_for_this_rule_source)NEWLINE concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)NEWLINENEWLINE # TODO(mark): Should verify that at most one of these is specified.NEWLINE if int(rule.get('process_outputs_as_sources', False)):NEWLINE for output in concrete_outputs_for_this_rule_source:NEWLINE AddSourceToTarget(output, type, pbxp, xct)NEWLINENEWLINE # If the file came from the mac_bundle_resources list or if the ruleNEWLINE # is marked to process outputs as bundle resource, do so.NEWLINE was_mac_bundle_resource = rule_source in tgt_mac_bundle_resourcesNEWLINE if was_mac_bundle_resource or \NEWLINE int(rule.get('process_outputs_as_mac_bundle_resources', False)):NEWLINE for output in concrete_outputs_for_this_rule_source:NEWLINE AddResourceToTarget(output, pbxp, xct)NEWLINENEWLINE # Do we have a message to print when this rule runs?NEWLINE message = rule.get('message')NEWLINE if message:NEWLINE message = gyp.common.EncodePOSIXShellArgument(message)NEWLINE message = ExpandXcodeVariables(message, rule_input_dict)NEWLINE messages.append(message)NEWLINENEWLINE # Turn the list into a string that can be passed to a shell.NEWLINE action_string = gyp.common.EncodePOSIXShellList(rule['action'])NEWLINENEWLINE action = ExpandXcodeVariables(action_string, rule_input_dict)NEWLINE actions.append(action)NEWLINENEWLINE if len(concrete_outputs_all) > 0:NEWLINE # TODO(mark): There's a possibility for collision here. ConsiderNEWLINE # target "t" rule "A_r" and target "t_A" rule "r".NEWLINE makefile_name = '%s.make' % re.sub(NEWLINE '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))NEWLINE makefile_path = os.path.join(xcode_projects[build_file].path,NEWLINE makefile_name)NEWLINE # TODO(mark): try/close? Write to a temporary file and swap it onlyNEWLINE # if it's got changes?NEWLINE makefile = open(makefile_path, 'wb')NEWLINENEWLINE # make will build the first target in the makefile by default. ByNEWLINE # convention, it's called "all". List all (or at least one)NEWLINE # concrete output for each rule source as a prerequisite of the "all"NEWLINE # target.NEWLINE makefile.write('all: \\\n')NEWLINE for concrete_output_index in \NEWLINE range(0, len(concrete_outputs_by_rule_source)):NEWLINE # Only list the first (index [0]) concrete output of each inputNEWLINE # in the "all" target. Otherwise, a parallel make (-j > 1) wouldNEWLINE # attempt to process each input multiple times simultaneously.NEWLINE # Otherwise, "all" could just contain the entire list ofNEWLINE # concrete_outputs_all.NEWLINE concrete_output = \NEWLINE concrete_outputs_by_rule_source[concrete_output_index][0]NEWLINE if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:NEWLINE eol = ''NEWLINE else:NEWLINE eol = ' \\'NEWLINE makefile.write(' %s%s\n' % (concrete_output, eol))NEWLINENEWLINE for (rule_source, concrete_outputs, message, action) in \NEWLINE zip(rule['rule_sources'], concrete_outputs_by_rule_source,NEWLINE messages, actions):NEWLINE makefile.write('\n')NEWLINENEWLINE # Add a rule that declares it can build each concrete output of aNEWLINE # rule source. Collect the names of the directories that areNEWLINE # required.NEWLINE concrete_output_dirs = []NEWLINE for concrete_output_index in range(0, len(concrete_outputs)):NEWLINE concrete_output = concrete_outputs[concrete_output_index]NEWLINE if concrete_output_index == 0:NEWLINE bol = ''NEWLINE else:NEWLINE bol = ' 'NEWLINE makefile.write('%s%s \\\n' % (bol, concrete_output))NEWLINENEWLINE concrete_output_dir = posixpath.dirname(concrete_output)NEWLINE if (concrete_output_dir andNEWLINE concrete_output_dir not in concrete_output_dirs):NEWLINE concrete_output_dirs.append(concrete_output_dir)NEWLINENEWLINE makefile.write(' : \\\n')NEWLINENEWLINE # The prerequisites for this rule are the rule source itself andNEWLINE # the set of additional rule inputs, if any.NEWLINE prerequisites = [rule_source]NEWLINE prerequisites.extend(rule.get('inputs', []))NEWLINE for prerequisite_index in range(0, len(prerequisites)):NEWLINE prerequisite = prerequisites[prerequisite_index]NEWLINE if prerequisite_index == len(prerequisites) - 1:NEWLINE eol = ''NEWLINE else:NEWLINE eol = ' \\'NEWLINE makefile.write(' %s%s\n' % (prerequisite, eol))NEWLINENEWLINE # Make sure that output directories exist before executing the ruleNEWLINE # action.NEWLINE if len(concrete_output_dirs) > 0:NEWLINE makefile.write('\t@mkdir -p "%s"\n' %NEWLINE '" "'.join(concrete_output_dirs))NEWLINENEWLINE # The rule message and action have already had the necessary variableNEWLINE # substitutions performed.NEWLINE if message:NEWLINE # Mark it with note: so Xcode picks it up in build output.NEWLINE makefile.write('\t@echo note: %s\n' % message)NEWLINE makefile.write('\t%s\n' % action)NEWLINENEWLINE makefile.close()NEWLINENEWLINE # It might be nice to ensure that needed output directories existNEWLINE # here rather than in each target in the Makefile, but that wouldn'tNEWLINE # work if there ever was a concrete output that had an input-dependentNEWLINE # variable anywhere other than in the leaf position.NEWLINENEWLINE # Don't declare any inputPaths or outputPaths. If they're present,NEWLINE # Xcode will provide a slight optimization by only running the scriptNEWLINE # phase if any output is missing or outdated relative to any input.NEWLINE # Unfortunately, it will also assume that all outputs are touched byNEWLINE # the script, and if the outputs serve as files in a compilationNEWLINE # phase, they will be unconditionally rebuilt. Since make might notNEWLINE # rebuild everything that could be declared here as an output, thisNEWLINE # extra compilation activity is unnecessary. With inputPaths andNEWLINE # outputPaths not supplied, make will always be called, but it knowsNEWLINE # enough to not do anything when everything is up-to-date.NEWLINENEWLINE # To help speed things up, pass -j COUNT to make so it does some workNEWLINE # in parallel. Don't use ncpus because Xcode will build ncpus targetsNEWLINE # in parallel and if each target happens to have a rules step, thereNEWLINE # would be ncpus^2 things going. With a machine that has 2 quad-coreNEWLINE # Xeons, a build can quickly run out of processes based onNEWLINE # scheduling/other tasks, and randomly failing builds are no good.NEWLINE script = \NEWLINE"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"NEWLINEif [ "${JOB_COUNT}" -gt 4 ]; thenNEWLINE JOB_COUNT=4NEWLINEfiNEWLINEexec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"NEWLINEexit 1NEWLINE""" % makefile_nameNEWLINE ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({NEWLINE 'name': 'Rule "' + rule['rule_name'] + '"',NEWLINE 'shellScript': script,NEWLINE 'showEnvVarsInLog': 0,NEWLINE })NEWLINENEWLINE if support_xct:NEWLINE support_xct.AppendProperty('buildPhases', ssbp)NEWLINE else:NEWLINE # TODO(mark): this assumes too much knowledge of the internals ofNEWLINE # xcodeproj_file; some of these smarts should move into xcodeproj_fileNEWLINE # itself.NEWLINE xct._properties['buildPhases'].insert(prebuild_index, ssbp)NEWLINE prebuild_index = prebuild_index + 1NEWLINENEWLINE # Extra rule inputs also go into the project file. Concrete outputs wereNEWLINE # already added when they were computed.NEWLINE groups = ['inputs', 'inputs_excluded']NEWLINE if skip_excluded_files:NEWLINE groups = [x for x in groups if not x.endswith('_excluded')]NEWLINE for group in groups:NEWLINE for item in rule.get(group, []):NEWLINE pbxp.AddOrGetFileInRootGroup(item)NEWLINENEWLINE # Add "sources".NEWLINE for source in spec.get('sources', []):NEWLINE (source_root, source_extension) = posixpath.splitext(source)NEWLINE if source_extension[1:] not in rules_by_ext:NEWLINE # AddSourceToTarget will add the file to a root group if it's notNEWLINE # already there.NEWLINE AddSourceToTarget(source, type, pbxp, xct)NEWLINE else:NEWLINE pbxp.AddOrGetFileInRootGroup(source)NEWLINENEWLINE # Add "mac_bundle_resources" and "mac_framework_private_headers" ifNEWLINE # it's a bundle of any type.NEWLINE if is_bundle:NEWLINE for resource in tgt_mac_bundle_resources:NEWLINE (resource_root, resource_extension) = posixpath.splitext(resource)NEWLINE if resource_extension[1:] not in rules_by_ext:NEWLINE AddResourceToTarget(resource, pbxp, xct)NEWLINE else:NEWLINE pbxp.AddOrGetFileInRootGroup(resource)NEWLINENEWLINE for header in spec.get('mac_framework_private_headers', []):NEWLINE AddHeaderToTarget(header, pbxp, xct, False)NEWLINENEWLINE # Add "mac_framework_headers". These can be valid for both frameworksNEWLINE # and static libraries.NEWLINE if is_bundle or type == 'static_library':NEWLINE for header in spec.get('mac_framework_headers', []):NEWLINE AddHeaderToTarget(header, pbxp, xct, True)NEWLINENEWLINE # Add "copies".NEWLINE pbxcp_dict = {}NEWLINE for copy_group in spec.get('copies', []):NEWLINE dest = copy_group['destination']NEWLINE if dest[0] not in ('/', '$'):NEWLINE # Relative paths are relative to $(SRCROOT).NEWLINE dest = '$(SRCROOT)/' + destNEWLINENEWLINE code_sign = int(copy_group.get('xcode_code_sign', 0))NEWLINE settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign]NEWLINENEWLINE # Coalesce multiple "copies" sections in the same target with the sameNEWLINE # "destination" property into the same PBXCopyFilesBuildPhase, otherwiseNEWLINE # they'll wind up with ID collisions.NEWLINE pbxcp = pbxcp_dict.get(dest, None)NEWLINE if pbxcp is None:NEWLINE pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({NEWLINE 'name': 'Copy to ' + copy_group['destination']NEWLINE },NEWLINE parent=xct)NEWLINE pbxcp.SetDestination(dest)NEWLINENEWLINE # TODO(mark): The usual comment about this knowing too much aboutNEWLINE # gyp.xcodeproj_file internals applies.NEWLINE xct._properties['buildPhases'].insert(prebuild_index, pbxcp)NEWLINENEWLINE pbxcp_dict[dest] = pbxcpNEWLINENEWLINE for file in copy_group['files']:NEWLINE pbxcp.AddFile(file, settings)NEWLINENEWLINE # Excluded files can also go into the project file.NEWLINE if not skip_excluded_files:NEWLINE for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',NEWLINE 'mac_framework_private_headers']:NEWLINE excluded_key = key + '_excluded'NEWLINE for item in spec.get(excluded_key, []):NEWLINE pbxp.AddOrGetFileInRootGroup(item)NEWLINENEWLINE # So can "inputs" and "outputs" sections of "actions" groups.NEWLINE groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']NEWLINE if skip_excluded_files:NEWLINE groups = [x for x in groups if not x.endswith('_excluded')]NEWLINE for action in spec.get('actions', []):NEWLINE for group in groups:NEWLINE for item in action.get(group, []):NEWLINE # Exclude anything in BUILT_PRODUCTS_DIR. They're products, notNEWLINE # sources.NEWLINE if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):NEWLINE pbxp.AddOrGetFileInRootGroup(item)NEWLINENEWLINE for postbuild in spec.get('postbuilds', []):NEWLINE action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])NEWLINE script = 'exec ' + action_string_sh + '\nexit 1\n'NEWLINENEWLINE # Make the postbuild step depend on the output of ld or ar from thisNEWLINE # target. Apparently putting the script step after the link step isn'tNEWLINE # sufficient to ensure proper ordering in all cases. With an inputNEWLINE # declared but no outputs, the script step should run every time, asNEWLINE # desired.NEWLINE ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({NEWLINE 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],NEWLINE 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',NEWLINE 'shellScript': script,NEWLINE 'showEnvVarsInLog': 0,NEWLINE })NEWLINE xct.AppendProperty('buildPhases', ssbp)NEWLINENEWLINE # Add dependencies before libraries, because adding a dependency may implyNEWLINE # adding a library. It's preferable to keep dependencies listed firstNEWLINE # during a link phase so that they can override symbols that wouldNEWLINE # otherwise be provided by libraries, which will usually include systemNEWLINE # libraries. On some systems, ld is finicky and even requires theNEWLINE # libraries to be ordered in such a way that unresolved symbols inNEWLINE # earlier-listed libraries may only be resolved by later-listed libraries.NEWLINE # The Mac linker doesn't work that way, but other platforms do, and soNEWLINE # their linker invocations need to be constructed in this way. There'sNEWLINE # no compelling reason for Xcode's linker invocations to differ.NEWLINENEWLINE if 'dependencies' in spec:NEWLINE for dependency in spec['dependencies']:NEWLINE xct.AddDependency(xcode_targets[dependency])NEWLINE # The support project also gets the dependencies (in case they areNEWLINE # needed for the actions/rules to work).NEWLINE if support_xct:NEWLINE support_xct.AddDependency(xcode_targets[dependency])NEWLINENEWLINE if 'libraries' in spec:NEWLINE for library in spec['libraries']:NEWLINE xct.FrameworksPhase().AddFile(library)NEWLINE # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.NEWLINE # I wish Xcode handled this automatically.NEWLINE library_dir = posixpath.dirname(library)NEWLINE if library_dir not in xcode_standard_library_dirs and (NEWLINE not xct.HasBuildSetting(_library_search_paths_var) orNEWLINE library_dir not in xct.GetBuildSetting(_library_search_paths_var)):NEWLINE xct.AppendBuildSetting(_library_search_paths_var, library_dir)NEWLINENEWLINE for configuration_name in configuration_names:NEWLINE configuration = spec['configurations'][configuration_name]NEWLINE xcbc = xct.ConfigurationNamed(configuration_name)NEWLINE for include_dir in configuration.get('mac_framework_dirs', []):NEWLINE xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)NEWLINE for include_dir in configuration.get('include_dirs', []):NEWLINE xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)NEWLINE for library_dir in configuration.get('library_dirs', []):NEWLINE if library_dir not in xcode_standard_library_dirs and (NEWLINE not xcbc.HasBuildSetting(_library_search_paths_var) orNEWLINE library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):NEWLINE xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)NEWLINENEWLINE if 'defines' in configuration:NEWLINE for define in configuration['defines']:NEWLINE set_define = EscapeXcodeDefine(define)NEWLINE xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)NEWLINE if 'xcode_settings' in configuration:NEWLINE for xck, xcv in configuration['xcode_settings'].items():NEWLINE xcbc.SetBuildSetting(xck, xcv)NEWLINE if 'xcode_config_file' in configuration:NEWLINE config_ref = pbxp.AddOrGetFileInRootGroup(NEWLINE configuration['xcode_config_file'])NEWLINE xcbc.SetBaseConfiguration(config_ref)NEWLINENEWLINE build_files = []NEWLINE for build_file, build_file_dict in data.items():NEWLINE if build_file.endswith('.gyp'):NEWLINE build_files.append(build_file)NEWLINENEWLINE for build_file in build_files:NEWLINE xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)NEWLINENEWLINE for build_file in build_files:NEWLINE xcode_projects[build_file].Finalize2(xcode_targets,NEWLINE xcode_target_to_target_dict)NEWLINENEWLINE for build_file in build_files:NEWLINE xcode_projects[build_file].Write()NEWLINE
from lib.base import BaseJiraActionNEWLINEfrom lib.formatters import to_attachment_dictNEWLINENEWLINE__all__ = [NEWLINE 'GetJiraIssueAttachmentsAction'NEWLINE]NEWLINENEWLINENEWLINEclass GetJiraIssueAttachmentsAction(BaseJiraAction):NEWLINE def run(self, issue_key):NEWLINE issue = self._client.issue(issue_key)NEWLINENEWLINE result = []NEWLINENEWLINE for attachment in issue.fields.attachment:NEWLINE item = to_attachment_dict(attachment)NEWLINE result.append(item)NEWLINENEWLINE return resultNEWLINE
from django.contrib.contenttypes.models import ContentTypeNEWLINEimport jsonNEWLINENEWLINEfrom django.http import Http404, HttpResponseNEWLINEfrom django.contrib import messagesNEWLINEfrom django.contrib.auth import get_user_modelNEWLINEfrom django.contrib.auth.decorators import login_required, user_passes_testNEWLINEfrom django.core.urlresolvers import reverseNEWLINEfrom django.shortcuts import get_object_or_404, redirect, renderNEWLINEfrom guardian.decorators import permission_requiredNEWLINENEWLINEfrom guardian.shortcuts import get_objects_for_userNEWLINENEWLINEfrom account.models import DepartmentGroupNEWLINEfrom backend.tasks import TestConnectionTaskNEWLINEfrom event.models import NotificationPreferencesNEWLINEfrom .models import Application, Department, Environment, Server, ServerRoleNEWLINEfrom task.models import ExecutionNEWLINENEWLINENEWLINE@login_requiredNEWLINEdef index(request):NEWLINE data = {}NEWLINE executions = Execution.objects.filter(task__application__department_id=request.current_department_id)NEWLINE if not executions.count():NEWLINE return redirect(reverse('first_steps_page'))NEWLINE return render(request, 'page/index.html', data)NEWLINENEWLINENEWLINE@permission_required('core.view_application', (Application, 'id', 'application_id'))NEWLINEdef application_page(request, application_id):NEWLINE data = {}NEWLINE data['application'] = get_object_or_404(Application, pk=application_id)NEWLINE return render(request, 'page/application.html', data)NEWLINENEWLINENEWLINE@permission_required('core.view_environment', (Environment, 'id', 'environment_id'))NEWLINEdef environment_page(request, environment_id):NEWLINE data = {}NEWLINE data['environment'] = get_object_or_404(Environment, pk=environment_id)NEWLINE data['servers'] = list(Server.objects.filter(environment_id=environment_id).prefetch_related('roles'))NEWLINE return render(request, 'page/environment.html', data)NEWLINENEWLINENEWLINE@permission_required('core.view_environment', (Environment, 'servers__id', 'server_id'))NEWLINEdef server_test(request, server_id):NEWLINE data = {}NEWLINE data['server'] = get_object_or_404(Server, pk=server_id)NEWLINE data['task_id'] = TestConnectionTask().delay(server_id).idNEWLINE return render(request, 'partial/server_test.html', data)NEWLINENEWLINENEWLINE@login_requiredNEWLINEdef server_test_ajax(request, task_id):NEWLINE data = {}NEWLINE task = TestConnectionTask().AsyncResult(task_id)NEWLINE if task.status == 'SUCCESS':NEWLINE status, output = task.get()NEWLINE data['status'] = statusNEWLINE data['output'] = outputNEWLINE elif task.status == 'FAILED':NEWLINE data['status'] = FalseNEWLINE else:NEWLINE data['status'] = NoneNEWLINE return HttpResponse(json.dumps(data), content_type="application/json")NEWLINENEWLINENEWLINE@login_requiredNEWLINEdef first_steps_page(request):NEWLINE data = {}NEWLINE return render(request, 'page/first_steps.html', data)NEWLINENEWLINENEWLINE@login_requiredNEWLINEdef settings_page(request, section='user', subsection='profile'):NEWLINE data = {}NEWLINE data['section'] = sectionNEWLINE data['subsection'] = subsectionNEWLINE data['department'] = Department(pk=request.current_department_id)NEWLINE data['on_settings'] = TrueNEWLINE handler = '_settings_%s_%s' % (section, subsection)NEWLINE if section == 'system' and request.user.is_superuser is not True:NEWLINE return redirect('index')NEWLINE if section == 'department' and not request.user.has_perm('core.change_department', obj=data['department']):NEWLINE return redirect('index')NEWLINE if handler in globals():NEWLINE data = globals()[handler](request, data)NEWLINE else:NEWLINE raise Http404NEWLINE return render(request, 'page/settings.html', data)NEWLINENEWLINENEWLINEdef _settings_account_profile(request, data):NEWLINE data['subsection_template'] = 'partial/account_profile.html'NEWLINE from account.forms import account_create_formNEWLINE form = account_create_form('user_profile', request, request.user.id)NEWLINE form.fields['email'].widget.attrs['readonly'] = TrueNEWLINE data['form'] = formNEWLINE if request.method == 'POST':NEWLINE if form.is_valid():NEWLINE form.save()NEWLINE data['user'] = form.instanceNEWLINE messages.success(request, 'Saved')NEWLINE return dataNEWLINENEWLINENEWLINEdef _settings_account_password(request, data):NEWLINE data['subsection_template'] = 'partial/account_password.html'NEWLINE from account.forms import account_create_formNEWLINE form = account_create_form('user_password', request, request.user.id)NEWLINE data['form'] = formNEWLINE if request.method == 'POST':NEWLINE if form.is_valid():NEWLINE user = form.save(commit=False)NEWLINE user.set_password(user.password)NEWLINE user.save()NEWLINE data['user'] = form.instanceNEWLINE messages.success(request, 'Saved')NEWLINE return dataNEWLINENEWLINENEWLINEdef _settings_account_notifications(request, data):NEWLINE data['subsection_template'] = 'partial/account_notifications.html'NEWLINE data['applications'] = get_objects_for_user(request.user, 'core.view_application')NEWLINE content_type = ContentType.objects.get_for_model(Application)NEWLINE if request.method == 'POST':NEWLINE for application in data['applications']:NEWLINE key = 'notification[%s]' % application.idNEWLINE notification, created = NotificationPreferences.objects.get_or_create(NEWLINE user=request.user,NEWLINE event_type='ExecutionFinish',NEWLINE content_type=content_type,NEWLINE object_id=application.id)NEWLINE if notification.is_active != (key in request.POST):NEWLINE notification.is_active = key in request.POSTNEWLINE notification.save()NEWLINE messages.success(request, 'Saved')NEWLINE data['notifications'] = NotificationPreferences.objects.filter(NEWLINE user=request.user,NEWLINE event_type='ExecutionFinish',NEWLINE content_type=content_type.id).values_list('object_id', 'is_active')NEWLINE data['notifications'] = dict(data['notifications'])NEWLINE return dataNEWLINENEWLINENEWLINEdef _settings_department_applications(request, data):NEWLINE data['subsection_template'] = 'partial/application_list.html'NEWLINE data['applications'] = Application.objects.filter(department_id=request.current_department_id)NEWLINE data['empty'] = not bool(data['applications'].count())NEWLINE return dataNEWLINENEWLINENEWLINEdef _settings_department_users(request, data):NEWLINE data['subsection_template'] = 'partial/user_list.html'NEWLINE from guardian.shortcuts import get_users_with_permsNEWLINE department = Department.objects.get(pk=request.current_department_id)NEWLINE data['users'] = get_users_with_perms(department).prefetch_related('groups__departmentgroup').order_by('name')NEWLINE data['department_user_list'] = TrueNEWLINE data['form_name'] = 'user'NEWLINE return dataNEWLINENEWLINENEWLINEdef _settings_department_groups(request, data):NEWLINE data['subsection_template'] = 'partial/group_list.html'NEWLINE data['groups'] = DepartmentGroup.objects.filter(department_id=request.current_department_id)NEWLINE return dataNEWLINENEWLINENEWLINEdef _settings_department_serverroles(request, data):NEWLINE data['subsection_template'] = 'partial/serverrole_list.html'NEWLINE data['serverroles'] = ServerRole.objects.filter(department_id=request.current_department_id)NEWLINE data['empty'] = not bool(data['serverroles'].count())NEWLINE return dataNEWLINENEWLINENEWLINE@user_passes_test(lambda u: u.is_superuser)NEWLINEdef _settings_system_departments(request, data):NEWLINE data['subsection_template'] = 'partial/department_list.html'NEWLINE data['departments'] = Department.objects.all()NEWLINE return dataNEWLINENEWLINENEWLINE@user_passes_test(lambda u: u.is_superuser)NEWLINEdef _settings_system_users(request, data):NEWLINE data['subsection_template'] = 'partial/user_list.html'NEWLINE data['users'] = get_user_model().objects.exclude(id=-1).prefetch_related('groups__departmentgroup__department').order_by('name')NEWLINE data['form_name'] = 'usersystem'NEWLINE return dataNEWLINENEWLINENEWLINEdef department_switch(request, id):NEWLINE department = get_object_or_404(Department, pk=id)NEWLINE if request.user.has_perm('core.view_department', department):NEWLINE request.session['current_department_id'] = int(id)NEWLINE else:NEWLINE messages.error(request, 'Access forbidden')NEWLINE return redirect('index')NEWLINENEWLINENEWLINEdef handle_403(request):NEWLINE print 'aaaaaaaa'NEWLINE messages.error(request, 'Access forbidden')NEWLINE return redirect('index')
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License").NEWLINE# You may not use this file except in compliance with the License.NEWLINE# A copy of the License is located atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# or in the "license" file accompanying this file. This file is distributedNEWLINE# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, eitherNEWLINE# express or implied. See the License for the specific language governingNEWLINE# permissions and limitations under the License.NEWLINENEWLINEimport pytestNEWLINENEWLINEimport mxnet as mxNEWLINENEWLINEfrom gluonts.distribution.gaussian import GaussianNEWLINENEWLINEDISTR_SHAPE = (3, 4)NEWLINENEWLINEDISTR_CASES = [NEWLINE Gaussian(NEWLINE mu=mx.nd.random.normal(shape=DISTR_SHAPE),NEWLINE sigma=mx.nd.random.uniform(shape=DISTR_SHAPE),NEWLINE )NEWLINE]NEWLINENEWLINESLICE_AXIS_CASES = [[(0, 0, None), 3], [(0, 1, 3), 2], [(1, -1, None), 1]]NEWLINENEWLINENEWLINE@pytest.mark.parametrize(NEWLINE "slice_axis_args, expected_axis_length", SLICE_AXIS_CASESNEWLINE)NEWLINE@pytest.mark.parametrize("distr", DISTR_CASES)NEWLINEdef test_distr_slice_axis(distr, slice_axis_args, expected_axis_length):NEWLINE axis, begin, end = slice_axis_argsNEWLINE distr_sliced = distr.slice_axis(axis, begin, end)NEWLINENEWLINE assert distr_sliced.batch_shape[axis] == expected_axis_lengthNEWLINE
#!python3NEWLINE#encoding: utf-8NEWLINEimport requestsNEWLINEfrom bs4 import BeautifulSoupNEWLINEimport os.pathNEWLINE"""NEWLINEBeautifulSoupでは擬似クラスはnth-of-typeしか実装されていないらしい。NEWLINENotImplementedError: Only the following pseudo-classes are implemented: nth-of-type.NEWLINEここではその代わりとなる関数を実装する。NEWLINE"""NEWLINEclass CssPseudoClass(object):NEWLINE def __init__(self):NEWLINE passNEWLINE """NEWLINE parentがchildを持っているか。NEWLINE @param {HtmlElement} parentは対象のHTML要素NEWLINE @param {str} childはparentが持つchildの要素名NEWLINE @return {boolean} 所持の是非NEWLINE """NEWLINE def Has(self, parent, child):NEWLINE for c in parent.children:NEWLINE if child == c.name:NEWLINE return TrueNEWLINE return FalseNEWLINE
#!/usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE# @Time : 2018/7/31 12:28NEWLINE# @Author : Ma xiaoquanNEWLINE# @Email : xiaoquan.ma@nokia-sbell.comNEWLINE# @Site :NEWLINE# @File : pydbsync.pyNEWLINE# @desc:NEWLINENEWLINEimport MySQLdb as mdbNEWLINEimport reNEWLINEimport sysNEWLINEimport os.pathNEWLINEfrom datetime import datetimeNEWLINENEWLINEcfg = NoneNEWLINENEWLINEif os.path.isfile("pydbconfig.py"):NEWLINE import pydbconfigNEWLINE cfg = pydbconfig.cfgNEWLINE print "Loaded from python config"NEWLINEelse:NEWLINE import inspectNEWLINE import yamlNEWLINE cfg = yaml.load(open(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/config.yaml', 'r'))NEWLINE print "Loaded from yaml"NEWLINENEWLINEtablecfg = cfg["tables"]NEWLINEsrcparams = cfg["parameters"]["source"]NEWLINEdstparams = cfg["parameters"]["destination"]NEWLINEbatchmax = cfg["parameters"]["batchmax"]NEWLINENEWLINENEWLINEdef countval(db, ctable, fromKey=None):NEWLINE cur = db.cursor()NEWLINE if fromKey is None:NEWLINE cur.execute("SELECT COUNT(*) FROM `" + ctable + "`")NEWLINE else:NEWLINE cur.execute("SELECT COUNT(*) FROM `" + ctable + "` WHERE `" + tablecfg[ctable]["updated-field"] + "` >= %s",NEWLINE [fromKey])NEWLINENEWLINE ret = cur.fetchone()[0]NEWLINE return retNEWLINENEWLINENEWLINEdef findAndCompareRow(row, cmpdescribe, showcreate):NEWLINE ret = FalseNEWLINE # Search "row" into row list (from DESCRIBE query)NEWLINE for i in range(len(cmpdescribe)):NEWLINE currow = cmpdescribe[i]NEWLINE # Found a column with the same name, comparing signatureNEWLINE if currow[0] == row[0]:NEWLINE ret = TrueNEWLINE if len(row) != len(currow):NEWLINE print rowNEWLINE print currowNEWLINE ret = FalseNEWLINE else:NEWLINE for j in range(len(row)):NEWLINE if j == 0:NEWLINE continueNEWLINE if row[j] != currow[j]:NEWLINE if row[j] == '' and currow[j] == 'on update CURRENT_TIMESTAMP':NEWLINE # May be a bug in SRC Host (MySQL 5.0)NEWLINE checkexpr = re.search('^\s*`'+row[0]+'`.*on update current_timestamp,?$', showcreate, re.MULTILINE|re.I)NEWLINE if checkexpr is None:NEWLINE print rowNEWLINE print currowNEWLINE ret = FalseNEWLINE else:NEWLINE print rowNEWLINE print currowNEWLINE ret = FalseNEWLINE return retNEWLINENEWLINENEWLINEdef synctable(srcdb, dstdb, table, fromkey=None):NEWLINE tabledef = []NEWLINE duplicatedef = []NEWLINE datetimeWorkaround = []NEWLINE cur = srcdb.cursor()NEWLINE cur.execute("DESCRIBE `" + table + "`")NEWLINE for i in range(cur.rowcount):NEWLINE row = cur.fetchone()NEWLINE tabledef.append("%s")NEWLINE duplicatedef.append("`" + row[0] + "`=VALUES(`" + row[0] + "`)")NEWLINE datetimeWorkaround.append(row[1])NEWLINENEWLINE # Sync allNEWLINE recordsToSync = countval(srcdb, table, fromkey)NEWLINE currentRecord = 0NEWLINE while currentRecord < recordsToSync:NEWLINE maxRecordToSync = min(recordsToSync, batchmax)NEWLINE print "Max Batch: " + str(maxRecordToSync) + " values; " + str(recordsToSync) + " in total, remaining: " \NEWLINE + str(recordsToSync - currentRecord)NEWLINE cur = srcdb.cursor()NEWLINE dstcur = dstdb.cursor()NEWLINE if fromkey is None:NEWLINE cur.execute("SELECT * FROM `" + table + "` ORDER BY `" + tablecfg[table]["updated-field"]NEWLINE + "` ASC LIMIT " + str(currentRecord) + "," + str(maxRecordToSync))NEWLINE else:NEWLINE cur.execute("SELECT * FROM `" + table + "` WHERE `" + tablecfg[table]["updated-field"] \NEWLINE + "` >= %s ORDER BY `" + tablecfg[table]["updated-field"]NEWLINE + "` ASC LIMIT " + str(currentRecord) + "," + str(maxRecordToSync), [fromkey])NEWLINENEWLINE for i in range(cur.rowcount):NEWLINE row = list(cur.fetchone())NEWLINE print iNEWLINE for j in range(0, len(row)):NEWLINE if row[j] is None and (datetimeWorkaround[j] == "datetime" or datetimeWorkaround[j] == "timestamp"):NEWLINE row[j] = "0000-00-00 00:00:00"NEWLINENEWLINE q = "INSERT INTO `" + table + "` VALUES (" + ",".join(tabledef) + ") ON DUPLICATE KEY UPDATE " \NEWLINE + ",".join(duplicatedef)NEWLINE dstcur.execute(q, row)NEWLINENEWLINE # if len(dstcur.messages) > 0:NEWLINE # print rowNEWLINE currentRecord += cur.rowcountNEWLINENEWLINENEWLINEdef main():NEWLINE currentTable = ""NEWLINE srcdb = NoneNEWLINE dstdb = NoneNEWLINE try:NEWLINE srcdb = mdb.connect(host=srcparams["host"], user=srcparams["user"], passwd=srcparams["pass"], port=srcparams["port"], db=srcparams["schema"])NEWLINE print "Connected to source host"NEWLINE dstdb = mdb.connect(host=dstparams["host"], user=dstparams["user"], passwd=dstparams["pass"], port=dstparams["port"], db=dstparams["schema"])NEWLINE print "Connected to destination host"NEWLINENEWLINE for table in tablecfg:NEWLINE currentTable = tableNEWLINE cur = dstdb.cursor()NEWLINE dstlast = NoneNEWLINE createTable = FalseNEWLINE try:NEWLINE cur.execute("SELECT MAX(`" + tablecfg[table]["updated-field"] + "`) FROM `" + table + "`")NEWLINE dstlast = cur.fetchone()NEWLINENEWLINE # Check table schemaNEWLINE dstdescribe = []NEWLINE cur = dstdb.cursor()NEWLINE cur.execute("DESCRIBE `" + table + "`")NEWLINE for i in range(cur.rowcount):NEWLINE row = cur.fetchone()NEWLINE dstdescribe.append(row)NEWLINENEWLINE cur = dstdb.cursor()NEWLINE cur.execute("SHOW CREATE TABLE " + table)NEWLINE showcreate = cur.fetchone()[1]NEWLINENEWLINE cur = srcdb.cursor()NEWLINE cur.execute("DESCRIBE `" + table + "`")NEWLINENEWLINE # if cur.rowcount != len(dstdescribe):NEWLINE # print "WARNING: structure of \"" + table + "\" table doesn't match between source and destination DB Host, dropping and recreating table"NEWLINE # ## dstdb.cursor().execute("DROP TABLE `" + table + "`")NEWLINE # # dstlast = NoneNEWLINE # # createTable = TrueNEWLINE # else:NEWLINE # for i in range(cur.rowcount):NEWLINE # row = cur.fetchone()NEWLINE # if findAndCompareRow(row, dstdescribe, showcreate) == False:NEWLINE # print "WARNING: structure of \"" + table + "\" table doesn't match between source and destination DB Host, dropping and recreating table"NEWLINE # ## dstdb.cursor().execute("DROP TABLE `" + table + "`")NEWLINE # # dstlast = NoneNEWLINE # # createTable = TrueNEWLINENEWLINE except mdb.Error, ex:NEWLINE if ex.args[0] != 1146:NEWLINE # If other error than "table doesn't exists" (eg. errno 1146)NEWLINE raise exNEWLINE else:NEWLINE print "Table " + table + " doesn't exists on destination host, creating now"NEWLINE # If table doesn't exists (eg. errno 1146)NEWLINE createTable = TrueNEWLINE passNEWLINENEWLINENEWLINE # cur = srcdb.cursor()NEWLINE # cur.execute("SHOW CREATE TABLE `" + table + "`")NEWLINE # createrow = cur.fetchone()NEWLINE # dstdb.cursor().execute(createrow[1])NEWLINENEWLINE # Sync only changedNEWLINE cur = srcdb.cursor()NEWLINE cur.execute("SELECT MAX(`" + tablecfg[table]["updated-field"] + "`) FROM `" + table + "`")NEWLINE srclast = cur.fetchone()NEWLINENEWLINE if dstlast[0] != srclast[0]:NEWLINE print table + ": sync from " + str(dstlast[0])NEWLINE synctable(srcdb, dstdb, table, dstlast[0] - 3000)NEWLINE print table + " sync ended"NEWLINE else:NEWLINE print table + " is already sync'ed"NEWLINENEWLINENEWLINE except mdb.Error, e:NEWLINE print "Error %d on table %s: %s" % (e.args[0], currentTable, e.args[1])NEWLINE sys.exit(1)NEWLINE finally:NEWLINE if srcdb is not None:NEWLINE srcdb.close()NEWLINE if dstdb is not None:NEWLINE dstdb.close()NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE t_start = datetime.now() # 起始时间NEWLINE main()NEWLINE t_end = datetime.now() # 关闭时间NEWLINE print "The script run time is:", (t_end - t_start).total_seconds() # 程序运行时间
# -*- coding: utf-8 -*-NEWLINEfrom __future__ import print_functionNEWLINEfrom time import timeNEWLINEimport osNEWLINENEWLINEimport numpy as npNEWLINEfrom pylab import *NEWLINEnp.set_printoptions(threshold=np.nan)NEWLINEnp.set_printoptions(linewidth=150)NEWLINENEWLINEfrom sklearn import preprocessingNEWLINEfrom sklearn import cross_validationNEWLINEfrom sklearn.utils.extmath import densityNEWLINEfrom sklearn import metricsNEWLINEfrom sklearn.feature_selection import SelectKBest, chi2NEWLINENEWLINEfrom status import ReaderNEWLINENEWLINENEWLINEclass ClassifierWrapper(object):NEWLINE NEWLINE NEWLINE def __init__(self, classifier, X, y, label_encoder, transformer):NEWLINE self.classifier = classifierNEWLINE self.label_encoder = label_encoderNEWLINE self.transformer = transformerNEWLINE self.X = XNEWLINE self.y = yNEWLINE NEWLINE #Implementation of standard classifier functionsNEWLINE def fit(self,X=None, y=None):NEWLINE if X == None and y == None:NEWLINE self.classifier.fit(self.X, self.y)NEWLINE else:NEWLINE self.classifier.fit(X,y)NEWLINE NEWLINE def predict(self, data_features):NEWLINE return self.classifier.predict(data_features)NEWLINE NEWLINE NEWLINE #Other functionsNEWLINE def classify(self, datum):NEWLINE guess = self.classifier.predict(self.transformer.transform(datum))NEWLINE return self.label_encoder.inverse_transform(guess[0])NEWLINE NEWLINE def benchmark(self, top_n=0, confusion_matrix=False, report=False, verbose=False):NEWLINE X_train, X_test, y_train, y_test = cross_validation.train_test_split(NEWLINE self.X, self.y, test_size=0.4, random_state=0)NEWLINE #feature_names = np.asarray(self.transformer.get_feature_names())NEWLINE categories = list(self.label_encoder.classes_)NEWLINE if verbose:NEWLINE print('_' * 80)NEWLINE print("Training: ")NEWLINE print(self.classifier)NEWLINE t0 = time()NEWLINE self.classifier.fit(X_train, y_train)NEWLINE train_time = time() - t0NEWLINE if verbose: print("train time: {:.3}s".format(train_time))NEWLINENEWLINE t0 = time()NEWLINE pred = self.classifier.predict(X_test)NEWLINE test_time = time() - t0NEWLINE if verbose: print("test time: {:.3}s".format(train_time))NEWLINENEWLINE score = metrics.f1_score(y_test, pred)NEWLINE if verbose: print("f1 score: {:.3}".format(score))NEWLINENEWLINE if hasattr(self.classifier, 'coef_') and verbose and False:#No feature names yetNEWLINE print("dimensionality: %d" % self.classifier.coef_.shape[1])NEWLINE print("density: %f" % density(clf.coef_))NEWLINE if top_n > 0 and feature_names is not None:NEWLINE print("top {0} keywords per class:".format(top_n))NEWLINE for i, category in enumerate(categories):NEWLINE topn = np.argsort(clf.coef_[i])[-top_n:]NEWLINE print("{0}: {1}".format(category, " ".join(feature_names[topn])))NEWLINE print()NEWLINENEWLINE if report:NEWLINE print("classification report:")NEWLINE print(metrics.classification_report(y_test, pred,NEWLINE target_names=categories))NEWLINENEWLINE if confusion_matrix:NEWLINE print("confusion matrix:")NEWLINE print(["{0}:{1}".format(i, category) for (i, category) in enumerate(categories)])NEWLINE conf_arr = metrics.confusion_matrix(y_test, pred) NEWLINE norm_conf = []NEWLINE for i in conf_arr:NEWLINE a = 0NEWLINE tmp_arr = []NEWLINE a = sum(i, 0)NEWLINE for j in i:NEWLINE tmp_arr.append(float(j)/float(a))NEWLINE norm_conf.append(tmp_arr)NEWLINE fig = plt.figure()NEWLINE ax = fig.add_subplot(111)NEWLINE res = ax.imshow(array(norm_conf), cmap=cm.jet, interpolation='nearest')NEWLINE for i, cas in enumerate(conf_arr):NEWLINE for j, c in enumerate(cas):NEWLINE if c>0:NEWLINE plt.text(j-.2, i+.2, c, fontsize=14)NEWLINE cb = fig.colorbar(res)NEWLINE plt.show()NEWLINE print()NEWLINENEWLINE if verbose: print()NEWLINE clf_descr = str(self.classifier).split('(')[0]NEWLINE return clf_descr, score, train_time, test_time #For comparative printoutNEWLINENEWLINE def estimate_accuracy(self, trials, verbose=False):NEWLINE score = 0.0NEWLINE i = 0NEWLINE while i < trials:NEWLINE X_train, X_test, y_train, y_test = cross_validation.train_test_split(NEWLINE self.X, self.y, test_size=0.4, random_state=0)NEWLINE self.classifier.fit(X_train, y_train)NEWLINE score +=self.classifier.score(X_test,y_test)NEWLINE i+=1NEWLINE if verbose: print("Average accuracy over {0} iterations: {1} ".format(trials, score/float(i)))NEWLINE return score / float(i)NEWLINE NEWLINENEWLINEclass ClassifierFactory(object):NEWLINE NEWLINE def __init__(self, classifier_dic):NEWLINE self.classifier = classifier_dicNEWLINE classifications = self.classifier['classifications']NEWLINE self.possible = TrueNEWLINE self.unreviewed = TrueNEWLINE self.reviewed = TrueNEWLINE self.data_files = {c : {} for c in classifications}NEWLINE for classification in classifications:NEWLINE seed = [f for f in self.classifier['seed'] if f.split(os.sep)[-1].find(classification) == 0]NEWLINE reviewed = [f for f in self.classifier['reviewed'] if f.split(os.sep)[-1].find(classification) == 0]NEWLINE unreviewed = [f for f in self.classifier['unreviewed'] if f.split(os.sep)[-1].find(classification) == 0]NEWLINE self.data_files[classification]["seed"]=seedNEWLINE self.data_files[classification]["reviewed"]=reviewed NEWLINE self.data_files[classification]["unreviewed"]=unreviewed NEWLINE if len(reviewed) == 0 and len(seed) == 0:NEWLINE self.reviewed = FalseNEWLINE if len(unreviewed) == 0:NEWLINE self.unreviewed = FalseNEWLINE if len(reviewed) == 0 and len(seed) == 0 and len(unreviewed) == 0:NEWLINE del self.data_files[classification]NEWLINE if self.unreviewed == False and self.reviewed == False:NEWLINE self.possible = FalseNEWLINE self.data = [] #Data featuresNEWLINE self.labels = [] #Data labelsNEWLINE NEWLINE def create_data_set(self, data_type):NEWLINE for classification in self.data_files.keys():NEWLINE if data_type == "reviewed" or data_type == "both":NEWLINE for f in self.data_files[classification]["reviewed"]:NEWLINE datum =Reader.read_reviewed(f)NEWLINE if datum:NEWLINE self.data.append(datum)NEWLINE self.labels.append(classification)NEWLINE if data_type == "both":NEWLINE for fs in self.data_files[classification]["seed"]:NEWLINE for datum in Reader.read_seed(fs):NEWLINE self.data.append(datum)NEWLINE self.labels.append(classification)NEWLINE if data_type == "unreviewed" or data_type == "both":NEWLINE for f in self.data_files[classification]["unreviewed"]:NEWLINE for datum in Reader.read_seed(fs):NEWLINE self.data.append(datum)NEWLINE self.labels.append(classification)NEWLINE print("{0} items of data".format(len(self.data)))NEWLINE NEWLINE def test_classifier(self, scikit_classifier, transformer, trials):NEWLINE X = transformer.fit_transform(self.data)NEWLINE le = preprocessing.LabelEncoder()NEWLINE y = le.fit_transform(self.labels)NEWLINE cw = ClassifierWrapper(scikt_classifier, X, y, transformer, le)NEWLINE return cw.estimate_accuracy(trials, verbose = True)NEWLINE NEWLINE NEWLINE def create_classifier(self, scikit_classifier, transformer):NEWLINE t = transformerNEWLINE X = t.fit_transform(self.data)NEWLINE le = preprocessing.LabelEncoder()NEWLINE y = le.fit_transform(self.labels)NEWLINE print(le.classes_)NEWLINE cw = ClassifierWrapper(scikit_classifier, X, y, le, t)NEWLINE return cwNEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE NEWLINE
# Lint as: python3NEWLINENEWLINE# Copyright 2020 Google LLC.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""Tests for client."""NEWLINENEWLINEimport osNEWLINEimport reNEWLINEfrom typing import ListNEWLINENEWLINEimport csvNEWLINEimport tempfileNEWLINEimport unittestNEWLINENEWLINEimport mockNEWLINEimport pandas as pdNEWLINEimport tensorflow as tfNEWLINENEWLINEfrom tfrecorder import utilsNEWLINEfrom tfrecorder import beam_pipelineNEWLINEfrom tfrecorder import converterNEWLINEfrom tfrecorder import dataset_loaderNEWLINEfrom tfrecorder import test_utilsNEWLINEfrom tfrecorder import input_schemaNEWLINENEWLINENEWLINE# pylint: disable=protected-accessNEWLINENEWLINENEWLINEclass IsDirectoryTest(unittest.TestCase):NEWLINE """Tests `_is_directory`."""NEWLINENEWLINE def test_local_ok(self):NEWLINE """Test function returns True on local directory."""NEWLINENEWLINE with tempfile.TemporaryDirectory() as dirname:NEWLINE self.assertTrue(converter._is_directory(dirname))NEWLINENEWLINE def test_local_exists_but_not_dir(self):NEWLINE """Test function returns False on local (non-directory) file."""NEWLINENEWLINE with tempfile.NamedTemporaryFile(prefix='test_', dir='/tmp') as f:NEWLINE self.assertFalse(converter._is_directory(f.name))NEWLINENEWLINENEWLINE# TODO(cezequiel): Refactor to per-function test case classesNEWLINEclass MiscTest(unittest.TestCase):NEWLINE """Misc tests for `client` module."""NEWLINENEWLINE def setUp(self):NEWLINE self.test_df = test_utils.get_test_df()NEWLINE self.test_region = 'us-central1'NEWLINE self.test_project = 'foo'NEWLINE self.test_wheel = '/my/path/wheel.whl'NEWLINENEWLINE @mock.patch.object(beam_pipeline, 'build_pipeline', autospec=True)NEWLINE def test_create_tfrecords_direct_runner(self, _):NEWLINE """Tests `create_tfrecords` Direct case."""NEWLINE r = converter.convert(NEWLINE self.test_df,NEWLINE runner='DirectRunner',NEWLINE output_dir='/tmp/direct_runner')NEWLINE self.assertCountEqual(r.keys(), ['job_id', 'metrics', 'tfrecord_dir'])NEWLINE self.assertCountEqual(NEWLINE r['metrics'].keys(), ['rows', 'good_images', 'bad_images'])NEWLINENEWLINE @mock.patch.object(converter, '_get_dataflow_url')NEWLINE @mock.patch.object(beam_pipeline, 'build_pipeline')NEWLINE def test_create_tfrecords_dataflow_runner(self, mock_pipeline, mock_url):NEWLINE """Tests `create_tfrecords` Dataflow case."""NEWLINE job_id = 'foo_id'NEWLINE dataflow_url = 'http://some/job/url'NEWLINE mock_pipeline().run().job_id.return_value = job_idNEWLINE mock_url.return_value = dataflow_urlNEWLINE df2 = self.test_df.copy()NEWLINE df2['image_uri'] = 'gs://' + df2['image_uri']NEWLINENEWLINE outdir = '/tmp/dataflow_runner'NEWLINE os.makedirs(outdir, exist_ok=True)NEWLINE r = converter.convert(NEWLINE df2,NEWLINE runner='DataflowRunner',NEWLINE output_dir=outdir,NEWLINE region=self.test_region,NEWLINE project=self.test_project,NEWLINE tfrecorder_wheel=self.test_wheel)NEWLINENEWLINE self.assertCountEqual(r.keys(), ['job_id', 'dataflow_url', 'tfrecord_dir'])NEWLINE self.assertEqual(r['job_id'], job_id)NEWLINE self.assertEqual(r['dataflow_url'], dataflow_url)NEWLINE self.assertRegex(r['tfrecord_dir'], fr'{outdir}/tfrecorder-.+-?.*')NEWLINENEWLINENEWLINEclass InputValidationTest(unittest.TestCase):NEWLINE """'Tests for validation input data."""NEWLINENEWLINE def setUp(self):NEWLINE self.test_df = test_utils.get_test_df()NEWLINE self.test_region = 'us-central1'NEWLINE self.test_project = 'foo'NEWLINE self.test_wheel = '/my/path/wheel.whl'NEWLINE self.test_schema = input_schema.IMAGE_CSV_SCHEMANEWLINENEWLINE def test_valid_dataframe(self):NEWLINE """Tests valid DataFrame input."""NEWLINE self.assertIsNone(converter._validate_data(self.test_df, self.test_schema))NEWLINENEWLINE def test_missing_image(self):NEWLINE """Tests missing image column."""NEWLINE with self.assertRaises(AttributeError):NEWLINE df2 = self.test_df.copy()NEWLINE df2.drop('image_uri', inplace=True, axis=1)NEWLINE converter._validate_data(df2, self.test_schema)NEWLINENEWLINE def test_missing_label(self):NEWLINE """Tests missing label column."""NEWLINE with self.assertRaises(AttributeError):NEWLINE df2 = self.test_df.copy()NEWLINE df2.drop('label', inplace=True, axis=1)NEWLINE converter._validate_data(df2, self.test_schema)NEWLINENEWLINE def test_missing_split(self):NEWLINE """Tests missing split column."""NEWLINE split_key = 'split'NEWLINE schema_keys = re.escape(NEWLINE str(list(self.test_schema.input_schema_map.keys())))NEWLINE regex = fr'^.+column: {split_key}.+keys: {schema_keys}.$'NEWLINE with self.assertRaisesRegex(AttributeError, regex):NEWLINE df2 = self.test_df.copy()NEWLINE df2.drop(split_key, inplace=True, axis=1)NEWLINE converter._validate_data(df2, self.test_schema)NEWLINENEWLINE def test_valid_runner(self):NEWLINE """Tests valid runner."""NEWLINE self.assertIsNone(converter._validate_runner(NEWLINE runner='DirectRunner',NEWLINE project=self.test_project,NEWLINE region=self.test_region,NEWLINE tfrecorder_wheel=None))NEWLINENEWLINE def test_invalid_runner(self):NEWLINE """Tests invalid runner."""NEWLINE with self.assertRaises(AttributeError):NEWLINE converter._validate_runner(NEWLINE runner='FooRunner',NEWLINE project=self.test_project,NEWLINE region=self.test_region,NEWLINE tfrecorder_wheel=None)NEWLINENEWLINENEWLINE def test_gcs_path_with_dataflow_runner_missing_param(self):NEWLINE """Tests DataflowRunner with missing required parameter."""NEWLINE for p, r in [NEWLINE (None, self.test_region), (self.test_project, None), (None, None)]:NEWLINE with self.assertRaises(AttributeError) as context:NEWLINE converter._validate_runner(NEWLINE runner='DataflowRunner',NEWLINE project=p,NEWLINE region=r,NEWLINE tfrecorder_wheel=self.test_wheel)NEWLINE self.assertTrue('DataflowRunner requires valid `project` and `region`'NEWLINE in repr(context.exception))NEWLINENEWLINENEWLINE def test_gcs_path_with_dataflow_runner_missing_wheel(self):NEWLINE """Tests DataflowRunner with missing required whl path."""NEWLINE with self.assertRaises(AttributeError) as context:NEWLINE converter._validate_runner(NEWLINE runner='DataflowRunner',NEWLINE project=self.test_project,NEWLINE region=self.test_region,NEWLINE tfrecorder_wheel=None)NEWLINE self.assertTrue('requires a tfrecorder whl file for remote execution.'NEWLINE in repr(context.exception))NEWLINENEWLINENEWLINEdef _make_csv_tempfile(data: List[List[str]]) -> tempfile.NamedTemporaryFile:NEWLINE """Returns `NamedTemporaryFile` representing an image CSV."""NEWLINENEWLINE f = tempfile.NamedTemporaryFile(mode='w+t', suffix='.csv')NEWLINE writer = csv.writer(f, delimiter=',')NEWLINE for row in data:NEWLINE writer.writerow(row)NEWLINE f.seek(0)NEWLINE return fNEWLINENEWLINENEWLINEdef get_sample_image_csv_data() -> List[List[str]]:NEWLINE """Returns sample CSV data in Image CSV format."""NEWLINENEWLINE data = test_utils.get_test_data()NEWLINE header = list(data.keys())NEWLINE content = [list(row) for row in zip(*data.values())]NEWLINE return [header] + contentNEWLINENEWLINENEWLINEclass ReadCSVTest(unittest.TestCase):NEWLINE """Tests `read_csv`."""NEWLINENEWLINE def setUp(self):NEWLINE data = get_sample_image_csv_data()NEWLINE self.header = data.pop(0)NEWLINE self.sample_data = dataNEWLINENEWLINE def test_valid_csv_no_header_no_names_specified(self):NEWLINE """Tests a valid CSV without a header and no header names given."""NEWLINE f = _make_csv_tempfile(self.sample_data)NEWLINE actual = converter.read_csv(f.name, header=None)NEWLINE self.assertEqual(NEWLINE list(actual.columns),NEWLINE list(input_schema.IMAGE_CSV_SCHEMA.get_input_keys()))NEWLINE self.assertEqual(actual.values.tolist(), self.sample_data)NEWLINENEWLINE def test_valid_csv_no_header_names_specified(self):NEWLINE """Tests valid CSV without a header, but header names are given."""NEWLINE f = _make_csv_tempfile(self.sample_data)NEWLINE actual = converter.read_csv(f.name, header=None, names=self.header)NEWLINE self.assertEqual(list(actual.columns), self.header)NEWLINE self.assertEqual(actual.values.tolist(), self.sample_data)NEWLINENEWLINE def test_valid_csv_with_header_no_names_specified(self):NEWLINE """Tests valid CSV with header, and no header names given (inferred)."""NEWLINENEWLINE f = _make_csv_tempfile([self.header] + self.sample_data)NEWLINE actual = converter.read_csv(f.name)NEWLINE self.assertEqual(list(actual.columns), self.header)NEWLINE self.assertEqual(actual.values.tolist(), self.sample_data)NEWLINENEWLINE def test_valid_csv_with_header_names_specified(self):NEWLINE """Tests valid CSV with header, and header names given (override)."""NEWLINENEWLINE f = _make_csv_tempfile([self.header] + self.sample_data)NEWLINE actual = converter.read_csv(f.name, names=self.header, header=0)NEWLINE self.assertEqual(list(actual.columns), self.header)NEWLINE self.assertEqual(actual.values.tolist(), self.sample_data)NEWLINENEWLINENEWLINEclass ToDataFrameTest(unittest.TestCase):NEWLINE """Tests `to_dataframe`."""NEWLINENEWLINE def setUp(self) -> None:NEWLINE sample_data = get_sample_image_csv_data()NEWLINE columns = sample_data.pop(0)NEWLINE self.input_df = pd.DataFrame(sample_data, columns=columns)NEWLINENEWLINE @mock.patch.object(converter, 'read_csv', autospec=True)NEWLINE def test_input_csv(self, read_csv):NEWLINE """Tests valid input CSV file."""NEWLINE expected = self.input_dfNEWLINE read_csv.return_value = expectedNEWLINE f = _make_csv_tempfile(get_sample_image_csv_data())NEWLINE actual = converter.to_dataframe(f.name)NEWLINE pd.testing.assert_frame_equal(actual, expected)NEWLINENEWLINE def test_input_dataframe_no_names_specified(self):NEWLINE """Tests valid input dataframe with no header names specified."""NEWLINE actual = converter.to_dataframe(self.input_df)NEWLINE pd.testing.assert_frame_equal(actual, self.input_df)NEWLINENEWLINE def test_input_dataframe_with_header(self):NEWLINE """Tests valid input dataframe with header specified."""NEWLINE names = list(self.input_df.columns[0:-1])NEWLINE actual = converter.to_dataframe(self.input_df, names=names)NEWLINE pd.testing.assert_frame_equal(actual, self.input_df[names])NEWLINENEWLINE @mock.patch.object(utils, 'read_image_directory', autospec=True)NEWLINE def test_input_image_dir(self, mock_fn):NEWLINE """Tests valid input image directory."""NEWLINENEWLINE mock_fn.return_value = self.input_dfNEWLINENEWLINE with tempfile.TemporaryDirectory() as input_data:NEWLINE actual = converter.to_dataframe(input_data)NEWLINE pd.testing.assert_frame_equal(actual, self.input_df)NEWLINENEWLINE def test_error_invalid_inputs(self):NEWLINE """Tests error handling with different invalid inputs."""NEWLINE inputs = [0, 'not_a_csv_file', list(), dict()]NEWLINE for input_data in inputs:NEWLINE with self.assertRaises(ValueError):NEWLINE converter.to_dataframe(input_data)NEWLINENEWLINENEWLINEclass ConvertAndLoadTest(unittest.TestCase):NEWLINE """Tests `convert_and_load`."""NEWLINENEWLINE def setUp(self):NEWLINE self.tfrecord_dir = '/path/to/tfrecords'NEWLINE self.dataset = tf.data.Dataset.from_tensor_slices([1, 2, 3])NEWLINE self.datasets = {NEWLINE 'train': self.dataset,NEWLINE 'validation': self.dataset,NEWLINE 'test': self.dataset,NEWLINE }NEWLINENEWLINE @mock.patch.object(dataset_loader, 'load', autospec=True)NEWLINE @mock.patch.object(converter, 'convert', autospec=True)NEWLINE def test_convert_and_load_normal(self, convert_fn, load_fn):NEWLINE """Tests normal case."""NEWLINE convert_fn.return_value = dict(tfrecord_dir=self.tfrecord_dir)NEWLINE load_fn.return_value = self.datasetsNEWLINE source = '/path/to/data.csv'NEWLINE datasets = converter.convert_and_load(source)NEWLINE self.assertEqual(datasets, self.datasets)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE unittest.main()NEWLINE
"""NEWLINE Copyright (c) 2019-2020 Intel CorporationNEWLINE Licensed under the Apache License, Version 2.0 (the "License");NEWLINE you may not use this file except in compliance with the License.NEWLINE You may obtain a copy of the License atNEWLINE http://www.apache.org/licenses/LICENSE-2.0NEWLINE Unless required by applicable law or agreed to in writing, softwareNEWLINE distributed under the License is distributed on an "AS IS" BASIS,NEWLINE WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE See the License for the specific language governing permissions andNEWLINE limitations under the License.NEWLINE"""NEWLINEimport itertoolsNEWLINEfrom collections import CounterNEWLINEfrom pathlib import PathNEWLINEfrom typing import ListNEWLINENEWLINEimport networkx as nxNEWLINEimport pytestNEWLINEimport torchNEWLINEfrom copy import deepcopyNEWLINEfrom torch import nnNEWLINENEWLINEfrom nncf import register_moduleNEWLINEfrom nncf.dynamic_graph.context import ScopeNEWLINEfrom nncf.dynamic_graph.graph import InputAgnosticOperationExecutionContext, NNCFGraph, OperationExecutionContextNEWLINEfrom nncf.dynamic_graph.graph_builder import ModelInputInfoNEWLINEfrom nncf.dynamic_graph.operator_metatypes import NoopMetatypeNEWLINEfrom nncf.dynamic_graph.patch_pytorch import MODEL_INPUT_OP_NAMENEWLINEfrom nncf.dynamic_graph.version_agnostic_op_names import VersionAgnosticNamesNEWLINEfrom nncf.layer_utils import _NNCFModuleMixinNEWLINEfrom nncf.module_operations import BaseOpNEWLINEfrom nncf.nncf_network import NNCFNetwork, InsertionCommand, InsertionPoint, InsertionType, OperationPriority, \NEWLINE InsertionPointGraph, InsertionPointGraphNodeTypeNEWLINEfrom tests.conftest import TEST_ROOTNEWLINEfrom tests.helpers import TwoConvTestModel, BasicConvTestModel, check_correct_nncf_modules_replacementNEWLINENEWLINENEWLINEdef test_disable_shape_matching():NEWLINE class MatMulModel(nn.Module):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.dummy_param = torch.nn.Parameter(torch.ones([1]))NEWLINENEWLINE def forward(self, inputs):NEWLINE half1, half2 = torch.chunk(inputs, 2, dim=2)NEWLINE return torch.bmm(half1, half2.transpose(1, 2))NEWLINENEWLINE model = MatMulModel()NEWLINENEWLINE input_shape_1 = (3, 32, 32)NEWLINE input_shape_2 = (4, 64, 64)NEWLINENEWLINE qnet_no_shape = NNCFNetwork(deepcopy(model), input_infos=[ModelInputInfo(input_shape_1), ],NEWLINE scopes_without_shape_matching=['MatMulModel']) # type: NNCFNetworkNEWLINE _ = qnet_no_shape(torch.zeros(*input_shape_1))NEWLINE graph_1 = deepcopy(qnet_no_shape.get_graph())NEWLINENEWLINE _ = qnet_no_shape(torch.zeros(*input_shape_2))NEWLINE graph_2 = deepcopy(qnet_no_shape.get_graph())NEWLINENEWLINE keys_1 = list(graph_1.get_all_node_keys())NEWLINE keys_2 = list(graph_2.get_all_node_keys())NEWLINE assert len(keys_1) == 2 # 1 input node + 1 operation nodeNEWLINE assert keys_1 == keys_2NEWLINENEWLINENEWLINE qnet = NNCFNetwork(model, input_infos=[ModelInputInfo(input_shape_1), ]) # type: NNCFNetworkNEWLINE _ = qnet(torch.zeros(*input_shape_1))NEWLINE _ = qnet(torch.zeros(*input_shape_2))NEWLINE # The second forward run should have led to an increase in registered node countsNEWLINE # since disable_shape_matching was False and the network was run with a differentNEWLINE # shape of input tensorNEWLINE assert qnet.get_graph().get_nodes_count() > graph_1.get_nodes_count()NEWLINENEWLINENEWLINEdef test_check_correct_modules_replacement():NEWLINE model = TwoConvTestModel()NEWLINE nncf_model = NNCFNetwork(TwoConvTestModel(), input_infos=[ModelInputInfo([1, 1, 4, 4])]) # type: NNCFNetworkNEWLINENEWLINE _, nncf_modules = check_correct_nncf_modules_replacement(model, nncf_model)NEWLINE assert set(nncf_modules) == set(nncf_model.get_nncf_modules())NEWLINENEWLINENEWLINE@register_moduleNEWLINEclass ModuleOfUser(torch.nn.Module):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.weight = torch.nn.Parameter(torch.ones([1]))NEWLINENEWLINE def forward(self, input_):NEWLINE return input_ * self.weightNEWLINENEWLINENEWLINEclass TwoConvTestModelWithUserModule(TwoConvTestModel):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.user_module = ModuleOfUser()NEWLINENEWLINE def forward(self, x):NEWLINE x = super().forward(x)NEWLINE x = self.user_module(x)NEWLINE return xNEWLINENEWLINENEWLINEdef test_custom_module_registering():NEWLINE model = TwoConvTestModelWithUserModule()NEWLINE nncf_model = NNCFNetwork(model, input_infos=[ModelInputInfo([1, 1, 4, 4])]) # type: NNCFNetworkNEWLINENEWLINE from nncf.layers import UNWRAPPED_USER_MODULESNEWLINE assert ModuleOfUser in UNWRAPPED_USER_MODULES.registry_dict.values()NEWLINENEWLINE # pylint: disable=protected-accessNEWLINE assert isinstance(nncf_model.user_module, ModuleOfUser)NEWLINE assert isinstance(nncf_model.user_module, _NNCFModuleMixin)NEWLINE assert type(nncf_model.user_module).__name__ == "NNCFUserModuleOfUser"NEWLINENEWLINE user_module_attrs = dir(nncf_model.user_module)NEWLINE for attr in dir(_NNCFModuleMixin):NEWLINE assert attr in user_module_attrsNEWLINENEWLINENEWLINE# pylint: disable=protected-accessNEWLINEdef test_find_node_in_nx_graph_by_scope():NEWLINE model = TwoConvTestModel()NEWLINE nncf_model = NNCFNetwork(deepcopy(model), input_infos=[ModelInputInfo([1, 1, 4, 4])]) # type: NNCFNetworkNEWLINE nncf_graph = nncf_model.get_original_graph()NEWLINENEWLINE # Valid scopes should be successfully foundNEWLINE valid_nncf_modules = nncf_model.get_nncf_modules()NEWLINE nodes_list = list(nncf_graph._nx_graph.nodes)NEWLINE for module_scope, _ in valid_nncf_modules.items():NEWLINE graph_node = nncf_graph.find_node_in_nx_graph_by_scope(module_scope)NEWLINE assert graph_node is not NoneNEWLINE assert isinstance(graph_node, dict)NEWLINE assert graph_node['key'] in nodes_listNEWLINENEWLINE fake_model = BasicConvTestModel()NEWLINE fake_nncf_model = NNCFNetwork(deepcopy(fake_model), input_infos=[ModelInputInfo([1, 1, 4, 4])])NEWLINENEWLINE # Not valid scopes shouldn't be foundNEWLINE fake_nncf_modules = fake_nncf_model.get_nncf_modules()NEWLINE for module_scope, _ in fake_nncf_modules.items():NEWLINE graph_node = nncf_graph.find_node_in_nx_graph_by_scope(module_scope)NEWLINE assert graph_node is NoneNEWLINENEWLINENEWLINEclass InsertionPointTestModel(nn.Module):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.conv1 = nn.Conv2d(1, 1, 1, 1)NEWLINE self.linear_wts = nn.Parameter(torch.FloatTensor(size=(100, 100)))NEWLINE self.conv2 = nn.Conv2d(1, 1, 1, 1)NEWLINE self.relu = nn.ReLU()NEWLINENEWLINE def forward(self, input_):NEWLINE x = self.conv1(input_)NEWLINE x = x.flatten()NEWLINE x = nn.functional.linear(x, self.linear_wts)NEWLINE x = x.reshape((1, 1, 10, 10))NEWLINE x = self.conv2(x)NEWLINE x = self.relu(x)NEWLINE return xNEWLINENEWLINENEWLINEclass TestInsertionCommands:NEWLINE @pytest.fixture()NEWLINE def setup(self):NEWLINE self.compressed_model = NNCFNetwork(InsertionPointTestModel(),NEWLINE [ModelInputInfo([1, 1, 10, 10])]) # type: NNCFNetworkNEWLINENEWLINE conv1_module_scope = Scope.from_str('InsertionPointTestModel/NNCFConv2d[conv1]')NEWLINE conv1_module_context = InputAgnosticOperationExecutionContext('', conv1_module_scope, 0)NEWLINE point_for_conv1_weights = InsertionPoint(ia_op_exec_context=conv1_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_PRE_OP)NEWLINE point_for_conv1_inputs = InsertionPoint(ia_op_exec_context=conv1_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_PRE_OP)NEWLINE point_for_conv1_activations = InsertionPoint(ia_op_exec_context=conv1_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_POST_OP)NEWLINENEWLINE conv2_module_scope = Scope.from_str('InsertionPointTestModel/NNCFConv2d[conv2]')NEWLINE conv2_module_context = InputAgnosticOperationExecutionContext('', conv2_module_scope, 0)NEWLINE point_for_conv2_weights = InsertionPoint(ia_op_exec_context=conv2_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_PRE_OP)NEWLINE point_for_conv2_inputs = InsertionPoint(ia_op_exec_context=conv2_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_PRE_OP)NEWLINE point_for_conv2_activations = InsertionPoint(ia_op_exec_context=conv2_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_POST_OP)NEWLINENEWLINE linear_op_scope = Scope.from_str('InsertionPointTestModel/linear_0')NEWLINE linear_op_context = InputAgnosticOperationExecutionContext('linear',NEWLINE linear_op_scope,NEWLINE 0)NEWLINE point_for_linear_weight_input = InsertionPoint(ia_op_exec_context=linear_op_context,NEWLINE insertion_type=InsertionType.OPERATOR_PRE_HOOK)NEWLINE point_for_linear_activation = InsertionPoint(ia_op_exec_context=linear_op_context,NEWLINE insertion_type=InsertionType.OPERATOR_POST_HOOK)NEWLINENEWLINE relu_op_scope = Scope.from_str('InsertionPointTestModel/ReLU[relu]/relu')NEWLINE relu_op_context = InputAgnosticOperationExecutionContext('relu',NEWLINE relu_op_scope,NEWLINE 0)NEWLINE point_for_relu_inputs = InsertionPoint(ia_op_exec_context=relu_op_context,NEWLINE insertion_type=InsertionType.OPERATOR_PRE_HOOK)NEWLINE point_for_relu_activations = InsertionPoint(ia_op_exec_context=relu_op_context,NEWLINE insertion_type=InsertionType.OPERATOR_POST_HOOK)NEWLINENEWLINE available_points = [point_for_conv1_weights,NEWLINE point_for_conv2_weights,NEWLINE point_for_conv1_inputs,NEWLINE point_for_conv2_inputs,NEWLINE point_for_conv1_activations,NEWLINE point_for_conv2_activations,NEWLINE point_for_linear_activation,NEWLINE point_for_linear_weight_input,NEWLINE point_for_relu_activations,NEWLINE point_for_relu_inputs]NEWLINENEWLINE @pytest.mark.parametrize("insertion_point", available_points)NEWLINE def test_single_insertions(self, setup, insertion_point):NEWLINE if insertion_point.insertion_type in [InsertionType.OPERATOR_PRE_HOOK, InsertionType.OPERATOR_POST_HOOK]:NEWLINE hook = lambda x: xNEWLINE else:NEWLINE hook = BaseOp(lambda x: x)NEWLINENEWLINE command = InsertionCommand(insertion_point, hook)NEWLINE self.compressed_model.register_insertion_command(command)NEWLINE self.compressed_model.commit_compression_changes()NEWLINENEWLINE #pylint:disable=protected-accessNEWLINE if insertion_point.insertion_type == InsertionType.OPERATOR_PRE_HOOK:NEWLINE ctx = self.compressed_model.get_tracing_context()NEWLINE assert ctx._pre_hooks[command.insertion_point.ia_op_exec_context][0] is hookNEWLINE if insertion_point.insertion_type == InsertionType.OPERATOR_POST_HOOK:NEWLINE ctx = self.compressed_model.get_tracing_context()NEWLINE assert ctx._post_hooks[command.insertion_point.ia_op_exec_context][0] is hookNEWLINE if insertion_point.insertion_type == InsertionType.NNCF_MODULE_PRE_OP:NEWLINE module = self.compressed_model.get_module_by_scope(NEWLINE command.insertion_point.ia_op_exec_context.scope_in_model)NEWLINE assert module.pre_ops["0"] is hookNEWLINENEWLINE if insertion_point.insertion_type == InsertionType.NNCF_MODULE_POST_OP:NEWLINE module = self.compressed_model.get_module_by_scope(NEWLINE command.insertion_point.ia_op_exec_context.scope_in_model)NEWLINE assert module.post_ops["0"] is hookNEWLINENEWLINE priority_types = ["same", "different"]NEWLINE insertion_types = InsertionTypeNEWLINE priority_test_cases = list(itertools.product(priority_types, insertion_types))NEWLINENEWLINE @staticmethodNEWLINE def check_order(iterable1: List, iterable2: List, ordering: List):NEWLINE for idx, order in enumerate(ordering):NEWLINE assert iterable1[idx] is iterable2[order]NEWLINENEWLINE # pylint:disable=undefined-variableNEWLINE @pytest.mark.parametrize("case", priority_test_cases, ids=[x[1].name + '-' + x[0] for x in priority_test_cases])NEWLINE def test_priority(self, case, setup):NEWLINE #pylint:disable=too-many-branchesNEWLINE priority_type = case[0]NEWLINE insertion_type = case[1]NEWLINE if insertion_type in [InsertionType.NNCF_MODULE_PRE_OP, InsertionType.NNCF_MODULE_POST_OP]:NEWLINE hook1 = BaseOp(lambda x: x)NEWLINE hook2 = BaseOp(lambda x: 2 * x)NEWLINE hook3 = BaseOp(lambda x: 3 * x)NEWLINE else:NEWLINE hook1 = lambda x: xNEWLINE hook2 = lambda x: 2 * xNEWLINE hook3 = lambda x: 3 * xNEWLINENEWLINE if insertion_type == InsertionType.NNCF_MODULE_PRE_OP:NEWLINE point = self.point_for_conv2_weightsNEWLINE elif insertion_type == InsertionType.NNCF_MODULE_POST_OP:NEWLINE point = self.point_for_conv1_activationsNEWLINE elif insertion_type == InsertionType.OPERATOR_PRE_HOOK:NEWLINE point = self.point_for_linear_weight_inputNEWLINE elif insertion_type == InsertionType.OPERATOR_POST_HOOK:NEWLINE point = self.point_for_relu_activationsNEWLINENEWLINE if priority_type == "same":NEWLINE # Same-priority commands will be executed in registration orderNEWLINE command1 = InsertionCommand(point, hook1, OperationPriority.DEFAULT_PRIORITY)NEWLINE command2 = InsertionCommand(point, hook2, OperationPriority.DEFAULT_PRIORITY)NEWLINE command3 = InsertionCommand(point, hook3, OperationPriority.DEFAULT_PRIORITY)NEWLINE else:NEWLINE # Prioritized commands will be executed in ascending priority orderNEWLINE command1 = InsertionCommand(point, hook1, OperationPriority.SPARSIFICATION_PRIORITY)NEWLINE command2 = InsertionCommand(point, hook2, OperationPriority.QUANTIZATION_PRIORITY)NEWLINE command3 = InsertionCommand(point, hook3, OperationPriority.DEFAULT_PRIORITY)NEWLINENEWLINE self.compressed_model.register_insertion_command(command1)NEWLINE self.compressed_model.register_insertion_command(command2)NEWLINE self.compressed_model.register_insertion_command(command3)NEWLINE self.compressed_model.commit_compression_changes()NEWLINENEWLINE hook_list = [hook1, hook2, hook3]NEWLINENEWLINE if priority_type == "same":NEWLINE order = [0, 1, 2]NEWLINE elif priority_type == "different":NEWLINE order = [2, 0, 1]NEWLINENEWLINE #pylint:disable=protected-accessNEWLINE if insertion_type == InsertionType.OPERATOR_PRE_HOOK:NEWLINE ctx = self.compressed_model.get_tracing_context()NEWLINE self.check_order(ctx._pre_hooks[point.ia_op_exec_context], hook_list, order)NEWLINE if insertion_type == InsertionType.OPERATOR_POST_HOOK:NEWLINE ctx = self.compressed_model.get_tracing_context()NEWLINE self.check_order(ctx._post_hooks[point.ia_op_exec_context], hook_list, order)NEWLINENEWLINE if insertion_type == InsertionType.NNCF_MODULE_PRE_OP:NEWLINE module = self.compressed_model.get_module_by_scope(point.ia_op_exec_context.scope_in_model)NEWLINE # Works because Pytorch ModuleDict is orderedNEWLINE self.check_order(list(module.pre_ops.values()), hook_list, order)NEWLINENEWLINE if insertion_type == InsertionType.NNCF_MODULE_POST_OP:NEWLINE module = self.compressed_model.get_module_by_scope(point.ia_op_exec_context.scope_in_model)NEWLINE # Works because Pytorch ModuleDict is orderedNEWLINE self.check_order(list(module.post_ops.values()), hook_list, order)NEWLINENEWLINENEWLINEdef get_two_branch_mock_model_graph() -> nx.DiGraph:NEWLINE mock_node_attrs = get_mock_nncf_node_attrs()NEWLINE mock_graph = nx.DiGraph()NEWLINENEWLINE # (A)NEWLINE # |NEWLINE # (B)NEWLINE # / \NEWLINE # (C) (D)NEWLINE # | |NEWLINE # (E) |NEWLINE # \ /NEWLINE # (F)NEWLINE # |NEWLINE # (G)NEWLINE # |NEWLINE # (H)NEWLINENEWLINE node_keys = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']NEWLINE for node_key in node_keys:NEWLINE mock_graph.add_node(node_key, **mock_node_attrs)NEWLINENEWLINE mock_graph.add_edges_from([('A', 'B'), ('B', 'C'), ('B', 'D'), ('C', 'E'), ('E', 'F'),NEWLINE ('D', 'F'), ('F', 'G'), ('G', 'H')])NEWLINE return mock_graphNEWLINENEWLINENEWLINEMOCK_OPERATOR_NAME = "conv_transpose2d"NEWLINENEWLINENEWLINEdef get_mock_nncf_node_attrs(op_name=None):NEWLINE op_name_to_set = op_name if op_name is not None else MOCK_OPERATOR_NAMENEWLINE return {NEWLINE NNCFGraph.OP_EXEC_CONTEXT_NODE_ATTR: OperationExecutionContext(op_name_to_set,NEWLINE Scope(),NEWLINE 0,NEWLINE [None])NEWLINE }NEWLINENEWLINENEWLINEdef get_mock_model_graph_with_mergeable_pattern() -> nx.DiGraph:NEWLINE mock_graph = nx.DiGraph()NEWLINENEWLINE # (A)NEWLINE # |NEWLINE # (conv2d)NEWLINE # |NEWLINE # (batch_norm)NEWLINE # |NEWLINE # (RELU)NEWLINE # |NEWLINE # (B)NEWLINENEWLINE node_keys = ['conv2d', 'batch_norm', VersionAgnosticNames.RELU, 'A', 'B']NEWLINE for node_key in node_keys:NEWLINE mock_graph.add_node(node_key, **get_mock_nncf_node_attrs(op_name=node_key))NEWLINENEWLINE mock_graph.add_edges_from([('A', 'conv2d'), ('conv2d', 'batch_norm'),NEWLINE ('batch_norm', VersionAgnosticNames.RELU),NEWLINE (VersionAgnosticNames.RELU, 'B')])NEWLINE return mock_graphNEWLINENEWLINENEWLINEdef get_mock_model_graph_with_no_mergeable_pattern() -> nx.DiGraph:NEWLINE mock_graph = nx.DiGraph()NEWLINENEWLINE # (A)NEWLINE # |NEWLINE # (conv2d)NEWLINE # |NEWLINE # (C)NEWLINE # |NEWLINE # (batch_norm)NEWLINE # |NEWLINE # (D)NEWLINE # |NEWLINE # (RELU)NEWLINE # |NEWLINE # (B)NEWLINENEWLINE node_keys = ['conv2d', 'batch_norm', VersionAgnosticNames.RELU, 'A', 'B', 'C', 'D']NEWLINE for node_key in node_keys:NEWLINE mock_graph.add_node(node_key, **get_mock_nncf_node_attrs(op_name=node_key))NEWLINENEWLINE mock_graph.add_edges_from([('A', 'conv2d'), ('conv2d', 'C'),NEWLINE ('C', 'batch_norm'),NEWLINE ('batch_norm', 'D'),NEWLINE ('D', VersionAgnosticNames.RELU),NEWLINE (VersionAgnosticNames.RELU, 'B')])NEWLINE return mock_graphNEWLINENEWLINENEWLINEdef get_mock_model_graph_with_broken_output_edge_pattern() -> nx.DiGraph:NEWLINE mock_graph = nx.DiGraph()NEWLINENEWLINE # (A)NEWLINE # |NEWLINE # (conv2d)----\NEWLINE # | |NEWLINE # (batch_norm) |NEWLINE # | |NEWLINE # (RELU) |NEWLINE # | |NEWLINE # (C)--------/NEWLINE # |NEWLINE # (B)NEWLINENEWLINE node_keys = ['conv2d', 'batch_norm', VersionAgnosticNames.RELU, 'A', 'B', 'C']NEWLINE for node_key in node_keys:NEWLINE mock_graph.add_node(node_key, **get_mock_nncf_node_attrs(op_name=node_key))NEWLINENEWLINE mock_graph.add_edges_from([('A', 'conv2d'), ('conv2d', 'batch_norm'),NEWLINE ('conv2d', 'C'),NEWLINE ('batch_norm', VersionAgnosticNames.RELU),NEWLINE (VersionAgnosticNames.RELU, 'C'),NEWLINE ('C', 'B')])NEWLINE return mock_graphNEWLINENEWLINENEWLINEMERGE_PATTERN_TEST_CASES = (NEWLINE [get_mock_model_graph_with_mergeable_pattern, "basic_pattern"],NEWLINE [get_mock_model_graph_with_no_mergeable_pattern, "no_pattern"],NEWLINE [get_mock_model_graph_with_broken_output_edge_pattern, "broken_output_edges_pattern"]NEWLINE)NEWLINENEWLINENEWLINEclass TestInsertionPointGraph:NEWLINE def test_insertion_point_setup(self):NEWLINE # TODO: Change testing premises when module pre/post-op hooks and input/output nodesNEWLINE # are correctly handledNEWLINE mock_graph = get_two_branch_mock_model_graph()NEWLINENEWLINE ip_graph = InsertionPointGraph(mock_graph)NEWLINENEWLINE ref_node_len = 3 * len(mock_graph.nodes) # 2 additional nodes per each operator nodeNEWLINE ref_edge_len = 3 * len(mock_graph.edges)NEWLINENEWLINE assert len(ip_graph.nodes) == ref_node_lenNEWLINE assert len(ip_graph.edges) == ref_edge_lenNEWLINENEWLINE for node_key, node in mock_graph.nodes.items():NEWLINE ip_graph_op_node = ip_graph.nodes[node_key]NEWLINE assert ip_graph_op_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] == InsertionPointGraphNodeType.OPERATORNEWLINE preds = list(ip_graph.predecessors(node_key))NEWLINE succs = list(ip_graph.successors(node_key))NEWLINE assert len(preds) == 1NEWLINE assert len(succs) == 1NEWLINE pre_hook_ip_node_key = preds[0]NEWLINE post_hook_ip_node_key = succs[0]NEWLINE pre_hook_ip_node = ip_graph.nodes[preds[0]]NEWLINE post_hook_ip_node = ip_graph.nodes[succs[0]]NEWLINE pre_hook_ip_node_type = pre_hook_ip_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR]NEWLINE post_hook_ip_node_type = post_hook_ip_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR]NEWLINE assert pre_hook_ip_node_type == InsertionPointGraphNodeType.INSERTION_POINTNEWLINE assert post_hook_ip_node_type == InsertionPointGraphNodeType.INSERTION_POINTNEWLINE ref_associated_ip_node_keys_set = {pre_hook_ip_node_key, post_hook_ip_node_key}NEWLINE assert ref_associated_ip_node_keys_set == ip_graph_op_node[NEWLINE InsertionPointGraph.ASSOCIATED_IP_NODE_KEYS_NODE_ATTR]NEWLINE original_neighbours = mock_graph.neighbors(node_key)NEWLINE for neighbour in original_neighbours:NEWLINE # IP node insertion should not disrupt the graph superstructureNEWLINE ip_graph_paths = list(nx.all_simple_paths(ip_graph, node_key, neighbour))NEWLINE for path in ip_graph_paths:NEWLINE path = path[1:-1]NEWLINE for path_node_key in path:NEWLINE node = ip_graph.nodes[path_node_key]NEWLINE node_type = node[InsertionPointGraph.NODE_TYPE_NODE_ATTR]NEWLINE assert node_type == InsertionPointGraphNodeType.INSERTION_POINTNEWLINENEWLINE for node_key, node in ip_graph.nodes.items():NEWLINE preds = list(ip_graph.predecessors(node_key))NEWLINE succs = list(ip_graph.successors(node_key))NEWLINE assert len(preds) != 0 or len(succs) != 0NEWLINENEWLINE for from_node_key, to_node_key in ip_graph.edges.keys():NEWLINE assert from_node_key in ip_graph.nodesNEWLINE assert to_node_key in ip_graph.nodesNEWLINENEWLINE def test_insertion_point_data_in_ip_nodes(self):NEWLINE # TODO: extend for modulesNEWLINE mock_graph = nx.DiGraph()NEWLINE ref_op_exec_context = OperationExecutionContext("baz",NEWLINE Scope.from_str("Test/Scope[foo]/bar"),NEWLINE 0,NEWLINE [None])NEWLINE node_attrs = {NEWLINE NNCFGraph.OP_EXEC_CONTEXT_NODE_ATTR: ref_op_exec_contextNEWLINE }NEWLINENEWLINE node_key = 0NEWLINE mock_graph.add_node(node_key, **node_attrs)NEWLINENEWLINE ip_graph = InsertionPointGraph(mock_graph)NEWLINENEWLINE for node_key in mock_graph.nodes.keys():NEWLINE preds = list(ip_graph.predecessors(node_key))NEWLINE succs = list(ip_graph.successors(node_key))NEWLINE pre_hook_ip_node = ip_graph.nodes[preds[0]]NEWLINE post_hook_ip_node = ip_graph.nodes[succs[0]]NEWLINENEWLINE pre_hook_ip = pre_hook_ip_node[InsertionPointGraph.INSERTION_POINT_DATA_NODE_ATTR]NEWLINE post_hook_ip = post_hook_ip_node[InsertionPointGraph.INSERTION_POINT_DATA_NODE_ATTR]NEWLINE assert pre_hook_ip.insertion_type == InsertionType.OPERATOR_PRE_HOOKNEWLINE assert post_hook_ip.insertion_type == InsertionType.OPERATOR_POST_HOOKNEWLINENEWLINE assert pre_hook_ip.ia_op_exec_context == ref_op_exec_context.input_agnosticNEWLINE assert post_hook_ip.ia_op_exec_context == ref_op_exec_context.input_agnosticNEWLINENEWLINE def test_operator_metatype_marking(self):NEWLINE from nncf.dynamic_graph.operator_metatypes import Conv2dMetatype, BatchNormMetatype, RELUMetatype, \NEWLINE MaxPool2dMetatype, \NEWLINE ConvTranspose2dMetatype, DepthwiseConv2dSubtype, AddMetatype, AvgPool2dMetatype, LinearMetatypeNEWLINE ref_scope_vs_metatype_dict = {NEWLINE "/" + MODEL_INPUT_OP_NAME + "_0": NoopMetatype,NEWLINE "ModelForMetatypeTesting/NNCFConv2d[conv_regular]/conv2d_0": Conv2dMetatype,NEWLINE "ModelForMetatypeTesting/BatchNorm2d[bn]/batch_norm_0": BatchNormMetatype,NEWLINE "ModelForMetatypeTesting/RELU_0": RELUMetatype,NEWLINE "ModelForMetatypeTesting/MaxPool2d[max_pool2d]/max_pool2d_0": MaxPool2dMetatype,NEWLINE "ModelForMetatypeTesting/NNCFConvTranspose2d[conv_transpose]/conv_transpose2d_0": ConvTranspose2dMetatype,NEWLINE "ModelForMetatypeTesting/NNCFConv2d[conv_depthwise]/conv2d_0": DepthwiseConv2dSubtype,NEWLINE "ModelForMetatypeTesting/__iadd___0": AddMetatype,NEWLINE "ModelForMetatypeTesting/AdaptiveAvgPool2d[adaptive_avg_pool]/adaptive_avg_pool2d_0": AvgPool2dMetatype,NEWLINE "ModelForMetatypeTesting/NNCFLinear[linear]/linear_0": LinearMetatypeNEWLINE }NEWLINE class ModelForMetatypeTesting(torch.nn.Module):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.conv_regular = torch.nn.Conv2d(in_channels=3,NEWLINE out_channels=16,NEWLINE kernel_size=3)NEWLINE self.bn = torch.nn.BatchNorm2d(num_features=16)NEWLINE self.max_pool2d = torch.nn.MaxPool2d(kernel_size=2)NEWLINE self.conv_transpose = torch.nn.ConvTranspose2d(in_channels=16,NEWLINE out_channels=8,NEWLINE kernel_size=3)NEWLINE self.conv_depthwise = torch.nn.Conv2d(in_channels=8, out_channels=8,NEWLINE kernel_size=5, groups=8)NEWLINE self.adaptive_avg_pool = torch.nn.AdaptiveAvgPool2d(output_size=1)NEWLINE self.linear = torch.nn.Linear(in_features=8, out_features=1)NEWLINENEWLINE def forward(self, input_):NEWLINE x = self.conv_regular(input_)NEWLINE x = self.bn(x)NEWLINE x = torch.nn.functional.relu(x)NEWLINE x.transpose_(2, 3)NEWLINE x = self.max_pool2d(x)NEWLINE x = self.conv_transpose(x)NEWLINE x = self.conv_depthwise(x)NEWLINE x += torch.ones_like(x)NEWLINE x = self.adaptive_avg_pool(x)NEWLINE x = self.linear(x.flatten())NEWLINE return xNEWLINENEWLINE model = ModelForMetatypeTesting()NEWLINE nncf_network = NNCFNetwork(model, [ModelInputInfo([1, 3, 300, 300])])NEWLINE ip_graph = nncf_network.get_insertion_point_graph()NEWLINENEWLINE for node in ip_graph.nodes().values():NEWLINE if node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] == InsertionPointGraphNodeType.OPERATOR:NEWLINE nncf_node_ref = node[InsertionPointGraph.REGULAR_NODE_REF_NODE_ATTR]NEWLINE scope_str = str(nncf_node_ref[NNCFGraph.OP_EXEC_CONTEXT_NODE_ATTR].input_agnostic)NEWLINE assert scope_str in ref_scope_vs_metatype_dictNEWLINE ref_metatype = ref_scope_vs_metatype_dict[scope_str]NEWLINE assert node[InsertionPointGraph.OPERATOR_METATYPE_NODE_ATTR] == ref_metatypeNEWLINENEWLINE @pytest.mark.parametrize(("mock_graph_factory", "dot_file_name"),NEWLINE MERGE_PATTERN_TEST_CASES,NEWLINE ids=[x[1] for x in MERGE_PATTERN_TEST_CASES])NEWLINE def test_get_ip_graph_with_merged_operations(self, mock_graph_factory, dot_file_name):NEWLINE mock_graph = mock_graph_factory()NEWLINE ip_graph = InsertionPointGraph(mock_graph)NEWLINE merged_ip_graph = ip_graph.get_ip_graph_with_merged_hw_optimized_operations()NEWLINENEWLINE data_dir = TEST_ROOT / 'data/reference_graphs/pattern_merging' # type: PathNEWLINENEWLINE path_to_dot_file = data_dir / '{}.dot'.format(dot_file_name)NEWLINENEWLINE # validate .dot file manually!NEWLINE if not path_to_dot_file.exists():NEWLINE if not data_dir.exists():NEWLINE data_dir.mkdir(parents=True)NEWLINE nx.drawing.nx_pydot.write_dot(merged_ip_graph, str(path_to_dot_file))NEWLINENEWLINE load_graph = nx.drawing.nx_pydot.read_dot(str(path_to_dot_file))NEWLINENEWLINE for key in load_graph.nodes.keys():NEWLINE key.replace(r'\\n', r'\n') # Somehow pydot mangles the \n characters while writing a .dot fileNEWLINENEWLINE sanitized_loaded_keys = [key.replace('\\n', '\n') for key in load_graph.nodes.keys()]NEWLINE sanitized_loaded_edges = [(u.replace('\\n', '\n'),NEWLINE v.replace('\\n', '\n')) for u, v in nx.DiGraph(load_graph).edges]NEWLINENEWLINE assert Counter(sanitized_loaded_keys) == Counter(list(merged_ip_graph.nodes.keys()))NEWLINE assert Counter(sanitized_loaded_edges) == Counter(list(merged_ip_graph.edges))NEWLINE
"""Script to calculate and save teams data for all seasons on file"""NEWLINENEWLINEimport loggingNEWLINENEWLINEfrom pynba import (NEWLINE seasons_on_file,NEWLINE halfgames_from_file,NEWLINE teams_from_halfgames,NEWLINE save_teams,NEWLINE save_team_plots,NEWLINE use_blackontrans_style,NEWLINE)NEWLINENEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINEuse_blackontrans_style()NEWLINENEWLINENEWLINEdef main():NEWLINE """Calculate and save teams data for all seasons on file"""NEWLINE logger.info("Loading seasons on file")NEWLINE season_info = seasons_on_file()NEWLINE for league, year, season_type in zip(NEWLINE season_info["league"], season_info["year"], season_info["season_type"]NEWLINE ):NEWLINE logger.info(NEWLINE f"Loading halfgame data for {league} {year} {season_type} from file"NEWLINE )NEWLINE halfgames = halfgames_from_file(league, year, season_type)NEWLINE logger.info(NEWLINE f"Calculating team statistics for {league} {year} {season_type} from halfgames"NEWLINE )NEWLINE teams = teams_from_halfgames(halfgames)NEWLINE logger.info(f"Saving teams data for {league} {year} {season_type}")NEWLINE save_teams(teams)NEWLINE logger.info("Plotting & saving team ratings & pace")NEWLINE save_team_plots(teams)NEWLINE logger.info("Complete")NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE main()NEWLINE
from abc import ABCNEWLINEimport loggingNEWLINENEWLINEimport ci_sdrNEWLINEimport fast_bss_evalNEWLINEimport torchNEWLINENEWLINENEWLINEfrom espnet2.enh.loss.criterions.abs_loss import AbsEnhLossNEWLINENEWLINENEWLINEclass TimeDomainLoss(AbsEnhLoss, ABC):NEWLINE """Base class for all time-domain Enhancement loss modules."""NEWLINENEWLINE passNEWLINENEWLINENEWLINEEPS = torch.finfo(torch.get_default_dtype()).epsNEWLINENEWLINENEWLINEclass CISDRLoss(TimeDomainLoss):NEWLINE """CI-SDR lossNEWLINENEWLINE Reference:NEWLINE Convolutive Transfer Function Invariant SDR TrainingNEWLINE Criteria for Multi-Channel Reverberant Speech Separation;NEWLINE C. Boeddeker et al., 2021;NEWLINE https://arxiv.org/abs/2011.15003NEWLINE Args:NEWLINE ref: (Batch, samples)NEWLINE inf: (Batch, samples)NEWLINE filter_length (int): a time-invariant filter that allowsNEWLINE slight distortion via filteringNEWLINE Returns:NEWLINE loss: (Batch,)NEWLINE """NEWLINENEWLINE def __init__(self, filter_length=512, name=None):NEWLINE super().__init__()NEWLINE self.filter_length = filter_lengthNEWLINENEWLINE self._name = "ci_sdr_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(NEWLINE self,NEWLINE ref: torch.Tensor,NEWLINE inf: torch.Tensor,NEWLINE ) -> torch.Tensor:NEWLINENEWLINE assert ref.shape == inf.shape, (ref.shape, inf.shape)NEWLINENEWLINE return ci_sdr.pt.ci_sdr_loss(NEWLINE inf, ref, compute_permutation=False, filter_length=self.filter_lengthNEWLINE )NEWLINENEWLINENEWLINEclass SNRLoss(TimeDomainLoss):NEWLINE def __init__(self, eps=EPS, name=None):NEWLINE super().__init__()NEWLINE self.eps = float(eps)NEWLINENEWLINE self._name = "snr_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(NEWLINE self,NEWLINE ref: torch.Tensor,NEWLINE inf: torch.Tensor,NEWLINE ) -> torch.Tensor:NEWLINE # the return tensor should be shape of (batch,)NEWLINENEWLINE noise = inf - refNEWLINENEWLINE snr = 20 * (NEWLINE torch.log10(torch.norm(ref, p=2, dim=1).clamp(min=self.eps))NEWLINE - torch.log10(torch.norm(noise, p=2, dim=1).clamp(min=self.eps))NEWLINE )NEWLINE return -snrNEWLINENEWLINENEWLINEclass SDRLoss(TimeDomainLoss):NEWLINE """SDR loss.NEWLINENEWLINE filter_length: intNEWLINE The length of the distortion filter allowed (default: ``512``)NEWLINE use_cg_iter:NEWLINE If provided, an iterative method is used to solve for the distortionNEWLINE filter coefficients instead of direct Gaussian elimination.NEWLINE This can speed up the computation of the metrics in case the filtersNEWLINE are long. Using a value of 10 here has been shown to provideNEWLINE good accuracy in most cases and is sufficient when using thisNEWLINE loss to train neural separation networks.NEWLINE clamp_db: floatNEWLINE clamp the output value in [-clamp_db, clamp_db]NEWLINE zero_mean: boolNEWLINE When set to True, the mean of all signals is subtracted prior.NEWLINE load_diag:NEWLINE If provided, this small value is added to the diagonal coefficients ofNEWLINE the system metrics when solving for the filter coefficients.NEWLINE This can help stabilize the metric in the case where some of the referenceNEWLINE signals may sometimes be zeroNEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE filter_length=512,NEWLINE use_cg_iter=None,NEWLINE clamp_db=None,NEWLINE zero_mean=True,NEWLINE load_diag=None,NEWLINE name=None,NEWLINE ):NEWLINE super().__init__()NEWLINENEWLINE self.filter_length = filter_lengthNEWLINE self.use_cg_iter = use_cg_iterNEWLINE self.clamp_db = clamp_dbNEWLINE self.zero_mean = zero_meanNEWLINE self.load_diag = load_diagNEWLINENEWLINE self._name = "sdr_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(NEWLINE self,NEWLINE ref: torch.Tensor,NEWLINE est: torch.Tensor,NEWLINE ) -> torch.Tensor:NEWLINE """SDR forward.NEWLINENEWLINE Args:NEWLINE ref: Tensor, (..., n_samples)NEWLINE reference signalNEWLINE est: Tensor (..., n_samples)NEWLINE estimated signalNEWLINENEWLINE Returns:NEWLINE loss: (...,)NEWLINE the SDR loss (negative sdr)NEWLINE """NEWLINENEWLINE sdr_loss = fast_bss_eval.sdr_loss(NEWLINE est=est,NEWLINE ref=ref,NEWLINE filter_length=self.filter_length,NEWLINE use_cg_iter=self.use_cg_iter,NEWLINE zero_mean=self.zero_mean,NEWLINE clamp_db=self.clamp_db,NEWLINE load_diag=self.load_diag,NEWLINE pairwise=False,NEWLINE )NEWLINENEWLINE return sdr_lossNEWLINENEWLINENEWLINEclass SISNRLoss(TimeDomainLoss):NEWLINE """SI-SNR (or named SI-SDR) lossNEWLINENEWLINE A more stable SI-SNR loss with clamp from `fast_bss_eval`.NEWLINENEWLINE Attributes:NEWLINE clamp_db: floatNEWLINE clamp the output value in [-clamp_db, clamp_db]NEWLINE zero_mean: boolNEWLINE When set to True, the mean of all signals is subtracted prior.NEWLINE eps: floatNEWLINE Deprecated. Keeped for compatibility.NEWLINE """NEWLINENEWLINE def __init__(self, clamp_db=None, zero_mean=True, eps=None, name=None):NEWLINE super().__init__()NEWLINE self.clamp_db = clamp_dbNEWLINE self.zero_mean = zero_meanNEWLINE if eps is not None:NEWLINE logging.warning("Eps is deprecated in si_snr loss, set clamp_db instead.")NEWLINENEWLINE self._name = "si_snr_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(NEWLINE self,NEWLINE ref: torch.Tensor,NEWLINE est: torch.Tensor,NEWLINE ) -> torch.Tensor:NEWLINE """SI-SNR forward.NEWLINENEWLINE Args:NEWLINENEWLINE ref: Tensor, (..., n_samples)NEWLINE reference signalNEWLINE est: Tensor (..., n_samples)NEWLINE estimated signalNEWLINENEWLINE Returns:NEWLINE loss: (...,)NEWLINE the SI-SDR loss (negative si-sdr)NEWLINE """NEWLINENEWLINE si_snr = fast_bss_eval.si_sdr_loss(NEWLINE est=est,NEWLINE ref=ref,NEWLINE zero_mean=self.zero_mean,NEWLINE clamp_db=self.clamp_db,NEWLINE pairwise=False,NEWLINE )NEWLINENEWLINE return si_snrNEWLINENEWLINENEWLINEclass TimeDomainMSE(TimeDomainLoss):NEWLINE def __init__(self, name=None):NEWLINE super().__init__()NEWLINE self._name = "TD_MSE_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(self, ref, inf) -> torch.Tensor:NEWLINE """Time-domain MSE loss forward.NEWLINENEWLINE Args:NEWLINE ref: (Batch, T) or (Batch, T, C)NEWLINE inf: (Batch, T) or (Batch, T, C)NEWLINE Returns:NEWLINE loss: (Batch,)NEWLINE """NEWLINE assert ref.shape == inf.shape, (ref.shape, inf.shape)NEWLINENEWLINE mseloss = (ref - inf).pow(2)NEWLINE if ref.dim() == 3:NEWLINE mseloss = mseloss.mean(dim=[1, 2])NEWLINE elif ref.dim() == 2:NEWLINE mseloss = mseloss.mean(dim=1)NEWLINE else:NEWLINE raise ValueError(NEWLINE "Invalid input shape: ref={}, inf={}".format(ref.shape, inf.shape)NEWLINE )NEWLINE return mselossNEWLINENEWLINENEWLINEclass TimeDomainL1(TimeDomainLoss):NEWLINE def __init__(self, name=None):NEWLINE super().__init__()NEWLINE self._name = "TD_L1_loss" if name is None else nameNEWLINENEWLINE @propertyNEWLINE def name(self) -> str:NEWLINE return self._nameNEWLINENEWLINE def forward(self, ref, inf) -> torch.Tensor:NEWLINE """Time-domain L1 loss forward.NEWLINENEWLINE Args:NEWLINE ref: (Batch, T) or (Batch, T, C)NEWLINE inf: (Batch, T) or (Batch, T, C)NEWLINE Returns:NEWLINE loss: (Batch,)NEWLINE """NEWLINE assert ref.shape == inf.shape, (ref.shape, inf.shape)NEWLINENEWLINE l1loss = abs(ref - inf)NEWLINE if ref.dim() == 3:NEWLINE l1loss = l1loss.mean(dim=[1, 2])NEWLINE elif ref.dim() == 2:NEWLINE l1loss = l1loss.mean(dim=1)NEWLINE else:NEWLINE raise ValueError(NEWLINE "Invalid input shape: ref={}, inf={}".format(ref.shape, inf.shape)NEWLINE )NEWLINE return l1lossNEWLINE
import itertoolsNEWLINEimport mathNEWLINEimport stringNEWLINEimport sysNEWLINEfrom bisect import bisect_left as bi_lNEWLINEfrom bisect import bisect_right as bi_rNEWLINEfrom collections import Counter, defaultdict, dequeNEWLINEfrom functools import lru_cache, reduceNEWLINEfrom heapq import heapify, heappop, heappushNEWLINEfrom operator import or_, xorNEWLINENEWLINEsys.setrecursionlimit(10**8)NEWLINEinf = float('inf')NEWLINEMOD = 10**9+7NEWLINE# MOD = 998244353NEWLINENEWLINENEWLINEusing_numpy = 1NEWLINEimport networkx as nxNEWLINEimport numpy as npNEWLINEfrom numba import i8, njitNEWLINEfrom scipy import optimizeNEWLINEfrom scipy.ndimage import distance_transform_cdtNEWLINEfrom scipy.sparse import csr_matrixNEWLINEfrom scipy.sparse.csgraph import (NEWLINE connected_components,NEWLINE csgraph_to_dense,NEWLINE maximum_flow,NEWLINE minimum_spanning_tree,NEWLINE shortest_path,NEWLINE)NEWLINEfrom scipy.spatial import ConvexHullNEWLINEfrom scipy.special import combNEWLINENEWLINENEWLINEclass Algebra:NEWLINE class Modular(int):NEWLINE def __init__(self, n, mod=MOD):NEWLINE self.value = nNEWLINE self.mod = modNEWLINENEWLINE def __str__(self): return f'{self.value}'NEWLINENEWLINE def __add__(self, other):NEWLINE return self.__class__((self.value + other.value) % self.mod)NEWLINE def __sub__(self, x): return self.__class__((self.value - x.value) % self.mod)NEWLINE def __mul__(self, x): return self.__class__((self.value * x.value) % self.mod)NEWLINE def __pow__(self, x): return self.__class__(pow(self.value, x.value, self.mod))NEWLINENEWLINE def __lt__(self, x): return self.value < x.valueNEWLINE def __le__(self, x): return self.value <= x.valueNEWLINE def __eq__(self, x): return self.value == x.valueNEWLINE def __ne__(self, x): return self.value != x.valueNEWLINE def __gt__(self, x): return self.value > x.valueNEWLINE def __ge__(self, x): return self.value >= x.valueNEWLINENEWLINENEWLINENEWLINE class SemiGroup:NEWLINE passNEWLINE class Monoid:NEWLINE passNEWLINE class Group:NEWLINE passNEWLINE class SemiRing:NEWLINE passNEWLINE class Ring:NEWLINE passNEWLINENEWLINENEWLINE @staticmethodNEWLINE def identity(n):NEWLINE if using_numpy:NEWLINE return np.identity(n, dtype=np.int64)NEWLINE else:NEWLINE a = [[0]*n for _ in range(n)]NEWLINE for i in range(n): a[i][i] = 1NEWLINE return aNEWLINENEWLINE @staticmethodNEWLINE def dot(a, b):NEWLINE if using_numpy:NEWLINE return np.dot(a, b)NEWLINE else:NEWLINE h, w, l = len(a), len(b[0]), len(b)NEWLINE assert len(a[0]) == lNEWLINE c = [[0]*w for _ in range(h)]NEWLINE for i in range(h):NEWLINE for j in range(w):NEWLINE for k in range(l):NEWLINE c[i][j] += a[i][k]*b[k][j]NEWLINE return cNEWLINENEWLINE @classmethodNEWLINE def matrix_pow(cls, a, n, mod=10**9+7):NEWLINE m = len(a)NEWLINE b = cls.identity(m)NEWLINE while n:NEWLINE if n&1: b = cls.dot(b, a)NEWLINE n >>= 1; a = cls.dot(a, a)NEWLINE if using_numpy:NEWLINE a %= mod; b %= modNEWLINE else:NEWLINE for i in range(m):NEWLINE for j in range(m):NEWLINE a[i][j] %= modNEWLINE b[i][j] %= modNEWLINE return bNEWLINENEWLINE @staticmethodNEWLINE def bitwise_dot(a, b):NEWLINE if using_numpy:NEWLINE return np.bitwise_xor.reduce(a[:,None,:] & b.T[None,:,:], axis=-1)NEWLINE else:NEWLINE h, w, l = len(a), len(b[0]), len(b)NEWLINE assert len(a[0]) == lNEWLINE c = [[0]*w for _ in range(h)]NEWLINE for i in range(h):NEWLINE for j in range(w):NEWLINE for k in range(l):NEWLINE c[i][j] ^= a[i][k]&b[k][j]NEWLINE return cNEWLINENEWLINE @classmethodNEWLINE def bitwise_mat_pow(cls, a, n):NEWLINE if n==0: return np.eye(len(a), dtype=np.uint32)*((1<<32)-1)NEWLINE res = cls.bitwise_mat_pow(a, n//2)NEWLINE res = cls.bitwise_dot(res, res)NEWLINE return cls.bitwise_dot(res, a) if n&1 else resNEWLINENEWLINENEWLINE @staticmethodNEWLINE def cumprod(a, mod):NEWLINE l = len(a); sql = int(np.sqrt(l)+1)NEWLINE a = np.resize(a, sql**2).reshape(sql, sql)NEWLINE for i in range(sql-1): a[:, i+1] *= a[:, i]; a[:, i+1] %= modNEWLINE for i in range(sql-1): a[i+1] *= a[i, -1]; a[i+1] %= modNEWLINE return np.ravel(a)[:l]NEWLINENEWLINE @classmethodNEWLINE def generate_fac_ifac(cls, n, p=MOD):NEWLINE if using_numpy:NEWLINE fac = np.arange(n+1); fac[0] = 1; fac = cls.cumprod(fac, p)NEWLINE ifac = np.arange(n+1, 0, -1); ifac[0] = pow(int(fac[-1]), p-2, p)NEWLINE ifac = cls.cumprod(ifac, p)[n::-1]NEWLINE else:NEWLINE fac = [None]*(n+1); fac[0] = 1NEWLINE for i in range(n): fac[i+1] = fac[i]*(i+1)%pNEWLINE ifac = [None]*(n+1); ifac[n] = pow(fac[n], p-2, p)NEWLINE for i in range(n, 0, -1): ifac[i-1] = ifac[i]*i%pNEWLINE return fac, ifacNEWLINENEWLINE class Kitamasa:NEWLINE passNEWLINENEWLINENEWLINEmint = Algebra.ModularNEWLINENEWLINENEWLINEclass NumberTheory:NEWLINE class PrimeNumbers: # pnNEWLINE def __init__(self, n=2*10**6):NEWLINE self.is_prime, self.prime_nums = self.find(n)NEWLINENEWLINE def __call__(self, n): return self.is_prime[n]NEWLINE def __iter__(self): return iter(self.prime_nums)NEWLINE def __getitem__(self, key): return self.prime_nums[key]NEWLINENEWLINE @staticmethodNEWLINE def find(n): # Sieve of eratosthenesNEWLINE if using_numpy:NEWLINE is_prime = np.ones(n+1, dtype=np.bool); is_prime[:2] = 0NEWLINE for i in range(2, int(n**.5)+1):NEWLINE if is_prime[i]: is_prime[i*2::i] = 0NEWLINE prime_nums = np.flatnonzero(is_prime)NEWLINE else:NEWLINE is_prime = [True]*(n+1); is_prime[0] = is_prime[1] = 0NEWLINE for i in range(2, int(n**.5)+1):NEWLINE if not is_prime[i]: continueNEWLINE for j in range(i*2, n+1, i): is_prime[j] = 0NEWLINE prime_nums = [i for i in range(2, n+1) if is_prime[i]]NEWLINE return is_prime, prime_numsNEWLINENEWLINE @lru_cache(maxsize=None)NEWLINE def factorize(self, n):NEWLINE res = defaultdict(int)NEWLINE if n < 2: return resNEWLINE for p in self:NEWLINE if p*p > n: breakNEWLINE while n%p == 0: res[p] += 1; n //= pNEWLINE if n == 1: return resNEWLINE res[n] = 1; return resNEWLINENEWLINE def factorize_factorial(self, n):NEWLINE res = defaultdict(int)NEWLINE for i in range(2, n+1):NEWLINE for p, c in self.factorize(i).items(): res[p] += cNEWLINE return resNEWLINENEWLINE @classmethodNEWLINE @lru_cache(maxsize=None)NEWLINE def gcd(cls, a, b): return cls.gcd(b, a%b) if b else abs(a)NEWLINE @classmethodNEWLINE def lcm(cls, a, b): return abs(a // cls.gcd(a, b) * b)NEWLINENEWLINE @staticmethodNEWLINE def find_divisors(n):NEWLINE divisors = []NEWLINE for i in range(1, int(n**.5)+1):NEWLINE if n%i: continueNEWLINE divisors.append(i)NEWLINE j = n // iNEWLINE if j != i: divisors.append(j)NEWLINE return sorted(divisors)NEWLINENEWLINE @staticmethodNEWLINE def base_convert(n, b):NEWLINE if not n: return [0]NEWLINE res = []NEWLINE while n:NEWLINE n, r = divmod(n, b)NEWLINE if r < 0: n += 1; r -= bNEWLINE res.append(r)NEWLINE return resNEWLINENEWLINENEWLINENEWLINEclass Combinatorics:NEWLINE @classmethodNEWLINE @lru_cache(maxsize=None)NEWLINE def choose(cls, n, r, mod=None):NEWLINE if r > n or r < 0: return 0NEWLINE if r == 0: return 1NEWLINE res = cls.choose(n-1,r,mod) + cls.choose(n-1,r-1,mod)NEWLINE if mod: res %= modNEWLINE return resNEWLINENEWLINE class CombinationsMod:NEWLINE def __init__(self, n=2*10**6, mod=MOD):NEWLINE self.__mod = modNEWLINE self.fac, self.ifac = Algebra.generate_fac_ifac(n, mod)NEWLINENEWLINE def __call__(self, n, r): return self.__choose(n, r)NEWLINENEWLINE def __choose(self, n, r):NEWLINE bl = (0<=r) & (r<=n)NEWLINE p = self.__modNEWLINE return bl * self.fac[n] * self.ifac[r] % p * self.ifac[n-r] % pNEWLINENEWLINE def make_nchoose_table(self, n):NEWLINE p = self.__modNEWLINE r = len(self.__fac)-1NEWLINE if using_numpy:NEWLINE n_choose = np.arange(n+1, n-r, -1); n_choose[0] = 1NEWLINE n_choose = Algebra.cumprod(n_choose, p)*self.ifac%pNEWLINE else:NEWLINE n_choose = [None]*(r+1); n_choose[0] = 1NEWLINE for i in range(r): n_choose[i+1] = n_choose[i]*(n-i)%pNEWLINE for i in range(1,r+1): n_choose[i] = n_choose[i]*self.ifac[i]%pNEWLINE return n_chooseNEWLINENEWLINE @classmethodNEWLINE def permutations(cls, a, r=None, i=0):NEWLINE a = list(a); n = len(a)NEWLINE if r is None: r = nNEWLINE res = []NEWLINE if r > n or i > r: return resNEWLINE if i == r: return [tuple(a[:r])]NEWLINE for j in range(i, n): a[i],a[j] = a[j],a[i]; res += cls.permutations(a, r, i+1)NEWLINE return resNEWLINENEWLINE @staticmethodNEWLINE def combinations(a, r):NEWLINE a = tuple(a)NEWLINE n = len(a)NEWLINE if r > n: returnNEWLINE indices = list(range(r))NEWLINE yield a[:r]NEWLINE while True:NEWLINE for i in range(r-1, -1, -1):NEWLINE if indices[i] != i+n-r: breakNEWLINE else: returnNEWLINE indices[i] += 1NEWLINE for j in range(i+1, r): indices[j] = indices[j-1]+1NEWLINE yield tuple(a[i] for i in indices)NEWLINENEWLINENEWLINENEWLINEclass DP:NEWLINE @staticmethodNEWLINE def LIS(a):NEWLINE res = [inf] * len(a)NEWLINE for x in a: res[bi_l(res, x)] = xNEWLINE return resNEWLINENEWLINENEWLINEclass String:NEWLINE @staticmethodNEWLINE def z_algorithm(s):NEWLINE n = len(s)NEWLINE a = [0] * n; a[0] = nNEWLINE l = r = -1NEWLINE for i in range(1, n):NEWLINE if r >= i: a[i] = min(a[i-l], r-i)NEWLINE while i + a[i] < n and s[i+a[i]] == s[a[i]]: a[i] += 1NEWLINE if i+a[i] >= r: l, r = i, i+a[i]NEWLINE return aNEWLINENEWLINENEWLINEclass GeometryTopology:NEWLINE class Graph:NEWLINE class __Edge:NEWLINE def __init__(self, weight=1, capacity=1, **args):NEWLINE self.weight = weightNEWLINE self.capacity = capacityNEWLINENEWLINE def __str__(self):NEWLINE return f'weight: {self.weight}, cap: {self.capacity}'NEWLINENEWLINE class __Node:NEWLINE def __init__(self, **args):NEWLINE passNEWLINENEWLINE def __init__(self, n=0):NEWLINE self.__N = nNEWLINE self.nodes = [None] * nNEWLINE self.edges = [{} for _ in range(n)]NEWLINENEWLINE def add_node_info(self, v, **args): self.nodes[v] = self.__Node(**args)NEWLINENEWLINE def add_edge(self, u, v, update=False, **args):NEWLINE if not update and v in self.edges[u]: returnNEWLINE self.edges[u][v] = self.__Edge(**args)NEWLINENEWLINE def get_size(self): return self.__NNEWLINENEWLINE def bfs(self, src=0):NEWLINE n = self.__NNEWLINE self.depth = self.lv = lv = [None]*n; lv[src] = 0 # depth in tree, or level in general graph.NEWLINE self.dist = dist = [inf]*n; dist[src] = 0 # dist for only tree.NEWLINE self.parent = par = [None]*n; par[src] = srcNEWLINE q = deque([src])NEWLINE while q:NEWLINE u = q.popleft()NEWLINE for v, e in self.edges[u].items():NEWLINE if e.capacity == 0 or lv[v] is not None: continueNEWLINE lv[v], dist[v], par[v] = lv[u]+1, dist[u]+e.weight, uNEWLINE q.append(v)NEWLINE return distNEWLINENEWLINE def dinic(self, src, sink):NEWLINE def flow_to_sink(u, flow_in):NEWLINE if u == sink: return flow_inNEWLINE flow = 0NEWLINE for v, e in self.edges[u].items():NEWLINE if e.capacity == 0 or self.lv[v] <= self.lv[u]: continueNEWLINE f = flow_to_sink(v, min(flow_in, e.capacity))NEWLINE if not f: continueNEWLINE self.edges[u][v].capacity -= fNEWLINE if u in self.edges[v]: self.edges[v][u].capacity += fNEWLINE else: self.add_edge(v, u, capacity=f)NEWLINE flow_in -= fNEWLINE flow += fNEWLINE return flowNEWLINENEWLINE flow = 0NEWLINE while True:NEWLINE self.bfs(src)NEWLINE if self.lv[sink] is None: return flowNEWLINE flow += flow_to_sink(src, inf)NEWLINENEWLINE def ford_fulkerson(self):NEWLINE passNEWLINENEWLINE def push_relabel(self):NEWLINE passNEWLINENEWLINE def floyd_warshall(self):NEWLINE n = self.__NNEWLINE d = [[inf]*n for _ in range(n)]NEWLINE for u in range(n):NEWLINE d[u][u] = 0NEWLINE for v, e in self.edges[u].items(): d[u][v] = e.weightNEWLINE for w in range(n):NEWLINE for u in range(n):NEWLINE for v in range(n):NEWLINE d[u][v] = min(d[u][v], d[u][w]+d[w][v])NEWLINE return dNEWLINENEWLINE def dijkstra(self, src, paths_cnt=False, mod=None):NEWLINE dist = [inf] * self.__N; dist[src] = 0NEWLINE visited = [False] * self.__NNEWLINE paths = [0] * self.__N; paths[src] = 1NEWLINE q = [(0, src)]NEWLINE while q:NEWLINE d, u = heappop(q)NEWLINE if visited[u]: continueNEWLINE visited[u] = TrueNEWLINE for v, e in self.edges[u].items():NEWLINE dv = d + e.weightNEWLINE if dv > dist[v]: continueNEWLINE elif dv == dist[v]:NEWLINE paths[v] += paths[u]NEWLINE if mod: paths[v] %= modNEWLINE continueNEWLINE paths[v], dist[v] = paths[u], dvNEWLINE heappush(q, (dv, v))NEWLINE if paths_cnt: return dist, pathsNEWLINE else: return distNEWLINENEWLINE def astar(self, src, tgt, heuristic_func):NEWLINE cost = [inf] * self.__NNEWLINE q = [(heuristic_func(src, tgt), 0, src)]NEWLINE while q:NEWLINE _, c, u = heappop(q)NEWLINE if u == tgt: return cNEWLINE if cost[u] != inf: continueNEWLINE cost[u] = cNEWLINE for v, e in self.edges[u].items():NEWLINE if cost[v] != inf: continueNEWLINE h = heuristic_func(v, tgt)NEWLINE nc = c + e.weightNEWLINE heappush(q, (h+nc, nc, v))NEWLINE return infNEWLINENEWLINE def bellman_ford(self, src):NEWLINE n = self.__NNEWLINE d = [inf] * n; d[src] = 0NEWLINE for _ in range(n-1):NEWLINE for u in range(n):NEWLINE for v, e in self.edges[u].items(): d[v] = min(d[v], d[u]+e.weight)NEWLINE for u in range(n):NEWLINE for v, e in self.edges[u].items():NEWLINE if d[u]+e.weight < d[v]: raise Exception('found negative cycle.')NEWLINE return dNEWLINENEWLINE def bfs01(self, src=0):NEWLINE d = [inf]*self.__N; d[src] = 0NEWLINE q = deque([src])NEWLINE while q:NEWLINE u = q.popleft()NEWLINE for v, e in self.edges[u].items():NEWLINE dv = d[u] + e.weightNEWLINE if d[v] <= dv: continueNEWLINE d[v] = dvNEWLINE if e.weight: q.append(v)NEWLINE else: q.appendleft(v)NEWLINE return dNEWLINENEWLINENEWLINE def find_ancestors(self): # tree doubling.NEWLINE self.__ancestors = ancestors = [self.parent]NEWLINE for _ in range(max(self.depth).bit_length()):NEWLINE ancestors.append([ancestors[-1][u] for u in ancestors[-1]])NEWLINENEWLINENEWLINE def find_dist(self, u, v):NEWLINE return self.dist[u]+self.dist[v]-2*self.dist[self.__find_lca(u, v)]NEWLINENEWLINENEWLINE def __find_lca(self, u, v):NEWLINE du, dv = self.depth[u], self.depth[v]NEWLINE if du > dv:NEWLINE u, v = v, uNEWLINE du, dv = dv, duNEWLINENEWLINE d = dv - duNEWLINE for i in range(d.bit_length()): # up-streamNEWLINE if d>>i&1: v = self.__ancestors[i][v]NEWLINE if v == u: return vNEWLINENEWLINE for i in range(du.bit_length()-1, -1, -1): # find direct child of LCA.NEWLINE nu, nv = self.__ancestors[i][u], self.__ancestors[i][v]NEWLINE if nu == nv: continueNEWLINE u, v = nu, nvNEWLINENEWLINE return self.__ancestors[0][u]NEWLINENEWLINE def init_dsu(self): # disjoint set union (union-find)NEWLINE n = self.__NNEWLINE self.parent = list(range(n))NEWLINE self.rank = [0] * nNEWLINE self.size = [1] * nNEWLINENEWLINE def find(self, u):NEWLINE if self.parent[u] == u: return uNEWLINE self.parent[u] = self.find(self.parent[u])NEWLINE return self.parent[u]NEWLINENEWLINE def unite(self, u, v):NEWLINE u, v = self.find(u), self.find(v)NEWLINE if u == v: returnNEWLINE if self.rank[u] < self.rank[v]: u,v = v,uNEWLINE self.parent[v] = uNEWLINE self.size[u] += self.size[v]NEWLINE self.rank[u] = max(self.rank[u], self.rank[v]+1)NEWLINENEWLINE def same(self, u, v): return self.find(u)==self.find(v)NEWLINENEWLINE def groups(self, empty=True):NEWLINE n = self.__NNEWLINE groups = [[] for _ in range(n)]NEWLINE for u in range(n): groups[self.find(u)].append(u)NEWLINE return groups if empty else [g for g in groups if g]NEWLINENEWLINENEWLINE def scc(self): # strongly connected componentsNEWLINE n = self.__NNEWLINE visited, q, root, r = [False]*n, [], [None]*n, 0NEWLINE gg = self.__class__(n)NEWLINE for u in range(n):NEWLINE for v in self.edges[u]: gg.add_edge(v, u)NEWLINENEWLINE def dfs(u):NEWLINE if visited[u]: returnNEWLINE visited[u] = TrueNEWLINE for v in self.edges[u]: dfs(v)NEWLINE q.append(u)NEWLINENEWLINE def rev_dfs(u, r):NEWLINE if root[u] is not None: returnNEWLINE root[u] = rNEWLINE for v in gg.edges[u]: rev_dfs(v, r)NEWLINENEWLINE for u in range(n): dfs(u)NEWLINE for u in q[::-1]: rev_dfs(u, r); r += 1NEWLINE return rootNEWLINENEWLINENEWLINE def kruskal(self): # minimum spanning treeNEWLINE n = self.__NNEWLINE uf = self.__class__(n); uf.init_dsu()NEWLINE edges = sorted([(u,v,e.weight) for u in range(n) for v,e in self.edges[u].items()], key=lambda x: x[2])NEWLINE g = self.__class__(n)NEWLINE d = 0NEWLINE for u, v, w in edges:NEWLINE if uf.same(u,v): continueNEWLINE uf.unite(u, v); g.add_edge(u, v, weight=w); d += wNEWLINE return g, dNEWLINENEWLINE def prim(self, src=0, return_parent=False): # minimum spanning treeNEWLINE n = self.__NNEWLINE g = self.__class__(n)NEWLINE parent, visited, dist = [None]*n, [False]*n, 0NEWLINE q = [(0, (src, src))]NEWLINE while q:NEWLINE d, (w, u) = heappop(q)NEWLINE if visited[u]: continueNEWLINE visited[u], parent[u] = True, w; dist += d; g.add_edge(w,u, weight=d)NEWLINE for v, e in self.edges[u].items():NEWLINE if not visited[v]: heappush(q, (e.weight, (u,v)))NEWLINE if return_parent: return g, dist, parentNEWLINE return g, distNEWLINENEWLINE def boruvka(self): # minimum spanning treeNEWLINE n = self.__NNEWLINE uf = self.__class__(n); uf.init_dsu()NEWLINE g = self.__class__(n)NEWLINE d = 0NEWLINENEWLINE def dfs(u):NEWLINE if visited[u]: return (inf, (None, None))NEWLINE visited[u] = TrueNEWLINE cand = []NEWLINE for v, e in self.edges[u].items():NEWLINE if uf.same(u,v): cand.append(dfs(v)); continueNEWLINE cand.append((e.weight, (u,v)))NEWLINE return sorted(cand)[0]NEWLINENEWLINE while len(set(uf.parent))!=1:NEWLINE edges, visited = [], [False]*nNEWLINE for u in range(n):NEWLINE if visited[u]: continueNEWLINE edges.append(dfs(u))NEWLINE for w, (u, v) in edges:NEWLINE if uf.same(u,v): continueNEWLINE g.add_edge(u,v, weight=w); uf.unite(u,v); d += wNEWLINE for u in range(n): uf.find(u)NEWLINENEWLINE return g, dNEWLINENEWLINE def tsp(self): # traveling salesperson problemNEWLINE passNEWLINENEWLINE class FenwickTree: # BIT (Binary Indexed Tree)NEWLINE def __init__(self, n):NEWLINE self.__N = nNEWLINE self.data = [0]*(n+1)NEWLINENEWLINE def add(self, i, x):NEWLINE while i <= self.__N: self.data[i] += x; i += i&-iNEWLINENEWLINE def __sum(self, i):NEWLINE s = 0NEWLINE while i > 0: s += self.data[i]; i -= i&-iNEWLINE return sNEWLINENEWLINE def sum(self, l, r): return self.__sum(r) - self.__sum(l-1)NEWLINENEWLINE @staticmethodNEWLINE def triangle_area(p0, p1, p2, signed=False):NEWLINE x1, y1, x2, y2 = p1[0]-p0[0], p1[1]-p0[1], p2[0]-p0[0], p2[1]-p0[1]NEWLINE return (x1*y2 - x2*y1)/2 if signed else abs(x1*y2 - x2*y1)/2NEWLINENEWLINE @classmethodNEWLINE def intersect(cls, seg1, seg2):NEWLINE (p1, p2), (p3, p4) = seg1, seg2NEWLINE t1 = cls.triangle_area(p1, p2, p3, signed=True)NEWLINE t2 = cls.triangle_area(p1, p2, p4, signed=True)NEWLINE t3 = cls.triangle_area(p3, p4, p1, signed=True)NEWLINE t4 = cls.triangle_area(p3, p4, p2, signed=True)NEWLINE return (t1*t2<0) & (t3*t4<0)NEWLINENEWLINENEWLINEdef cumxor(a): return reduce(xor, a, 0)NEWLINEdef cumor(a): return reduce(or_, a, 0)NEWLINENEWLINEdef bit_count(n):NEWLINE cnt = 0NEWLINE while n: cnt += n&1; n >>= 1NEWLINE return cntNEWLINENEWLINENEWLINEclass AtCoder:NEWLINE class ABC001:NEWLINE @staticmethodNEWLINE def a():NEWLINE h1, h2 = map(int, sys.stdin.read().split()); print(h1-h2)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE def to_minuites(x):NEWLINE q, r = divmod(x, 100)NEWLINE return 60*q + rNEWLINENEWLINE def to_hmform(x):NEWLINE q, r = divmod(x, 60)NEWLINE return 100*q + rNEWLINENEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE term = [0] * 2001NEWLINE for _ in range(n):NEWLINE s, e = map(to_minuites, map(int, sys.stdin.readline().rstrip().split('-')))NEWLINE s = s//5 * 5NEWLINE e = (e+4)//5 * 5NEWLINE term[s] += 1NEWLINE term[e+1] -= 1NEWLINE for i in range(2000):NEWLINE term[i+1] += term[i]NEWLINENEWLINE res = []NEWLINE raining = FalseNEWLINE for i in range(2001):NEWLINE if term[i]:NEWLINE if not raining:NEWLINE s = iNEWLINE raining = TrueNEWLINE elif raining:NEWLINE res.append((s, i-1))NEWLINE raining = FalseNEWLINE for s, e in res:NEWLINE print(f'{to_hmform(s):04}-{to_hmform(e):04}')NEWLINENEWLINENEWLINENEWLINENEWLINE class ABC002:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(max(map(int, sys.stdin.readline().split())))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE vowels = set('aeiou')NEWLINE print(''.join([c for c in sys.stdin.readline().rstrip() if c not in vowels]))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE print(GeometryTopology.triangle_area(*map(int, sys.stdin.readline().split())))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE edges = set((x-1, y-1) for x, y in zip(*[map(int, sys.stdin.read().split())]*2))NEWLINE print(max(len(s) for i in range(1, 1<<n) for s in [[j for j in range(n) if i>>j&1]] if all((x, y) in edges for x, y in itertools.combinations(s, 2))))NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE relations = [1<<i for i in range(n)]NEWLINE for x, y in zip(*[map(int, sys.stdin.read().split())]*2):NEWLINE relations[x] |= 1<<(y-1); relations[y] |= 1<<(x-1)NEWLINE res = 0NEWLINE for i in range(1<<n):NEWLINE s, cnt = (1<<n)-1, 0NEWLINE for j in range(n):NEWLINE if i>>j & 1: t &= relations[j] | 1<<j; cnt += 1NEWLINE if s&i == i: res = max(res, cnt)NEWLINE print(res)NEWLINENEWLINE class ABC003:NEWLINE @staticmethodNEWLINE def a():NEWLINE print((int(sys.stdin.readline().rstrip())+1)*5000)NEWLINE @staticmethodNEWLINE def b():NEWLINE atcoder = set('atcoder')NEWLINE s, t = sys.stdin.read().split()NEWLINE print(all(s[i]==t[i] or s[i]=='@' and t[i] in atcoder or t[i]=='@' and s[i] in atcoder for i in range(len(s))) and 'You can win' or 'You will lose')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *r = map(int, sys.stdin.read().split()); print(reduce(lambda x, y: (x+y)/2, sorted(r)[-k:], 0))NEWLINENEWLINE class ABC004:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(int(sys.stdin.readline().rstrip())*2)NEWLINE @staticmethodNEWLINE def b():NEWLINE for l in [sys.stdin.readline().rstrip() for _ in range(4)][::-1]: print(l[::-1])NEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())%30NEWLINE res = list(range(1, 7))NEWLINE for i in range(n): i %= 5; res[i], res[i+1] = res[i+1], res[i]NEWLINE print(*res, sep='')NEWLINENEWLINENEWLINENEWLINE class ABC005:NEWLINE @staticmethodNEWLINE def a():NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print(y//x)NEWLINE @staticmethodNEWLINE def b():NEWLINE n, *t = map(int, sys.stdin.read().split())NEWLINE print(min(t))NEWLINE @staticmethodNEWLINE def c():NEWLINE t = int(sys.stdin.readline().rstrip())NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE m = int(sys.stdin.readline().rstrip())NEWLINE b = [int(x) for x in sys.stdin.readline().split()]NEWLINE i = 0NEWLINE for p in b:NEWLINE if i == n: print('no'); returnNEWLINE while p-a[i] > t:NEWLINE i += 1NEWLINE if i == n: print('no'); returnNEWLINE if a[i] > p: print('no'); returnNEWLINE i += 1NEWLINE print('yes')NEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE d = np.array([sys.stdin.readline().split() for _ in range(n)], np.int64)NEWLINE s = d.cumsum(axis=0).cumsum(axis=1)NEWLINE s = np.pad(s, 1)NEWLINE max_del = np.zeros((n+1, n+1), dtype=np.int64)NEWLINE for y in range(1, n+1):NEWLINE for x in range(1, n+1):NEWLINE max_del[y, x] = np.amax(s[y:n+1, x:n+1] - s[0:n-y+1, x:n+1] - s[y:n+1, 0:n-x+1] + s[0:n-y+1, 0:n-x+1])NEWLINE res = np.arange(n**2+1)[:, None]NEWLINE i = np.arange(1, n+1)NEWLINE res = max_del[i, np.minimum(res//i, n)].max(axis=1)NEWLINE q = int(sys.stdin.readline().rstrip())NEWLINE p = np.array(sys.stdin.read().split(), dtype=np.int64)NEWLINE print(*res[p], sep='\n')NEWLINENEWLINENEWLINE class ABC006:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = sys.stdin.readline().rstrip()NEWLINE if '3' in n: print('YES')NEWLINE elif int(n)%3 == 0: print('YES')NEWLINE else: print('NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE mod = 10007NEWLINE a = np.eye(N=3, k=-1, dtype=np.int64); a[0] = 1NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE a = Algebra.matrix_pow(a, n-1, mod)NEWLINE print(a[2][0])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE cnt = [0, 0, 0]NEWLINE if m == 1: cnt = [-1, -1, -1]NEWLINE else:NEWLINE if m & 1: m -= 3; cnt[1] += 1; n -= 1NEWLINE cnt[2] = m//2 - nNEWLINE cnt[0] = n - cnt[2]NEWLINE if cnt[0]<0 or cnt[1]<0 or cnt[2]<0: print(-1, -1, -1)NEWLINE else: print(*cnt, sep=' ')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *c = map(int, sys.stdin.read().split())NEWLINE lis = [inf]*nNEWLINE for x in c: lis[bi_l(lis, x)] = xNEWLINE print(n - bi_l(lis, inf))NEWLINENEWLINE class ABC007:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(n-1)NEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE if s == 'a': print(-1)NEWLINE else: print('a')NEWLINE @staticmethodNEWLINE def c():NEWLINE r, c = map(int, sys.stdin.readline().split())NEWLINE sy, sx = map(int, sys.stdin.readline().split())NEWLINE gy, gx = map(int, sys.stdin.readline().split())NEWLINE sy -= 1; sx -=1; gy -= 1; gx -= 1NEWLINE maze = [sys.stdin.readline().rstrip() for _ in range(r)]NEWLINE queue = deque([(sy, sx)])NEWLINE dist = np.full((r, c), np.inf); dist[sy, sx] = 0NEWLINE while queue:NEWLINE y, x = queue.popleft()NEWLINE for i, j in [(-1, 0), (1, 0), (0, -1), (0, 1)]:NEWLINE i += y; j += xNEWLINE if maze[i][j] == '#' or dist[i, j] != np.inf: continueNEWLINE dist[i, j] = dist[y, x] + 1NEWLINE queue.append((i, j))NEWLINE print(int(dist[gy, gx]))NEWLINE @staticmethodNEWLINE def d():NEWLINE ng = set([4, 9])NEWLINE def count(d):NEWLINE return d if d<=4 else d-1NEWLINE def f(n):NEWLINE x = [int(d) for d in str(n)]NEWLINE flg = TrueNEWLINE dp = 0NEWLINE for d in x:NEWLINE dp = dp*8 + flg*count(d)NEWLINE if d in ng: flg = FalseNEWLINE return n-(dp+flg)NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print(f(b) - f(a-1))NEWLINENEWLINE class ABC008:NEWLINE @staticmethodNEWLINE def a():NEWLINE s, t = map(int, sys.stdin.readline().split())NEWLINE print(t-s+1)NEWLINE @staticmethodNEWLINE def b():NEWLINE n, *s = sys.stdin.read().split()NEWLINE res = defaultdict(int)NEWLINE for name in s: res[name] += 1NEWLINE print(sorted(res.items(), key=lambda x: x[1])[-1][0])NEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE c = n - np.count_nonzero(a[:, None]%a, axis=1)NEWLINE print(np.sum((c+1)//2/c))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE w, h, n, *xy = map(int, sys.stdin.read().split())NEWLINE *xy, = zip(*([iter(xy)]*2))NEWLINENEWLINE @lru_cache(maxsize=None)NEWLINE def count(x1, y1, x2, y2):NEWLINE res = 0NEWLINE for x, y in xy:NEWLINE if not (x1 <= x <= x2 and y1 <= y <= y2): continueNEWLINE cnt = (x2-x1) + (y2-y1) + 1NEWLINE cnt += count(x1, y1, x-1, y-1)NEWLINE cnt += count(x1, y+1, x-1, y2)NEWLINE cnt += count(x+1, y1, x2, y-1)NEWLINE cnt += count(x+1, y+1, x2, y2)NEWLINE res = max(res, cnt)NEWLINE return resNEWLINE print(count(1, 1, w, h))NEWLINENEWLINENEWLINE class ABC009:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print((n+1)//2)NEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE print(sorted(set(a))[-2])NEWLINE @staticmethodNEWLINE def c():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE s = list(sys.stdin.readline().rstrip())NEWLINE cost = [1]*nNEWLINE r = kNEWLINE for i in range(n-1):NEWLINE q = []NEWLINE for j in range(i+1, n):NEWLINE if s[j] < s[i] and cost[i]+cost[j] <= r:NEWLINE heappush(q, (s[j], cost[i]+cost[j], -j))NEWLINE if not q: continueNEWLINE _, c, j = heappop(q); j = -jNEWLINE s[i], s[j] = s[j], s[i]NEWLINE r -= cNEWLINE cost[i] = cost[j] = 0NEWLINE print(''.join(s))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE k, m = map(int, sys.stdin.readline().split())NEWLINE a = np.array([int(x) for x in sys.stdin.readline().split()])NEWLINE c = np.array([int(x) for x in sys.stdin.readline().split()])NEWLINE mask = (1<<32) - 1NEWLINE d = np.eye(k, k, -1, dtype=np.uint32) * mask; d[0] = cNEWLINE if m <= k: print(a[m-1]); returnNEWLINE # print(Algebra.bitwise_mat_pow(d, m-k))NEWLINE # print(Algebra.bitwise_dot(Algebra.bitwise_mat_pow(d, m-k), a[::-1].reshape(-1, 1))[0].item())NEWLINE print(Algebra.bitwise_dot(Algebra.bitwise_mat_pow(d, m-k), a[::-1].reshape(-1, 1))[0][0])NEWLINENEWLINENEWLINENEWLINE class ABC010:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(sys.stdin.readline().rstrip()+'pp')NEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE tot = 0NEWLINE for x in a:NEWLINE c = 0NEWLINE while x%2==0 or x%3==2:NEWLINE x -= 1NEWLINE c += 1NEWLINE tot += cNEWLINE print(tot)NEWLINE @staticmethodNEWLINE def c():NEWLINE sx, sy, gx, gy, t, v, n, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = np.array(xy).reshape(-1, 2).TNEWLINE def dist(x1, y1, x2, y2):NEWLINE return np.sqrt((x2-x1)**2 + (y2-y1)**2)NEWLINE ans = 'YES' if (dist(sx, sy, x, y)+dist(x, y, gx, gy) <= v*t).any() else 'NO'NEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, g, e = map(int, sys.stdin.readline().split())NEWLINE p = [int(x) for x in sys.stdin.readline().split()]NEWLINE x, y = [], []NEWLINE for _ in range(e):NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE x.append(a); y.append(b)NEWLINE x.append(b); y.append(a)NEWLINE for a in p:NEWLINE x.append(a)NEWLINE y.append(n)NEWLINE if not x:NEWLINE print(0)NEWLINE returnNEWLINE c = [1] * len(x)NEWLINE min_cut = maximum_flow(csr_matrix((c, (x, y)), (n+1, n+1)), source=0, sink=n).flow_valueNEWLINE print(min_cut)NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, g, e = map(int, sys.stdin.readline().split())NEWLINE graph = nx.DiGraph()NEWLINE graph.add_nodes_from(range(n+1))NEWLINE for p in [int(x) for x in sys.stdin.readline().split()]:NEWLINE graph.add_edge(p, n, capacity=1)NEWLINE for _ in range(e):NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE graph.add_edge(a, b, capacity=1)NEWLINE graph.add_edge(b, a, capacity=1)NEWLINE print(nx.minimum_cut_value(graph, 0, n))NEWLINENEWLINE @staticmethodNEWLINE def d_3():NEWLINE n, q, m = map(int, sys.stdin.readline().split())NEWLINE g = GeometryTopology.Graph(n+1)NEWLINE # for i in range(n+1): g.add_node(i)NEWLINE for p in [int(x) for x in sys.stdin.readline().split()]:NEWLINE g.add_edge(p, n, capacity=1)NEWLINE for a, b in zip(*[map(int, sys.stdin.read().split())]*2):NEWLINE g.add_edge(a, b, capacity=1)NEWLINE g.add_edge(b, a, capacity=1)NEWLINE print(g.dinic(0, n))NEWLINENEWLINENEWLINENEWLINE class ABC011:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(n%12+1)NEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(s[0].upper()+s[1:].lower())NEWLINE @staticmethodNEWLINE def c():NEWLINE n, *ng = map(int, sys.stdin.read().split())NEWLINE ng = set(ng)NEWLINE if n in ng: print('NO')NEWLINE else:NEWLINE r = 100NEWLINE while n > 0:NEWLINE if r == 0: print('NO'); returnNEWLINE for i in range(3, 0, -1):NEWLINE if (n-i) in ng: continueNEWLINE n -= iNEWLINE r -= 1NEWLINE breakNEWLINE else: print('NO'); returnNEWLINE print('YES')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, d, x, y = map(int, sys.stdin.read().split())NEWLINE x, y = abs(x), abs(y)NEWLINE if x%d or y%d: print(0); returnNEWLINE x, y = x//d, y//dNEWLINE r = n - (x+y)NEWLINE if r < 0 or r&1: print(0); returnNEWLINENEWLINE res = 0NEWLINE half_p = pow(1/2, n)NEWLINE for d in range(r//2 + 1): # 0 <= d <= r//2, southNEWLINE south, north = d, y+dNEWLINE west = (r - 2*d)//2NEWLINE res += half_p * comb(n, south, exact=True) * comb(n-south, north, exact=True)\NEWLINE * comb(n-south-north, west, exact=True) * half_pNEWLINE print(res)NEWLINENEWLINENEWLINE class ABC012:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print(b, a)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE h, n = divmod(n, 3600)NEWLINE m, s = divmod(n, 60)NEWLINE print(f'{h:02}:{m:02}:{s:02}')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = 2025 - int(sys.stdin.readline().rstrip())NEWLINE res = []NEWLINE for i in range(1, 10):NEWLINE if n%i != 0 or n//i > 9: continueNEWLINE res.append(f'{i} x {n//i}')NEWLINE print(*sorted(res), sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *abt = map(int, sys.stdin.read().split())NEWLINE a, b, t = np.array(abt).reshape(m, 3).TNEWLINE res = shortest_path(csr_matrix((t, (a-1, b-1)), (n, n)), method='FW', directed=False)NEWLINE print(res.max(axis=-1).min().astype(np.int64))NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, m, *abt = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for a, b, t in zip(*[iter(abt)]*3):NEWLINE a -= 1; b -= 1NEWLINE g.add_edge(a, b, weight=t)NEWLINE g.add_edge(b, a, weight=t)NEWLINENEWLINE print(min(max(d) for d in g.floyd_warshall()))NEWLINENEWLINENEWLINENEWLINE class ABC013:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(ord(sys.stdin.readline().rstrip()) - ord('A') + 1)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b = map(int, sys.stdin.read().split())NEWLINE d = abs(a - b)NEWLINE print(min(d, 10-d))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, h, a, b, c, d, e = map(int, sys.stdin.read().split())NEWLINE y = np.arange(n+1)NEWLINE x = (n*e-h-(d+e)*y)//(b+e) + 1NEWLINE np.maximum(x, 0, out=x)NEWLINE np.minimum(x, n-y, out=x)NEWLINE print(np.amin(a*x + c*y))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, d, *a = map(int, sys.stdin.read().split())NEWLINE res = list(range(n))NEWLINE def swap(i, j): res[i], res[j] = res[j], res[i]NEWLINE for i in a[::-1]: swap(i-1, i)NEWLINE res = np.array(res)NEWLINE def binary_method(a, p):NEWLINE b = np.arange(n)NEWLINE while p:NEWLINE if p&1: b = a[b]NEWLINE p >>= 1NEWLINE a = a[a]NEWLINE return bNEWLINE print(*(binary_method(res, d)+1), sep='\n')NEWLINENEWLINE class ABC014:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.read().split())NEWLINE print((a+b-1)//b * b - a)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, x, *a = map(int, sys.stdin.read().split())NEWLINE print(sum(a[i] for i in range(n) if x>>i&1))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(n, 2).TNEWLINE res = np.zeros(10**6+2, dtype=np.int64)NEWLINE np.add.at(res, a, 1)NEWLINE np.subtract.at(res, b+1, 1)NEWLINE np.cumsum(res, out=res)NEWLINE print(res.max())NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for _ in range(n-1):NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE x -= 1; y -= 1NEWLINE g.add_edge(x, y, weight=1)NEWLINE g.add_edge(y, x, weight=1)NEWLINENEWLINE g.bfs(0)NEWLINE g.find_ancestors()NEWLINENEWLINE q, *ab = map(int, sys.stdin.read().split())NEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE print(g.find_dist(a, b) + 1)NEWLINENEWLINE class ABC015:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = sys.stdin.read().split()NEWLINE print(a if len(a) > len(b) else b)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE print(np.ceil(a[np.nonzero(a)[0]].sum() / np.count_nonzero(a)).astype(np.int8))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *t = map(int, sys.stdin.read().split())NEWLINE t = np.array(t).reshape(n, k)NEWLINE x = np.zeros((1, 1), dtype=np.int8)NEWLINE for i in range(n):NEWLINE x = x.reshape(-1, 1) ^ t[i]NEWLINE print('Found' if np.count_nonzero(x==0) > 0 else 'Nothing')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE w, n, k, *ab = map(int, sys.stdin.read().split())NEWLINE dp = np.zeros((k+1, w+1), dtype=np.int32)NEWLINE for a, b in zip(*[iter(ab)]*2): np.maximum(dp[1:,a:], dp[:-1,:-a]+b, out=dp[1:,a:])NEWLINE print(dp[k][w])NEWLINENEWLINENEWLINE class ABC016:NEWLINE @staticmethodNEWLINE def a():NEWLINE m, d = map(int, sys.stdin.readline().split())NEWLINE print('YES' if m%d == 0 else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE f1, f2 = a+b==c, a-b==cNEWLINE if f1 & f2: print('?')NEWLINE elif f1 & (~f2): print('+')NEWLINE elif (~f1) & f2: print('-')NEWLINE else: print('!')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, _, *ab = map(int, sys.stdin.read().split())NEWLINE f = [0] * nNEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE f[a] |= 1<<bNEWLINE f[b] |= 1<<aNEWLINE res = [bit_count(cumor(f[j] for j in range(n) if f[i]>>j&1) & ~(f[i] | 1<<i)) for i in range(n)]NEWLINE print(*res, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE sx, sy, gx, gy = map(int, sys.stdin.readline().split())NEWLINE seg1 = ((sx, sy), (gx, gy))NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE p1 = np.array(sys.stdin.read().split(), dtype=np.int64).reshape(n, 2).TNEWLINE p2 = np.hstack((p1[:, 1:], p1[:, :1]))NEWLINE seg2 = (p1, p2)NEWLINE print(np.count_nonzero(GeometryTopology.intersect(seg1, seg2))//2 + 1)NEWLINENEWLINE class ABC017:NEWLINE @staticmethodNEWLINE def a():NEWLINE s, e = np.array(sys.stdin.read().split(), dtype=np.int16).reshape(3, 2).TNEWLINE print((s // 10 * e).sum())NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE choku_tail = set('ch, o, k, u'.split(', '))NEWLINE def is_choku(s):NEWLINE if s == '': return TrueNEWLINE if len(s)>=1 and (s[-1] in choku_tail) and is_choku(s[:-1]): return TrueNEWLINE if len(s)>=2 and (s[-2:] in choku_tail) and is_choku(s[:-2]): return TrueNEWLINE return FalseNEWLINE print('YES' if is_choku(sys.stdin.readline().rstrip()) else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, *lrs = map(int, sys.stdin.read().split())NEWLINE l, r, s = np.array(lrs).reshape(n, 3).TNEWLINE score = np.zeros((m+1, ), dtype=np.int32)NEWLINE np.add.at(score, l-1, s)NEWLINE np.subtract.at(score, r, s)NEWLINE np.cumsum(score, out=score)NEWLINE print(s.sum() - score[:m].min())NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *f = map(int, sys.stdin.read().split())NEWLINE prev = [0] * (n+1)NEWLINE tmp = defaultdict(int)NEWLINE for i in range(n):NEWLINE prev[i+1] = tmp[f[i]]NEWLINE tmp[f[i]] = i+1NEWLINENEWLINE dp = [0] * (n+1); dp[0] = 1NEWLINE l, s = 0, dp[0]NEWLINE for i in range(1, n+1):NEWLINE while l < prev[i]:NEWLINE s = (s - dp[l]) % MODNEWLINE l += 1NEWLINE dp[i] = sNEWLINE s = (s + dp[i]) % MODNEWLINE print(dp[n])NEWLINENEWLINE class ABC018:NEWLINE @staticmethodNEWLINE def a():NEWLINE *a, = map(int, sys.stdin.read().split())NEWLINE a = sorted(enumerate(a), key=lambda x: -x[1])NEWLINE res = [None] * 3NEWLINE for i in range(3):NEWLINE res[a[i][0]] = i+1NEWLINE print(*res, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE n, *lr = map(int, sys.stdin.read().split())NEWLINE for l, r in zip(*[iter(lr)]*2):NEWLINE l -= 1; r -= 1NEWLINE s = s[:l] + s[l:r+1][::-1] + s[r+1:]NEWLINE print(s)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE r, c, k = map(int, sys.stdin.readline().split())NEWLINE s = np.array([list(s) for s in sys.stdin.read().split()])NEWLINE s = np.pad(s, 1, constant_values='x')NEWLINENEWLINE a = np.zeros_like(s, dtype=np.float64)NEWLINE a[s=='o'] = np.infNEWLINE for i in range(1, r+1): np.minimum(a[i-1,:]+1, a[i,:], out=a[i,:])NEWLINE for i in range(r, 0, -1): np.minimum(a[i+1,:]+1, a[i,:], out=a[i,:])NEWLINE for j in range(1, c+1): np.minimum(a[:,j-1]+1, a[:,j], out=a[:,j])NEWLINE for j in range(c, 0, -1): np.minimum(a[:,j+1]+1, a[:,j], out=a[:,j])NEWLINE print(np.count_nonzero(a>=k))NEWLINENEWLINE @staticmethodNEWLINE def c_2():NEWLINE r, c, k = map(int, sys.stdin.readline().split())NEWLINE s = np.array([list(s) for s in sys.stdin.read().split()])NEWLINE s = np.pad(s, 1, constant_values='x')NEWLINE a = (s=='o').astype(np.int16)NEWLINE a = distance_transform_cdt(a, metric='taxicab')NEWLINE print(np.count_nonzero(a>=k))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, p, q, r, *xyz = map(int, sys.stdin.read().split())NEWLINE x, y, z = np.array(xyz).reshape(r, 3).TNEWLINE h = np.zeros((n, m), dtype=np.int32); h[x-1, y-1] = zNEWLINE g = np.array([*itertools.combinations(range(n), p)])NEWLINE print(np.sort(h[g].sum(axis=1), axis=1)[:,-q:].sum(axis=1).max())NEWLINENEWLINENEWLINE class ABC019:NEWLINE @staticmethodNEWLINE def a():NEWLINE *a, = map(int, sys.stdin.readline().split())NEWLINE print(sorted(a)[1])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip() + '$'NEWLINE cnt = 0NEWLINE prev = '$'NEWLINE t = ''NEWLINE for c in s:NEWLINE if c == prev: cnt += 1; continueNEWLINE t += prev+str(cnt)NEWLINE prev = c; cnt = 1NEWLINE print(t[2:])NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE res = set()NEWLINE for x in a:NEWLINE while not x&1:NEWLINE x >>= 1NEWLINE res.add(x)NEWLINE print(len(res))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE def inquire(u, v):NEWLINE print(f'? {u} {v}'.format(u, v), flush=True)NEWLINE return int(sys.stdin.readline().rstrip())NEWLINENEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE u = sorted([(inquire(1, v), v) for v in range(2, n+1)])[-1][1]NEWLINE d = max((inquire(u, v)) for v in range(1, n+1) if u!=v)NEWLINE print(f'! {d}')NEWLINENEWLINE class ABC020:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('ABC' if int(sys.stdin.readline().rstrip())==1 else 'chokudai')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b = sys.stdin.readline().split()NEWLINE print(int(a+b) * 2)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w, t = map(int, sys.stdin.readline().split())NEWLINE s = [list(s) for s in sys.stdin.read().split()]NEWLINE for i in range(h):NEWLINE for j in range(w):NEWLINE if s[i][j] == 'S': sy, sx = i, jNEWLINE if s[i][j] == 'G': gy, gx = i, jNEWLINE s[sy][sx] = s[gy][gx] = '.'NEWLINE source, target = sy*w+sx, gy*w+gxNEWLINENEWLINE def heuristic_function(u, v=target):NEWLINE uy, ux = divmod(u, w)NEWLINE vy, vx = divmod(v, w)NEWLINE return abs(vy-uy) + abs(ux-vx)NEWLINENEWLINE def min_time(x):NEWLINE g = GeometryTopology.Graph(h*w)NEWLINE # g = nx.DiGraph()NEWLINENEWLINE for i in range(h):NEWLINE for j in range(w):NEWLINE u = i*w + jNEWLINE if i > 0: g.add_edge(u, (i-1)*w+j, weight=(1 if s[i-1][j]=='.' else x))NEWLINE if i < h-1: g.add_edge(u, (i+1)*w+j, weight=(1 if s[i+1][j]=='.' else x))NEWLINE if j > 0: g.add_edge(u, i*w+j-1, weight=(1 if s[i][j-1]=='.' else x))NEWLINE if j < w-1: g.add_edge(u, i*w+j+1, weight=(1 if s[i][j+1]=='.' else x))NEWLINENEWLINE return g.dijkstra(source)[target]NEWLINE return g.astar(source, target, heuristic_function)NEWLINE # return nx.dijkstra_path_length(g, source, target)NEWLINE # return nx.astar_path_length(g, source, target, heuristic_function)NEWLINENEWLINE def binary_search():NEWLINE lo, hi = 1, t+1NEWLINE while lo+1 < hi:NEWLINE x = (lo+hi)//2NEWLINE if min_time(x) > t:NEWLINE hi = xNEWLINE else:NEWLINE lo = xNEWLINE return loNEWLINENEWLINE print(binary_search())NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE div = sorted(NumberTheory.find_divisors(k))NEWLINE l = len(div)NEWLINE s = [0] * lNEWLINE for i, d in enumerate(div): s[i] = (1+n//d)*(n//d)//2 * d % MODNEWLINE for i in range(l-1, -1, -1):NEWLINE for j in range(i+1, l):NEWLINE if div[j]%div[i]: continueNEWLINE s[i] = (s[i]-s[j])%MODNEWLINENEWLINE print(sum(s[i]*k//div[i]%MOD for i in range(l))%MOD) # ans is LCM.NEWLINENEWLINE class ABC021:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE s = [1<<i for i in range(5) if n>>i&1]NEWLINE print(len(s), *s, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, a, b, k, *p = map(int, sys.stdin.read().split())NEWLINE print('YES' if len(set(p)|set([a, b])) == k+2 else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, a, b, m, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = np.array(xy).reshape(m, 2).T - 1NEWLINE a -= 1; b -= 1NEWLINE g = csgraph_to_dense(csr_matrix((np.ones(m), (x, y)), (n, n), dtype=np.int8))NEWLINE g = np.logical_or(g, g.T)NEWLINE paths = np.zeros(n, dtype=np.int64).reshape(-1, 1)NEWLINE paths[a, 0] = 1NEWLINE while not paths[b, 0]:NEWLINE paths = np.dot(g, paths) % MODNEWLINE print(paths[b, 0])NEWLINENEWLINE @staticmethodNEWLINE def c_2():NEWLINE n, a, b, m, *xy = map(int, sys.stdin.read().split())NEWLINE a -= 1; b -= 1NEWLINE g = GeometryTopology.Graph()NEWLINENEWLINE for x, y in zip(*[iter(xy)]*2):NEWLINE x -= 1; y -= 1NEWLINE g.add_edge(x, y, weight=1)NEWLINE g.add_edge(y, x, weight=1)NEWLINENEWLINE dist, paths = g.dijkstra(a, paths_cnt=True, mod=MOD)NEWLINE print(paths[b])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.read().split())NEWLINE cn = Combinatorics.CombinationsMod()NEWLINE print(cn(n+k-1, k))NEWLINENEWLINENEWLINE class ABC022:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, s, t, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE np.cumsum(a, out=a)NEWLINE print(((s<=a) & (a<=t)).sum())NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE c = Counter(a)NEWLINE print(sum(c.values())-len(c))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, *uvl = map(int, sys.stdin.read().split())NEWLINE u, v, l = np.array(uvl).reshape(m, 3).TNEWLINE u -= 1; v -= 1NEWLINE g = csgraph_to_dense(csr_matrix((l, (u,v)), (n,n)))NEWLINE g += g.TNEWLINE g[g==0] = np.infNEWLINE dist0 = g[0].copy()NEWLINE g[0] = 0; g[:, 0] = 0NEWLINE dist = shortest_path(g, method='FW', directed=False)NEWLINE u, v = np.array([*itertools.combinations(range(1,n), 2)]).TNEWLINE res = (dist0[u]+dist[u,v]+dist0[v]).min()NEWLINE print(-1 if res==np.inf else int(res))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE c = np.array(ab).reshape(2,n,2)NEWLINE g = c.mean(axis=1)NEWLINE d = np.sqrt(((c-g[:,None,:])**2).sum(axis=-1)).sum(axis=1)NEWLINE print(d[1]/d[0])NEWLINENEWLINENEWLINE class ABC023:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(sum(divmod(int(sys.stdin.readline().rstrip()), 10)))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, s = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE t = 'b'NEWLINE for i in range(n//2):NEWLINE if i%3==0: t = 'a'+t+'c'NEWLINE elif i%3==1: t = 'c'+t+'a'NEWLINE else: t = 'b'+t+'b'NEWLINE print(n//2 if t==s else -1)NEWLINENEWLINE @staticmethodNEWLINE def b_2():NEWLINE n, s = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE if n&1^1: print(-1); returnNEWLINE a = list('abc')NEWLINE i = (1-n//2)%3NEWLINE for c in s:NEWLINE if c != a[i]:NEWLINE print(-1); returnNEWLINE i = (i+1) % 3NEWLINE print(n//2)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w, k, n, *rc = map(int, sys.stdin.read().split())NEWLINE r, c = np.array(rc).reshape(n,2).T - 1NEWLINE rb = np.bincount(r, minlength=h)NEWLINE cb = np.bincount(c, minlength=w)NEWLINE rbb = np.bincount(rb, minlength=k+1)NEWLINE cbb = np.bincount(cb, minlength=k+1)NEWLINE tot = (rbb[:k+1]*cbb[k::-1]).sum()NEWLINE real = np.bincount(rb[r]+cb[c]-1, minlength=k+1)NEWLINE print(tot-real[k-1]+real[k])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *hs = map(int, sys.stdin.read().split())NEWLINE h, s = np.array(hs).reshape(n,2).TNEWLINENEWLINE t = np.arange(n)NEWLINE def is_ok(x): return np.all(np.sort((x-h)//s) >= t)NEWLINE def binary_search():NEWLINE lo, hi = 0, 10**14NEWLINE while lo+1 < hi:NEWLINE x = (lo+hi)//2NEWLINE if is_ok(x): hi = xNEWLINE else: lo = xNEWLINE return hiNEWLINENEWLINE print(binary_search())NEWLINENEWLINE class ABC024:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c, k, s, t = map(int, sys.stdin.read().split())NEWLINE print(a*s + b*t - c*(s+t)*(s+t>=k))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, t, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE print(np.minimum(a[1:]-a[:-1], t).sum() + t)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, d, k, *lrst = map(int, sys.stdin.read().split())NEWLINE lrst = np.array(lrst)NEWLINE lr = lrst[:2*d].reshape(d,2)NEWLINE s, t = lrst[2*d:].reshape(k,2).TNEWLINE day = np.zeros((k,),dtype=np.int32)NEWLINE for i in range(d):NEWLINE l, r = lr[i]NEWLINE move = (l<=s)&(s<=r)&(s!=t)NEWLINE reach = move&(l<=t)&(t<=r)NEWLINE s[move&(s<t)] = rNEWLINE s[move&(s>t)] = lNEWLINE s[reach] = t[reach]; day[reach] = i+1NEWLINE print(*day, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE a, b, c = map(int, sys.stdin.read().split())NEWLINE p = MODNEWLINE denom = pow(a*b%p - b*c%p + c*a%p, p-2, p)NEWLINE w = (b*c-a*b)%p*denom%pNEWLINE h = (b*c-a*c)%p*denom%pNEWLINE print(h,w)NEWLINENEWLINE class ABC025:NEWLINE @staticmethodNEWLINE def a():NEWLINE s, n = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE i, j = divmod(n-1, 5)NEWLINE print(s[i]+s[j])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, a, b = map(int, sys.stdin.readline().split())NEWLINE res = defaultdict(int)NEWLINE for _ in range(n):NEWLINE s, d = sys.stdin.readline().split()NEWLINE d = int(d)NEWLINE res[s] += min(max(d,a),b)NEWLINE res = res['East'] - res['West']NEWLINE if res == 0: ans = 0NEWLINE elif res > 0: ans = f'East {res}'NEWLINE else: ans = f'West {-res}'NEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE b = [0] * 6NEWLINE for i in range(2):NEWLINE *row, = map(int, sys.stdin.readline().split())NEWLINE for j in range(3):NEWLINE b[i*3+j] = row[j]NEWLINE c = [0] * 8NEWLINE for i in range(3):NEWLINE *row, = map(int, sys.stdin.readline().split())NEWLINE for j in range(2):NEWLINE c[i*3+j] = row[j]NEWLINE tot = sum(b) + sum(c)NEWLINENEWLINE @lru_cache(maxsize=None)NEWLINE def f(s=tuple(0 for _ in range(9))):NEWLINE if all(s):NEWLINE res = 0NEWLINE for i in range(6): res += (s[i]==s[i+3])*b[i]NEWLINE for i in range(8): res += (s[i]==s[i+1])*c[i]NEWLINE return resNEWLINE cand = [i for i in range(9) if not s[i]]NEWLINE flg = len(cand)&1NEWLINE s = list(s)NEWLINE res = []NEWLINE for i in cand:NEWLINE s[i] = (flg^1)+1NEWLINE res.append(f(tuple(s)))NEWLINE s[i] = 0NEWLINE return sorted(res, reverse=flg)[0]NEWLINENEWLINE a = f(); b = tot-aNEWLINE print(a)NEWLINE print(b)NEWLINENEWLINENEWLINENEWLINE class ABC026:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = int(sys.stdin.readline().rstrip())NEWLINE print(a//2 * (a-a//2))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *r = map(int, sys.stdin.read().split())NEWLINE s = np.pi * np.array([0]+r)**2; s.sort()NEWLINE res = s[n::-2].sum() - s[n-1::-2].sum()NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *b = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph()NEWLINE for i in range(1, n): g.add_edge(b[i-1]-1, i, weight=1)NEWLINENEWLINE def f(u=0):NEWLINE if not g.edges[u]: return 1NEWLINE s = [f(v) for v in g.edges[u]]NEWLINE return max(s) + min(s) + 1NEWLINENEWLINE print(f())NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE def f(t): return a*t + b*np.sin(c*t*np.pi) - 100NEWLINE print(optimize.brenth(f, 0, 200))NEWLINENEWLINENEWLINE class ABC027:NEWLINE @staticmethodNEWLINE def a():NEWLINE l = [int(l) for l in sys.stdin.readline().split()]NEWLINE l.sort()NEWLINE print(l[2] if l[0]==l[1] else l[0])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE m, r = divmod(sum(a), n)NEWLINE if r: print(-1); returnNEWLINE population = 0NEWLINE towns = 0NEWLINE cnt = 0NEWLINE for x in a:NEWLINE population += xNEWLINE towns += 1NEWLINE if population/towns != m: cnt+=1; continueNEWLINE population, towns = 0, 0NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE flg = n.bit_length()&1^1NEWLINE t = 0NEWLINE x = 1NEWLINE while x <= n:NEWLINE t += 1NEWLINE x = 2*x+1 if t&1^flg else 2*xNEWLINE print('Aoki' if t&1 else 'Takahashi')NEWLINENEWLINENEWLINE class ABC028:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print('Bad' if n<60 else 'Good' if n<90 else 'Great' if n<100 else 'Perfect')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE cnt = Counter(s)NEWLINE print(*[cnt.get(c, 0) for c in 'ABCDEF'])NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE a, b, c, d, e = map(int, sys.stdin.readline().split())NEWLINE print(max(b+c+e, a+d+e))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE c = 3*2*(n-k)*(k-1) + 3*(n-1) + 1NEWLINE print(c/n**3)NEWLINENEWLINENEWLINE class ABC029:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(sys.stdin.readline().rstrip()+'s')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE print(sum('r' in s for s in sys.stdin.read().split()))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE print(*[''.join(s) for s in itertools.product('abc', repeat=int(sys.stdin.readline().rstrip()))], sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(sum(n//10**(i+1)*10**i + min(max((n%10**(i+1)-10**i+1), 0), 10**i) for i in range(9)))NEWLINENEWLINE class ABC030:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c, d = map(int, sys.stdin.readline().split())NEWLINE e, f = b*c, d*aNEWLINE print('TAKAHASHI' if e>f else 'AOKI' if f>e else 'DRAW')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE n = (n%12 + m/60)*30; m *= 6NEWLINE d = abs(n-m)NEWLINE print(min(d, 360-d))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE b = [int(x) for x in sys.stdin.readline().split()]NEWLINENEWLINE t = 0NEWLINE p = 1NEWLINE cnt = 0NEWLINE while True:NEWLINE if p:NEWLINE i = bi_l(a, t)NEWLINE if i == n: breakNEWLINE t = a[i] + xNEWLINE else:NEWLINE i = bi_l(b, t)NEWLINE if i == m: breakNEWLINE t = b[i] + yNEWLINE cnt += 1NEWLINE p ^= 1NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, a = map(int , sys.stdin.readline().split()); a -= 1NEWLINE k = sys.stdin.readline().rstrip()NEWLINE b = [int(x)-1 for x in sys.stdin.readline().split()]NEWLINENEWLINE c = [None] * nNEWLINE for i in range(n+1):NEWLINE if str(i)==k: print(a+1);returnNEWLINE if c[a] is not None: l, d = i-c[a], c[a];breakNEWLINE c[a] = i; a = b[a]NEWLINENEWLINE r = [None] * len(k); r[0] = 1NEWLINE for i in range(len(k)-1): r[i+1] = r[i]*10%lNEWLINE k = [int(c) for c in k][::-1]NEWLINE d = (sum(r[i]*k[i] for i in range(len(k)))-d) % lNEWLINE for _ in range(d): a = b[a]NEWLINE print(a+1)NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, a, k, *b = map(int, sys.stdin.read().split())NEWLINE a -= 1; b = [x-1 for x in b]NEWLINE c = [None]*nNEWLINE for i in range(n+1):NEWLINE if i==k: print(a+1); returnNEWLINE if c[a] is not None:NEWLINE for _ in range((k-c[a])%(i-c[a])): a = b[a]NEWLINE print(a+1); returnNEWLINE c[a] = i; a = b[a]NEWLINENEWLINENEWLINE class ABC031:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, d = map(int, sys.stdin.readline().split())NEWLINE if a > d: a,d = d,aNEWLINE print((a+1)*d)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE l, h, n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE res = np.maximum(l-a, 0)NEWLINE res[a>h] = -1NEWLINE print(*res, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE np.cumsum(a[::2], out=a[::2])NEWLINE np.cumsum(a[1::2], out=a[1::2])NEWLINE a = list(a) + [0]*2NEWLINENEWLINE def score(i, j):NEWLINE if i > j: i, j = j, iNEWLINE if (j-i)&1: x, y = a[j-1]-a[i-2], a[j]-a[i-1]NEWLINE else: x, y = a[j]-a[i-2], a[j-1]-a[i-1]NEWLINE return x, yNEWLINENEWLINE res = -infNEWLINE for i in range(n):NEWLINE s = -infNEWLINE for j in range(n):NEWLINE if i==j: continueNEWLINE x, y = score(i, j)NEWLINE if y>s: s,t = y,xNEWLINE res = max(res, t)NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE k, m = map(int, sys.stdin.readline().split())NEWLINE *vw, = zip(*[iter(sys.stdin.read().split())]*2)NEWLINE for l in itertools.product((1,2,3), repeat=k):NEWLINE s = dict()NEWLINE for v, w in vw:NEWLINE i = 0NEWLINE for d in v:NEWLINE d = int(d)-1NEWLINE j = i+l[d]NEWLINE if j > len(w): breakNEWLINE t = w[i:j]NEWLINE if d in s and s[d] != t: breakNEWLINE s[d] = tNEWLINE i = jNEWLINE else:NEWLINE if i == len(w): continueNEWLINE breakNEWLINE else:NEWLINE for i in range(k): print(s[i])NEWLINE returnNEWLINENEWLINENEWLINE class ABC032:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, n = map(int, sys.stdin.read().split())NEWLINE l = NumberTheory.lcm(a, b)NEWLINE print((n+l-1)//l*l)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s, k = sys.stdin.read().split()NEWLINE k = int(k)NEWLINE res = set()NEWLINE for i in range(len(s)-k+1):NEWLINE res.add(s[i:i+k])NEWLINE print(len(res))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *s = map(int, sys.stdin.read().split())NEWLINE if 0 in s: print(n); returnNEWLINE if k == 0: print(0); returnNEWLINE res, tmp, l = 0, 1, 0NEWLINE for r in range(n):NEWLINE tmp *= s[r]NEWLINE while tmp > k: tmp //= s[l]; l+=1NEWLINE res = max(res, r-l+1)NEWLINENEWLINE print(res)NEWLINENEWLINE class ABC033:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('SAME' if len(set(sys.stdin.readline().rstrip()))==1 else 'DIFFERENT')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE res = dict()NEWLINE for _ in range(n):NEWLINE s, p = sys.stdin.readline().split()NEWLINE res[s] = int(p)NEWLINE tot = sum(res.values())NEWLINE for s, p in res.items():NEWLINE if p > tot/2: print(s); returnNEWLINE print('atcoder')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(sum(not '0' in f for f in s.split('+')))NEWLINENEWLINENEWLINE class ABC034:NEWLINE @staticmethodNEWLINE def a():NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print('Better' if y>x else 'Worse')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(n+1 if n&1 else n-1)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w = map(int, sys.stdin.read().split())NEWLINE choose = Combinatorics.CombinationsMod()NEWLINE print(choose(h+w-2, h-1))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k, *wp = map(int, sys.stdin.read().split())NEWLINE w, p = np.array(wp).reshape(-1, 2).TNEWLINE def f(x):NEWLINE return np.sort(w*(p-x))[-k:].sum()NEWLINE print(optimize.bisect(f, 0, 100))NEWLINENEWLINE class ABC035:NEWLINE @staticmethodNEWLINE def a():NEWLINE w, h = map(int, sys.stdin.readline().split())NEWLINE print('4:3' if 4*h==3*w else '16:9')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s, t = sys.stdin.read().split()NEWLINE y = x = z = 0NEWLINE for c in s:NEWLINE if c == '?': z += 1NEWLINE elif c == 'L': x -= 1NEWLINE elif c == 'R': x += 1NEWLINE elif c == 'D': y -= 1NEWLINE elif c == 'U': y += 1NEWLINE d = abs(y)+abs(x)NEWLINE print(d+z if t=='1' else max(d-z, (d-z)&1))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, q, *lr = map(int, sys.stdin.read().split())NEWLINE l, r = np.array(lr).reshape(q, 2).TNEWLINE res = np.zeros(n+1, dtype=int)NEWLINE np.add.at(res, l-1, 1)NEWLINE np.subtract.at(res, r, 1)NEWLINE np.cumsum(res, out=res)NEWLINE res = res&1NEWLINE print(''.join(map(str, res[:-1])))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, t = map(int, sys.stdin.readline().split())NEWLINE point = np.array(sys.stdin.readline().split(), dtype=int)NEWLINE a, b, c = np.array(sys.stdin.read().split(), dtype=np.int64).reshape(m, 3).TNEWLINE a -= 1; b -= 1NEWLINE d_1 = shortest_path(csr_matrix((c, (a, b)), (n, n)), method='D', directed=True, indices=0)NEWLINE d_2 = shortest_path(csr_matrix((c, (b, a)), (n, n)), method='D', directed=True, indices=0)NEWLINE print(int(np.amax((t-(d_1+d_2))*point)))NEWLINENEWLINE class ABC036:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print((b+a-1)//a)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *s = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE for j in range(n):NEWLINE row = ''NEWLINE for i in range(n-1, -1, -1):NEWLINE row += s[i][j]NEWLINE print(row)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE b = [None]*nNEWLINE prev = NoneNEWLINE j = -1NEWLINE for i, x in sorted(enumerate(a), key=lambda x: x[1]):NEWLINE if x != prev: j += 1NEWLINE b[i] = jNEWLINE prev = xNEWLINE print(*b, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE edges = [[] for _ in range(n)]NEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE edges[a].append(b)NEWLINE edges[b].append(a)NEWLINE parent = [None]*nNEWLINE def count(u):NEWLINE black, white = 1, 1NEWLINE for v in edges[u]:NEWLINE if v == parent[u]: continueNEWLINE parent[v] = uNEWLINE b, w = count(v)NEWLINE black *= w; black %= MODNEWLINE white *= (b+w)%MOD; white %= MODNEWLINE return black, whiteNEWLINE print(sum(count(0))%MOD)NEWLINENEWLINE class ABC037:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print(c//min(a, b))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, q, *lrt = map(int, sys.stdin.read().split())NEWLINE a = np.zeros(n, dtype=int)NEWLINE for l, r, t in zip(*[iter(lrt)]*3):NEWLINE a[l-1:r] = tNEWLINE print(*a, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array([0]+a)NEWLINE np.cumsum(a, out=a)NEWLINE s = (a[k:] - a[:-k]).sum()NEWLINE print(s)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, *a = map(int, sys.stdin.read().split())NEWLINE p = [None]*(h*w)NEWLINE def paths(k):NEWLINE if p[k]: return p[k]NEWLINE p[k] = 1NEWLINE i, j = divmod(k,w)NEWLINE if j>0 and a[k]>a[k-1]: p[k] += paths(k-1)NEWLINE if j<w-1 and a[k]>a[k+1]: p[k] += paths(k+1)NEWLINE if i>0 and a[k]>a[k-w]: p[k] += paths(k-w)NEWLINE if i<h-1 and a[k]>a[k+w]: p[k] += paths(k+w)NEWLINE p[k] %= MOD; return p[k]NEWLINE print(sum(paths(i) for i in range(h*w))%MOD)NEWLINENEWLINENEWLINE class ABC038:NEWLINE @staticmethodNEWLINE def a():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print('YES' if s[-1]=='T' else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c, d = map(int, sys.stdin.read().split())NEWLINE print('YES' if a==c or b==c or a==d or b==d else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a += [-1]NEWLINE cnt = nNEWLINE tmp = 1NEWLINE for i in range(n):NEWLINE if a[i+1] > a[i]:NEWLINE tmp += 1NEWLINE else:NEWLINE cnt += tmp*(tmp-1)//2NEWLINE tmp = 1NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *wh = map(int, sys.stdin.read().split())NEWLINE a = [x[1] for x in sorted(zip(*[iter(wh)]*2), key=lambda x: (x[0], -x[1]))]NEWLINE print(bi_l(DP.LIS(a), inf))NEWLINENEWLINE class ABC039:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print((a*b+b*c+c*a)*2)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE x = int(sys.stdin.readline().rstrip())NEWLINE for n in range(1, int(x**0.5)+1):NEWLINE if pow(n, 4)==x:NEWLINE print(n); returnNEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE board = 'WBWBWWBWBWBW' * 3NEWLINE convert = 'Do, *, Re, *, Mi, Fa, *, So, *, La, *, Si'.split(', ')NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(convert[board.index(s)])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w = map(int, sys.stdin.readline().split())NEWLINE s = ''.join(sys.stdin.read().split())NEWLINE white = set()NEWLINE for i in range(h*w):NEWLINE if s[i]=='#': continueNEWLINE l = 0 if i%w==0 else -1NEWLINE r = 0 if (i+1)%w==0 else 1NEWLINE white |= {i+dy+dx for dy in range(-w, w+1, w) for dx in range(l,r+1)}NEWLINE black_before = set(range(h*w)) - whiteNEWLINE black_after = set()NEWLINE for i in black_before:NEWLINE l = 0 if i%w==0 else -1NEWLINE r = 0 if (i+1)%w==0 else 1NEWLINE black_after |= {i+dy+dx for dy in range(-w, w+1, w) for dx in range(l,r+1)}NEWLINE black_after &= set(range(h*w))NEWLINE for i in range(h*w):NEWLINE if s[i]=='#' and not i in black_after: print('impossible'); returnNEWLINE print('possible')NEWLINE for i in range(h):NEWLINE print(''.join(['#' if i*w+j in black_before else '.' for j in range(w)]))NEWLINENEWLINENEWLINENEWLINENEWLINE class ABC040:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, x = map(int, sys.stdin.readline().split())NEWLINE print(min(x-1, n-x))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE res = infNEWLINE for i in range(1, int(n**.5)+1):NEWLINE res = min(res, n//i-i+n%i)NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *h = map(int, sys.stdin.read().split())NEWLINE h = [h[0]]+hNEWLINE cost = [None] * (n+1); cost[0] = cost[1] = 0NEWLINE for i in range(2, n+1):NEWLINE cost[i] = min(NEWLINE cost[i-2] + abs(h[i]-h[i-2]),NEWLINE cost[i-1] + abs(h[i]-h[i-1])NEWLINE )NEWLINE print(cost[n])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE queue = []NEWLINE for _ in range(m):NEWLINE a, b, y = map(int, sys.stdin.readline().split())NEWLINE heappush(queue, (-(2*y), a-1, b-1))NEWLINE q = int(sys.stdin.readline().rstrip())NEWLINE for i in range(q):NEWLINE v, y = map(int, sys.stdin.readline().split())NEWLINE heappush(queue, (-(2*y+1), v-1, i))NEWLINE res = [None] * qNEWLINE while queue:NEWLINE y, i, j = heappop(queue)NEWLINE if y&1:NEWLINE res[j] = uf.size[uf.find(i)]NEWLINE else:NEWLINE uf.unite(i, j)NEWLINE print(*res, sep='\n')NEWLINENEWLINE class ABC041:NEWLINE @staticmethodNEWLINE def a():NEWLINE s, i = sys.stdin.read().split()NEWLINE i = int(i)NEWLINE print(s[i-1])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE ans = a * b % MOD * c % MODNEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE for i, h in sorted(enumerate(a), key=lambda x: -x[1]):NEWLINE print(i+1)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, _, *xy = map(int, sys.stdin.read().split())NEWLINE g = [0]*nNEWLINE for x, y in zip(*[iter(xy)]*2): g[x-1] |= 1<<(y-1)NEWLINE res = [0]*(1<<n); res[0] = 1NEWLINE for i in range(1<<n):NEWLINE for j in range(n):NEWLINE if i>>j&1^1: continueNEWLINE if not(g[j]&i): res[i] += res[i&~(1<<j)]NEWLINE print(res[-1])NEWLINENEWLINENEWLINENEWLINE class ABC042:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE c = Counter(a)NEWLINE print('YES' if c[5]==2 and c[7]==1 else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, l, *s = sys.stdin.read().split()NEWLINE print(''.join(sorted(s)))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *d = sys.stdin.read().split()NEWLINE l = len(n)NEWLINE ok = sorted(set(string.digits)-set(d))NEWLINE cand = [int(''.join(p)) for p in itertools.product(ok, repeat=l)] + [int(min(x for x in ok if x > '0')+min(ok)*l)]NEWLINE print(cand[bi_l(cand, int(n))])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, a, b = map(int, sys.stdin.read().split())NEWLINE combinations = Combinatorics.CombinationsMod(n=2*10**5, mod=MOD)NEWLINE i = np.arange(h-a, h)NEWLINE ng = np.sum(combinations(i+b-1, i) * combinations(h-i+w-b-2, h-1-i) % MOD)NEWLINE print((combinations(h+w-2, h-1)-ng)%MOD)NEWLINENEWLINENEWLINE class ABC043:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print((1+n)*n//2)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE t = ''NEWLINE for c in s:NEWLINE if c == 'B': t = t[:-1]NEWLINE else: t += cNEWLINE print(t)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE x = np.around(a.sum()/n).astype(int)NEWLINE print(np.sum((a-x)**2))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE n = len(s)NEWLINE for i in range(n-1):NEWLINE if s[i] == s[i+1]: print(i+1, i+2); returnNEWLINE for i in range(n-2):NEWLINE if s[i] == s[i+2]: print(i+1, i+3); returnNEWLINE print(-1, -1)NEWLINENEWLINE class ABC044:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, k, x, y = map(int, sys.stdin.read().split())NEWLINE print(min(n,k)*x + max(0,n-k)*y)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE res = set(c&1 for c in Counter(sys.stdin.readline().rstrip()).values())NEWLINE print('Yes' if len(res)==1 and res.pop()==0 else 'No')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, a, *x = map(int, sys.stdin.read().split())NEWLINE dp = np.zeros((n+1, 2501), dtype=np.int64); dp[0,0] = 1NEWLINE for v in x: dp[1:,v:] += dp[:-1,:-v]NEWLINE i = np.arange(1, n+1)NEWLINE print(dp[i, i*a].sum())NEWLINENEWLINE @staticmethodNEWLINE def c_2():NEWLINE n, a, *x = map(int, sys.stdin.read().split())NEWLINE for i in range(n): x[i] -= aNEWLINENEWLINE s = defaultdict(int); s[0] = 1NEWLINE for i in range(n):NEWLINE ns = s.copy()NEWLINE for k, v in s.items(): ns[k+x[i]] += vNEWLINE s = nsNEWLINE print(s[0]-1)NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE passNEWLINENEWLINE class ABC045:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, h = map(int, sys.stdin.read().split())NEWLINE print((a+b)*h//2)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c = sys.stdin.read().split()NEWLINE d = {'a': a[::-1], 'b': b[::-1], 'c': c[::-1]}NEWLINE nx = 'a'NEWLINE while 1:NEWLINE if not d[nx]: print(nx.upper()); returnNEWLINE d[nx], nx = d[nx][:-1], d[nx][-1]NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE def c(l): return pow(2, max(0,l-1))NEWLINE s = sys.stdin.readline().rstrip()NEWLINE n = len(s)NEWLINE print(sum(int(s[i:j+1])*c(i)*c(n-1-j) for i in range(n) for j in range(i, n)))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, n, *ab = map(int, sys.stdin.read().split())NEWLINE c = defaultdict(int)NEWLINE for y, x in zip(*[iter(ab)] * 2):NEWLINE y -= 1; x -= 1NEWLINE for dy, dx in itertools.product(range(-1, 2), repeat=2):NEWLINE i, j = y+dy, x+dxNEWLINE if not(0<i<h-1 and 0<j<w-1): continueNEWLINE c[(i,j)] += 1NEWLINE c = Counter(c.values())NEWLINE c[0] = (h-2)*(w-2)-sum(c.values())NEWLINE for i in range(10): print(c[i])NEWLINENEWLINENEWLINE class ABC046:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(len(set(sys.stdin.readline().split())))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE print(k*pow(k-1, n-1))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE a, b = 1, 1NEWLINE for x, y in zip(*[iter(xy)]*2):NEWLINE n = max((a+x-1)//x, (b+y-1)//y)NEWLINE a, b = n*x, n*yNEWLINE print(a+b)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE c = Counter(sys.stdin.readline().rstrip())NEWLINE print((c['g']-c['p'])//2)NEWLINENEWLINENEWLINENEWLINE class ABC047:NEWLINE @staticmethodNEWLINE def a():NEWLINE c = sorted(map(int, sys.stdin.readline().split()))NEWLINE print('Yes' if c[0]+c[1]==c[2] else 'No')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE w, h, n, *xyf = map(int, sys.stdin.read().split())NEWLINE l, r, d, u = 0, w, 0, hNEWLINE for x, y, f in zip(*[iter(xyf)]*3):NEWLINE if f == 1: l = max(l, x)NEWLINE if f == 2: r = min(r, x)NEWLINE if f == 3: d = max(d, y)NEWLINE if f == 4: u = min(u, y)NEWLINE print(max(0, r-l)*max(0, u-d))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(sum(s[i]!=s[i+1] for i in range(len(s)-1)))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE mn, mx, c = inf, -1, 0NEWLINE n, t, *a = map(int, sys.stdin.read().split())NEWLINE for p in a:NEWLINE if p-mn == mx: c += 1NEWLINE elif p-mn>mx: mx, c = p-mn, 1NEWLINE mn = min(mn, p)NEWLINE print(c)NEWLINENEWLINE class ABC048:NEWLINE @staticmethodNEWLINE def a():NEWLINE def initial(s): return s[0].upper()NEWLINE print(''.join(map(initial, sys.stdin.readline().split())))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, x = map(int, sys.stdin.readline().split())NEWLINE print(b//x - (a-1)//x) # if a=0, (a-1)/x is rounded down to -1.NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, x, *a = map(int, sys.stdin.read().split())NEWLINE cnt = prev = 0NEWLINE for i in range(n):NEWLINE d = prev+a[i] - xNEWLINE prev = a[i]NEWLINE if d <= 0: continueNEWLINE cnt += d; prev -= dNEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print('First' if len(s)&1^(s[0]==s[-1]) else 'Second')NEWLINENEWLINENEWLINE class ABC049:NEWLINE @staticmethodNEWLINE def a():NEWLINE vowels = set('aeiou')NEWLINE print('vowel' if sys.stdin.readline().rstrip() in vowels else 'consonant')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE h, w, *s = sys.stdin.read().split()NEWLINE for l in s:NEWLINE for _ in range(2): print(l)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE t = set('dream, dreamer, erase, eraser'.split(', '))NEWLINE def obtainable(s):NEWLINE while True:NEWLINE for i in range(5, 8):NEWLINE if s[-i:] in t:NEWLINE s = s[:-i]NEWLINE if not s: return TrueNEWLINE breakNEWLINE else: return FalseNEWLINENEWLINE s = sys.stdin.readline().rstrip()NEWLINE print('YES' if obtainable(s) else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k, l = map(int, sys.stdin.readline().split())NEWLINE uf1 = GeometryTopology.Graph(n); uf1.init_dsu()NEWLINE uf2 = GeometryTopology.Graph(n); uf2.init_dsu()NEWLINENEWLINE def add_edges(uf, m):NEWLINE for _ in range(m):NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE x -= 1; y -= 1NEWLINE uf.unite(x, y)NEWLINENEWLINE add_edges(uf1, k); add_edges(uf2, l)NEWLINENEWLINE g = defaultdict(list)NEWLINE for i in range(n): g[(uf1.find(i), uf2.find(i))].append(i)NEWLINENEWLINE res = [None] * nNEWLINE for a in g:NEWLINE for i in g[a]: res[i] = len(g[a])NEWLINENEWLINE print(*res, sep=' ')NEWLINENEWLINENEWLINE class ABC050:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(eval(sys.stdin.readline().rstrip()))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE t = np.array(sys.stdin.readline().split(), dtype=np.int64)NEWLINE m, *px = map(int, sys.stdin.read().split())NEWLINE p, x = np.array(px).reshape(m, 2).T; p -= 1NEWLINE print(*(t.sum()+x-t[p]), sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = Counter(a)NEWLINE if n&1 and not(a[0]==1 and all(a[i]==2 for i in range(2, n, 2))):NEWLINE print(0); returnNEWLINE if ~n&1 and any(a[i]!= 2 for i in range(1, n, 2)):NEWLINE print(0); returnNEWLINE print(pow(2, n//2, MOD))NEWLINENEWLINE @staticmethodNEWLINE def d(): passNEWLINENEWLINENEWLINE class ABC051:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(' '.join(sys.stdin.readline().rstrip().split(',')))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE k, s = map(int, sys.stdin.readline().split())NEWLINE tot = 0NEWLINE for x in range(k+1):NEWLINE if s-x < 0: breakNEWLINE if s-x > 2*k: continueNEWLINE tot += s-x+1 if s-x<=k else 2*k-(s-x)+1NEWLINE print(tot)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE x1, y1, x2, y2 = map(int, sys.stdin.readline().split())NEWLINE dx, dy = x2-x1, y2-y1NEWLINE print('U'*dy+'R'*(dx+1)+'D'*(dy+1)+'L'*(dx+1)+'U'+'L'+'U'*(dy+1)+'R'*(dx+1)+'D'*(dy+1)+'L'*dx)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *abc = map(int, sys.stdin.read().split())NEWLINE x = np.arange(n)NEWLINE a, b, c = np.array(abc).reshape(m, 3).T; a -= 1; b -= 1NEWLINE d = shortest_path(csr_matrix((c, (a, b)), shape=(n, n)), method='FW', directed=False).astype(np.int64)NEWLINE print(m-np.any(d[x,a[:,None]]+c[:,None]==d[x,b[:,None]], axis=1).sum())NEWLINENEWLINENEWLINE class ABC052:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c, d = map(int, sys.stdin.readline().split())NEWLINE print(max(a*b, c*d))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, s = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE a = [0] * (n+1)NEWLINE for i in range(n):NEWLINE a[i+1] = a[i] + (1 if s[i]=='I' else -1)NEWLINE print(max(a))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE pn = NumberTheory.PrimeNumbers(n)NEWLINE s = 1NEWLINE for c in pn.factorize_factorial(n).values():NEWLINE s = s*(c+1)%MODNEWLINE print(s)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, a, b, *x = map(int, sys.stdin.read().split())NEWLINE x = np.array(x)NEWLINE print(np.minimum((x[1:]-x[:-1])*a, b).sum())NEWLINENEWLINE class ABC053:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('ABC' if int(sys.stdin.readline().rstrip())<1200 else 'ARC')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(len(s)-s.find('A')-s[::-1].find('Z'))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE x = int(sys.stdin.readline().rstrip())NEWLINE q, r = divmod(x, 11)NEWLINE print(2*q + (r+5)//6)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE print(n-((n-len(set(a))+1)//2*2))NEWLINENEWLINE class ABC054:NEWLINE @staticmethodNEWLINE def a():NEWLINE def f(x):NEWLINE return (x+11)%13NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print('Alice' if f(a)>f(b) else 'Bob' if f(a)<f(b) else 'Draw')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE a = [sys.stdin.readline().rstrip() for _ in range(n)]NEWLINE b = [sys.stdin.readline().rstrip() for _ in range(m)]NEWLINENEWLINE for i in range(n-m+1):NEWLINE for j in range(n-m+1):NEWLINE for y in range(m):NEWLINE for x in range(m):NEWLINE if a[i+y][j+x]==b[y][x]: continueNEWLINE breakNEWLINE else: continueNEWLINE breakNEWLINE else: print('Yes'); returnNEWLINE print('No')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE g.add_edge(a,b)NEWLINE g.add_edge(b,a)NEWLINENEWLINE cnt = 0NEWLINE stack = [(0, 1)]NEWLINE while stack:NEWLINE u, s = stack.pop()NEWLINE if s==(1<<n)-1: cnt+=1; continueNEWLINE for v in g.edges[u]:NEWLINE if s>>v&1: continueNEWLINE stack.append((v, s|1<<v))NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, ma, mb, *abc = map(int, sys.stdin.read().split())NEWLINE dp = np.full((401, 401), np.inf); dp[0,0] = 0NEWLINE for a, b, c in zip(*[iter(abc)]*3):NEWLINE np.minimum(dp[a:, b:], dp[:-a, :-b]+c, out=dp[a:, b:])NEWLINE i = np.arange(1, 400//max(ma,mb)+1)NEWLINE res = dp[i*ma, i*mb].min()NEWLINE print(int(res) if res != np.inf else -1)NEWLINENEWLINENEWLINE class ABC055:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(800*n - 200*(n//15))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE fac, _ = Algebra.generate_fac_ifac(n, MOD)NEWLINE print(fac[-1])NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE print(m//2 if m<=2*n else n+(m-2*n)//4)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, s = sys.stdin.read().split(); n = int(n)NEWLINE s = [1 if c=='o' else 0 for c in s]NEWLINE def possible(t):NEWLINE for i in range(1, n-1): t[i+1] = t[i-1]^t[i]^s[i]NEWLINE return ((t[0]^s[0]^t[1]^t[-1])|(t[-1]^s[-1]^t[-2]^t[0]))^1NEWLINENEWLINE for fst in [(1,0), (0,1), (1,1), (0,0)]:NEWLINE t = [None]*n; t[0], t[1] = fst[0], fst[1]NEWLINE if possible(t): print(''.join('S' if x==1 else 'W' for x in t)); returnNEWLINE print(-1)NEWLINENEWLINENEWLINE class ABC056:NEWLINE @staticmethodNEWLINE def a():NEWLINE def to_i(c):NEWLINE return 1 if c=='H' else 0NEWLINE a, b = map(to_i, sys.stdin.readline().split())NEWLINE print('D' if a^b else 'H')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE w, a, b = map(int, sys.stdin.readline().split())NEWLINE if a>b: a,b = b,aNEWLINE print(max(b-(a+w), 0))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE x = int(sys.stdin.readline().rstrip())NEWLINE print(int(math.ceil(math.sqrt(2*x+1/4)-.5)))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE a = sorted(min(x,k) for x in a)NEWLINENEWLINE def necessary(i):NEWLINE dp = np.zeros(k, dtype=np.bool); dp[0] = TrueNEWLINE for j in range(n):NEWLINE if j==i: continueNEWLINE dp[a[j]:] += dp[:-a[j]]NEWLINE return np.any(dp[k-a[i]:])NEWLINENEWLINE def binary_search():NEWLINE lo, hi = -1, nNEWLINE while hi-lo > 1:NEWLINE i = (lo+hi)//2NEWLINE if necessary(i): hi = iNEWLINE else: lo = iNEWLINE return hiNEWLINENEWLINE print(binary_search())NEWLINENEWLINENEWLINENEWLINE class ABC057:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print((a+b)%24)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I).reshape(-1, 2)NEWLINE ab, cd = I[:n], I[n:]NEWLINE print(*(np.argmin(np.absolute(ab[:,None]-cd).sum(axis=-1), axis=-1)+1), sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE divs = NumberTheory.find_divisors(n)NEWLINE print(len(str(divs[bi_l(divs, math.sqrt(n))])))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE c = Combinatorics.chooseNEWLINE n, a, b, *v = map(int, sys.stdin.read().split())NEWLINE v.sort()NEWLINE print(sum(v[-a:])/a)NEWLINE l, r = bi_l(v, v[-a]), bi_r(v, v[-a])NEWLINE print(sum(c(r-l, i) for i in range(r-n+a, r-max(l,n-b)+1)) if r==n else c(r-l, r-n+a))NEWLINENEWLINENEWLINE class ABC058:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print('YES' if c-b==b-a else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s, t = sys.stdin.read().split()NEWLINE a = ''NEWLINE for i in range(len(t)): a += s[i]+t[i]NEWLINE if len(s)>len(t): a += s[-1]NEWLINE print(a)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *s = sys.stdin.read().split()NEWLINE res = {c: 100 for c in string.ascii_lowercase}NEWLINE for counter in map(Counter, s):NEWLINE for c, x, in res.items(): res[c] = min(x, counter[c])NEWLINE t = ''NEWLINE for c, x in sorted(res.items()): t += c*xNEWLINE print(t)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = np.array(xy[:n]), np.array(xy[n:])NEWLINE print((x*(np.arange(n)+1)-np.cumsum(x)).sum()%MOD*((y*(np.arange(m)+1)-np.cumsum(y)).sum()%MOD)%MOD)NEWLINENEWLINE class ABC059:NEWLINE @staticmethodNEWLINE def a():NEWLINE def initial(s): return s[0].upper()NEWLINE print(''.join(map(initial, sys.stdin.readline().split())))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b = sys.stdin.read().split()NEWLINE la, lb = len(a), len(b)NEWLINE print('GREATER' if la>lb else 'LESS' if la<lb else 'GREATER' if a>b else 'LESS' if a<b else 'EQUAL')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE c = s = 0NEWLINE for i in range(n):NEWLINE s += a[i]NEWLINE if i&1 and s>=0: c += s+1; s=-1NEWLINE elif i&1^1 and s<=0: c += 1-s; s=1NEWLINE c1 = cNEWLINE c = s = 0NEWLINE for i in range(n):NEWLINE s += a[i]NEWLINE if i&1 and s<=0: c += 1-s; s=1NEWLINE elif i&1^1 and s>=0: c += s+1; s=-1NEWLINE c2 = cNEWLINE print(min(c1, c2))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print('Brown' if abs(x-y)<=1 else 'Alice')NEWLINENEWLINENEWLINE class ABC060:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = sys.stdin.readline().split()NEWLINE print('YES' if a[-1]==b[0] and b[-1]==c[0] else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print('NO' if c%NumberTheory.gcd(a,b) else 'YES')NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, t, *a = map(int, sys.stdin.read().split())NEWLINE print(sum(min(a[i+1]-a[i], t) for i in range(n-1))+t)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, W, *wv = map(int, sys.stdin.read().split())NEWLINE v, w0 = [[] for _ in range(4)], wv[0]NEWLINE for a, b in zip(*[iter(wv)]*2): v[a-w0].append(b)NEWLINE for i in range(4):NEWLINE v[i] = (sorted(v[i])+[0])[::-1]NEWLINE *v[i], = itertools.accumulate(v[i])NEWLINE global res; res = 0NEWLINE @lru_cache(maxsize=None)NEWLINE def dfs(i,j,k):NEWLINE if i>=len(v[0]) or j>=len(v[1]) or k>=len(v[2]): returnNEWLINE w = j+2*k + (i+j+k)*w0NEWLINE if w > W: returnNEWLINE l = min(len(v[3])-1, (W-w)//(w0+3))NEWLINE global res; res = max(res, v[0][i]+v[1][j]+v[2][k]+v[3][l])NEWLINE dfs(i+1,j,k); dfs(i,j+1,k); dfs(i,j,k+1)NEWLINE dfs(0,0,0)NEWLINE print(res)NEWLINENEWLINE class ABC061:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print('Yes' if a <= c <= b else 'No')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE ab = np.array(ab) - 1NEWLINE g = np.zeros(n, dtype=np.int32)NEWLINE np.add.at(g, ab, 1)NEWLINE print(*g, sep='\n')NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *ab = map(int, sys.stdin.read().split())NEWLINE ab = np.transpose(np.array(ab).reshape(n,2))NEWLINE a, b = ab[:, np.argsort(ab[0])]NEWLINE print(a[np.cumsum(b)>=k][0])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *abc = map(int, sys.stdin.read().split())NEWLINE a, b, c = np.array(abc).reshape(m, 3).T; a -= 1; b -= 1; c *= -1NEWLINE g = csr_matrix(([1]*(m+1), (np.append(a, n-1), np.append(b, 0))), (n, n))NEWLINE _, labels = connected_components(g, connection='strong')NEWLINE bl = (labels[a]==labels[0]) & (labels[b]==labels[0])NEWLINE g = csr_matrix((c[bl], (a[bl], b[bl])), (n, n))NEWLINE try: print(-shortest_path(g, method='BF', directed=True, indices=0)[-1].astype(int))NEWLINE except: print('inf')NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, m, *abc = map(int, sys.stdin.read().split())NEWLINE a, b, c = np.array(abc).reshape(m, 3).T; a -= 1; b -= 1; c *= -1NEWLINE d = np.full(n, np.inf); d[0] = 0NEWLINE for _ in range(n-1): np.minimum.at(d, b, d[a]+c)NEWLINE neg_cycle = np.zeros(n, dtype=np.bool)NEWLINE for _ in range(n):NEWLINE np.logical_or.at(neg_cycle, b, d[a]+c<d[b])NEWLINE np.minimum.at(d, b, d[a]+c)NEWLINE print(inf if neg_cycle[-1] else -d[-1].astype(int))NEWLINENEWLINENEWLINE class ABC062:NEWLINE @staticmethodNEWLINE def a():NEWLINE g = [0, 2, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0]NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print('Yes' if g[x-1]==g[y-1] else 'No')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE h, w = map(int, sys.stdin.readline().split())NEWLINE a = np.array([list(s) for s in sys.stdin.read().split()], dtype='U1')NEWLINE a = np.pad(a, pad_width=1, constant_values='#')NEWLINE for s in a: print(''.join(s))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w = map(int, sys.stdin.readline().split())NEWLINE if h*w%3==0: print(0); returnNEWLINE def minimize(h, w):NEWLINE return min(h, *(s[-1]-s[0] for x in range(w//3, w//3+2) for s in (sorted([h*x, h//2*(w-x), (h+1)//2*(w-x)]),)))NEWLINENEWLINE print(min(minimize(h,w), minimize(w,h)))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINENEWLINE def optimize(a):NEWLINE a = list(a)NEWLINE l, r = a[:n], a[n:]; heapify(l)NEWLINE s = [None]*(n+1); s[0] = sum(l)NEWLINE for i in range(n):NEWLINE x = heappop(l)NEWLINE heappush(l, max(x, r[i]))NEWLINE s[i+1] = s[i]+max(0, r[i]-x)NEWLINE return np.array(s)NEWLINENEWLINE print((optimize(a[:2*n]) + optimize(-a[-1:n-1:-1])[::-1]).max())NEWLINENEWLINE class ABC063:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = sum(map(int, sys.stdin.readline().split()))NEWLINE print('error' if a>=10 else a)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print('yes' if len(set(s))==len(s) else 'no')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE s = a.sum()NEWLINE if s%10: print(s)NEWLINE elif not np.count_nonzero(a%10): print(0)NEWLINE else: print(s-a[a%10!=0].min())NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, a, b, *h = map(int, sys.stdin.read().split())NEWLINE h = np.array(h)NEWLINE d = a-bNEWLINENEWLINE def possible(c):NEWLINE hh = h.copy()NEWLINE np.maximum(hh-b*c, 0, out=hh)NEWLINE return ((hh+d-1)//d).sum() <= cNEWLINENEWLINE def binary_search():NEWLINE lo, hi = 0, 10**9NEWLINE while hi-lo > 1:NEWLINE c = (lo+hi)//2NEWLINE if possible(c): hi = cNEWLINE else: lo = cNEWLINE return hiNEWLINENEWLINE print(binary_search())NEWLINENEWLINE class ABC064:NEWLINE @staticmethodNEWLINE def a():NEWLINE r, g, b = map(int, sys.stdin.readline().split())NEWLINE print('NO' if (10*g+b)%4 else 'YES')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a.sort()NEWLINE print(a[-1]-a[0])NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.bincount(np.minimum(np.array(a)//400, 8), minlength=9)NEWLINE mx = np.count_nonzero(a[:-1]) + a[-1]NEWLINE mn = max(mx-a[-1], 1)NEWLINE print(mn, mx)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, s = sys.stdin.read().split()NEWLINE l = r = 0NEWLINE for c in s:NEWLINE if c=='(': r += 1NEWLINE else:NEWLINE if r==0: l += 1NEWLINE else: r -= 1NEWLINE print('('*l+s+')'*r)NEWLINENEWLINE class ABC065:NEWLINE @staticmethodNEWLINE def a():NEWLINE x, a, b = map(int, sys.stdin.readline().split())NEWLINE y = -a+bNEWLINE print('delicious' if y<=0 else 'safe' if y<=x else 'dangerous')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = [int(x)-1 for x in sys.stdin.read().split()]NEWLINE i = 0NEWLINE for c in range(n):NEWLINE i = a[i]NEWLINE if i == 1: print(c+1); returnNEWLINE print(-1)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE d = abs(n-m)NEWLINE if d >= 2: print(0); returnNEWLINE fac, _ = Algebra.generate_fac_ifac(10**5)NEWLINE print(fac[n]*fac[m]*(1 if d else 2)%MOD)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = np.array(xy).reshape(n,2).TNEWLINE i = np.argsort(x); ax, bx, cx = i[:-1], i[1:], x[i[1:],]-x[i[:-1]]NEWLINE i = np.argsort(y); ay, by, cy = i[:-1], i[1:], y[i[1:],]-y[i[:-1]]NEWLINE e = np.vstack([np.hstack([ax,ay]),np.hstack([bx,by]),np.hstack([cx,cy])])NEWLINE e = e[:,np.argsort(e[-1])]NEWLINE _, i = np.unique(e[:-1], return_index=True, axis=1)NEWLINE a, b, c = e[:,i]NEWLINE print(minimum_spanning_tree(csr_matrix((c,(a,b)), (n,n))).astype(np.int64).sum())NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = xy[::2], xy[1::2]NEWLINE g = GeometryTopology.Graph(n)NEWLINE def make(a):NEWLINE b = sorted(enumerate(a), key=lambda x: x[1])NEWLINE for i in range(n-1):NEWLINE u, v, w = b[i][0], b[i+1][0], b[i+1][1]-b[i][1]NEWLINE for u, v in [(v,u), (u,v)]:NEWLINE if not v in g.edges[u]: g.add_edge(u, v, weight=w)NEWLINE else: g.edges[u][v].weight = min(g.edges[u][v].weight, w)NEWLINE make(x); make(y)NEWLINE _, d = g.kruskal()NEWLINE # _, d = g.prim()NEWLINE # _, d = g.boruvka()NEWLINE print(d)NEWLINENEWLINENEWLINENEWLINE class ABC066:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(sum(sorted(map(int, sys.stdin.readline().split()))[:-1]))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE def f(s):NEWLINE n = len(s)//2NEWLINE return s[:n] == s[n:]NEWLINE for i in range(len(s)-2, 0, -2):NEWLINE if f(s[:i]): print(i); returnNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE b = deque()NEWLINE for i in range(n):NEWLINE if i&1: b.appendleft(a[i])NEWLINE else: b.append(a[i])NEWLINE if n&1: b.reverse()NEWLINE print(*b)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE tmp = [None]*(n+1)NEWLINE for i in range(n+1):NEWLINE if tmp[a[i]] is not None: d=tmp[a[i]]+n-i; breakNEWLINE tmp[a[i]] = iNEWLINE k = np.arange(1, n+2)NEWLINE c = Combinatorics.CombinationsMod(n+1, MOD)NEWLINE print(*((c(n+1,k)-c(d,k-1))%MOD), sep='\n')NEWLINENEWLINENEWLINE class ABC067:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print('Impossible' if a%3 and b%3 and (a+b)%3 else 'Possible')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, k, *l = map(int, sys.stdin.read().split())NEWLINE print(sum(sorted(l)[-k:]))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE np.cumsum(a, out=a)NEWLINE print(np.absolute(a[-1]-2*a[:-1]).min())NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE g.add_edge(a, b); g.add_edge(b,a)NEWLINE d1, d2 = g.bfs(0), g.bfs(n-1)NEWLINE print('Fennec' if sum(d1[i]<=d2[i] for i in range(n)) > n//2 else 'Snuke')NEWLINENEWLINE class ABC068:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('ABC'+sys.stdin.readline().rstrip())NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE print(2**math.floor(math.log2(int(sys.stdin.readline().rstrip()))))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(m, 2).TNEWLINE d = shortest_path(csr_matrix(([1]*m, (a-1, b-1)), (n,n)), method='D', directed=False, indices=0).astype(np.int32)NEWLINE print('POSSIBLE' if d[-1]==2 else 'IMPOSSIBLE')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE k = int(sys.stdin.readline().rstrip())NEWLINE n = 50; print(n)NEWLINE q,r = divmod(k,n); a = np.arange(n-1,-1,-1)+q; a[:r]+=1; print(*a)NEWLINENEWLINE class ABC069:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE print((n-1)*(m-1))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, n, *a = map(int, sys.stdin.read().split())NEWLINE c = [i+1 for i in range(n) for j in range(a[i])]NEWLINE for i in range(h):NEWLINE row = c[i*w:(i+1)*w]NEWLINE if i&1: row = row[::-1]NEWLINE print(*row)NEWLINENEWLINE class ABC070:NEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for _ in range(n-1):NEWLINE a, b, c = map(int, sys.stdin.readline().split()); a-=1; b-=1NEWLINE g.add_edge(a, b, weight=c); g.add_edge(b, a, weight=c)NEWLINE q, k = map(int, sys.stdin.readline().split())NEWLINE d = g.bfs(k-1)NEWLINE for _ in range(q):NEWLINE x, y = map(int, sys.stdin.readline().split()); x-=1; y-=1NEWLINE print(d[x]+d[y])NEWLINENEWLINE class ABC071:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *s = sys.stdin.read().split(); n = int(n)NEWLINE s = list(zip(*s))NEWLINE dp = [0]*n; dp[0] = 3 if s[0][0]==s[0][1] else 6NEWLINE for i in range(1,n):NEWLINE dp[i] = dp[i-1]NEWLINE if s[i][0]==s[i-1][0]: continueNEWLINE dp[i] *= 2 if s[i-1][0]==s[i-1][1] else 3 if s[i][0]!=s[i][1] else 1NEWLINE dp[i] %= MODNEWLINE print(dp[-1])NEWLINENEWLINE class ABC072:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *p = map(int, sys.stdin.read().split())NEWLINE p += [-1]NEWLINE cnt, i = 0, 0NEWLINE while i < n:NEWLINE if p[i]==i+1:NEWLINE cnt += p[i]==i+1NEWLINE if p[i+1]==i+2: i += 1NEWLINE i += 1NEWLINE print(cnt)NEWLINENEWLINENEWLINE class ABC073:NEWLINE @staticmethodNEWLINE def a():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, r, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE a, b, c = I[r:].reshape(m,3).TNEWLINE d = shortest_path(csr_matrix((c, (a-1, b-1)), (n,n)), method='FW', directed=False).astype(np.int32)NEWLINE r = np.array([*itertools.permutations(I[:r]-1)])NEWLINE print((d[r[:,:-1], r[:,1:]].sum(axis=1)).min())NEWLINENEWLINE class ABC074:NEWLINE @staticmethodNEWLINE def a():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a, dtype=np.int32).reshape(n,n)NEWLINE b = shortest_path(a, method='FW').astype(np.int32)NEWLINE if (b < a).any(): print(-1); returnNEWLINE np.fill_diagonal(b, 10**9)NEWLINE a[np.any(b[:,None]+b<=a[:,:,None], axis=2)] = 0NEWLINE print(a.sum()//2)NEWLINENEWLINENEWLINENEWLINE class ABC075:NEWLINE @staticmethodNEWLINE def a():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k, *xy = map(int, sys.stdin.read().split())NEWLINE xy = np.array(xy).reshape(n,2)NEWLINE x_y = xy.copy()[np.argsort(xy[:,0])]NEWLINE y_x = xy.copy()[np.argsort(xy[:,1])]NEWLINE comb = np.array([*itertools.combinations(range(n),2)])NEWLINE i1, i2 = comb.TNEWLINE j1, j2 = comb[None,:].TNEWLINE s = (y_x[:,1][i2]-y_x[:,1][i1]) * (x_y[:,0][j2]-x_y[:,0][j1])NEWLINE c = np.zeros((n+1,n+1), dtype=np.int64)NEWLINE for i in range(n): c[i+1, 1:] += c[i, 1:] + (y_x[i,0]<=x_y[:,0])NEWLINE a = c[i2+1, j2+1] - c[i2+1, j1] - c[i1, j2+1] + c[i1, j1]NEWLINE print(s[a>=k].min())NEWLINENEWLINENEWLINE class ABC076:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *tv = map(int, sys.stdin.read().split())NEWLINE t, v = np.array(tv).reshape(2, n)NEWLINE t = np.pad(t, pad_width=[2,1], constant_values=0)NEWLINE np.cumsum(t, out=t)NEWLINE l, r = t[:-1], t[1:]NEWLINE v = np.pad(v, pad_width=[1,1], constant_values=0)NEWLINE x = np.arange(0, r[-1]+0.1, 0.5, dtype=np.float32)[:,None]NEWLINE # y = np.stack([v-(x-l), np.zeros(r[-1]*2+1, dtype=np.float32)[:,None]+v, v+(x-r)]).max(axis=0).min(axis=1)NEWLINE mx = v-(x-l); np.maximum(mx, v, out=mx); np.maximum(mx, v+(x-r), out=mx)NEWLINE y = mx.min(axis=1)NEWLINE print(((y[:-1]+y[1:])/4).sum())NEWLINENEWLINENEWLINE class ABC077:NEWLINE @staticmethodNEWLINE def d():NEWLINE k = int(sys.stdin.readline().rstrip())NEWLINE g = GeometryTopology.Graph(k)NEWLINE for i in range(k):NEWLINE g.add_edge(i, i*10%k, weight=0)NEWLINE g.add_edge(i, (i+1)%k, update=False, weight=1)NEWLINE print(1+g.bfs01(1)[0])NEWLINENEWLINENEWLINE class ABC078:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, z, w, *a = map(int, sys.stdin.read().split())NEWLINE print(abs(a[0]-w) if n==1 else max(abs(a[-1]-w), abs(a[-1]-a[-2])))NEWLINENEWLINE class ABC079:NEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE c = I[:100].reshape(10,10)NEWLINE a = I[100:].reshape(h,w)NEWLINE c = shortest_path(c.T, method='D', indices=1).astype(np.int32)NEWLINE print(c[a[a!=-1]].sum())NEWLINENEWLINE class ABC080:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, c, *stc = map(int, sys.stdin.read().split())NEWLINE using = np.zeros((c, 10**5+2), dtype=np.int8)NEWLINE s, t, c = np.array(stc).reshape(n,3).TNEWLINE np.add.at(using, (c-1, s), 1)NEWLINE np.subtract.at(using, (c-1, t+1), 1)NEWLINE np.cumsum(using, axis=1, out=using)NEWLINE print(np.count_nonzero(using, axis=0).max())NEWLINENEWLINE class ABC081:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE i = np.argmax(np.absolute(a))NEWLINE print(2*n-1)NEWLINE for j in range(n): print(i+1, j+1)NEWLINE if a[i] >= 0:NEWLINE for j in range(n-1): print(j+1, j+2)NEWLINE else:NEWLINE for j in range(n-1, 0, -1): print(j+1, j)NEWLINENEWLINENEWLINE class ABC082:NEWLINE @staticmethodNEWLINE def d():NEWLINE s = [1 if c=='T' else 0 for c in sys.stdin.readline().rstrip()] + [1]NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE i = j = 0NEWLINE while s[i]==0: x -= 1; i +=1NEWLINE d = [[], []]NEWLINE while i < len(s):NEWLINE if s[i]: j ^= 1; i += 1; continueNEWLINE c = 0NEWLINE while s[i]==0: c += 1; i += 1NEWLINE d[j].append(c)NEWLINENEWLINE def possible(a, s):NEWLINE dp = np.zeros(sum(a)+1, dtype=np.bool)NEWLINE if s >= len(dp): return FalseNEWLINE dp[-1] = TrueNEWLINE for x in a: dp[:-2*x] += dp[2*x:]NEWLINE return dp[s]NEWLINENEWLINE print('Yes' if possible(d[0], abs(x)) & possible(d[1], abs(y)) else 'No')NEWLINENEWLINENEWLINE class ABC083:NEWLINE @staticmethodNEWLINE def d():NEWLINE s = np.array(list(sys.stdin.readline().rstrip()), dtype=np.int8)NEWLINE k = np.argwhere(s[:-1] != s[1:]).ravel()NEWLINE if not k.size: print(len(s)); returnNEWLINE print(np.maximum(k+1, len(s)-1-k).min())NEWLINENEWLINENEWLINE class ABC084:NEWLINE @staticmethodNEWLINE def d():NEWLINE pn = NumberTheory.PrimeNumbers()NEWLINE n = np.arange(10**5+1)NEWLINE cnt = (pn.is_prime[n] & pn.is_prime[(n+1)//2]).astype(np.int32)NEWLINE np.cumsum(cnt, out=cnt)NEWLINE q, *lr = map(int, sys.stdin.read().split())NEWLINE l, r = np.array(lr).reshape(q, 2).TNEWLINE print(*(cnt[r]-cnt[l-1]), sep='\n')NEWLINENEWLINENEWLINE class ABC085:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, h, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(n, 2).TNEWLINE a = np.sort(a)[-1]; b = np.sort(b[b>=a])[::-1]NEWLINE np.cumsum(b, out=b)NEWLINE print(np.searchsorted(b, h, side='left')+1 if h<=b[-1] else len(b)+(h-b[-1]+a-1)//a)NEWLINENEWLINE class ABC086:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE xy = []NEWLINE for _ in range(n):NEWLINE a, b, c = sys.stdin.readline().split()NEWLINE a, b = int(a), int(b)NEWLINE b += k*(c=='W')NEWLINE xy.append((a,b))NEWLINE x, y = np.array(xy, dtype=np.int32).T % (2*k)NEWLINE s = np.zeros((3*k, 3*k), dtype=np.int32)NEWLINE np.add.at(s, (y,x), 1); np.add.at(s, (y+k, x+k), 1); np.add.at(s, (y+k, x), -1); np.add.at(s, (y, x+k), -1)NEWLINE del x; del yNEWLINE s = s.cumsum(axis=0).cumsum(axis=1)NEWLINE s[:k] += s[-k:]; s[:, :k] += s[:, -k:]; s = s[:-k, :-k]NEWLINE s[:k, :k] += s[-k:, -k:]; s[:k, -k:] += s[-k:, :k]; s = s[:k]NEWLINE print(s.max())NEWLINENEWLINE class ABC087:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *lrd = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for l, r, d in zip(*[iter(lrd)]*3):NEWLINE l -= 1; r -= 1NEWLINE g.add_edge(l, r, weight=d); g.add_edge(r, l, weight=-d)NEWLINENEWLINE x = [None] * nNEWLINE @lru_cache(maxsize=None)NEWLINE def dfs(u, y):NEWLINE if x[u] is not None:NEWLINE if x[u] != y: raise Exception('conflict!')NEWLINE returnNEWLINE x[u] = yNEWLINE for v, e in g.edges[u].items(): dfs(v, y+e.weight)NEWLINENEWLINE for u in range(n):NEWLINE if x[u] is not None: continueNEWLINE # try: dfs(u, 0)NEWLINE # except: print('No'); returnNEWLINE stack = [(u, 0)]NEWLINE while stack:NEWLINE u, y = stack.pop()NEWLINE if x[u] is not None:NEWLINE if x[u] != y: print('No'); returnNEWLINE continueNEWLINE x[u] = yNEWLINE for v, e in g.edges[u].items(): stack.append((v, y+e.weight))NEWLINE print('Yes')NEWLINENEWLINE class ABC088:NEWLINE @staticmethodNEWLINE def d():NEWLINE h, w = map(int, sys.stdin.readline().split())NEWLINE s = ''.join(sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(h*w)NEWLINE cnt = h*wNEWLINE for u in range(h*w):NEWLINE if s[u] == '#': cnt -= 1; continueNEWLINE i, j = divmod(u, w)NEWLINE if i>0 and s[u-w]=='.': g.add_edge(u, u-w, weight=1)NEWLINE if i<h-1 and s[u+w]=='.': g.add_edge(u, u+w, weight=1)NEWLINE if j>0 and s[u-1]=='.': g.add_edge(u, u-1, weight=1)NEWLINE if j<w-1 and s[u+1]=='.': g.add_edge(u, u+1, weight=1)NEWLINE d = g.bfs(0)NEWLINE print(-1 if d[-1]==inf else cnt-d[-1]-1)NEWLINENEWLINE class ABC089:NEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, d, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE a = I[:h*w].reshape(h,w)NEWLINE l, r = I[h*w+1:].reshape(-1,2).T - 1NEWLINE yx = np.pad(np.argwhere(a)[np.argsort(a.ravel())], pad_width=[(0,d), (0,0)], constant_values=0)NEWLINE a = np.zeros(h*w+d, dtype=np.int32)NEWLINE for i in range(0, h*w-d, d):NEWLINE a[i+d:i+2*d] = a[i:i+d] + np.absolute(yx[i+d:i+2*d]-yx[i:i+d]).sum(axis=1)NEWLINE print(*(a[r]-a[l]), sep='\n')NEWLINENEWLINE class ABC090:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE b = np.arange(k+1, n+1)NEWLINE print((n//b*(b-k) + np.maximum(0, (n%b)-k+1*(k!=0))).sum())NEWLINENEWLINE class ABC091:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE c = 2**np.arange(30)NEWLINE a, b = np.sort(np.array(ab).reshape(2,n)[:, None] % (2*c)[:,None])NEWLINE res = 0NEWLINE for i in range(30):NEWLINE j = np.searchsorted(b[i], np.arange(1, 5)[:,None]*c[i]-a[i]).sum(axis=1)NEWLINE j[1::2] *= -1NEWLINE res += (j.sum()&1) * c[i]NEWLINE print(res)NEWLINENEWLINE class ABC092:NEWLINE @staticmethodNEWLINE def d():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE def make(color, cnt):NEWLINE g = [[color^1]*100 for _ in range(21)]NEWLINE for i in range(1, 21, 2):NEWLINE for j in range(0, 100, 2):NEWLINE if not cnt: return gNEWLINE g[i][j] = color; cnt -= 1NEWLINE g = make(0,a-1) + make(1,b-1)NEWLINE def convert(s): return ''.join('#' if c else '.' for c in s)NEWLINE print(42, 100)NEWLINE print(*map(convert, g), sep='\n')NEWLINENEWLINE class ABC093:NEWLINE @staticmethodNEWLINE def d():NEWLINE q, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.sort(np.array(ab).reshape(q,2)).TNEWLINE x = np.sqrt(a*b).astype(int)NEWLINE x[x*x==a*b] -= 1NEWLINE res = a-1NEWLINE res += (a-1) * (b-a<=1)NEWLINE res += (x+np.minimum(x-a-1*(x*(x+1)>=a*b), b-x-1)) * (b-a>=2)NEWLINENEWLINE # res = 0NEWLINE # res += 2*(a-1) * (b-a<=1)NEWLINE # res += (2*x-1 - 1*(x*(x+1)>=a*b)) * (b-a >= 2)NEWLINENEWLINE print(*res, sep='\n')NEWLINENEWLINENEWLINENEWLINE class ABC094:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a.sort()NEWLINE print(a[-1], end=' ')NEWLINE b = (a[-1]+1)//2NEWLINE i = bi_l(a, b)NEWLINE print(a[-2] if i==n-1 else a[i-1] if b-a[i-1]<=a[i]-b else a[i])NEWLINENEWLINENEWLINENEWLINE class ABC095:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, c, *xv = map(int, sys.stdin.read().split())NEWLINENEWLINE def make(xv):NEWLINE x, v = xv.TNEWLINE s = np.cumsum(v)-x; rs = s-xNEWLINE np.maximum.accumulate(s, out=s)NEWLINE np.maximum.accumulate(rs, out=rs)NEWLINE return s, rsNEWLINENEWLINE xv = np.pad(np.array(xv).reshape(n,2), pad_width=[(1,0), (0,0)], constant_values=0)NEWLINE ls, lrs = make(xv)NEWLINE xv[1:, 0] = c-xv[1:, 0]; xv[1:] = xv[-1:0:-1]NEWLINE rs, rrs = make(xv)NEWLINE print(np.maximum(ls+rrs[::-1], rs+lrs[::-1]).max())NEWLINENEWLINE class ABC096:NEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE pn = NumberTheory.PrimeNumbers()NEWLINE a = [p for p in pn if p%5==1]NEWLINE print(*a[:n])NEWLINENEWLINENEWLINE class ABC097:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE p = [int(x)-1 for x in sys.stdin.readline().split()]NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE for x, y in zip(*[map(int, sys.stdin.read().split())]*2): uf.unite(x-1, y-1)NEWLINE groups = [set(p[u] for u in g) for g in uf.groups()]NEWLINE print(sum(i in groups[uf.find(i)] for i in range(n)))NEWLINENEWLINE class ABC098:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE r = s = cnt = 0NEWLINE for l in range(n):NEWLINE while r<n and not(s&a[r]): s ^= a[r]; r += 1NEWLINE cnt += r-l; s ^= a[l]NEWLINE print(cnt)NEWLINENEWLINENEWLINE class ABC099:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, c, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE d = I[:c*c].reshape(c,c)NEWLINE r = np.arange(n*n); r = (r//n + r%n)%3NEWLINE a = d[I[c*c:]-1, np.arange(c)[:,None]]NEWLINE r = np.arange(n*n); r = (r//n + r%n)%3 == np.arange(3)[:,None]NEWLINE a = np.vstack([a[:,r[i]].sum(axis=1) for i in range(3)])NEWLINE p = np.array([*itertools.permutations(range(c), 3)])NEWLINE print(a[np.arange(3),p].sum(axis=1).min())NEWLINENEWLINENEWLINE class ABC100:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *xyz = map(int, sys.stdin.read().split())NEWLINE xyz = np.array(xyz).reshape(n,3)NEWLINE op = np.array([*itertools.product((-1,1), repeat=3)])NEWLINE print(np.sort((op[:,None]*xyz).sum(axis=-1), axis=-1)[:,n-m:].sum(axis=-1).max())NEWLINENEWLINE class ABC101:NEWLINE @staticmethodNEWLINE def d():NEWLINE def s(n): return sum(int(d) for d in str(n))NEWLINE def f(n):NEWLINE return sorted([pow(10,d)*(n//pow(10,d)+2)-1 for d in range(int(math.log10(n))+2)], key=lambda x: x/s(x))[0]NEWLINE k = int(sys.stdin.readline().rstrip())NEWLINE n = 1NEWLINE for _ in range(k): print(n); n = f(n)NEWLINENEWLINE class ABC102:NEWLINE @staticmethodNEWLINE def d(): # two pointers (online)NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE mn = infNEWLINE i, k = 0, 2NEWLINE p,q,r,s = a[0], 0, a[1]+a[2], sum(a[3:])NEWLINE for j in range(1,n-2):NEWLINE q += a[j]; r -= a[j]NEWLINE while i < j-1:NEWLINE if abs(q-p-2*a[i+1]) <= abs(q-p):NEWLINE q -= a[i+1]; p += a[i+1]NEWLINE i += 1; continueNEWLINE breakNEWLINE while k < n-2:NEWLINE if abs(s-r-2*a[k+1]) <= abs(s-r):NEWLINE s -= a[k+1]; r += a[k+1]NEWLINE k += 1; continueNEWLINE breakNEWLINE tmp = sorted([p,q,r,s])NEWLINE mn = min(mn, tmp[-1]-tmp[0])NEWLINE print(mn)NEWLINENEWLINE @staticmethodNEWLINE def d_2(): # binary_search (offline)NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE def f(a):NEWLINE s = np.cumsum(a)NEWLINE i = np.searchsorted(s, s/2)NEWLINE l, r = s[i], s-s[i]NEWLINE bl = np.abs(r-l) > np.abs(r-l+2*a[i])NEWLINE l -= a[i]*bl; r += a[i]*blNEWLINE return l, rNEWLINE (p,q), (s,r) = f(a), f(a[::-1])NEWLINE a = np.sort(np.vstack((p[:-1], q[:-1], r[-2::-1], s[-2::-1])), axis=0)[:,1:-1]NEWLINE print((a[-1]-a[0]).min())NEWLINENEWLINE class ABC103:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE cnt = prev = 0NEWLINE for a, b in sorted(zip(*[iter(ab)]*2), key=lambda x: x[1]):NEWLINE a -= 1; b -= 1NEWLINE if a < prev: continueNEWLINE prev = b; cnt += 1NEWLINE print(cnt)NEWLINENEWLINE class ABC104:NEWLINE @staticmethodNEWLINE def d():NEWLINE s = sys.stdin.readline().rstrip()[::-1]NEWLINE a = b = c = 0; d = 1NEWLINE for i in range(len(s)):NEWLINE if s[i]=='?': a,b,c,d = 3*a+b, 3*b+c, 3*c+d, 3*dNEWLINE elif s[i] == 'A': a += bNEWLINE elif s[i] == 'B': b += cNEWLINE elif s[i] == 'C': c += dNEWLINE a %= MOD; b %= MOD; c %= MOD; d %= MODNEWLINE print(a)NEWLINENEWLINE class ABC105:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *a = map(int, sys.stdin.read().split())NEWLINE c = Counter(np.array(a).cumsum()%m)NEWLINE print(c[0] + sum([v*(v-1)//2 for v in c.values()]))NEWLINENEWLINE class ABC106:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, q, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I).reshape(-1,2) - 1NEWLINE (l,r), (p,q) = I[:m].T, I[-q:].TNEWLINE c = np.zeros((n+1, n+1), dtype=np.int64)NEWLINE np.add.at(c, (0,r), 1); np.add.at(c, (l+1,-1), 1)NEWLINE np.add.at(c, (l+1,r), -1); c[0,-1] -= mNEWLINE c = c.cumsum(axis=0).cumsum(axis=1)NEWLINE print(*c[p,q], sep='\n')NEWLINENEWLINE class ABC107:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE h = (n*(n+1)//2 + 1)//2NEWLINENEWLINE def f(x):NEWLINE *b, = itertools.accumulate([0]+[-1+2*(v>=x) for v in a])NEWLINE mn = min(b)NEWLINE b = [v-mn+1 for v in b]NEWLINE bit = GeometryTopology.FenwickTree(max(b))NEWLINE c = 0NEWLINE for v in b: c += bit.sum(1, v); bit.add(v, 1)NEWLINE return c >= hNEWLINENEWLINE def f_2(x):NEWLINE tot = 0NEWLINE s, cs, c = 0, defaultdict(int), 0; cs[0] = 1NEWLINE for v in a:NEWLINE if v>=x: s += 1; c += cs[s]NEWLINE else: c -= cs[s]; s -= 1NEWLINE tot += c; cs[s] += 1; c += 1NEWLINE # print(tot)NEWLINE return tot >= hNEWLINENEWLINE def binary_search():NEWLINE lo, hi = 1, 10**9+1NEWLINE while hi-lo > 1:NEWLINE x = (hi+lo)//2NEWLINE # if f(x): lo = xNEWLINE if f_2(x): lo = xNEWLINE else: hi = xNEWLINE return loNEWLINE print(binary_search())NEWLINENEWLINENEWLINENEWLINE class ABC108:NEWLINE @staticmethodNEWLINE def d():NEWLINE l = int(sys.stdin.readline().rstrip())NEWLINE n = l.bit_length()NEWLINE m = 2*(n-1) + bit_count(l)-1NEWLINE edges = [(i, i+1, d) for i in range(n-1) for d in [0, 1<<i]]NEWLINE d = 1<<(n-1)NEWLINE for i in range(n-1):NEWLINE if l>>i&1: edges.append((i, n-1, d)); d += 1<<iNEWLINE print(n, m)NEWLINE for u, v, d in edges: print(u+1, v+1, d)NEWLINENEWLINENEWLINE class ABC109:NEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, *a = map(int, sys.stdin.read().split())NEWLINENEWLINE passNEWLINENEWLINE class ABC110:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE c = Combinatorics.CombinationsMod(n=10**6, mod=MOD)NEWLINE pn = NumberTheory.PrimeNumbers(10**5)NEWLINE f = np.array([*pn.factorize(m).values()])NEWLINE print(Algebra.cumprod(c(n+f-1, f), mod=MOD)[-1])NEWLINENEWLINE class ABC111: passNEWLINE class ABC112: passNEWLINE class ABC113: passNEWLINE class ABC114: passNEWLINE class ABC115: passNEWLINE class ABC116: passNEWLINE class ABC117: passNEWLINE class ABC118: passNEWLINE class ABC119: passNEWLINE class ABC120: passNEWLINE class ABC121: passNEWLINE class ABC122: passNEWLINE class ABC123: passNEWLINE class ABC124: passNEWLINE class ABC125: passNEWLINE class ABC126: passNEWLINE class ABC127: passNEWLINE class ABC128: passNEWLINE class ABC129: passNEWLINE class ABC130: passNEWLINE class ABC131: passNEWLINE class ABC132: passNEWLINE class ABC133: passNEWLINE class ABC134: passNEWLINE class ABC135: passNEWLINE class ABC136: passNEWLINE class ABC137: passNEWLINE class ABC138: passNEWLINE class ABC139: passNEWLINE class ABC140: passNEWLINE class ABC141: passNEWLINE class ABC142: passNEWLINE class ABC143: passNEWLINE class ABC144: passNEWLINE class ABC145: passNEWLINE class ABC146: passNEWLINE class ABC147: passNEWLINE class ABC148: passNEWLINE class ABC149: passNEWLINE class ABC150: passNEWLINE class ABC151: passNEWLINE class ABC152: passNEWLINE class ABC153: passNEWLINE class ABC154: passNEWLINE class ABC155: passNEWLINE class ABC156: passNEWLINE class ABC157: passNEWLINE class ABC158: passNEWLINE class ABC159: passNEWLINE class ABC160: passNEWLINE class ABC161: passNEWLINE class ABC162: passNEWLINE class ABC163: passNEWLINE class ABC164: passNEWLINE class ABC165: passNEWLINE class ABC166: passNEWLINE class ABC167: passNEWLINE class ABC168: passNEWLINE class ABC169: passNEWLINENEWLINE class ABC170:NEWLINE @staticmethodNEWLINE def a():NEWLINE x = [int(x) for x in sys.stdin.readline().split()]NEWLINE for i in range(5):NEWLINE if x[i] != i+1:NEWLINE print(i+1)NEWLINE breakNEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print('Yes' if 2*x <= y <= 4*x and y%2 == 0 else 'No')NEWLINE @staticmethodNEWLINE def c():NEWLINE x, n, *p = map(int, sys.stdin.read().split())NEWLINE a = list(set(range(102)) - set(p))NEWLINE a = [(abs(y-x), y) for y in a]NEWLINE print(sorted(a)[0][1])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE cand = set(a)NEWLINE cnt = 0NEWLINE for x, c in sorted(Counter(a).items()):NEWLINE cnt += c == 1 and x in candNEWLINE cand -= set(range(x*2, 10**6+1, x))NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, q = map(int, sys.stdin.readline().split())NEWLINE queue = []NEWLINE m = 2*10**5NEWLINE infants = [[] for _ in range(m)]NEWLINE highest_rate = [None] * mNEWLINE where = [None] * nNEWLINE rate = [None] * nNEWLINENEWLINE def entry(i, k):NEWLINE where[i] = kNEWLINE while infants[k]:NEWLINE r, j = heappop(infants[k])NEWLINE if where[j] != k or j == i: continueNEWLINE if rate[i] >= -r:NEWLINE highest_rate[k] = rate[i]NEWLINE heappush(queue, (rate[i], k, i))NEWLINE heappush(infants[k], (r, j))NEWLINE breakNEWLINE else:NEWLINE highest_rate[k] = rate[i]NEWLINE heappush(queue, (rate[i], k, i))NEWLINE heappush(infants[k], (-rate[i], i))NEWLINENEWLINE def transfer(i, k):NEWLINE now = where[i]NEWLINE while infants[now]:NEWLINE r, j = heappop(infants[now])NEWLINE if where[j] != now or j == i: continueNEWLINE if highest_rate[now] != -r:NEWLINE highest_rate[now] = -rNEWLINE heappush(queue, (-r, now, j))NEWLINE heappush(infants[now], (r, j))NEWLINE breakNEWLINE else:NEWLINE highest_rate[now] = NoneNEWLINE entry(i, k)NEWLINENEWLINE def inquire():NEWLINE while True:NEWLINE r, k, i = heappop(queue)NEWLINE if where[i] != k or r != highest_rate[k]: continueNEWLINE heappush(queue, (r, k, i))NEWLINE return rNEWLINENEWLINE for i in range(n):NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE rate[i] = aNEWLINE entry(i, b-1)NEWLINE for _ in range(q):NEWLINE c, d = map(int, sys.stdin.readline().split())NEWLINE transfer(c-1, d-1)NEWLINE print(inquire())NEWLINENEWLINENEWLINENEWLINE class ABC171:NEWLINE @staticmethodNEWLINE def a():NEWLINE c = sys.stdin.readline().rstrip()NEWLINE print('A' if c < 'a' else 'a')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, k, *p = map(int, sys.stdin.read().split())NEWLINE print(sum(sorted(p)[:k]))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE n -= 1NEWLINE l = 1NEWLINE while True:NEWLINE if n < pow(26, l):NEWLINE breakNEWLINE n -= pow(26, l)NEWLINE l += 1NEWLINE res = ''.join([chr(ord('a')+d) for d in NumberTheory.base_convert(n, 26)][::-1])NEWLINE res = 'a'*(l-len(res)) + resNEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE s = sum(a)NEWLINE cnt = Counter(a)NEWLINE q = int(sys.stdin.readline().rstrip())NEWLINE for _ in range(q):NEWLINE b, c = map(int, sys.stdin.readline().split())NEWLINE s += (c-b)*cnt[b]NEWLINE print(s)NEWLINE cnt[c] += cnt[b]; cnt[b] = 0NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE s = 0NEWLINE for x in a: s ^= xNEWLINE b = map(lambda x: x^s, a)NEWLINE print(*b, sep=' ')NEWLINENEWLINENEWLINE class ABC172:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = int(sys.stdin.readline().rstrip()); print(a*(1+a+a**2))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s, t = sys.stdin.read().split(); print(sum(s[i]!=t[i] for i in range(len(s))))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, k = map(int, sys.stdin.readline().split())NEWLINE a = [0] + [int(x) for x in sys.stdin.readline().split()]NEWLINE b = [int(x) for x in sys.stdin.readline().split()]NEWLINE *sa, = itertools.accumulate(a)NEWLINE *sb, = itertools.accumulate(b)NEWLINE res = 0NEWLINE for i in range(n+1):NEWLINE r = k - sa[i]NEWLINE if r < 0: breakNEWLINE res = max(res, i+bi_r(sb, r))NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE f = np.zeros(n+1, dtype=np.int64)NEWLINE for i in range(1, n+1):NEWLINE f[i::i] += 1NEWLINE print((np.arange(1, n+1)*f[1:]).sum())NEWLINENEWLINENEWLINE class ABC173:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE charge = (n+999)//1000 * 1000 - nNEWLINE print(charge)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *s = sys.stdin.read().split()NEWLINE c = Counter(s)NEWLINE for v in 'AC, WA, TLE, RE'.split(', '):NEWLINE print(f'{v} x {c[v]}')NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w, k = map(int, sys.stdin.readline().split())NEWLINE c = [sys.stdin.readline().rstrip() for _ in range(h)]NEWLINE tot = 0NEWLINE for i in range(1<<h):NEWLINE for j in range(1<<w):NEWLINE cnt = 0NEWLINE for y in range(h):NEWLINE for x in range(w):NEWLINE if i>>y & 1 or j>>x & 1:NEWLINE continueNEWLINE cnt += c[y][x] == '#'NEWLINE tot += cnt == kNEWLINE print(tot)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a.sort(reverse=True)NEWLINE res = a[0] + sum(a[1:1+(n-2)//2])*2 + a[1+(n-2)//2]*(n & 1)NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE MOD = 10**9+7NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE minus = [x for x in a if x < 0]NEWLINE plus = [x for x in a if x > 0]NEWLINE if len(plus) + len(minus)//2*2 >= k: # plusNEWLINE *minus, = map(abs, minus)NEWLINE minus.sort(reverse=True)NEWLINE plus.sort(reverse=True)NEWLINE cand = []NEWLINE if len(minus)&1: minus = minus[:-1]NEWLINE for i in range(0, len(minus)-1, 2):NEWLINE cand.append(minus[i]*minus[i+1]%MOD)NEWLINE if k & 1:NEWLINE res = plus[0]NEWLINE plus = plus[1:]NEWLINE else:NEWLINE res = 1NEWLINE if len(plus)&1: plus = plus[:-1]NEWLINE for i in range(0, len(plus)-1, 2):NEWLINE cand.append(plus[i]*plus[i+1]%MOD)NEWLINE cand.sort(reverse=True)NEWLINE for x in cand[:k//2]:NEWLINE res *= xNEWLINE res %= MODNEWLINE print(res)NEWLINE elif 0 in a:NEWLINE print(0)NEWLINE else:NEWLINE cand = sorted(map(abs, a))NEWLINE res = 1NEWLINE for i in range(k):NEWLINE res *= cand[i]NEWLINE res %= MODNEWLINE res = MOD - resNEWLINE print(res)NEWLINE passNEWLINENEWLINENEWLINE class ABC174:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('Yes' if int(sys.stdin.readline().rstrip())>=30 else 'No')NEWLINENEWLINENEWLINENEWLINENEWLINE class ABC178:NEWLINE @staticmethodNEWLINE def a(): passNEWLINENEWLINE @staticmethodNEWLINE def b(): passNEWLINENEWLINE @staticmethodNEWLINE def c(): passNEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE s = int(sys.stdin.readline().rstrip())NEWLINE if s == 0: print(1); returnNEWLINE elif s == 1: print(0); returnNEWLINE c = np.eye(3, k=-1, dtype=np.int64)NEWLINE c[0, 0] = c[0, 2] = 1NEWLINE a = np.array([0, 0, 1])NEWLINE print(Algebra.dot(Algebra.matrix_pow(c, s-2), a)[0])NEWLINENEWLINE class ABC179:NEWLINE @staticmethodNEWLINE def a():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(s+'s' if s[-1]!='s' else s+'es')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *d = map(int, sys.stdin.read().split())NEWLINE d = np.array(d).reshape(n, 2).TNEWLINE d = np.equal(d[0], d[1]).astype(int)NEWLINE dd = d.copy()NEWLINE dd[1:] += d[:-1]NEWLINE dd[:-1] += d[1:]NEWLINE print('Yes' if (dd>=3).any() else 'No')NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE res = (n//np.arange(1, n+1)).sum() - len(NumberTheory.find_divisors(n))NEWLINE print(res)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE mod = 998244353NEWLINE n, k, *lr = map(int, sys.stdin.read().split())NEWLINE l, r = np.array(lr).reshape(k, -1).TNEWLINE @njit((i8, i8[:], i8[:]), cache=True)NEWLINE def solve(n, l, r):NEWLINE res = np.zeros(n*2, dtype=np.int64); res[0], res[1] = 1, -1NEWLINE for i in range(n-1):NEWLINE res[i+1] = (res[i+1]+res[i]) % modNEWLINE res[i+l] = (res[i+l]+res[i]) % modNEWLINE res[i+r+1] = (res[i+r+1]-res[i]) % modNEWLINE print(res[n-1])NEWLINE solve(n, l, r)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, x, m = map(int, sys.stdin.readline().split())NEWLINE res = [-1 for _ in range(m)]NEWLINE s = 0NEWLINE loop = np.zeros(m, dtype=np.int64)NEWLINE for i in range(m+1):NEWLINE if i==n: print(s); returnNEWLINE if res[x] != -1:NEWLINE l, loop = i-res[x], loop[res[x]:i]NEWLINE q, r = divmod(n-i, l)NEWLINE print(s+q*loop.sum()+loop[:r].sum()); returnNEWLINE res[x], loop[i] = i, xNEWLINE s += x; x = x**2 % mNEWLINENEWLINENEWLINE class ABC180:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, a, b = map(int, sys.stdin.readline().split())NEWLINE print(n-a+b)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *x = map(int, sys.stdin.read().split())NEWLINE x = np.absolute(np.array(x))NEWLINE print(x.sum())NEWLINE print(np.sqrt((x**2).sum()))NEWLINE print(x.max())NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE div = NumberTheory.find_divisors(n)NEWLINE print(*div, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE x, y, a, b = map(int, sys.stdin.readline().split())NEWLINE cnt = 0NEWLINE while x*a <= x+b:NEWLINE x *= aNEWLINE if x >= y:NEWLINE print(cnt); returnNEWLINE cnt += 1NEWLINE cnt += (y-x-1) // bNEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, *xyz = map(int, sys.stdin.read().split())NEWLINENEWLINE xyz = list(zip(*[iter(xyz)] * 3))NEWLINE dist = [[0] * n for _ in range(n)]NEWLINE for i in range(n):NEWLINE a, b, c = xyz[i]NEWLINE for j in range(n):NEWLINE p, q, r = xyz[j]NEWLINE dist[i][j] = abs(p-a) + abs(q-b) + max(0, r-c)NEWLINENEWLINE dp = [[inf] * n for _ in range(1<<n)]NEWLINE dp[0][0] = 0NEWLINE for s in range(1<<n):NEWLINE for i in range(n):NEWLINE t = s|(1<<i)NEWLINE for j in range(n):NEWLINE dp[t][i] = min(dp[t][i], dp[s][j]+dist[j][i])NEWLINE print(dp[-1][0])NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def f(): # rewrite with jit compiling later.NEWLINE n, m, l = map(int, sys.stdin.readline().split())NEWLINE c = Combinatorics.CombinationsMod(n, MOD)NEWLINE path = np.zeros(n+1, dtype=np.int64); path[1] = path[2] = 1NEWLINE for i in range(3, n+1): path[i] = path[i-1]*i%MODNEWLINE cycle = np.zeros(n+1, dtype=np.int64); cycle[1:] = path[:-1]NEWLINE dp = np.zeros((n+1, m+1), dtype=np.int64)NEWLINE def f(l):NEWLINE dp[:,:] = 0; dp[0,0] = 1NEWLINE for i in range(n):NEWLINE for j in range(m+1):NEWLINE k = np.arange(1, min(l, n-i, m-j+1)+1)NEWLINE dp[i+k, j+k-1] += dp[i, j]*c(n-i-1, k-1)%MOD*path[k]%MODNEWLINE dp[i+k, j+k-1] %= MODNEWLINE k = np.arange(2, min(l, n-i, m-j)+1)NEWLINE dp[i+k, j+k] += dp[i, j]*c(n-i-1, k-1)%MOD*cycle[k]%MODNEWLINE dp[i+k, j+k] %= MODNEWLINE return dp[n, m]NEWLINE print((f(l)-f(l-1))%MOD)NEWLINENEWLINE @staticmethodNEWLINE def f_2(): # PyPyNEWLINE n, m, l = map(int, sys.stdin.readline().split())NEWLINE c = Combinatorics.CombinationsMod(n, MOD)NEWLINE path = [0] * (n+1); path[1] = path[2] = 1NEWLINE for i in range(3, n+1): path[i] = path[i-1]*i%MODNEWLINE cycle = [0] + path[:-1]NEWLINE def f(l):NEWLINE dp = [[0]*(m+1) for _ in range(n+1)]; dp[0][0] = 1NEWLINE for i in range(n):NEWLINE for j in range(m+1):NEWLINE for k in range(1, min(l, n-i, m-j+1)+1):NEWLINE dp[i+k][j+k-1] += dp[i][j]*c(n-i-1, k-1)%MOD*path[k]%MODNEWLINE dp[i+k][j+k-1] %= MODNEWLINE for k in range(1, min(l, n-i, m-j)+1):NEWLINE dp[i+k][j+k] += dp[i][j]*c(n-i-1, k-1)%MOD*cycle[k]%MODNEWLINE dp[i+k][j+k] %= MODNEWLINE return dp[n][m]NEWLINE print((f(l)-f(l-1))%MOD)NEWLINENEWLINENEWLINE class ABC181:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print('White' if n&1==0 else 'Black')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(n,2).TNEWLINE print(((a+b)*(b-a+1)//2).sum())NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE i, j, k = np.array([*itertools.combinations(range(n),3)]).TNEWLINE x, y = np.array(xy).reshape(-1,2).TNEWLINE b = (y[j]-y[i])*(x[k]-x[j]) == (y[k]-y[j])*(x[j]-x[i])NEWLINE print('Yes' if b.any() else 'No')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = sys.stdin.readline().rstrip()NEWLINE if len(n)<=2:NEWLINE print('Yes' if int(n)%8==0 or int(n[::-1])%8==0 else 'No')NEWLINE returnNEWLINE c = Counter(n)NEWLINE for i in range(112, 1000, 8):NEWLINE if not Counter(str(i))-c: print('Yes'); returnNEWLINE print('No')NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, m, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE h, w = np.sort(I[:n]), np.sort(I[-m:])NEWLINE tmp = np.pad(h[1:]-h[:-1], 1, constant_values=0)NEWLINE l = tmp.copy(); l[::2] = 0; np.cumsum(l, out=l)NEWLINE r = tmp.copy(); r[1::2] = 0; np.cumsum(r[::-1], out=r[::-1])NEWLINE i = np.searchsorted(w, h)NEWLINE d = np.pad(h[2:]-h[:-2], 1, constant_values=0); d[::2] = 0NEWLINE d += np.minimum(np.abs(h-w[np.maximum(i-1, 0)]), np.abs(h-w[np.minimum(m-1, i)]))NEWLINE print((l[:-1]+r[1:]+d).min())NEWLINENEWLINE @staticmethodNEWLINE def f():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE xy = np.array(xy).reshape(n,2)NEWLINE y = xy[:, 1]NEWLINE if n == 1: print(np.maximum(100-y, y+100)[0]/2); returnNEWLINE ij = np.array([*itertools.combinations(range(n),2)])NEWLINE d = (np.diff(xy[ij], axis=1)**2).sum(axis=-1).ravel()NEWLINE def f(r):NEWLINE r *= 2NEWLINE uf = GeometryTopology.Graph(n+2); uf.init_dsu()NEWLINE for i in np.argwhere(y+100<=r).ravel(): uf.unite(i, n)NEWLINE for i in np.argwhere(100-y<=r).ravel(): uf.unite(i, n+1)NEWLINE for i, j in ij[np.argwhere(d<=r*r).ravel()]: uf.unite(i, j)NEWLINE return uf.same(n, n+1)NEWLINENEWLINE def binary_search():NEWLINE lo, hi = 0, 200.1NEWLINE while hi-lo > 1e-9:NEWLINE r = (lo+hi)/2NEWLINE if f(r): hi = rNEWLINE else: lo = rNEWLINE return loNEWLINE print(binary_search())NEWLINENEWLINE class ARC106:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE a = 1NEWLINE while pow(3,a)<=n:NEWLINE m, b = n-pow(3,a), 1NEWLINE while pow(5,b)<=m:NEWLINE if pow(5,b)==m: print(a, b); returnNEWLINE b += 1NEWLINE a += 1NEWLINE print(-1)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE b = [int(x) for x in sys.stdin.readline().split()]NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE for _ in range(m):NEWLINE c, d = map(int, sys.stdin.readline().split()); c -= 1; d -= 1NEWLINE uf.unite(c, d)NEWLINE ga, gb = [[] for _ in range(n)], [[] for _ in range(n)]NEWLINE for i in range(n):NEWLINE r = uf.find(i)NEWLINE ga[r].append(a[i]); gb[r].append(b[i])NEWLINE print('Yes' if all(sum(ga[i])==sum(gb[i]) for i in range(n)) else 'No')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE if m < 0: print(-1); returnNEWLINE if n == 1:NEWLINE if m != 0: print(-1); returnNEWLINE print(1, 2); returnNEWLINE if m >= n-1: print(-1); returnNEWLINE l, r = 1, 10**9NEWLINE print(l, r)NEWLINE for _ in range(n-2-m):NEWLINE l += 1; r -= 1; print(l, r)NEWLINE r = lNEWLINE for _ in range(m+1):NEWLINE l, r = r+1, r+2NEWLINE print(l, r)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE mod = 998244353NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE b = np.zeros((k+1, n), dtype=np.int64)NEWLINE b[0] = 1NEWLINE for i in range(k): b[i+1] = b[i]*a%modNEWLINE s = b.sum(axis=1) % modNEWLINE inv_2 = pow(2, mod-2, mod)NEWLINE c = Combinatorics.CombinationsMod(mod=mod)NEWLINE for x in range(1, k+1):NEWLINE l = np.arange(x+1)NEWLINE print(((c(x, l)*s[l]%mod*s[l][::-1]%mod).sum()%mod - pow(2,x,mod)*s[x])%mod*inv_2%mod)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def f():NEWLINE passNEWLINENEWLINENEWLINE class ARC107:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = np.array(sys.stdin.read().split(), dtype=np.int64)NEWLINE print(Algebra.cumprod((1+a)*a//2%MOD, mod=MOD)[2])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE def c(m): return np.minimum(m-1,2*n-m+1)NEWLINE x = np.arange(2, 2*n+1)NEWLINE print((c(x)*c(x-k)*((x-k>=2)&(x-k<=2*n))).sum())NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a).reshape(n,n)NEWLINE fac, _ = Algebra.generate_fac_ifac(n=50, p=MOD)NEWLINE def count(a):NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE for i, j in itertools.combinations(range(n),2):NEWLINE if (a[i]+a[j] <= k).all(): uf.unite(i,j)NEWLINE c = 1NEWLINE for g in uf.groups():NEWLINE if g: c *= fac[len(g)]; c %= MODNEWLINE return cNEWLINE print(count(a)*count(a.T)%MOD)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE @njit((i8,i8), cache=True)NEWLINE def solve(n, k):NEWLINE dp = np.zeros((n+1, 2*n+1), dtype=np.int64); dp[0,0] = 1NEWLINE for i in range(1, n+1):NEWLINE for j in range(i, 0, -1):NEWLINE dp[i,j] = dp[i-1,j-1] + dp[i,2*j]NEWLINE dp[i,j] %= MODNEWLINE return dp[-1,k]NEWLINE print(solve(n,k))NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def f():NEWLINE passNEWLINENEWLINENEWLINENEWLINE class ACL001:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE *xy, = zip(*[iter(xy)]*2)NEWLINE print(xy)NEWLINE passNEWLINENEWLINENEWLINENEWLINE class TDPC:NEWLINE @staticmethodNEWLINE def t():NEWLINE passNEWLINENEWLINENEWLINE class ChokudaiSpecialRun001:NEWLINE @staticmethodNEWLINE def j():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE bit = GeometryTopology.FenwickTree(n)NEWLINE c = 0NEWLINE for x in a:NEWLINE c += bit.sum(1,n) - bit.sum(1,x)NEWLINE bit.add(x,1)NEWLINE print(c)NEWLINENEWLINE class ALPC: # AtCoder Library Practice Contest\NEWLINE @staticmethodNEWLINE def a():NEWLINE n, q, *tuv = map(int, sys.stdin.read().split())NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE for t, u, v in zip(*[iter(tuv)]*3):NEWLINE if t == 0: uf.unite(u,v)NEWLINE else: print(int(uf.same(u,v)))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, q = map(int, sys.stdin.readline().split())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE bit = GeometryTopology.FenwickTree(n)NEWLINE for i in range(n): bit.add(i+1, a[i])NEWLINE for t, i, j in zip(*[map(int, sys.stdin.read().split())]*3):NEWLINE if t==0: bit.add(i+1,j)NEWLINE else: print(bit.sum(i+1,j))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def g():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(m,2).TNEWLINE _, r = connected_components(csr_matrix(([1]*m, (a,b)), (n,n)), connection='strong')NEWLINE groups = [[] for _ in range(n)]NEWLINE for u in range(n): groups[r[u]].append(u)NEWLINE groups = [group for group in groups if group]NEWLINE print(len(groups))NEWLINE for group in groups[::-1]: print(len(group), *group)NEWLINENEWLINENEWLINE class MSolutions2020:NEWLINE @staticmethodNEWLINE def a():NEWLINE x = int(sys.stdin.readline().rstrip())NEWLINE x -= 400NEWLINE print(8-x//200)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE r, g, b, k = map(int, sys.stdin.read().split())NEWLINE while k and g <= r:NEWLINE g *= 2NEWLINE k -= 1NEWLINE while k and b <= g:NEWLINE b *= 2NEWLINE k -= 1NEWLINE print('Yes' if r < g < b else 'No')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE for i in range(k, n):NEWLINE print('Yes' if a[i] > a[i-k] else 'No')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a += [-1]NEWLINE m = 1000NEWLINE s = 0NEWLINE for i in range(n):NEWLINE if a[i+1] == a[i]: continueNEWLINE elif a[i+1] > a[i]:NEWLINE cnt = m//a[i]NEWLINE m -= a[i]*cntNEWLINE s += cntNEWLINE else:NEWLINE m += a[i]*sNEWLINE s = 0NEWLINE print(m)NEWLINENEWLINENEWLINEclass Codeforces:NEWLINE class CR676div2:NEWLINE @staticmethodNEWLINE def a():NEWLINE t = int(sys.stdin.readline().rstrip())NEWLINE for _ in range(t):NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print(a^b)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE t = int(sys.stdin.readline().rstrip())NEWLINE for _ in range(t):NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE s = [list(sys.stdin.readline().rstrip()) for _ in range(n)]NEWLINE s[0][0] = s[-1][-1] = '0'NEWLINE for i in range(n):NEWLINE for j in range(n):NEWLINE s[i][j] = int(s[i][j])NEWLINENEWLINENEWLINE def can_goal(g, c=0):NEWLINE visited = [0] * nNEWLINE stack = [(0, 0)]NEWLINE visited[0] |= 1<<0NEWLINE while stack:NEWLINE y, x = stack.pop()NEWLINE for dy, dx in [(-1, 0), (0, -1), (1, 0), (0, 1)]:NEWLINE i, j = y+dy, x+dxNEWLINE if i<0 or i>=n or j<0 or j>=n: continueNEWLINE if i == j == n-1: return TrueNEWLINE if visited[i]>>j&1: continueNEWLINE visited[i] |= 1<<jNEWLINE if g[i][j] != c: continueNEWLINE stack.append((i, j))NEWLINE return FalseNEWLINENEWLINE if not (can_goal(s, 0) or can_goal(s, 1)):NEWLINE print(0)NEWLINE continueNEWLINENEWLINE flg = 0NEWLINE for i in range(n):NEWLINE for j in range(n):NEWLINE if i==j==0 or i==j==n-1: continueNEWLINE s[i][j] ^= 1NEWLINE if not (can_goal(s, 0) or can_goal(s, 1)):NEWLINE print(1)NEWLINE print(i+1, j+1)NEWLINE flg = 1NEWLINE breakNEWLINE s[i][j] ^= 1NEWLINE if flg: breakNEWLINE if flg: continueNEWLINENEWLINE print(2)NEWLINE if s[0][1] == s[1][0]:NEWLINE print(n, n-1)NEWLINE print(n-1, n)NEWLINE continueNEWLINENEWLINE if s[0][1] == s[-1][-2]:NEWLINE print(1, 2)NEWLINE print(n-1, n)NEWLINE else:NEWLINE print(1, 2)NEWLINE print(n, n-1)NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINEclass ProjectEuler:NEWLINE @staticmethodNEWLINE def p1():NEWLINE def f(n, x):NEWLINE return (x + n//x*x) * (n//x) // 2NEWLINE n = 1000NEWLINE ans = f(n-1, 3)+f(n-1, 5)-f(n-1, 15)NEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def p2():NEWLINE fib = [1, 2]NEWLINE while fib[-1] < 4*10**6:NEWLINE fib.append(fib[-1]+fib[-2])NEWLINE print(sum(fib[1:-1:3]))NEWLINENEWLINE @staticmethodNEWLINE def p3():NEWLINE pn = NumberTheory.PrimeNumbers()NEWLINE res = pn.factorize(600851475143)NEWLINE print(max(res.keys()))NEWLINENEWLINE @staticmethodNEWLINE def p4():NEWLINE def is_palindrome(n):NEWLINE n = str(n)NEWLINE return n == n[::-1]NEWLINE cand = []NEWLINE for a in range(100, 1000):NEWLINE for b in range(a, 1000):NEWLINE n = a*bNEWLINE if is_palindrome(n): cand.append(n)NEWLINE print(max(cand))NEWLINENEWLINE @staticmethodNEWLINE def p5():NEWLINE pn = NumberTheory.PrimeNumbers()NEWLINE res = defaultdict(int)NEWLINE for i in range(1, 21):NEWLINE for p, c in pn.factorize(i).items():NEWLINE res[p] = max(res[p], c)NEWLINE ans = 1NEWLINE for p, c in res.items(): ans *= pow(p, c)NEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def p6():NEWLINE a = np.arange(101)NEWLINE b = np.cumsum(a**2)NEWLINE a = a.cumsum()NEWLINE print(a[100]**2 - b[100])NEWLINENEWLINE @staticmethodNEWLINE def p7():NEWLINE nt = NumberTheory.PrimeNumbers()NEWLINE print(sorted(nt)[10000])NEWLINE @staticmethodNEWLINE def p8():NEWLINE n = '7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450'NEWLINE n = [int(d) for d in list(n)]NEWLINE res = 0NEWLINE for i in range(988):NEWLINE x = 1NEWLINE for j in range(13):NEWLINE x *= n[i+j]NEWLINE res = max(res, x)NEWLINE print(res)NEWLINE @staticmethodNEWLINE def p9():NEWLINE for a in range(1, 997):NEWLINE for b in range(a, 998-a):NEWLINE c = 1000 - a - bNEWLINE if a**2 + b**2 == c**2:NEWLINE print(a*b*c)NEWLINE returnNEWLINE @staticmethodNEWLINE def p10():NEWLINE pn = NumberTheory.PrimeNumbers(2*10**6+1)NEWLINE print(sum(pn))NEWLINE @staticmethodNEWLINE def p11():NEWLINE grid = '08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48'NEWLINE print(grid)NEWLINENEWLINE passNEWLINENEWLINEclass Yukicoder:NEWLINE def __init__(self):NEWLINE passNEWLINENEWLINE def __call__(self):NEWLINE print(1)NEWLINENEWLINENEWLINEclass AOJ:NEWLINE @staticmethodNEWLINE def ALDS1_12_A(): # minimum spanning treeNEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for i in range(n-1):NEWLINE for j in range(i+1, n):NEWLINE if a[i*n+j] == -1: continueNEWLINE g.add_edge(i,j, weight=a[i*n+j])NEWLINE g.add_edge(j,i, weight=a[i*n+j])NEWLINE _, d = g.kruskal()NEWLINE # _, d = g.prim()NEWLINE # _, d = g.boruvka()NEWLINE print(d)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def GRL_3_C(): # strongly connected componentsNEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE _, r = connecteNEWLINE g = GeometryTopology.Graph(n)NEWLINE for _ in range(m): g.add_edge(*map(int, sys.stdin.readline().split()))NEWLINE r = g.scc()NEWLINE q, *uv = map(int, sys.stdin.read().split())NEWLINE for u, v in zip(*[iter(uv)] * 2): print(int(r[u]==r[v]))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def DSL_2_B(): # Binary Indexed Tree (Fenwick Tree)NEWLINE n, q, *txy = map(int, sys.stdin.read().split())NEWLINE bit = GeometryTopology.FenwickTree(n)NEWLINE for t, x, y in zip(*[iter(txy)]*3):NEWLINE if t==0: bit.add(x, y)NEWLINE else: print(bit.sum(x,y))NEWLINENEWLINENEWLINEclass YosupoJudge:NEWLINENEWLINE @staticmethodNEWLINE def PointAddRangeSum():NEWLINE n, q = map(int, sys.stdin.readline().split())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE bit = GeometryTopology.FenwickTree(n)NEWLINE for i in range(n): bit.add(i+1, a[i])NEWLINE for t, i, j in zip(*[map(int, sys.stdin.read().split())]*3):NEWLINE if t==0: bit.add(i+1,j)NEWLINE else: print(bit.sum(i+1,j))NEWLINENEWLINE @staticmethodNEWLINE def Directed_MST():NEWLINE n, m, s, *abc = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for a, b, c in zip(*[iter(abc)]*3):g.add_edge(a, b, weight=c)NEWLINE _, d, p = g.prim(src=s, return_parent=True)NEWLINE print(d)NEWLINE print(*p)NEWLINENEWLINE @staticmethodNEWLINE def Manhattan_MST():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE AtCoder.ABC110.d()NEWLINE # AtCoder.ARC107.e()NEWLINE passNEWLINE
# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root forNEWLINE# license information.NEWLINE#NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code isNEWLINE# regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINENEWLINEfrom msrest.service_client import SDKClientNEWLINEfrom msrest import Serializer, DeserializerNEWLINENEWLINEfrom ._configuration import ContainerRegistryManagementClientConfigurationNEWLINEfrom .operations import RegistriesOperationsNEWLINEfrom .operations import OperationsNEWLINEfrom .operations import ReplicationsOperationsNEWLINEfrom .operations import WebhooksOperationsNEWLINEfrom .operations import RunsOperationsNEWLINEfrom .operations import TasksOperationsNEWLINEfrom . import modelsNEWLINENEWLINENEWLINEclass ContainerRegistryManagementClient(SDKClient):NEWLINE """ContainerRegistryManagementClientNEWLINENEWLINE :ivar config: Configuration for client.NEWLINE :vartype config: ContainerRegistryManagementClientConfigurationNEWLINENEWLINE :ivar registries: Registries operationsNEWLINE :vartype registries: azure.mgmt.containerregistry.v2019_05_01.operations.RegistriesOperationsNEWLINE :ivar operations: Operations operationsNEWLINE :vartype operations: azure.mgmt.containerregistry.v2019_05_01.operations.OperationsNEWLINE :ivar replications: Replications operationsNEWLINE :vartype replications: azure.mgmt.containerregistry.v2019_05_01.operations.ReplicationsOperationsNEWLINE :ivar webhooks: Webhooks operationsNEWLINE :vartype webhooks: azure.mgmt.containerregistry.v2019_05_01.operations.WebhooksOperationsNEWLINE :ivar runs: Runs operationsNEWLINE :vartype runs: azure.mgmt.containerregistry.v2019_05_01.operations.RunsOperationsNEWLINE :ivar tasks: Tasks operationsNEWLINE :vartype tasks: azure.mgmt.containerregistry.v2019_05_01.operations.TasksOperationsNEWLINENEWLINE :param credentials: Credentials needed for the client to connect to Azure.NEWLINE :type credentials: :mod:`A msrestazure CredentialsNEWLINE object<msrestazure.azure_active_directory>`NEWLINE :param subscription_id: The Microsoft Azure subscription ID.NEWLINE :type subscription_id: strNEWLINE :param str base_url: Service URLNEWLINE """NEWLINENEWLINE def __init__(NEWLINE self, credentials, subscription_id, base_url=None):NEWLINENEWLINE self.config = ContainerRegistryManagementClientConfiguration(credentials, subscription_id, base_url)NEWLINE super(ContainerRegistryManagementClient, self).__init__(self.config.credentials, self.config)NEWLINENEWLINE client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}NEWLINE self._serialize = Serializer(client_models)NEWLINE self._deserialize = Deserializer(client_models)NEWLINENEWLINE self.registries = RegistriesOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.operations = Operations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.replications = ReplicationsOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.webhooks = WebhooksOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.runs = RunsOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE self.tasks = TasksOperations(NEWLINE self._client, self.config, self._serialize, self._deserialize)NEWLINE
import torchNEWLINENEWLINEfrom .resnet import NormalizationNEWLINEfrom .preact_resnet import preact_resnetNEWLINEfrom .resnet import resnetNEWLINEfrom .wideresnet import wideresnetNEWLINENEWLINEfrom .preact_resnetwithswish import preact_resnetwithswishNEWLINEfrom .wideresnetwithswish import wideresnetwithswishNEWLINENEWLINEfrom core.data import DATASETSNEWLINENEWLINENEWLINEMODELS = ['resnet18', 'resnet34', 'resnet50', 'resnet101', NEWLINE 'preact-resnet18', 'preact-resnet34', 'preact-resnet50', 'preact-resnet101', NEWLINE 'wrn-28-10', 'wrn-32-10', 'wrn-34-10', 'wrn-34-20', NEWLINE 'preact-resnet18-swish', 'preact-resnet34-swish',NEWLINE 'wrn-28-10-swish', 'wrn-34-20-swish', 'wrn-70-16-swish']NEWLINENEWLINENEWLINEdef create_model(name, normalize, info, device):NEWLINE """NEWLINE Returns suitable model from its name.NEWLINE Arguments:NEWLINE name (str): name of resnet architecture.NEWLINE normalize (bool): normalize input.NEWLINE info (dict): dataset information.NEWLINE device (str or torch.device): device to work on.NEWLINE Returns:NEWLINE torch.nn.Module.NEWLINE """NEWLINE if info['data'] in ['tiny-imagenet']:NEWLINE assert 'preact-resnet' in name, 'Only preact-resnets are supported for this dataset!'NEWLINE from .ti_preact_resnet import ti_preact_resnetNEWLINE backbone = ti_preact_resnet(name, num_classes=info['num_classes'], device=device)NEWLINE NEWLINE elif info['data'] in DATASETS and info['data'] not in ['tiny-imagenet']:NEWLINE if 'preact-resnet' in name and 'swish' not in name:NEWLINE backbone = preact_resnet(name, num_classes=info['num_classes'], pretrained=False, device=device)NEWLINE elif 'preact-resnet' in name and 'swish' in name:NEWLINE backbone = preact_resnetwithswish(name, dataset=info['data'], num_classes=info['num_classes'])NEWLINE elif 'resnet' in name and 'preact' not in name:NEWLINE backbone = resnet(name, num_classes=info['num_classes'], pretrained=False, device=device)NEWLINE elif 'wrn' in name and 'swish' not in name:NEWLINE backbone = wideresnet(name, num_classes=info['num_classes'], device=device)NEWLINE elif 'wrn' in name and 'swish' in name:NEWLINE backbone = wideresnetwithswish(name, dataset=info['data'], num_classes=info['num_classes'], device=device)NEWLINE else:NEWLINE raise ValueError('Invalid model name {}!'.format(name))NEWLINE NEWLINE else:NEWLINE raise ValueError('Models for {} not yet supported!'.format(info['data']))NEWLINE NEWLINE if normalize:NEWLINE model = torch.nn.Sequential(Normalization(info['mean'], info['std']), backbone)NEWLINE else:NEWLINE model = torch.nn.Sequential(backbone)NEWLINE NEWLINE model = torch.nn.DataParallel(model)NEWLINE model = model.to(device)NEWLINE return modelNEWLINE
#!/usr/bin/env pythonNEWLINE# Copyright (c) 2011 Google Inc. All rights reserved.NEWLINE#NEWLINE# Redistribution and use in source and binary forms, with or withoutNEWLINE# modification, are permitted provided that the following conditions areNEWLINE# met:NEWLINE#NEWLINE# * Redistributions of source code must retain the above copyrightNEWLINE# notice, this list of conditions and the following disclaimer.NEWLINE# * Redistributions in binary form must reproduce the aboveNEWLINE# copyright notice, this list of conditions and the following disclaimerNEWLINE# in the documentation and/or other materials provided with theNEWLINE# distribution.NEWLINE# * Neither the name of Google Inc. nor the names of itsNEWLINE# contributors may be used to endorse or promote products derived fromNEWLINE# this software without specific prior written permission.NEWLINE#NEWLINE# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORSNEWLINE# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOTNEWLINE# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FORNEWLINE# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHTNEWLINE# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,NEWLINE# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOTNEWLINE# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,NEWLINE# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANYNEWLINE# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORTNEWLINE# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USENEWLINE# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.NEWLINENEWLINEimport osNEWLINEimport reNEWLINEtry:NEWLINE import jsonNEWLINEexcept ImportError:NEWLINE import simplejson as jsonNEWLINENEWLINEtype_traits = {NEWLINE "any": "*",NEWLINE "string": "string",NEWLINE "integer": "number",NEWLINE "number": "number",NEWLINE "boolean": "boolean",NEWLINE "array": "!Array.<*>",NEWLINE "object": "!Object",NEWLINE}NEWLINENEWLINEpromisified_domains = {NEWLINE "Accessibility",NEWLINE "Animation",NEWLINE "CSS",NEWLINE "Emulation",NEWLINE "Profiler"NEWLINE}NEWLINENEWLINEref_types = {}NEWLINENEWLINEdef full_qualified_type_id(domain_name, type_id):NEWLINE if type_id.find(".") == -1:NEWLINE return "%s.%s" % (domain_name, type_id)NEWLINE return type_idNEWLINENEWLINENEWLINEdef fix_camel_case(name):NEWLINE refined = re.sub(r'-(\w)', lambda pat: pat.group(1).upper(), name)NEWLINE refined = to_title_case(refined)NEWLINE return re.sub(r'(?i)HTML|XML|WML|API', lambda pat: pat.group(0).upper(), refined)NEWLINENEWLINENEWLINEdef to_title_case(name):NEWLINE return name[:1].upper() + name[1:]NEWLINENEWLINENEWLINEdef generate_enum(name, json):NEWLINE enum_members = []NEWLINE for member in json["enum"]:NEWLINE enum_members.append(" %s: \"%s\"" % (fix_camel_case(member), member))NEWLINE return "\n/** @enum {string} */\n%s = {\n%s\n};\n" % (name, (",\n".join(enum_members)))NEWLINENEWLINENEWLINEdef param_type(domain_name, param):NEWLINE if "type" in param:NEWLINE if param["type"] == "array":NEWLINE items = param["items"]NEWLINE return "!Array.<%s>" % param_type(domain_name, items)NEWLINE else:NEWLINE return type_traits[param["type"]]NEWLINE if "$ref" in param:NEWLINE type_id = full_qualified_type_id(domain_name, param["$ref"])NEWLINE if type_id in ref_types:NEWLINE return ref_types[type_id]NEWLINE else:NEWLINE print "Type not found: " + type_idNEWLINE return "!! Type not found: " + type_idNEWLINENEWLINENEWLINEdef load_schema(file, domains):NEWLINE input_file = open(file, "r")NEWLINE json_string = input_file.read()NEWLINE parsed_json = json.loads(json_string)NEWLINE domains.extend(parsed_json["domains"])NEWLINENEWLINENEWLINEdef generate_protocol_externs(output_path, file1, file2):NEWLINE domains = []NEWLINE load_schema(file1, domains)NEWLINE load_schema(file2, domains)NEWLINE output_file = open(output_path, "w")NEWLINENEWLINE output_file.write(NEWLINE"""NEWLINEvar InspectorBackend = {}NEWLINENEWLINEvar Protocol = {};NEWLINE/** @typedef {string}*/NEWLINEProtocol.Error;NEWLINE""")NEWLINENEWLINE for domain in domains:NEWLINE domain_name = domain["domain"]NEWLINE if "types" in domain:NEWLINE for type in domain["types"]:NEWLINE type_id = full_qualified_type_id(domain_name, type["id"])NEWLINE ref_types[type_id] = "%sAgent.%s" % (domain_name, type["id"])NEWLINENEWLINE for domain in domains:NEWLINE domain_name = domain["domain"]NEWLINE promisified = domain_name in promisified_domainsNEWLINENEWLINE output_file.write("\n\n/**\n * @constructor\n*/\n")NEWLINE output_file.write("Protocol.%sAgent = function(){};\n" % domain_name)NEWLINENEWLINE if "commands" in domain:NEWLINE for command in domain["commands"]:NEWLINE output_file.write("\n/**\n")NEWLINE params = []NEWLINE has_return_value = "returns" in commandNEWLINE explicit_parameters = promisified and has_return_valueNEWLINE if ("parameters" in command):NEWLINE for in_param in command["parameters"]:NEWLINE # All parameters are not optional in case of promisified domain with return value.NEWLINE if (not explicit_parameters and "optional" in in_param):NEWLINE params.append("opt_%s" % in_param["name"])NEWLINE output_file.write(" * @param {%s=} opt_%s\n" % (param_type(domain_name, in_param), in_param["name"]))NEWLINE else:NEWLINE params.append(in_param["name"])NEWLINE output_file.write(" * @param {%s} %s\n" % (param_type(domain_name, in_param), in_param["name"]))NEWLINE returns = []NEWLINE returns.append("?Protocol.Error")NEWLINE if ("error" in command):NEWLINE returns.append("%s=" % param_type(domain_name, command["error"]))NEWLINE if (has_return_value):NEWLINE for out_param in command["returns"]:NEWLINE if ("optional" in out_param):NEWLINE returns.append("%s=" % param_type(domain_name, out_param))NEWLINE else:NEWLINE returns.append("%s" % param_type(domain_name, out_param))NEWLINE callback_return_type = "void="NEWLINE if explicit_parameters:NEWLINE callback_return_type = "T"NEWLINE elif promisified:NEWLINE callback_return_type = "T="NEWLINE output_file.write(" * @param {function(%s):%s} opt_callback\n" % (", ".join(returns), callback_return_type))NEWLINE if (promisified):NEWLINE output_file.write(" * @return {!Promise.<T>}\n")NEWLINE output_file.write(" * @template T\n")NEWLINE params.append("opt_callback")NEWLINENEWLINE output_file.write(" */\n")NEWLINE output_file.write("Protocol.%sAgent.prototype.%s = function(%s) {}\n" % (domain_name, command["name"], ", ".join(params)))NEWLINE output_file.write("/** @param {function(%s):void=} opt_callback */\n" % ", ".join(returns))NEWLINE output_file.write("Protocol.%sAgent.prototype.invoke_%s = function(obj, opt_callback) {}\n" % (domain_name, command["name"]))NEWLINENEWLINE output_file.write("\n\n\nvar %sAgent = function(){};\n" % domain_name)NEWLINENEWLINE if "types" in domain:NEWLINE for type in domain["types"]:NEWLINE if type["type"] == "object":NEWLINE typedef_args = []NEWLINE if "properties" in type:NEWLINE for property in type["properties"]:NEWLINE suffix = ""NEWLINE if ("optional" in property):NEWLINE suffix = "|undefined"NEWLINE if "enum" in property:NEWLINE enum_name = "%sAgent.%s%s" % (domain_name, type["id"], to_title_case(property["name"]))NEWLINE output_file.write(generate_enum(enum_name, property))NEWLINE typedef_args.append("%s:(%s%s)" % (property["name"], enum_name, suffix))NEWLINE else:NEWLINE typedef_args.append("%s:(%s%s)" % (property["name"], param_type(domain_name, property), suffix))NEWLINE if (typedef_args):NEWLINE output_file.write("\n/** @typedef {!{%s}} */\n%sAgent.%s;\n" % (", ".join(typedef_args), domain_name, type["id"]))NEWLINE else:NEWLINE output_file.write("\n/** @typedef {!Object} */\n%sAgent.%s;\n" % (domain_name, type["id"]))NEWLINE elif type["type"] == "string" and "enum" in type:NEWLINE output_file.write(generate_enum("%sAgent.%s" % (domain_name, type["id"]), type))NEWLINE elif type["type"] == "array":NEWLINE output_file.write("\n/** @typedef {!Array.<!%s>} */\n%sAgent.%s;\n" % (param_type(domain_name, type["items"]), domain_name, type["id"]))NEWLINE else:NEWLINE output_file.write("\n/** @typedef {%s} */\n%sAgent.%s;\n" % (type_traits[type["type"]], domain_name, type["id"]))NEWLINENEWLINE output_file.write("/** @interface */\n")NEWLINE output_file.write("%sAgent.Dispatcher = function() {};\n" % domain_name)NEWLINE if "events" in domain:NEWLINE for event in domain["events"]:NEWLINE params = []NEWLINE if ("parameters" in event):NEWLINE output_file.write("/**\n")NEWLINE for param in event["parameters"]:NEWLINE if ("optional" in param):NEWLINE params.append("opt_%s" % param["name"])NEWLINE output_file.write(" * @param {%s=} opt_%s\n" % (param_type(domain_name, param), param["name"]))NEWLINE else:NEWLINE params.append(param["name"])NEWLINE output_file.write(" * @param {%s} %s\n" % (param_type(domain_name, param), param["name"]))NEWLINE output_file.write(" */\n")NEWLINE output_file.write("%sAgent.Dispatcher.prototype.%s = function(%s) {};\n" % (domain_name, event["name"], ", ".join(params)))NEWLINENEWLINE output_file.write("\n/** @constructor\n * @param {!Object.<string, !Object>} agentsMap\n */\n")NEWLINE output_file.write("Protocol.Agents = function(agentsMap){this._agentsMap;};\n")NEWLINE output_file.write("/**\n * @param {string} domain\n * @param {!Object} dispatcher\n */\n")NEWLINE output_file.write("Protocol.Agents.prototype.registerDispatcher = function(domain, dispatcher){};\n")NEWLINE for domain in domains:NEWLINE domain_name = domain["domain"]NEWLINE uppercase_length = 0NEWLINE while uppercase_length < len(domain_name) and domain_name[uppercase_length].isupper():NEWLINE uppercase_length += 1NEWLINENEWLINE output_file.write("/** @return {!Protocol.%sAgent}*/\n" % domain_name)NEWLINE output_file.write("Protocol.Agents.prototype.%s = function(){};\n" % (domain_name[:uppercase_length].lower() + domain_name[uppercase_length:] + "Agent"))NEWLINENEWLINE output_file.write("/**\n * @param {!%sAgent.Dispatcher} dispatcher\n */\n" % domain_name)NEWLINE output_file.write("Protocol.Agents.prototype.register%sDispatcher = function(dispatcher) {}\n" % domain_name)NEWLINENEWLINENEWLINE output_file.close()NEWLINENEWLINEif __name__ == "__main__":NEWLINE import sysNEWLINE import os.pathNEWLINE program_name = os.path.basename(__file__)NEWLINE if len(sys.argv) < 5 or sys.argv[1] != "-o":NEWLINE sys.stderr.write("Usage: %s -o OUTPUT_FILE INPUT_FILE_1 INPUT_FILE_2\n" % program_name)NEWLINE exit(1)NEWLINE output_path = sys.argv[2]NEWLINE input_path_1 = sys.argv[3]NEWLINE input_path_2 = sys.argv[4]NEWLINE generate_protocol_externs(output_path, input_path_1, input_path_2)NEWLINE
words =[ "aback","abaft","abandoned","abashed","aberrant","abhorrent","abiding","abject","ablaze","able","abnormal","aboard","aboriginal","abortive","abounding","abrasive","abrupt","absent","absorbed","absorbing","abstracted","absurd","abundant","abusive","accept","acceptable","accessible","accidental","account","accurate","achiever","acid","acidic","acoustic","acoustics","acrid","act","action","activity","actor","actually","ad hoc","adamant","adaptable","add","addicted","addition","adhesive","adjoining","adjustment","admire","admit","adorable","adventurous","advertisement","advice","advise","afford","afraid","aftermath","afternoon","afterthought","aggressive","agonizing","agree","agreeable","agreement","ahead","air","airplane","airport","ajar","alarm","alcoholic","alert","alike","alive","alleged","allow","alluring","aloof","amazing","ambiguous","ambitious","amount","amuck","amuse","amused","amusement","amusing","analyze","ancient","anger","angle","angry","animal","animated","announce","annoy","annoyed","annoying","answer","ants","anxious","apathetic","apologise","apparatus","apparel","appear","applaud","appliance","appreciate","approval","approve","aquatic","arch","argue","argument","arithmetic","arm","army","aromatic","arrange","arrest","arrive","arrogant","art","ashamed","ask","aspiring","assorted","astonishing","attach","attack","attempt","attend","attract","attraction","attractive","aunt","auspicious","authority","automatic","available","average","avoid","awake","aware","awesome","awful","axiomatic","babies","baby","back","bad","badge","bag","bait","bake","balance","ball","ban","bang","barbarous","bare","base","baseball","bashful","basin","basket","basketball","bat","bath","bathe","battle","bawdy","bead","beam","bear","beautiful","bed","bedroom","beds","bee","beef","befitting","beg","beginner","behave","behavior","belief","believe","bell","belligerent","bells","belong","beneficial","bent","berry","berserk","best","better","bewildered","big","bike","bikes","billowy","bird","birds","birth","birthday","bit","bite","bite-sized","bitter","bizarre","black","black-and-white","blade","bleach","bless","blind","blink","blood","bloody","blot","blow","blue","blue-eyed","blush","blushing","board","boast","boat","boil","boiling","bolt","bomb","bone","book","books","boorish","boot","border","bore","bored","boring","borrow","bottle","bounce","bouncy","boundary","boundless","bow","box","boy","brainy","brake","branch","brash","brass","brave","brawny","breakable","breath","breathe","breezy","brick","bridge","brief","bright","broad","broken","brother","brown","bruise","brush","bubble","bucket","building","bulb","bump","bumpy","burly","burn","burst","bury","bushes","business","bustling","busy","butter","button","buzz","cabbage","cable","cactus","cagey","cake","cakes","calculate","calculating","calculator","calendar","call","callous","calm","camera","camp","can","cannon","canvas","cap","capable","capricious","caption","car","card","care","careful","careless","caring","carpenter","carriage","carry","cars","cart","carve","cast","cat","cats","cattle","cause","cautious","cave","ceaseless","celery","cellar","cemetery","cent","certain","chalk","challenge","chance","change","changeable","channel","charge","charming","chase","cheap","cheat","check","cheer","cheerful","cheese","chemical","cherries","cherry","chess","chew","chicken","chickens","chief","childlike","children","chilly","chin","chivalrous","choke","chop","chubby","chunky","church","circle","claim","clam","clammy","clap","class","classy","clean","clear","clever","clip","cloistered","close","closed","cloth","cloudy","clover","club","clumsy","cluttered","coach","coal","coast","coat","cobweb","coherent","coil","cold","collar","collect","color","colorful","colossal","colour","comb","combative","comfortable","command","committee","common","communicate","company","compare","comparison","compete","competition","complain","complete","complex","concentrate","concern","concerned","condemned","condition","confess","confuse","confused","connect","connection","conscious","consider","consist","contain","continue","control","cooing","cook","cool","cooperative","coordinated","copper","copy","corn","correct","cough","count","country","courageous","cover","cow","cowardly","cows","crabby","crack","cracker","crash","crate","craven","crawl","crayon","crazy","cream","creator","creature","credit","creepy","crib","crime","crook","crooked","cross","crow","crowd","crowded","crown","cruel","crush","cry","cub","cuddly","cultured","cumbersome","cup","cure","curious","curl","curly","current","curtain","curve","curved","curvy","cushion","cut","cute","cycle","cynical","dad","daffy","daily","dam","damage","damaged","damaging","damp","dance","dangerous","dapper","dare","dark","dashing","daughter","day","dazzling","dead","deadpan","deafening","dear","death","debonair","debt","decay","deceive","decide","decision","decisive","decorate","decorous","deep","deeply","deer","defeated","defective","defiant","degree","delay","delicate","delicious","delight","delightful","delirious","deliver","demonic","depend","dependent","depressed","deranged","describe","descriptive","desert","deserted","deserve","design","desire","desk","destroy","destruction","detail","detailed","detect","determined","develop","development","devilish","didactic","different","difficult","digestion","diligent","dime","dinner","dinosaurs","direction","direful","dirt","dirty","disagree","disagreeable","disappear","disapprove","disarm","disastrous","discover","discovery","discreet","discussion","disgusted","disgusting","disillusioned","dislike","dispensable","distance","distinct","distribution","disturbed","divergent","divide","division","dizzy","dock","doctor","dog","dogs","doll","dolls","domineering","donkey","door","double","doubt","doubtful","downtown","drab","draconian","drag","drain","dramatic","drawer","dream","dreary","dress","drink","drip","driving","drop","drown","drum","drunk","dry","duck","ducks","dull","dust","dusty","dynamic","dysfunctional","eager","ear","early","earn","earsplitting","earth","earthquake","earthy","easy","eatable","economic","edge","educate","educated","education","effect","efficacious","efficient","egg","eggnog","eggs","eight","elastic","elated","elbow","elderly","electric","elegant","elfin","elite","embarrass","embarrassed","eminent","employ","empty","enchanted","enchanting","encourage","encouraging","end","endurable","energetic","engine","enjoy","enormous","enter","entertain","entertaining","enthusiastic","envious","equable","equal","erect","erratic","error","escape","ethereal","evanescent","evasive","even","event","examine","example","excellent","exchange","excite","excited","exciting","exclusive","excuse","exercise","exist","existence","exotic","expand","expansion","expect","expensive","experience","expert","explain","explode","extend","extra-large","extra-small","exuberant","exultant","eye","eyes","fabulous","face","fact","fade","faded","fail","faint","fair","fairies","faithful","fall","fallacious","false","familiar","famous","fanatical","fancy","fang","fantastic","far","far-flung","farm","fascinated","fast","fasten","fat","faulty","fax","fear","fearful","fearless","feeble","feeling","feigned","female","fence","fertile","festive","fetch","few","field","fierce","file","fill","film","filthy","fine","finger","finicky","fire","fireman","first","fish","fit","five","fix","fixed","flag","flagrant","flaky","flame","flap","flash","flashy","flat","flavor","flawless","flesh","flight","flimsy","flippant","float","flock","flood","floor","flow","flower","flowers","flowery","fluffy","fluttering","fly","foamy","fog","fold","follow","food","fool","foolish","foot","force","foregoing","forgetful","fork","form","fortunate","found","four","fowl","fragile","frail","frame","frantic","free","freezing","frequent","fresh","fretful","friction","friend","friendly","friends","frighten","frightened","frightening","frog","frogs","front","fruit","fry","fuel","full","fumbling","functional","funny","furniture","furry","furtive","future","futuristic","fuzzy","gabby","gainful","gamy","gaping","garrulous","gate","gather","gaudy","gaze","geese","general","gentle","ghost","giant","giants","giddy","gifted","gigantic","giraffe","girl","girls","glamorous","glass","gleaming","glib","glistening","glorious","glossy","glove","glow","glue","godly","gold","good","goofy","gorgeous","government","governor","grab","graceful","grade","grain","grandfather","grandiose","grandmother","grape","grass","grate","grateful","gratis","gray","grease","greasy","great","greedy","green","greet","grey","grieving","grin","grip","groan","groovy","grotesque","grouchy","ground","group","growth","grubby","gruesome","grumpy","guarantee","guard","guarded","guess","guide","guiltless","guitar","gullible","gun","gusty","guttural","habitual","hair","haircut","half","hall","hallowed","halting","hammer","hand","handle","hands","handsome","handsomely","handy","hang","hanging","hapless","happen","happy","harass","harbor","hard","hard-to-find","harm","harmonious","harmony","harsh","hat","hate","hateful","haunt","head","heady","heal","health","healthy","heap","heartbreaking","heat","heavenly","heavy","hellish","help","helpful","helpless","hesitant","hideous","high","high-pitched","highfalutin","hilarious","hill","hissing","historical","history","hobbies","hole","holiday","holistic","hollow","home","homeless","homely","honey","honorable","hook","hop","hope","horn","horrible","horse","horses","hose","hospitable","hospital","hot","hour","house","houses","hover","hug","huge","hulking","hum","humdrum","humor","humorous","hungry","hunt","hurried","hurry","hurt","hushed","husky","hydrant","hypnotic","hysterical","ice","icicle","icky","icy","idea","identify","idiotic","ignorant","ignore","ill","ill-fated","ill-informed","illegal","illustrious","imaginary","imagine","immense","imminent","impartial","imperfect","impolite","important","imported","impossible","impress","improve","impulse","incandescent","include","income","incompetent","inconclusive","increase","incredible","industrious","industry","inexpensive","infamous","influence","inform","inject","injure","ink","innate","innocent","inquisitive","insect","insidious","instinctive","instruct","instrument","insurance","intelligent","intend","interest","interesting","interfere","internal","interrupt","introduce","invent","invention","invincible","invite","irate","iron","irritate","irritating","island","itch","itchy","jaded","jagged","jail","jam","jar","jazzy","jealous","jeans","jelly","jellyfish","jewel","jittery","jobless","jog","join","joke","jolly","joyous","judge","judicious","juggle","juice","juicy","jumbled","jump","jumpy","juvenile","kaput","keen","kettle","key","kick","kill","kind","kindhearted","kindly","kiss","kittens","kitty","knee","kneel","knife","knit","knock","knot","knotty","knowing","knowledge","knowledgeable","known","label","labored","laborer","lace","lackadaisical","lacking","ladybug","lake","lame","lamentable","lamp","land","language","languid","large","last","late","laugh","laughable","launch","lavish","lazy","lean","learn","learned","leather","left","leg","legal","legs","lethal","letter","letters","lettuce","level","lewd","library","license","lick","lie","light","lighten","like","likeable","limit","limping","line","linen","lip","liquid","list","listen","literate","little","live","lively","living","load","loaf","lock","locket","lonely","long","long-term","longing","look","loose","lopsided","loss","loud","loutish","love","lovely","loving","low","lowly","lucky","ludicrous","lumber","lumpy","lunch","lunchroom","lush","luxuriant","lying","lyrical","macabre","machine","macho","maddening","madly","magenta","magic","magical","magnificent","maid","mailbox","majestic","makeshift","male","malicious","mammoth","man","manage","maniacal","many","marble","march","mark","marked","market","married","marry","marvelous","mask","mass","massive","match","mate","material","materialistic","matter","mature","meal","mean","measly","measure","meat","meaty","meddle","medical","meek","meeting","mellow","melodic","melt","melted","memorize","memory","men","mend","merciful","mere","mess up","messy","metal","mice","middle","mighty","military","milk","milky","mind","mindless","mine","miniature","minister","minor","mint","minute","miscreant","miss","mist","misty","mitten","mix","mixed","moan","moaning","modern","moldy","mom","momentous","money","monkey","month","moon","moor","morning","mother","motion","motionless","mountain","mountainous","mourn","mouth","move","muddle","muddled","mug","multiply","mundane","murder","murky","muscle","mushy","mute","mysterious","nail","naive","name","nappy","narrow","nasty","nation","natural","naughty","nauseating","near","neat","nebulous","necessary","neck","need","needle","needless","needy","neighborly","nerve","nervous","nest","new","next","nice","nifty","night","nimble","nine","nippy","nod","noise","noiseless","noisy","nonchalant","nondescript","nonstop","normal","north","nose","nostalgic","nosy","note","notebook","notice","noxious","null","number","numberless","numerous","nut","nutritious","nutty","oafish","oatmeal","obedient","obeisant","obese","obey","object","obnoxious","obscene","obsequious","observant","observation","observe","obsolete","obtain","obtainable","occur","ocean","oceanic","odd","offbeat","offend","offer","office","oil","old","old-fashioned","omniscient","one","onerous","open","opposite","optimal","orange","oranges","order","ordinary","organic","ossified","outgoing","outrageous","outstanding","oval","oven","overconfident","overflow","overjoyed","overrated","overt","overwrought","owe","own","pack","paddle","page","pail","painful","painstaking","paint","pale","paltry","pan","pancake","panicky","panoramic","paper","parallel","parcel","parched","park","parsimonious","part","partner","party","pass","passenger","past","paste","pastoral","pat","pathetic","pause","payment","peace","peaceful","pear","peck","pedal","peel","peep","pen","pencil","penitent","perfect","perform","periodic","permissible","permit","perpetual","person","pest","pet","petite","pets","phobic","phone","physical","picayune","pick","pickle","picture","pie","pies","pig","pigs","pin","pinch","pine","pink","pipe","piquant","pizzas","place","placid","plain","plan","plane","planes","plant","plantation","plants","plastic","plate","plausible","play","playground","pleasant","please","pleasure","plot","plough","plucky","plug","pocket","point","pointless","poised","poison","poke","polish","polite","political","pollution","poor","pop","popcorn","porter","position","possess","possessive","possible","post","pot","potato","pour","powder","power","powerful","practice","pray","preach","precede","precious","prefer","premium","prepare","present","preserve","press","pretend","pretty","prevent","previous","price","pricey","prick","prickly","print","private","probable","produce","productive","profit","profuse","program","promise","property","prose","protect","protective","protest","proud","provide","psychedelic","psychotic","public","puffy","pull","pump","pumped","punch","puncture","punish","punishment","puny","purple","purpose","purring","push","pushy","puzzled","puzzling","quack","quaint","quarrelsome","quarter","quartz","queen","question","questionable","queue","quick","quickest","quicksand","quiet","quill","quilt","quince","quirky","quiver","quixotic","quizzical","rabbit","rabbits","rabid","race","racial","radiate","ragged","rail","railway","rain","rainstorm","rainy","raise","rake","rambunctious","rampant","range","rapid","rare","raspy","rat","rate","ratty","ray","reach","reaction","reading","ready","real","realize","reason","rebel","receipt","receive","receptive","recess","recognise","recondite","record","red","reduce","redundant","reflect","reflective","refuse","regret","regular","reign","reject","rejoice","relation","relax","release","relieved","religion","rely","remain","remarkable","remember","remind","reminiscent","remove","repair","repeat","replace","reply","report","representative","reproduce","repulsive","request","rescue","resolute","resonant","respect","responsible","rest","retire","return","reward","rhetorical","rhyme","rhythm","rice","rich","riddle","rifle","right","righteous","rightful","rigid","ring","rings","rinse","ripe","risk","ritzy","river","road","roasted","rob","robin","robust","rock","rod","roll","romantic","roof","room","roomy","root","rose","rot","rotten","rough","round","route","royal","rub","ruddy","rude","ruin","rule","run","rural","rush","rustic","ruthless","sable","sack","sad","safe","sail","salt","salty","same","sand","sassy","satisfy","satisfying","save","savory","saw","scale","scandalous","scarce","scare","scarecrow","scared","scarf","scary","scatter","scattered","scene","scent","school","science","scientific","scintillating","scissors","scold","scorch","scrape","scratch","scrawny","scream","screeching","screw","scribble","scrub","sea","seal","search","seashore","seat","second","second-hand","secret","secretary","secretive","sedate","seed","seemly","selection","selective","self","selfish","sense","separate","serious","servant","serve","settle","shade","shaggy","shake","shaky","shallow","shame","shape","share","sharp","shave","sheep","sheet","shelf","shelter","shiny","ship","shirt","shiver","shivering","shock","shocking","shoe","shoes","shop","short","show","shrill","shrug","shut","shy","sick","side","sidewalk","sigh","sign","signal","silent","silk","silky","silly","silver","simple","simplistic","sin","sincere","sink","sip","sister","sisters","six","size","skate","ski","skillful","skin","skinny","skip","skirt","sky","slap","slave","sleep","sleepy","sleet","slim","slimy","slip","slippery","slope","sloppy","slow","small","smart","smash","smell","smelly","smile","smiling","smoggy","smoke","smooth","snail","snails","snake","snakes","snatch","sneaky","sneeze","sniff","snobbish","snore","snotty","snow","soak","soap","society","sock","soda","sofa","soft","soggy","solid","somber","son","song","songs","soothe","sophisticated","sordid","sore","sort","sound","soup","sour","space","spade","spare","spark","sparkle","sparkling","special","spectacular","spell","spicy","spiders","spiffy","spiky","spill","spiritual","spiteful","splendid","spoil","sponge","spooky","spoon","spot","spotless","spotted","spotty","spray","spring","sprout","spurious","spy","squalid","square","squash","squeak","squeal","squealing","squeamish","squeeze","squirrel","stage","stain","staking","stale","stamp","standing","star","stare","start","statement","station","statuesque","stay","steadfast","steady","steam","steel","steep","steer","stem","step","stereotyped","stew","stick","sticks","sticky","stiff","stimulating","stingy","stir","stitch","stocking","stomach","stone","stop","store","stormy","story","stove","straight","strange","stranger","strap","straw","stream","street","strengthen","stretch","string","strip","striped","stroke","strong","structure","stuff","stupendous","stupid","sturdy","subdued","subsequent","substance","substantial","subtract","succeed","successful","succinct","suck","sudden","suffer","sugar","suggest","suggestion","suit","sulky","summer","sun","super","superb","superficial","supply","support","suppose","supreme","surprise","surround","suspect","suspend","swanky","sweater","sweet","sweltering","swift","swim","swing","switch","symptomatic","synonymous","system","table","taboo","tacit","tacky","tail","talented","talk","tall","tame","tan","tangible","tangy","tank","tap","tart","taste","tasteful","tasteless","tasty","tawdry","tax","teaching","team","tearful","tease","tedious","teeny","teeny-tiny","teeth","telephone","telling","temper","temporary","tempt","ten","tendency","tender","tense","tent","tenuous","terrible","terrific","terrify","territory","test","tested","testy","texture","thank","thankful","thaw","theory","therapeutic","thick","thin","thing","things","thinkable","third","thirsty","thought","thoughtful","thoughtless","thread","threatening","three","thrill","throat","throne","thumb","thunder","thundering","tick","ticket","tickle","tidy","tie","tiger","tight","tightfisted","time","tin","tiny","tip","tire","tired","tiresome","title","toad","toe","toes","tomatoes","tongue","tooth","toothbrush","toothpaste","toothsome","top","torpid","touch","tough","tour","tow","towering","town","toy","toys","trace","trade","trail","train","trains","tramp","tranquil","transport","trap","trashy","travel","tray","treat","treatment","tree","trees","tremble","tremendous","trick","tricky","trip","trite","trot","trouble","troubled","trousers","truck","trucks","truculent","true","trust","truthful","try","tub","tug","tumble","turkey","turn","twig","twist","two","type","typical","ubiquitous","ugliest","ugly","ultra","umbrella","unable","unaccountable","unadvised","unarmed","unbecoming","unbiased","uncle","uncovered","understood","underwear","undesirable","undress","unequal","unequaled","uneven","unfasten","unhealthy","uninterested","unique","unit","unite","unkempt","unknown","unlock","unnatural","unpack","unruly","unsightly","unsuitable","untidy","unused","unusual","unwieldy","unwritten","upbeat","uppity","upset","uptight","use","used","useful","useless","utopian","utter","uttermost","vacation","vacuous","vagabond","vague","valuable","value","van","vanish","various","vase","vast","vegetable","veil","vein","vengeful","venomous","verdant","verse","versed","vessel","vest","victorious","view","vigorous","violent","violet","visit","visitor","vivacious","voice","voiceless","volatile","volcano","volleyball","voracious","voyage","vulgar","wacky","waggish","wail","wait","waiting","wakeful","walk","wall","wander","wandering","want","wanting","war","warlike","warm","warn","wary","wash","waste","wasteful","watch","water","watery","wave","waves","wax","way","weak","wealth","wealthy","weary","weather","week","weigh","weight","welcome","well-groomed","well-made","well-off","well-to-do","wet","wheel","whimsical","whine","whip","whirl","whisper","whispering","whistle","white","whole","wholesale","wicked","wide","wide-eyed","wiggly","wild","wilderness","willing","wind","window","windy","wine","wing","wink","winter","wipe","wire","wiry","wise","wish","wistful","witty","wobble","woebegone","woman","womanly","women","wonder","wonderful","wood","wooden","wool","woozy","word","work","workable","worm","worried","worry","worthless","wound","wrap","wrathful","wreck","wren","wrench","wrestle","wretched","wriggle","wrist","writer","writing","wrong","wry","x-ray","yak","yam","yard","yarn","yawn","year","yell","yellow","yielding","yoke","young","youthful","yummy","zany","zealous","zebra","zephyr","zesty","zinc","zip","zipper","zippy","zonked","zoo","zoom"]NEWLINE
from typing import OptionalNEWLINENEWLINEtry:NEWLINE from .tree_node import NodeNEWLINEexcept ImportError:NEWLINE from tree_node import NodeNEWLINENEWLINENEWLINE# auto-balanced-binary-search-tree (AVL tree) implemented by Xanonymous.NEWLINE# worst case: O(log n)NEWLINENEWLINEclass AVLTree:NEWLINE def __init__(self):NEWLINE self.__root: Optional[Node] = NoneNEWLINENEWLINE def __str__(self):NEWLINE if self.__root is not None:NEWLINE self.__root.display()NEWLINE return str()NEWLINENEWLINE __repr__ = __str__NEWLINENEWLINE @staticmethodNEWLINE def __height(target: Optional[Node]) -> int:NEWLINE return -1 if target is None else target.heightNEWLINENEWLINE @staticmethodNEWLINE def __re_height(target: Optional[Node]) -> None:NEWLINE if target is not None:NEWLINE target.height = max(NEWLINE AVLTree.__height(target.left),NEWLINE AVLTree.__height(target.right),NEWLINE ) + 1NEWLINENEWLINE def __set_balances(self, *nodes: Node) -> None:NEWLINE for node in nodes:NEWLINE self.__re_height(node)NEWLINE node.balance = self.__height(node.right) - self.__height(node.left)NEWLINENEWLINE def __rotate_left(self, a: Node) -> Node:NEWLINE b = a.rightNEWLINE b.parent = a.parentNEWLINE a.right = b.leftNEWLINENEWLINE if a.right is not None:NEWLINE a.right.parent = aNEWLINENEWLINE b.left = aNEWLINE a.parent = bNEWLINENEWLINE if b.parent is not None:NEWLINE if b.parent.right == a:NEWLINE b.parent.right = bNEWLINE else:NEWLINE b.parent.left = bNEWLINENEWLINE self.__set_balances(a, b)NEWLINE return bNEWLINENEWLINE def __rotate_right(self, a: Node) -> Node:NEWLINE b = a.leftNEWLINE b.parent = a.parentNEWLINE a.left = b.rightNEWLINENEWLINE if a.left is not None:NEWLINE a.left.parent = aNEWLINENEWLINE b.right = aNEWLINE a.parent = bNEWLINENEWLINE if b.parent is not None:NEWLINE if b.parent.right != a:NEWLINE b.parent.left = bNEWLINE else:NEWLINE b.parent.right = bNEWLINENEWLINE self.__set_balances(a, b)NEWLINE return bNEWLINENEWLINE # LRNEWLINE def __rotate_left_then_right(self, target: Node) -> Node:NEWLINE target.left = self.__rotate_left(target.left)NEWLINE return self.__rotate_right(target)NEWLINENEWLINE # RLNEWLINE def __rotate_right_then_left(self, target: Node) -> Node:NEWLINE target.right = self.__rotate_right(target.right)NEWLINE return self.__rotate_left(target)NEWLINENEWLINE def __re_balance(self, target: Node) -> None:NEWLINE self.__set_balances(target)NEWLINENEWLINE if target.balance == -2:NEWLINE if self.__height(target.left.left) >= self.__height(target.left.right):NEWLINE target = self.__rotate_right(target)NEWLINE else:NEWLINE target = self.__rotate_left_then_right(target)NEWLINE elif target.balance == 2:NEWLINE if self.__height(target.right.right) >= self.__height(target.right.left):NEWLINE target = self.__rotate_left(target)NEWLINE else:NEWLINE target = self.__rotate_right_then_left(target)NEWLINENEWLINE if target.parent is not None:NEWLINE self.__re_balance(target.parent)NEWLINE else:NEWLINE self.__root = targetNEWLINENEWLINE def __delete(self, node: Node) -> None:NEWLINE if node.left is None and node.right is None:NEWLINE if node.parent is None:NEWLINE # if tree has only root node.NEWLINE self.__root = NoneNEWLINE else:NEWLINE parent = node.parentNEWLINENEWLINE # disconnect node and its parent.NEWLINE if parent.left is node:NEWLINE parent.left = NoneNEWLINE else:NEWLINE parent.right = NoneNEWLINENEWLINE self.__re_balance(parent)NEWLINE returnNEWLINENEWLINE if node.left is not None:NEWLINE # child is the root of the left sub-tree of node.NEWLINE child = node.leftNEWLINENEWLINE # find the node which has the biggest key in the sub-tree.NEWLINE while child.right is not None:NEWLINE child = child.rightNEWLINENEWLINE else:NEWLINE # child is the root of the right sub-tree of node.NEWLINE child = node.rightNEWLINENEWLINE # find the node which has the smallest key in the sub-tree.NEWLINE while child.left is not None:NEWLINE child = child.leftNEWLINENEWLINE # copy the child's key of the sub-tree to the deleted place.NEWLINE node.key = child.keyNEWLINENEWLINE # remove the child whose value has already been copied to another place.NEWLINE self.__delete(child)NEWLINENEWLINE def __search(self, root: Optional[Node], key: int) -> Optional[Node]:NEWLINE if root is None or root.key == key:NEWLINE return rootNEWLINENEWLINE if root.key > key:NEWLINE return self.__search(root.left, key)NEWLINENEWLINE return self.__search(root.right, key)NEWLINENEWLINE def __print_property(self, root: Optional[Node]) -> None:NEWLINE if root is not None:NEWLINE self.__print_property(root.left)NEWLINE print('Node {0}, HEIGHT={1}, BF={2}'.format(root.key, root.height, root.balance))NEWLINE self.__print_property(root.right)NEWLINENEWLINE def insert(self, key: int) -> bool:NEWLINE if self.__root is None:NEWLINE self.__root = Node(key)NEWLINE return TrueNEWLINENEWLINE current = self.__rootNEWLINENEWLINE while True:NEWLINE if current.key == key:NEWLINE # node has same key does not allowed in BTS.NEWLINE return FalseNEWLINENEWLINE parent = currentNEWLINENEWLINE # happened when the new key is smaller than the root of a sub-tree.NEWLINE should_go_left = current.key > keyNEWLINENEWLINE if should_go_left:NEWLINE current = current.leftNEWLINE else:NEWLINE current = current.rightNEWLINENEWLINE if current is None:NEWLINE new_node = Node(key, parent)NEWLINENEWLINE if should_go_left:NEWLINE parent.left = new_nodeNEWLINE else:NEWLINE parent.right = new_nodeNEWLINENEWLINE self.__re_balance(parent)NEWLINE breakNEWLINENEWLINE return TrueNEWLINENEWLINE def delete(self, key_to_delete: int) -> None:NEWLINE if self.__root is None:NEWLINE # tree is empty.NEWLINE returnNEWLINENEWLINE target = self.__rootNEWLINENEWLINE while target is not None:NEWLINE if key_to_delete > target.key:NEWLINE target = target.rightNEWLINE elif key_to_delete < target.key:NEWLINE target = target.leftNEWLINE else:NEWLINE self.__delete(target)NEWLINE returnNEWLINENEWLINE def search_key(self, key: int) -> str:NEWLINE result = self.__search(self.__root, key)NEWLINE return 'Not found.\n' if result is None else '{} is found.\n'.format(key)NEWLINENEWLINE def print_property(self) -> None:NEWLINE self.__print_property(self.__root)NEWLINENEWLINE def __preorder_traversal(self, root: Optional[Node] = None) -> None:NEWLINE if root is not None:NEWLINE print(root.key, end=' ')NEWLINE self.__preorder_traversal(root.left)NEWLINE self.__preorder_traversal(root.right)NEWLINENEWLINE def preorder_traversal(self) -> None:NEWLINE print('preorder_traversal')NEWLINE self.__preorder_traversal(self.__root)NEWLINE print()NEWLINENEWLINE def __inorder_traversal(self, root: Optional[Node] = None) -> None:NEWLINE if root is not None:NEWLINE self.__inorder_traversal(root.left)NEWLINE print(root.key, end=' ')NEWLINE self.__inorder_traversal(root.right)NEWLINENEWLINE def inorder_traversal(self) -> None:NEWLINE print('inorder_traversal')NEWLINE self.__inorder_traversal(self.__root)NEWLINE print()NEWLINENEWLINE def __postorder_traversal(self, root: Optional[Node] = None) -> None:NEWLINE if root is not None:NEWLINE self.__postorder_traversal(root.left)NEWLINE self.__postorder_traversal(root.right)NEWLINE print(root.key, end=' ')NEWLINENEWLINE def postorder_traversal(self) -> None:NEWLINE print('postorder_traversal')NEWLINE self.__postorder_traversal(self.__root)NEWLINE print()NEWLINENEWLINENEWLINEBinarySearchTree = AVLTreeNEWLINE
#MenuTitle: Build Circled GlyphsNEWLINE# -*- coding: utf-8 -*-NEWLINEfrom __future__ import division, print_function, unicode_literalsNEWLINE__doc__="""NEWLINEBuilds circled numbers and letters (U+24B6...24EA and U+2460...2473) from _part.circle and the letters and figures.NEWLINE"""NEWLINENEWLINEfrom Foundation import NSPointNEWLINEimport mathNEWLINENEWLINEminDistanceBetweenFigures = 90.0NEWLINENEWLINEthisFont = Glyphs.font # frontmost fontNEWLINEthisFontMaster = thisFont.selectedFontMaster # active masterNEWLINEselectedLayers = thisFont.selectedLayers # active layers of selected glyphsNEWLINEcircledGlyphNames = ["one.circled", "two.circled", "three.circled", "four.circled", "five.circled", "six.circled", "seven.circled", "eight.circled", "nine.circled", "one_zero.circled", "one_one.circled", "one_two.circled", "one_three.circled", "one_four.circled", "one_five.circled", "one_six.circled", "one_seven.circled", "one_eight.circled", "one_nine.circled", "two_zero.circled", "A.circled", "B.circled", "C.circled", "D.circled", "E.circled", "F.circled", "G.circled", "H.circled", "I.circled", "J.circled", "K.circled", "L.circled", "M.circled", "N.circled", "O.circled", "P.circled", "Q.circled", "R.circled", "S.circled", "T.circled", "U.circled", "V.circled", "W.circled", "X.circled", "Y.circled", "Z.circled", "a.circled", "b.circled", "c.circled", "d.circled", "e.circled", "f.circled", "g.circled", "h.circled", "i.circled", "j.circled", "k.circled", "l.circled", "m.circled", "n.circled", "o.circled", "p.circled", "q.circled", "r.circled", "s.circled", "t.circled", "u.circled", "v.circled", "w.circled", "x.circled", "y.circled", "z.circled", "zero.circled"]NEWLINENEWLINENEWLINENEWLINEdef offsetLayer( thisLayer, offset, makeStroke=False, position=0.5, autoStroke=False ):NEWLINE offsetFilter = NSClassFromString("GlyphsFilterOffsetCurve")NEWLINE offsetFilter.offsetLayer_offsetX_offsetY_makeStroke_autoStroke_position_error_shadow_(NEWLINE thisLayer,NEWLINE offset, offset, # horizontal and vertical offsetNEWLINE makeStroke, # if True, creates a strokeNEWLINE autoStroke, # if True, distorts resulting shape to vertical metricsNEWLINE position, # stroke distribution to the left and right, 0.5 = middleNEWLINE None, None )NEWLINENEWLINEdef transform(shiftX=0.0, shiftY=0.0, rotate=0.0, skew=0.0, scale=1.0):NEWLINE """NEWLINE Returns an NSAffineTransform object for transforming layers.NEWLINE Apply an NSAffineTransform t object like this:NEWLINE Layer.transform_checkForSelection_doComponents_(t,False,True)NEWLINE Access its transformation matrix like this:NEWLINE tMatrix = t.transformStruct() # returns the 6-float tupleNEWLINE Apply the matrix tuple like this:NEWLINE Layer.applyTransform(tMatrix)NEWLINE Component.applyTransform(tMatrix)NEWLINE Path.applyTransform(tMatrix)NEWLINE Chain multiple NSAffineTransform objects t1, t2 like this:NEWLINE t1.appendTransform_(t2)NEWLINE """NEWLINE myTransform = NSAffineTransform.transform()NEWLINE if rotate:NEWLINE myTransform.rotateByDegrees_(rotate)NEWLINE if scale != 1.0:NEWLINE myTransform.scaleBy_(scale)NEWLINE if not (shiftX == 0.0 and shiftY == 0.0):NEWLINE myTransform.translateXBy_yBy_(shiftX,shiftY)NEWLINE if skew:NEWLINE skewStruct = NSAffineTransformStruct()NEWLINE skewStruct.m11 = 1.0NEWLINE skewStruct.m22 = 1.0NEWLINE skewStruct.m21 = math.tan(math.radians(skew))NEWLINE skewTransform = NSAffineTransform.transform()NEWLINE skewTransform.setTransformStruct_(skewStruct)NEWLINE myTransform.appendTransform_(skewTransform)NEWLINE return myTransformNEWLINENEWLINEdef centerOfRect(rect):NEWLINE """NEWLINE Returns the center of NSRect rect as an NSPoint.NEWLINE """NEWLINE x = rect.origin.x + rect.size.width * 0.5NEWLINE y = rect.origin.y + rect.size.height * 0.5NEWLINE return NSPoint(x,y)NEWLINENEWLINEdef combinedBounds(rects):NEWLINE bottomLeft = NSPoint( 1000.0, 100.0 )NEWLINE topRight = NSPoint( 0.0, 0.0 )NEWLINE for thisRect in rects:NEWLINE bottomLeft.x = min( thisRect.origin.x, bottomLeft.x )NEWLINE bottomLeft.y = min( thisRect.origin.y, bottomLeft.y )NEWLINE topRight.x = max( topRight.x, thisRect.origin.x+thisRect.size.width )NEWLINE topRight.y = max( topRight.y, thisRect.origin.y+thisRect.size.height )NEWLINE combinedRect = NSRect()NEWLINE combinedRect.origin = bottomLeftNEWLINE combinedRect.size = NSSize( topRight.x-bottomLeft.x, topRight.y-bottomLeft.y )NEWLINE return combinedRectNEWLINENEWLINEdef measureLayerAtHeightFromLeftOrRight( thisLayer, height, leftSide=True ):NEWLINE leftX = thisLayer.bounds.origin.xNEWLINE rightX = leftX + thisLayer.bounds.size.widthNEWLINE y = heightNEWLINE returnIndex = 1NEWLINE if not leftSide:NEWLINE returnIndex = -2NEWLINE measurement = thisLayer.intersectionsBetweenPoints( NSPoint(leftX,y), NSPoint(rightX,y) )[returnIndex].pointValue().xNEWLINE if leftSide:NEWLINE distance = measurement - leftXNEWLINE else:NEWLINE distance = rightX - measurementNEWLINE return distanceNEWLINENEWLINEdef minDistanceBetweenTwoLayers( comp1, comp2, interval=5.0 ):NEWLINE topY = min( comp1.bounds.origin.y+comp1.bounds.size.height, comp2.bounds.origin.y+comp2.bounds.size.height )NEWLINE bottomY = max( comp1.bounds.origin.y, comp2.bounds.origin.y )NEWLINE distance = topY - bottomYNEWLINE minDist = NoneNEWLINE for i in range(int(distance/interval)):NEWLINE height = bottomY + i * intervalNEWLINE left = measureLayerAtHeightFromLeftOrRight( comp1, height, leftSide=False )NEWLINE right = measureLayerAtHeightFromLeftOrRight( comp2, height, leftSide=True )NEWLINE total = left+rightNEWLINE if minDist == None or minDist > total:NEWLINE minDist = totalNEWLINE return minDistNEWLINENEWLINEdef placeComponentsAtDistance( thisLayer, comp1, comp2, interval=5.0, distance=10.0 ):NEWLINE thisMaster = thisLayer.associatedFontMaster()NEWLINE masterID = thisMaster.idNEWLINE original1 = comp1.component.layers[masterID]NEWLINE original2 = comp2.component.layers[masterID]NEWLINE minDist = minDistanceBetweenTwoLayers( original1, original2, interval=interval )NEWLINE comp2shift = distance - minDistNEWLINE addedSBs = original1.RSB + original2.LSBNEWLINE comp2.x = comp1.x + original1.width - addedSBs + comp2shiftNEWLINENEWLINEdef process( thisLayer, compName1, compName2, distance=10.0, interval=5.0 ):NEWLINE if compName1 and compName2:NEWLINE compCount = len(thisLayer.components)NEWLINE for compName in (compName1,compName2):NEWLINE newComp = GSComponent(compName)NEWLINE thisLayer.components.append(newComp)NEWLINE newComp.disableAlignment = TrueNEWLINE placeComponentsAtDistance( thisLayer, thisLayer.components[compCount], thisLayer.components[compCount+1] )NEWLINE else:NEWLINE return FalseNEWLINENEWLINENEWLINEdef buildCircledGlyph( thisGlyph, circleName, scaleFactors ):NEWLINE thisFont = thisGlyph.font()NEWLINE thisGlyph.setWidthMetricsKey_( "=%i" % thisFont.upm )NEWLINE circleGlyph = thisFont.glyphs[circleName]NEWLINE NEWLINE for i, thisMaster in enumerate(thisFont.masters):NEWLINE figureHeight = NoneNEWLINE scaleFactor = scaleFactors[i]NEWLINE thisLayer = thisGlyph.layers[thisMaster.id]NEWLINE circleLayer = circleGlyph.layers[thisMaster.id]NEWLINE circleScaleFactor = thisFont.upm * 0.9 / ( circleLayer.bounds.size.width )NEWLINE thisLayer.clear()NEWLINE thisLayer.syncMetrics()NEWLINE NEWLINE # add circle:NEWLINE assumedCenter = NSPoint( thisFont.upm*0.5, thisFont.upm*0.3 ) # hardcodedNEWLINE circleComponent = GSComponent(circleName)NEWLINE thisLayer.components.append(circleComponent)NEWLINE NEWLINE # scale circle:NEWLINE circleScale = transform( scale=circleScaleFactor ).transformStruct()NEWLINE circleComponent.applyTransform( circleScale )NEWLINE NEWLINE # move circle:NEWLINE circleBounds = thisLayer.components[0].boundsNEWLINE circleCenter = centerOfRect(circleBounds)NEWLINE xShift = assumedCenter.x - circleCenter.xNEWLINE yShift = assumedCenter.y - circleCenter.yNEWLINE circleShift = transform( shiftX=xShift, shiftY=yShift ).transformStruct()NEWLINE circleComponent.applyTransform(circleShift)NEWLINE NEWLINE # find components to addNEWLINE suffixlessName = thisGlyph.nameNEWLINE if "." in suffixlessName:NEWLINE suffixlessName = thisGlyph.name[:thisGlyph.name.find(".")]NEWLINE componentNames = suffixlessName.split("_")NEWLINE NEWLINE # add one component in the center:NEWLINE if componentNames:NEWLINE advance = 0NEWLINE for j, compName in enumerate(componentNames):NEWLINE lfName = "%s.lf" % compNameNEWLINE osfName = "%s.osf" % compNameNEWLINE if thisFont.glyphs[lfName]:NEWLINE compName = lfNameNEWLINE elif thisFont.glyphs[osfName]:NEWLINE compName = osfNameNEWLINE NEWLINE innerComponent = GSComponent( compName )NEWLINE thisLayer.components.append( innerComponent )NEWLINE innerComponent.position = NSPoint( advance, 0.0 )NEWLINE NEWLINE if j > 0:NEWLINE innerComponent.disableAlignment = TrueNEWLINE placeComponentsAtDistance( NEWLINE thisLayer,NEWLINE thisLayer.components[-2], NEWLINE thisLayer.components[-1], # same as innerComponentNEWLINE distance = minDistanceBetweenFiguresNEWLINE )NEWLINE NEWLINE originalLayerWidth = thisFont.glyphs[compName].layers[thisMaster.id].widthNEWLINE advance += originalLayerWidthNEWLINE NEWLINE collectedBounds = [ c.bounds for c in thisLayer.components[1:] ]NEWLINE compCenter = centerOfRect( combinedBounds(collectedBounds) )NEWLINE circleCenter = centerOfRect( circleComponent.bounds )NEWLINE NEWLINE # scale and move it in place:NEWLINE shift = transform( shiftX=-compCenter.x, shiftY=-compCenter.y ).transformStruct()NEWLINE scaleToFit = transform( scale=scaleFactor*circleScaleFactor ).transformStruct()NEWLINE backshift = transform( shiftX=circleCenter.x, shiftY=circleCenter.y ).transformStruct()NEWLINE NEWLINE compensateStroke = []NEWLINE for innerComponent in thisLayer.components[1:]:NEWLINE NEWLINE # optically shift so top anchor is in center:NEWLINE originalLayer = topAnchor = innerComponent.component.layers[thisMaster.id]NEWLINE topAnchor = originalLayer.anchors["top"]NEWLINE if topAnchor:NEWLINE anchorCenter = topAnchor.xNEWLINE boundsCenter = centerOfRect(originalLayer.bounds).xNEWLINE opticalCorrection = boundsCenter-anchorCenterNEWLINE if opticalCorrection != 0.0:NEWLINE threshold = 35.0NEWLINE if abs(opticalCorrection) > threshold:NEWLINE posNeg = opticalCorrection/abs(opticalCorrection)NEWLINE rest = abs(opticalCorrection) - thresholdNEWLINE opticalCorrection = posNeg * ( threshold + rest * 1/rest**0.3 )NEWLINE print("--", opticalCorrection)NEWLINE opticalShift = transform( shiftX = opticalCorrection ).transformStruct()NEWLINE innerComponent.applyTransform( opticalShift )NEWLINE NEWLINE NEWLINE innerComponent.applyTransform( shift )NEWLINE innerComponent.applyTransform( scaleToFit )NEWLINE innerComponent.applyTransform( backshift )NEWLINE NEWLINE # move components closer to center:NEWLINE #move = 15.0NEWLINE #hOffset = circleCenter.x - centerOfRect(innerComponent.bounds).xNEWLINE #if abs(hOffset) > move:NEWLINE # hOffset = (hOffset/abs(hOffset))*moveNEWLINE #if hOffset != 0.0:NEWLINE # moveCloser = transform( shiftX=hOffset ).transformStruct()NEWLINE # innerComponent.applyTransform( moveCloser )NEWLINE NEWLINE # compensatory shift:NEWLINE if thisGlyph.name in ("two_zero.circled", "one_nine.circled", "one_zero.circled"):NEWLINE compensate = transform( shiftX=10.0 ).transformStruct()NEWLINE innerComponent.applyTransform( compensate )NEWLINE NEWLINE NEWLINE NEWLINE if innerComponent.component.glyphInfo.category == "Number":NEWLINE if figureHeight == None:NEWLINE figureHeight = innerComponent.position.yNEWLINE else:NEWLINE innerComponent.position.y = figureHeightNEWLINE NEWLINE compensateStroke.append(innerComponent)NEWLINE NEWLINE # auffetten:NEWLINE isNumber = FalseNEWLINE for i in range(len(compensateStroke))[::-1]:NEWLINE componentToDecompose = compensateStroke[i]NEWLINE if componentToDecompose.component.category == "Number":NEWLINE isNumber = TrueNEWLINE thisLayer.decomposeComponent_(componentToDecompose)NEWLINE NEWLINE offsetLayer( thisLayer, 4.0 ) #4.0 if isNumber else 3.0 )NEWLINE thisLayer.anchors = NoneNEWLINE NEWLINE NEWLINE NEWLINENEWLINENEWLINEdef buildCirclePart( thisFont, glyphName ):NEWLINE partCircle = (NEWLINE (NEWLINE (353.0, 0.0),NEWLINE ((152.0, 0.0),(0.0, 150.0),(0.0, 348.0)),NEWLINE ((0.0, 549.0),(152.0, 700.0),(353.0, 700.0)),NEWLINE ((556.0, 700.0),(708.0, 549.0),(708.0, 348.0)),NEWLINE ((708.0, 149.0),(556.0, 0.0),(353.0, 0.0))NEWLINE ),NEWLINE )NEWLINE NEWLINE thisGlyph = thisFont.glyphs[glyphName]NEWLINE if not thisGlyph:NEWLINE thisGlyph = GSGlyph()NEWLINE thisGlyph.name = glyphNameNEWLINE thisFont.glyphs.append( thisGlyph )NEWLINE print("Generated %s" % glyphName)NEWLINE NEWLINE thisGlyph.export = FalseNEWLINE NEWLINE # find zero for reference:NEWLINE zeroGlyph = thisFont.glyphs["zero.lf"]NEWLINE if not zeroGlyph:NEWLINE zeroGlyph = thisFont.glyphs["zero.tf"]NEWLINE if not zeroGlyph:NEWLINE zeroGlyph = thisFont.glyphs["zero"]NEWLINE NEWLINE # draw in every layer:NEWLINE for thisLayer in thisGlyph.layers:NEWLINE # make sure it is empty:NEWLINE thisLayer.clear()NEWLINE NEWLINE # draw outer circle:NEWLINE for thisPath in partCircle:NEWLINE pen = thisLayer.getPen()NEWLINE pen.moveTo( thisPath[0] )NEWLINE for thisSegment in thisPath[1:]:NEWLINE if len(thisSegment) == 2: # linetoNEWLINE pen.lineTo( thisSegment )NEWLINE elif len(thisSegment) == 3: # curvetoNEWLINE pen.curveTo(NEWLINE thisSegment[0],NEWLINE thisSegment[1],NEWLINE thisSegment[2]NEWLINE )NEWLINE else:NEWLINE print("%s: Path drawing error. Could not process this segment:\n" % (glyphName, thisSegment))NEWLINE pen.closePath()NEWLINE pen.endPath()NEWLINE NEWLINE # scale circle to match zero:NEWLINE if zeroGlyph:NEWLINE zeroBounds = zeroGlyph.layers[thisLayer.associatedMasterId].boundsNEWLINE zeroHeight = zeroBounds.size.heightNEWLINE if zeroHeight: # zero could be emptyNEWLINE zeroOvershoot = -zeroBounds.origin.yNEWLINE overshootDiff = zeroOvershoot - 5.0NEWLINE actualHeight = thisLayer.bounds.size.heightNEWLINE correctedHeight = zeroHeight - 2 * overshootDiffNEWLINE if correctedHeight != actualHeight:NEWLINE scaleFactor = correctedHeight/actualHeightNEWLINE correction = transform(shiftY=5.0)NEWLINE correction.appendTransform_( transform(scale=scaleFactor) )NEWLINE correction.appendTransform_( transform(-5.0) )NEWLINE thisLayer.applyTransform( correction.transformStruct() )NEWLINENEWLINE # inner circle, scaled down:NEWLINE currentHeight = thisLayer.bounds.size.heightNEWLINE outerCircle = thisLayer.paths[0]NEWLINE innerCircle = outerCircle.copy()NEWLINE thisLayer.paths.append(innerCircle)NEWLINE NEWLINE # scale down inner circle:NEWLINE stemSize = 50.0NEWLINE hstems = thisLayer.associatedFontMaster().horizontalStemsNEWLINE vstems = thisLayer.associatedFontMaster().verticalStemsNEWLINE if hstems and vstems:NEWLINE stemSize = (hstems[0] + vstems[0]) * 0.25NEWLINE NEWLINE maximumStemSize = currentHeight * 0.28NEWLINE stemSize = min(maximumStemSize,stemSize)NEWLINE smallerBy = stemSize * 2 * 1.06NEWLINE newHeight = currentHeight - smallerByNEWLINE scaleFactor = newHeight/currentHeightNEWLINE scale = transform(scale=scaleFactor).transformStruct()NEWLINE NEWLINE centerX = innerCircle.bounds.origin.x + innerCircle.bounds.size.width * 0.5NEWLINE centerY = innerCircle.bounds.origin.y + innerCircle.bounds.size.height * 0.5NEWLINE shift = transform(shiftX=-centerX, shiftY=-centerY).transformStruct()NEWLINE shiftBack = transform(shiftX=centerX, shiftY=centerY).transformStruct()NEWLINE NEWLINE innerCircle.applyTransform( shift )NEWLINE innerCircle.applyTransform( scale )NEWLINE innerCircle.applyTransform( shiftBack )NEWLINENEWLINE # tidy up paths and set width:NEWLINE thisLayer.correctPathDirection()NEWLINE thisLayer.cleanUpPaths()NEWLINE thisLayer.LSB = 40.0NEWLINE thisLayer.RSB = 40.0NEWLINE NEWLINE # add anchor:NEWLINE centerX = thisLayer.bounds.origin.x + thisLayer.bounds.size.width * 0.5NEWLINE centerY = thisLayer.bounds.origin.y + thisLayer.bounds.size.height * 0.5NEWLINE centerAnchor = GSAnchor()NEWLINE centerAnchor.name = "#center"NEWLINE centerAnchor.position = NSPoint( centerX, centerY )NEWLINE thisLayer.anchors.append(centerAnchor)NEWLINENEWLINEdef boxArea(thisLayer):NEWLINE return thisLayer.bounds.size.width * thisLayer.bounds.size.heightNEWLINENEWLINEthisFont.disableUpdateInterface() # suppresses UI updates in Font ViewNEWLINENEWLINENEWLINE# add circle if not present in font already:NEWLINEcircleName = "_part.circle"NEWLINEif not thisFont.glyphs[circleName]:NEWLINE buildCirclePart( thisFont, circleName )NEWLINEcircleGlyph = thisFont.glyphs[circleName]NEWLINENEWLINE# determining scale of inscribed letters:NEWLINEscaleFactors = []NEWLINEfor thisMaster in thisFont.masters:NEWLINE radius = circleGlyph.layers[thisMaster.id].paths[1].bounds.size.width * 0.5NEWLINE maxArea = 0.0NEWLINE biggestLayer = NoneNEWLINE for glyphName in circledGlyphNames:NEWLINE if "." in glyphName:NEWLINE glyphName = glyphName[:glyphName.find(".")]NEWLINE thisGlyph = thisFont.glyphs[glyphName]NEWLINE if thisGlyph:NEWLINE thisLayer = thisGlyph.layers[thisMaster.id]NEWLINE thisArea = boxArea(thisLayer)NEWLINE if thisArea > maxArea:NEWLINE maxArea = thisAreaNEWLINE biggestLayer = thisLayerNEWLINE NEWLINE angleInRadians = math.atan2( biggestLayer.bounds.size.height, biggestLayer.bounds.size.width )NEWLINE scaledHeight = math.sin(angleInRadians) * radius * 2 * 0.9NEWLINE scaleFactor = scaledHeight / biggestLayer.bounds.size.heightNEWLINE scaleFactors.append(scaleFactor)NEWLINE NEWLINENEWLINEfor glyphName in circledGlyphNames:NEWLINE thisGlyph = thisFont.glyphs[glyphName]NEWLINE if not thisGlyph:NEWLINE thisGlyph = GSGlyph()NEWLINE thisGlyph.name = glyphNameNEWLINE thisFont.glyphs.append(thisGlyph)NEWLINENEWLINE thisGlyph.beginUndo() # begin undo groupingNEWLINE print("Building %s" % thisGlyph.name)NEWLINE buildCircledGlyph( thisGlyph, circleName, scaleFactors )NEWLINE thisGlyph.endUndo() # end undo groupingNEWLINENEWLINEthisFont.enableUpdateInterface() # re-enables UI updates in Font ViewNEWLINE
import importlibNEWLINEimport inspectNEWLINEimport mathNEWLINENEWLINEimport numpy as npNEWLINEimport pytestNEWLINENEWLINEfrom river import optimNEWLINENEWLINENEWLINE@pytest.mark.parametrize(NEWLINE "loss",NEWLINE [NEWLINE pytest.param(loss(), id=name)NEWLINE for name, loss in inspect.getmembers(NEWLINE importlib.import_module("river.optim.losses"),NEWLINE lambda x: inspect.isclass(x)NEWLINE and not inspect.isabstract(x)NEWLINE and not issubclass(x, optim.losses.CrossEntropy),NEWLINE )NEWLINE ],NEWLINE)NEWLINEdef test_batch_online_equivalence(loss):NEWLINENEWLINE y_true = np.random.randint(2, size=30)NEWLINE y_pred = np.random.uniform(-10, 10, size=30)NEWLINENEWLINE for yt, yp, g in zip(y_true, y_pred, loss.gradient(y_true, y_pred)):NEWLINE assert math.isclose(loss.gradient(yt, yp), g, abs_tol=1e-9)NEWLINE
# pylint: disable=too-many-linesNEWLINE# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root for license information.NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code is regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINEfrom typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, UnionNEWLINENEWLINEfrom azure.core.async_paging import AsyncItemPaged, AsyncListNEWLINEfrom azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_errorNEWLINEfrom azure.core.pipeline import PipelineResponseNEWLINEfrom azure.core.pipeline.transport import AsyncHttpResponseNEWLINEfrom azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethodNEWLINEfrom azure.core.rest import HttpRequestNEWLINEfrom azure.core.tracing.decorator import distributed_traceNEWLINEfrom azure.core.tracing.decorator_async import distributed_trace_asyncNEWLINEfrom azure.mgmt.core.exceptions import ARMErrorFormatNEWLINEfrom azure.mgmt.core.polling.async_arm_polling import AsyncARMPollingNEWLINENEWLINEfrom ... import models as _modelsNEWLINEfrom ..._vendor import _convert_requestNEWLINEfrom ...operations._scope_maps_operations import build_create_request_initial, build_delete_request_initial, build_get_request, build_list_request, build_update_request_initialNEWLINET = TypeVar('T')NEWLINEClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]NEWLINENEWLINEclass ScopeMapsOperations:NEWLINE """ScopeMapsOperations async operations.NEWLINENEWLINE You should not instantiate this class directly. Instead, you should create a Client instance thatNEWLINE instantiates it for you and attaches it as an attribute.NEWLINENEWLINE :ivar models: Alias to model classes used in this operation group.NEWLINE :type models: ~azure.mgmt.containerregistry.v2020_11_01_preview.modelsNEWLINE :param client: Client for service requests.NEWLINE :param config: Configuration of service client.NEWLINE :param serializer: An object model serializer.NEWLINE :param deserializer: An object model deserializer.NEWLINE """NEWLINENEWLINE models = _modelsNEWLINENEWLINE def __init__(self, client, config, serializer, deserializer) -> None:NEWLINE self._client = clientNEWLINE self._serialize = serializerNEWLINE self._deserialize = deserializerNEWLINE self._config = configNEWLINENEWLINE @distributed_trace_asyncNEWLINE async def get(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE **kwargs: AnyNEWLINE ) -> "_models.ScopeMap":NEWLINE """Gets the properties of the specified scope map.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :param scope_map_name: The name of the scope map.NEWLINE :type scope_map_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: ScopeMap, or the result of cls(response)NEWLINE :rtype: ~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMapNEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINENEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINENEWLINE NEWLINE request = build_get_request(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE template_url=self.get.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINENEWLINE get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINENEWLINE async def _create_initial(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE scope_map_create_parameters: "_models.ScopeMap",NEWLINE **kwargs: AnyNEWLINE ) -> "_models.ScopeMap":NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINENEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]NEWLINENEWLINE _json = self._serialize.body(scope_map_create_parameters, 'ScopeMap')NEWLINENEWLINE request = build_create_request_initial(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE content_type=content_type,NEWLINE json=_json,NEWLINE template_url=self._create_initial.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 201]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if response.status_code == 200:NEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if response.status_code == 201:NEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINENEWLINE _create_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINENEWLINE @distributed_trace_asyncNEWLINE async def begin_create(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE scope_map_create_parameters: "_models.ScopeMap",NEWLINE **kwargs: AnyNEWLINE ) -> AsyncLROPoller["_models.ScopeMap"]:NEWLINE """Creates a scope map for a container registry with the specified parameters.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :param scope_map_name: The name of the scope map.NEWLINE :type scope_map_name: strNEWLINE :param scope_map_create_parameters: The parameters for creating a scope map.NEWLINE :type scope_map_create_parameters:NEWLINE ~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMapNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False forNEWLINE this operation to not poll, or pass in your own initialized polling object for a personalNEWLINE polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if noNEWLINE Retry-After header is present.NEWLINE :return: An instance of AsyncLROPoller that returns either ScopeMap or the result ofNEWLINE cls(response)NEWLINE :rtype:NEWLINE ~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMap]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]NEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = await self._create_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE scope_map_create_parameters=scope_map_create_parameters,NEWLINE api_version=api_version,NEWLINE content_type=content_type,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINE kwargs.pop('error_map', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE response = pipeline_response.http_responseNEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINE return deserializedNEWLINENEWLINENEWLINE if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)NEWLINE elif polling is False: polling_method = AsyncNoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return AsyncLROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINENEWLINE begin_create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINE async def _delete_initial( # pylint: disable=inconsistent-return-statementsNEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE **kwargs: AnyNEWLINE ) -> None:NEWLINE cls = kwargs.pop('cls', None) # type: ClsType[None]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINENEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINENEWLINE NEWLINE request = build_delete_request_initial(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE template_url=self._delete_initial.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 202, 204]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, None, {})NEWLINENEWLINE _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINENEWLINE @distributed_trace_asyncNEWLINE async def begin_delete( # pylint: disable=inconsistent-return-statementsNEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE **kwargs: AnyNEWLINE ) -> AsyncLROPoller[None]:NEWLINE """Deletes a scope map from a container registry.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :param scope_map_name: The name of the scope map.NEWLINE :type scope_map_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False forNEWLINE this operation to not poll, or pass in your own initialized polling object for a personalNEWLINE polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if noNEWLINE Retry-After header is present.NEWLINE :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)NEWLINE :rtype: ~azure.core.polling.AsyncLROPoller[None]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType[None]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = await self._delete_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINE kwargs.pop('error_map', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE if cls:NEWLINE return cls(pipeline_response, None, {})NEWLINENEWLINENEWLINE if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)NEWLINE elif polling is False: polling_method = AsyncNoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return AsyncLROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINENEWLINE begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINE async def _update_initial(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE scope_map_update_parameters: "_models.ScopeMapUpdateParameters",NEWLINE **kwargs: AnyNEWLINE ) -> "_models.ScopeMap":NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINENEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]NEWLINENEWLINE _json = self._serialize.body(scope_map_update_parameters, 'ScopeMapUpdateParameters')NEWLINENEWLINE request = build_update_request_initial(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE api_version=api_version,NEWLINE content_type=content_type,NEWLINE json=_json,NEWLINE template_url=self._update_initial.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200, 201]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE if response.status_code == 200:NEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if response.status_code == 201:NEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINENEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINENEWLINE return deserializedNEWLINENEWLINE _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINENEWLINE @distributed_trace_asyncNEWLINE async def begin_update(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE scope_map_name: str,NEWLINE scope_map_update_parameters: "_models.ScopeMapUpdateParameters",NEWLINE **kwargs: AnyNEWLINE ) -> AsyncLROPoller["_models.ScopeMap"]:NEWLINE """Updates a scope map with the specified parameters.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :param scope_map_name: The name of the scope map.NEWLINE :type scope_map_name: strNEWLINE :param scope_map_update_parameters: The parameters for updating a scope map.NEWLINE :type scope_map_update_parameters:NEWLINE ~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMapUpdateParametersNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :keyword str continuation_token: A continuation token to restart a poller from a saved state.NEWLINE :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False forNEWLINE this operation to not poll, or pass in your own initialized polling object for a personalNEWLINE polling strategy.NEWLINE :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethodNEWLINE :keyword int polling_interval: Default waiting time between two polls for LRO operations if noNEWLINE Retry-After header is present.NEWLINE :return: An instance of AsyncLROPoller that returns either ScopeMap or the result ofNEWLINE cls(response)NEWLINE :rtype:NEWLINE ~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMap]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINE content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]NEWLINE polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]NEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMap"]NEWLINE lro_delay = kwargs.pop(NEWLINE 'polling_interval',NEWLINE self._config.polling_intervalNEWLINE )NEWLINE cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]NEWLINE if cont_token is None:NEWLINE raw_result = await self._update_initial(NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE scope_map_name=scope_map_name,NEWLINE scope_map_update_parameters=scope_map_update_parameters,NEWLINE api_version=api_version,NEWLINE content_type=content_type,NEWLINE cls=lambda x,y,z: x,NEWLINE **kwargsNEWLINE )NEWLINE kwargs.pop('error_map', None)NEWLINENEWLINE def get_long_running_output(pipeline_response):NEWLINE response = pipeline_response.http_responseNEWLINE deserialized = self._deserialize('ScopeMap', pipeline_response)NEWLINE if cls:NEWLINE return cls(pipeline_response, deserialized, {})NEWLINE return deserializedNEWLINENEWLINENEWLINE if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)NEWLINE elif polling is False: polling_method = AsyncNoPolling()NEWLINE else: polling_method = pollingNEWLINE if cont_token:NEWLINE return AsyncLROPoller.from_continuation_token(NEWLINE polling_method=polling_method,NEWLINE continuation_token=cont_token,NEWLINE client=self._client,NEWLINE deserialization_callback=get_long_running_outputNEWLINE )NEWLINE return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)NEWLINENEWLINE begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps/{scopeMapName}"} # type: ignoreNEWLINENEWLINE @distributed_traceNEWLINE def list(NEWLINE self,NEWLINE resource_group_name: str,NEWLINE registry_name: str,NEWLINE **kwargs: AnyNEWLINE ) -> AsyncIterable["_models.ScopeMapListResult"]:NEWLINE """Lists all the scope maps for the specified container registry.NEWLINENEWLINE :param resource_group_name: The name of the resource group to which the container registryNEWLINE belongs.NEWLINE :type resource_group_name: strNEWLINE :param registry_name: The name of the container registry.NEWLINE :type registry_name: strNEWLINE :keyword callable cls: A custom type or function that will be passed the direct responseNEWLINE :return: An iterator like instance of either ScopeMapListResult or the result of cls(response)NEWLINE :rtype:NEWLINE ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerregistry.v2020_11_01_preview.models.ScopeMapListResult]NEWLINE :raises: ~azure.core.exceptions.HttpResponseErrorNEWLINE """NEWLINE api_version = kwargs.pop('api_version', "2020-11-01-preview") # type: strNEWLINENEWLINE cls = kwargs.pop('cls', None) # type: ClsType["_models.ScopeMapListResult"]NEWLINE error_map = {NEWLINE 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsErrorNEWLINE }NEWLINE error_map.update(kwargs.pop('error_map', {}))NEWLINE def prepare_request(next_link=None):NEWLINE if not next_link:NEWLINE NEWLINE request = build_list_request(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE api_version=api_version,NEWLINE template_url=self.list.metadata['url'],NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINENEWLINE else:NEWLINE NEWLINE request = build_list_request(NEWLINE subscription_id=self._config.subscription_id,NEWLINE resource_group_name=resource_group_name,NEWLINE registry_name=registry_name,NEWLINE api_version=api_version,NEWLINE template_url=next_link,NEWLINE )NEWLINE request = _convert_request(request)NEWLINE request.url = self._client.format_url(request.url)NEWLINE request.method = "GET"NEWLINE return requestNEWLINENEWLINE async def extract_data(pipeline_response):NEWLINE deserialized = self._deserialize("ScopeMapListResult", pipeline_response)NEWLINE list_of_elem = deserialized.valueNEWLINE if cls:NEWLINE list_of_elem = cls(list_of_elem)NEWLINE return deserialized.next_link or None, AsyncList(list_of_elem)NEWLINENEWLINE async def get_next(next_link=None):NEWLINE request = prepare_request(next_link)NEWLINENEWLINE pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-accessNEWLINE request,NEWLINE stream=False,NEWLINE **kwargsNEWLINE )NEWLINE response = pipeline_response.http_responseNEWLINENEWLINE if response.status_code not in [200]:NEWLINE map_error(status_code=response.status_code, response=response, error_map=error_map)NEWLINE raise HttpResponseError(response=response, error_format=ARMErrorFormat)NEWLINENEWLINE return pipeline_responseNEWLINENEWLINENEWLINE return AsyncItemPaged(NEWLINE get_next, extract_dataNEWLINE )NEWLINE list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/scopeMaps"} # type: ignoreNEWLINE
"""!NEWLINE@brief Improved Experiment Argument Parser for SudoRmRfNEWLINENEWLINE@author Efthymios Tzinis {etzinis2@illinois.edu}NEWLINE@copyright University of Illinois at Urbana-ChampaignNEWLINE"""NEWLINENEWLINEimport argparseNEWLINENEWLINENEWLINEdef get_args():NEWLINE """! Command line parser """NEWLINE parser = argparse.ArgumentParser(NEWLINE description='Experiment Argument Parser')NEWLINE # ===============================================NEWLINE # Datasets argumentsNEWLINE parser.add_argument("--train", type=str, nargs='+',NEWLINE help="Training dataset",NEWLINE default=None,NEWLINE choices=['WHAM', 'LIBRI2MIX', 'MUSDB', 'FUSS'])NEWLINE parser.add_argument("--val", type=str, nargs='+',NEWLINE help="Validation dataset",NEWLINE default=None,NEWLINE choices=['WHAM', 'LIBRI2MIX', 'MUSDB', 'FUSS'])NEWLINE parser.add_argument("--test", type=str, nargs='+',NEWLINE help="Test dataset",NEWLINE default=None,NEWLINE choices=['WHAM', 'LIBRI2MIX', 'MUSDB', 'FUSS'])NEWLINE parser.add_argument("--train_val", type=str, nargs='+',NEWLINE help="Validation on the training data",NEWLINE default=None,NEWLINE choices=['WHAM', 'LIBRI2MIX'])NEWLINE parser.add_argument("--n_train", type=int,NEWLINE help="""Reduce the number of training NEWLINE samples to this number.""", default=0)NEWLINE parser.add_argument("--n_val", type=int,NEWLINE help="""Reduce the number of evaluation NEWLINE samples to this number.""", default=0)NEWLINE parser.add_argument("--n_test", type=int,NEWLINE help="""Reduce the number of test NEWLINE samples to this number.""", default=0)NEWLINE parser.add_argument("--n_train_val", type=int,NEWLINE help="""Reduce the number of evaluation NEWLINE samples on the training set.""", default=0)NEWLINE parser.add_argument("--audio_timelength", type=float,NEWLINE help="""The timelength of the audio that you want NEWLINE to load in seconds.""",NEWLINE default=4.)NEWLINE parser.add_argument("--min_or_max", type=str,NEWLINE help="""Min or max if this applies to the dataset NEWLINE that you use. Min means that the mixture is going to NEWLINE be cropped at the minimum of all sources and for max NEWLINE is going to be zero-padded""",NEWLINE default='min',NEWLINE choices=['min', 'max'])NEWLINE parser.add_argument("--zero_pad_audio", action='store_true',NEWLINE help="""If a specific timelength is required all NEWLINE audio sources and mixtures are going to be zero NEWLINE padded in order to have the required length. If not NEWLINE and a specific timelegth is required then the files NEWLINE with less than required legth are not going to be NEWLINE used.""", default=False)NEWLINE parser.add_argument("--normalize_audio", action='store_true',NEWLINE help="""Normalize using mean and standard deviation NEWLINE before processing each audio file.""",NEWLINE default=False)NEWLINE # ===============================================NEWLINE # Separation task argumentsNEWLINE parser.add_argument("--n_channels", type=int,NEWLINE help="""The number of mixture channels.""",NEWLINE default=1, choices=[1, 2])NEWLINE parser.add_argument("--min_num_sources", type=int,NEWLINE help="""The minimum number of sources in a mixture.""",NEWLINE default=1)NEWLINE parser.add_argument("--max_num_sources", type=int,NEWLINE help="""The maximum number of sources in a mixture.""",NEWLINE default=4)NEWLINE parser.add_argument("--separation_task", type=str,NEWLINE help="The separation task you would like to perform, "NEWLINE "some of the tasks might not be available for "NEWLINE "specific datasets.",NEWLINE default=None,NEWLINE choices=['enhance_single_white_noise',NEWLINE 'enhance_single', 'enhance_both',NEWLINE 'sep_clean', 'sep_noisy'])NEWLINE # ===============================================NEWLINE # Training paramsNEWLINE parser.add_argument("-bs", "--batch_size", type=int,NEWLINE help="""The number of samples in each batch. NEWLINE Warning: Cannot be less than the number of NEWLINE the validation samples""", default=4)NEWLINE parser.add_argument("--n_epochs", type=int,NEWLINE help="""The number of epochs that the NEWLINE experiment should run""", default=500)NEWLINE parser.add_argument("-lr", "--learning_rate", type=float,NEWLINE help="""Initial Learning rate""", default=1e-3)NEWLINE parser.add_argument("--divide_lr_by", type=float,NEWLINE help="""The factor that the learning rate NEWLINE would be divided by""", default=3.)NEWLINE parser.add_argument("--patience", type=int,NEWLINE help="""Patience until reducing the learning rate .""",NEWLINE default=5)NEWLINE parser.add_argument("--optimizer", type=str,NEWLINE help="""The optimizer that you want to use""",NEWLINE default="adam",NEWLINE choices=['adam', 'radam'])NEWLINE parser.add_argument("--clip_grad_norm", type=float,NEWLINE help="""The norm value which all gradients NEWLINE are going to be clipped, 0 means that no NEWLINE grads are going to be clipped""",NEWLINE default=5.)NEWLINE parser.add_argument("-fs", type=int,NEWLINE help="""Sampling rate of the audio.""", default=8000)NEWLINE # ===============================================NEWLINE # CometML experiment configuration argumentsNEWLINE parser.add_argument("-tags", "--cometml_tags", type=str,NEWLINE nargs="+", help="""A list of tags for the cometml NEWLINE experiment.""",NEWLINE default=[])NEWLINE parser.add_argument("--experiment_name", type=str,NEWLINE help="""Name of current experiment""",NEWLINE default=None)NEWLINE parser.add_argument("--project_name", type=str,NEWLINE help="""Name of current experiment""",NEWLINE default="yolo_experiment")NEWLINE # ===============================================NEWLINE # Device paramsNEWLINE parser.add_argument("-cad", "--cuda_available_devices", type=str,NEWLINE nargs="+",NEWLINE help="""A list of Cuda IDs that would be NEWLINE available for running this experiment""",NEWLINE default=['0'],NEWLINE choices=['0', '1', '2', '3'])NEWLINE parser.add_argument("--n_jobs", type=int,NEWLINE help="""The number of cpu workers for NEWLINE loading the data, etc.""", default=4)NEWLINE # ===============================================NEWLINE # Local experiment loggingNEWLINE parser.add_argument("-elp", "--experiment_logs_path", type=str,NEWLINE help="""Path for logging experiment's audio.""",NEWLINE default=None)NEWLINE parser.add_argument("-mlp", "--metrics_logs_path", type=str,NEWLINE help="""Path for logging metrics.""",NEWLINE default=None)NEWLINE parser.add_argument("-clp", "--checkpoints_path", type=str,NEWLINE help="""Path for logging checkpoints.""",NEWLINE default=None)NEWLINE parser.add_argument("--save_checkpoint_every", type=int,NEWLINE help="""Number of epochs between each model save.""",NEWLINE default=0)NEWLINE # ===============================================NEWLINE # Separation model (SuDO-RM-RF) paramsNEWLINE parser.add_argument("--out_channels", type=int,NEWLINE help="The number of channels of the internal "NEWLINE "representation outside the U-Blocks.",NEWLINE default=128)NEWLINE parser.add_argument("--in_channels", type=int,NEWLINE help="The number of channels of the internal "NEWLINE "representation inside the U-Blocks.",NEWLINE default=512)NEWLINE parser.add_argument("--num_blocks", type=int,NEWLINE help="Number of the successive U-Blocks.",NEWLINE default=16)NEWLINE parser.add_argument("--upsampling_depth", type=int,NEWLINE help="Number of successive upsamplings and "NEWLINE "effectively downsampling inside each U-Block. "NEWLINE "The aggregation of all scales is performed by "NEWLINE "addition.",NEWLINE default=5)NEWLINE parser.add_argument("--group_size", type=int,NEWLINE help="The number of individual computation groups "NEWLINE "applied if group communication module is used.",NEWLINE default=16)NEWLINE parser.add_argument("--enc_kernel_size", type=int,NEWLINE help="The width of the encoder and decoder kernels.",NEWLINE default=21)NEWLINE parser.add_argument("--enc_num_basis", type=int,NEWLINE help="Number of the encoded basis representations.",NEWLINE default=512)NEWLINENEWLINE # Attentive sudo parametersNEWLINE parser.add_argument("--att_dims", type=int,NEWLINE help="The number of attention depth.",NEWLINE default=256)NEWLINE parser.add_argument("--att_n_heads", type=int,NEWLINE help="The number of attention heads.",NEWLINE default=4)NEWLINE parser.add_argument("--att_dropout", type=float,NEWLINE help="The dropout rate inside the attention layers.",NEWLINE default=0.1)NEWLINENEWLINE parser.add_argument("--model_type", type=str,NEWLINE help="The type of model you would like to use.",NEWLINE default='relu',NEWLINE choices=['relu', 'softmax', 'groupcomm',NEWLINE 'groupcomm_v2', 'causal',NEWLINE 'attention', 'attention_v2',NEWLINE 'attention_v3', 'sepformer'])NEWLINENEWLINE return parser.parse_args()
import audiovisuaaliNEWLINEfrom urllib.request import quoteNEWLINEfrom requests import get as rgetNEWLINEfrom json import loadsNEWLINENEWLINE# trumpNEWLINEasync def trump(message, client, arguments):NEWLINENEWLINE #Starting to fetch a CatfactNEWLINE query = "https://api.tronalddump.io/search/quote?query={}".format(quote(arguments))NEWLINE response = loads(rget(query).text)NEWLINENEWLINE # Checking if name is valid from responseNEWLINE if response["total"] == 0:NEWLINE letter = "No quotes found"NEWLINE returnNEWLINENEWLINE # Creating if existsNEWLINE else:NEWLINE letter = ":rofl: **| Found " + str(response["total"]) + " hits\n```" + response["_embedded"]["quotes"][0]["value"] + "```"NEWLINE try:NEWLINE letter = letter + "```" + response["_embedded"]["quotes"][1]["value"] + "```"NEWLINE except:NEWLINE passNEWLINE try:NEWLINE letter = letter + "```" + response["_embedded"]["quotes"][2]["value"] + "```"NEWLINE except:NEWLINE passNEWLINENEWLINE # Sending messageNEWLINE await client.send_message(message.channel, letter+"**")NEWLINE
"""NEWLINEThe tsnet.postprocessing.time_history module contains functionsNEWLINEto plot the time history of head and velocity at the start andNEWLINEend point of a pipeNEWLINE"""NEWLINEfrom __future__ import print_functionNEWLINEimport matplotlib.pyplot as pltNEWLINENEWLINEdef plot_head_history(pipe,H,wn,tt):NEWLINE """Plot Head history on the start and end node of a pipeNEWLINENEWLINE ParametersNEWLINE ----------NEWLINE pipe : strNEWLINE Name of the pipe where you want to report the headNEWLINE H : listNEWLINE Head resultsNEWLINE wn : wntr.network.model.WaterNetworkModelNEWLINE NetworkNEWLINE tt : listNEWLINE Simulation timestampsNEWLINE """NEWLINENEWLINE pipeid = wn.links[pipe].id-1NEWLINE plt.figure(figsize=(10,4), dpi=80, facecolor='w', edgecolor='k')NEWLINE plt.plot(tt,H[pipeid][0,:], 'b-',label='Start Node')NEWLINE plt.plot(tt,H[pipeid][-1,:], 'r-',label='End Node')NEWLINE plt.xlim([tt[0],tt[-1]])NEWLINE plt.title('Pressure Head of Pipe %s '%pipe)NEWLINE plt.xlabel("Time")NEWLINE plt.ylabel("Pressure Head (m)")NEWLINE plt.legend(loc='best')NEWLINE plt.grid(True)NEWLINE plt.show()NEWLINENEWLINEdef plot_velocity_history(pipe,V,wn,tt):NEWLINE """Plot Velocity history on the start and end node of a pipeNEWLINENEWLINE ParametersNEWLINE ----------NEWLINE pipe : strNEWLINE Name of the pipe where you want to report the headNEWLINE V : listNEWLINE velocity resultsNEWLINE wn : wntr.network.model.WaterNetworkModelNEWLINE NetworkNEWLINE tt : listNEWLINE Simulation timestampsNEWLINE """NEWLINENEWLINE pipeid = wn.links[pipe].id-1NEWLINE plt.figure(figsize=(10,4), dpi=80, facecolor='w', edgecolor='k')NEWLINE plt.plot(tt,V[pipeid][0,:], 'b-',label='Start Node')NEWLINE plt.plot(tt,V[pipeid][-1,:], 'r-',label='End Node')NEWLINE plt.xlim([tt[0],tt[-1]])NEWLINE plt.title('Velocity Head of Pipe %s ' %pipe)NEWLINE plt.xlabel("Time")NEWLINE plt.ylabel("Velocity (m/s)")NEWLINE plt.legend(loc='best')NEWLINE plt.grid(True)NEWLINE plt.show()
import argparseNEWLINENEWLINEimport torchNEWLINEfrom torch_geometric.nn import Node2VecNEWLINEfrom torch_geometric.utils import to_undirectedNEWLINENEWLINEfrom ogb.nodeproppred import PygNodePropPredDatasetNEWLINENEWLINENEWLINEdef save_embedding(model):NEWLINE torch.save(model.embedding.weight.data.cpu(), 'embedding.pt')NEWLINENEWLINENEWLINEdef main():NEWLINE parser = argparse.ArgumentParser(description='OGBN-Arxiv (Node2Vec)')NEWLINE parser.add_argument('--device', type=int, default=0)NEWLINE parser.add_argument('--embedding_dim', type=int, default=128)NEWLINE parser.add_argument('--walk_length', type=int, default=80)NEWLINE parser.add_argument('--context_size', type=int, default=20)NEWLINE parser.add_argument('--walks_per_node', type=int, default=10)NEWLINE parser.add_argument('--batch_size', type=int, default=256)NEWLINE parser.add_argument('--lr', type=float, default=0.01)NEWLINE parser.add_argument('--epochs', type=int, default=5)NEWLINE parser.add_argument('--log_steps', type=int, default=1)NEWLINE args = parser.parse_args()NEWLINENEWLINE device = f'cuda:{args.device}' if torch.cuda.is_available() else 'cpu'NEWLINE device = torch.device(device)NEWLINENEWLINE dataset = PygNodePropPredDataset(name='ogbn-arxiv',NEWLINE root='/srv/scratch/ogb/datasets/nodeproppred')NEWLINE data = dataset[0]NEWLINE data.edge_index = to_undirected(data.edge_index, data.num_nodes)NEWLINENEWLINE model = Node2Vec(data.edge_index, args.embedding_dim, args.walk_length,NEWLINE args.context_size, args.walks_per_node,NEWLINE sparse=True).to(device)NEWLINENEWLINE loader = model.loader(batch_size=args.batch_size, shuffle=True,NEWLINE num_workers=4)NEWLINE optimizer = torch.optim.SparseAdam(list(model.parameters()), lr=args.lr)NEWLINENEWLINE model.train()NEWLINE for epoch in range(1, args.epochs + 1):NEWLINE for i, (pos_rw, neg_rw) in enumerate(loader):NEWLINE optimizer.zero_grad()NEWLINE loss = model.loss(pos_rw.to(device), neg_rw.to(device))NEWLINE loss.backward()NEWLINE optimizer.step()NEWLINENEWLINE if (i + 1) % args.log_steps == 0:NEWLINE print(f'Epoch: {epoch:02d}, Step: {i+1:03d}/{len(loader)}, 'NEWLINE f'Loss: {loss:.4f}')NEWLINENEWLINE if (i + 1) % 100 == 0: # Save model every 100 steps.NEWLINE save_embedding(model)NEWLINE save_embedding(model)NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE main()NEWLINE
from __future__ import absolute_import, divisionNEWLINE__author__ = 'katharine'NEWLINENEWLINEfrom six import indexbytesNEWLINEfrom six.moves import rangeNEWLINENEWLINEfrom libpebble2.events.mixin import EventSourceMixinNEWLINEfrom libpebble2.exceptions import ScreenshotErrorNEWLINEfrom libpebble2.protocol.screenshots import *NEWLINENEWLINENEWLINEclass Screenshot(EventSourceMixin):NEWLINE """NEWLINE Takes a screenshot from the watch.NEWLINENEWLINE :param pebble: The pebble of which to take a screenshot.NEWLINE :type pebble: .PebbleConnectionNEWLINE """NEWLINE def __init__(self, pebble):NEWLINE self._pebble = pebbleNEWLINE super(Screenshot, self).__init__()NEWLINENEWLINE def grab_image(self):NEWLINE """NEWLINE Takes a screenshot. Blocks until completion, or raises a :exc:`.ScreenshotError` on failure.NEWLINENEWLINE While this method is executing, "progress" events will periodically be emitted with the following signature: ::NEWLINENEWLINE (downloaded_so_far, total_size)NEWLINENEWLINE :return: A list of bytearrays in RGB8 format, where each bytearray is one row of the image.NEWLINE """NEWLINE # We have to open this queue before we make the request, to ensure we don't miss the response.NEWLINE queue = self._pebble.get_endpoint_queue(ScreenshotResponse)NEWLINE self._pebble.send_packet(ScreenshotRequest())NEWLINE return self._read_screenshot(queue)NEWLINENEWLINE def _read_screenshot(self, queue):NEWLINE data = queue.get().dataNEWLINE header = ScreenshotHeader.parse(data)[0]NEWLINE if header.response_code != ScreenshotHeader.ResponseCode.OK:NEWLINE queue.close()NEWLINE raise ScreenshotError("Screenshot failed: {!s}".format(header.response_code))NEWLINE data = header.dataNEWLINE expected_size = self._get_expected_bytes(header)NEWLINE while len(data) < expected_size:NEWLINE data += queue.get().dataNEWLINE self._broadcast_event("progress", len(data), expected_size)NEWLINE queue.close()NEWLINE return self._decode_image(header, data)NEWLINENEWLINE @classmethodNEWLINE def _get_expected_bytes(cls, header):NEWLINE if header.version == 1:NEWLINE return (header.width * header.height) // 8NEWLINE elif header.version == 2:NEWLINE return header.width * header.heightNEWLINE else:NEWLINE raise ScreenshotError("Unknown screenshot version: {}".format(header.version))NEWLINENEWLINE @classmethodNEWLINE def _decode_image(cls, header, data):NEWLINE if header.version == 1:NEWLINE return cls._decode_1bit(header, data)NEWLINE elif header.version == 2:NEWLINE return cls._decode_8bit(header, data)NEWLINENEWLINE @classmethodNEWLINE def _decode_1bit(cls, header, data):NEWLINE output = []NEWLINE row_bytes = header.width // 8NEWLINE for row in range(header.height):NEWLINE row_values = []NEWLINE for column in range(header.width):NEWLINE pixel = (indexbytes(data, row*row_bytes + column//8) >> (column % 8)) & 1NEWLINE row_values.extend([pixel * 255] * 3)NEWLINE output.append(bytearray(row_values))NEWLINE return outputNEWLINENEWLINE @classmethodNEWLINE def _decode_8bit(cls, header, data):NEWLINE output = []NEWLINE for row in range(header.height):NEWLINE row_values = []NEWLINE for column in range(header.width):NEWLINE pixel = indexbytes(data, row*header.width + column)NEWLINE row_values.extend([NEWLINE ((pixel >> 4) & 0b11) * 85,NEWLINE ((pixel >> 2) & 0b11) * 85,NEWLINE ((pixel >> 0) & 0b11) * 85,NEWLINE ])NEWLINE output.append(bytearray(row_values))NEWLINE return outputNEWLINE
from qrandom.qrandom import QRandom
# -*- coding: utf-8 -*-NEWLINE"""Chemical Engineering Design Library (ChEDL). Utilities for process modeling.NEWLINECopyright (C) 2016, 2017, 2018, 2020 Caleb Bell <Caleb.Andrew.Bell@gmail.com>NEWLINENEWLINEPermission is hereby granted, free of charge, to any person obtaining a copyNEWLINEof this software and associated documentation files (the "Software"), to dealNEWLINEin the Software without restriction, including without limitation the rightsNEWLINEto use, copy, modify, merge, publish, distribute, sublicense, and/or sellNEWLINEcopies of the Software, and to permit persons to whom the Software isNEWLINEfurnished to do so, subject to the following conditions:NEWLINENEWLINEThe above copyright notice and this permission notice shall be included in allNEWLINEcopies or substantial portions of the Software.NEWLINENEWLINETHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ORNEWLINEIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,NEWLINEFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THENEWLINEAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHERNEWLINELIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,NEWLINEOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THENEWLINESOFTWARE.NEWLINENEWLINEThis module contains functionality for calculating rating and designingNEWLINEvapor-liquid separators.NEWLINENEWLINEFor reporting bugs, adding feature requests, or submitting pull requests,NEWLINEplease use the `GitHub issue tracker <https://github.com/CalebBell/fluids/>`_NEWLINEor contact the author at Caleb.Andrew.Bell@gmail.com.NEWLINENEWLINE.. contents:: :local:NEWLINENEWLINEFunctionsNEWLINE---------NEWLINE.. autofunction :: v_Sounders_BrownNEWLINE.. autofunction :: K_separator_WatkinsNEWLINE.. autofunction :: K_separator_demister_YorkNEWLINE.. autofunction :: K_Sounders_Brown_theoreticalNEWLINE"""NEWLINENEWLINEfrom __future__ import divisionNEWLINEfrom math import log, exp, pi, sqrtNEWLINEfrom fluids.constants import g, foot, psiNEWLINEfrom fluids.numerics import splev, implementation_optimize_tckNEWLINENEWLINE__all__ = ['v_Sounders_Brown', 'K_separator_Watkins',NEWLINE 'K_separator_demister_York', 'K_Sounders_Brown_theoretical']NEWLINENEWLINENEWLINE# 92 points taken from a 2172x3212 page scan, after dewarping the scan,NEWLINE# digitization with Engauge Digitizer, and extensive checking; every 5th pointNEWLINE# it produced was selected plus the last point. The initial value is adjustedNEWLINE# to be the lower limit of the graph.NEWLINENEWLINENEWLINEtck_Watkins = implementation_optimize_tck([[-5.115995809754082, -5.115995809754082, -5.115995809754082,NEWLINE -5.115995809754082, -4.160106231099973, -3.209113630523477,NEWLINE -1.2175106961204154, 0.4587657198189318, 1.1197669427405068,NEWLINE 1.6925908552310418, 1.6925908552310418, 1.6925908552310418,NEWLINE 1.6925908552310418],NEWLINE [-1.4404286048266364, -1.2375168139385286, -0.9072614905522024,NEWLINE -0.7662335745829165, -0.944537665617708, -1.957339717378027,NEWLINE -3.002614318094637, -3.5936804378352956, -3.8779153181940553,NEWLINE 0.0, 0.0, 0.0, 0.0],NEWLINE 3])NEWLINENEWLINEdef K_separator_Watkins(x, rhol, rhog, horizontal=False, method='spline'):NEWLINE r'''Calculates the Sounders-Brown `K` factor as used in determining maximumNEWLINE allowable gas velocity in a two-phase separator in either a horizontal orNEWLINE vertical orientation. This function approximates a graph published in [1]_NEWLINE to determine `K` as used in the following equation:NEWLINENEWLINE .. math::NEWLINE v_{max} = K_{SB}\sqrt{\frac{\rho_l-\rho_g}{\rho_g}}NEWLINENEWLINE The graph has `K_{SB}` on its y-axis, and the following as its x-axis:NEWLINENEWLINE .. math::NEWLINE \frac{m_l}{m_g}\sqrt{\rho_g/\rho_l}NEWLINE = \frac{(1-x)}{x}\sqrt{\rho_g/\rho_l}NEWLINENEWLINE Cubic spline interpolation is the default method of retrieving a valueNEWLINE from the graph, which was digitized with Engauge-Digitizer.NEWLINENEWLINE Also supported are two published curve fits to the graph. The first is thatNEWLINE of Blackwell (1984) [2]_, as follows:NEWLINENEWLINE .. math::NEWLINE K_{SB} = \exp(-1.942936 -0.814894X -0.179390 X^2 -0.0123790 X^3NEWLINE + 0.000386235 X^4 + 0.000259550 X^5)NEWLINENEWLINE X = \ln\left[\frac{(1-x)}{x}\sqrt{\rho_g/\rho_l}\right]NEWLINENEWLINE The second is that of Branan (1999), as follows:NEWLINENEWLINE .. math::NEWLINE K_{SB} = \exp(-1.877478097 -0.81145804597X -0.1870744085 X^2NEWLINE -0.0145228667 X^3 -0.00101148518 X^4)NEWLINENEWLINE X = \ln\left[\frac{(1-x)}{x}\sqrt{\rho_g/\rho_l}\right]NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE x : floatNEWLINE Quality of fluid entering separator, [-]NEWLINE rhol : floatNEWLINE Density of liquid phase [kg/m^3]NEWLINE rhog : floatNEWLINE Density of gas phase [kg/m^3]NEWLINE horizontal : bool, optionalNEWLINE Whether to use the vertical or horizontal value; horizontal is 1.25NEWLINE higherNEWLINE method : strNEWLINE One of 'spline, 'blackwell', or 'branan'NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE K : floatNEWLINE Sounders Brown horizontal or vertical `K` factor for two-phaseNEWLINE separator design only, [m/s]NEWLINENEWLINE NotesNEWLINE -----NEWLINE Both the 'branan' and 'blackwell' models are used frequently. However,NEWLINE the spline is much more accurate.NEWLINENEWLINE No limits checking is enforced. However, the x-axis spans only 0.006 toNEWLINE 5.4, and the function should not be used outside those limits.NEWLINENEWLINE ExamplesNEWLINE --------NEWLINE >>> K_separator_Watkins(0.88, 985.4, 1.3, horizontal=True)NEWLINE 0.07951613600476297NEWLINENEWLINE ReferencesNEWLINE ----------NEWLINE .. [1] Watkins (1967). Sizing Separators and Accumulators, HydrocarbonNEWLINE Processing, November 1967.NEWLINE .. [2] Blackwell, W. Wayne. Chemical Process Design on a ProgrammableNEWLINE Calculator. New York: Mcgraw-Hill, 1984.NEWLINE .. [3] Branan, Carl R. Pocket Guide to Chemical Engineering. 1st edition.NEWLINE Houston, Tex: Gulf Professional Publishing, 1999.NEWLINE '''NEWLINE factor = (1. - x)/x*sqrt(rhog/rhol)NEWLINE if method == 'spline':NEWLINE K = exp(float(splev(log(factor), tck_Watkins)))NEWLINE elif method == 'blackwell':NEWLINE X = log(factor)NEWLINE A = -1.877478097NEWLINE B = -0.81145804597NEWLINE C = -0.1870744085NEWLINE D = -0.0145228667NEWLINE E = -0.00101148518NEWLINE K = exp(A + X*(B + X*(C + X*(D + E*X))))NEWLINE elif method == 'branan':NEWLINE X = log(factor)NEWLINE A = -1.942936NEWLINE B = -0.814894NEWLINE C = -0.179390NEWLINE D = -0.0123790NEWLINE E = 0.000386235NEWLINE F = 0.000259550NEWLINE K = exp(A + X*(B + X*(C + X*(D + X*(E + F*X)))))NEWLINE else:NEWLINE raise ValueError("Only methods 'spline', 'branan', and 'blackwell' are supported.")NEWLINE K *= foot # Converts units of ft/s to m/s; the graph and all fits are in ft/sNEWLINE if horizontal:NEWLINE K *= 1.25 # Watkins recommends a factor of 1.25 for horizontal separators over vertical separatorsNEWLINE return KNEWLINENEWLINENEWLINEdef K_separator_demister_York(P, horizontal=False):NEWLINE r'''Calculates the Sounders Brown `K` factor as used in determining maximumNEWLINE permissible gas velocity in a two-phase separator in either a horizontal orNEWLINE vertical orientation, *with a demister*.NEWLINE This function is a curve fit to [1]_ published in [2]_ and is widely used.NEWLINENEWLINE For 1 < P < 15 psia:NEWLINENEWLINE .. math::NEWLINE K = 0.1821 + 0.0029P + 0.0460\ln PNEWLINENEWLINE For 15 <= P <= 40 psia:NEWLINENEWLINE .. math::NEWLINE K = 0.35NEWLINENEWLINE For P < 5500 psia:NEWLINENEWLINE .. math::NEWLINE K = 0.430 - 0.023\ln PNEWLINENEWLINE In the above equations, P is in units of psia.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE P : floatNEWLINE Pressure of separator, [Pa]NEWLINE horizontal : bool, optionalNEWLINE Whether to use the vertical or horizontal value; horizontal is 1.25NEWLINE times higher, [-]NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE K : floatNEWLINE Sounders Brown Horizontal or vertical `K` factor for two-phaseNEWLINE separator design with a demister, [m/s]NEWLINENEWLINE NotesNEWLINE -----NEWLINE If the input pressure is under 1 psia, 1 psia is used. If theNEWLINE input pressure is over 5500 psia, 5500 psia is used.NEWLINENEWLINE ExamplesNEWLINE --------NEWLINE >>> K_separator_demister_York(975*psi)NEWLINE 0.08281536035331669NEWLINENEWLINE ReferencesNEWLINE ----------NEWLINE .. [2] Otto H. York Company, "Mist Elimination in Gas Treatment Plants andNEWLINE Refineries," Engineering, Parsippany, NJ.NEWLINE .. [1] Svrcek, W. Y., and W. D. Monnery. "Design Two-Phase SeparatorsNEWLINE within the Right Limits" Chemical Engineering Progress, (October 1,NEWLINE 1993): 53-60.NEWLINE '''NEWLINE P = P/psi # Correlation in terms of psiaNEWLINE if P < 15:NEWLINE if P < 1:NEWLINE P = 1 # Prevent negative K values, but as a consequence beNEWLINE # optimistic for K values; limit is 0.185 ft/s but real valuesNEWLINE # should probably be lowerNEWLINE K = 0.1821 + 0.0029*P + 0.0460*log(P)NEWLINE elif P < 40:NEWLINE K = 0.35NEWLINE else:NEWLINE if P > 5500:NEWLINE P = 5500 # Do not allow for lower K values above 5500 psia, asNEWLINE # the limit is stated to be 5500NEWLINE K = 0.430 - 0.023*log(P)NEWLINE K *= foot # Converts units of ft/s to m/s; the graph and all fits are in ft/sNEWLINE if horizontal:NEWLINE # Watkins recommends a factor of 1.25 for horizontal separators overNEWLINE # vertical separators as wellNEWLINE K *= 1.25NEWLINE return KNEWLINENEWLINENEWLINEdef v_Sounders_Brown(K, rhol, rhog):NEWLINE r'''Calculates the maximum allowable vapor velocity in a two-phaseNEWLINE separator to permit separation between entrained droplets and the gasNEWLINE using an empirical `K` factor, named after Sounders and Brown [1]_.NEWLINE This is a simplifying expression for terminal velocity and drag onNEWLINE particles.NEWLINENEWLINE .. math::NEWLINE v_{max} = K_{SB} \sqrt{\frac{\rho_l-\rho_g}{\rho_g}}NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE K : floatNEWLINE Sounders Brown `K` factor for two-phase separator design, [m/s]NEWLINE rhol : floatNEWLINE Density of liquid phase [kg/m^3]NEWLINE rhog : floatNEWLINE Density of gas phase [kg/m^3]NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE v_max : floatNEWLINE Maximum allowable vapor velocity in a two-phase separator to permitNEWLINE separation between entrained droplets and the gas, [m/s]NEWLINENEWLINE NotesNEWLINE -----NEWLINE The Sounders Brown K factor is related to the terminal velocity as shown inNEWLINE the following expression.NEWLINENEWLINE .. math::NEWLINE v_{term} = v_{max} = \sqrt{\frac{4 g d_p (\rho_p-\rho_f)}{3 C_D \rho_f }}NEWLINENEWLINE v_{term} = \sqrt{\frac{(\rho_p-\rho_f)}{\rho_f}} \sqrt{\frac{4 g d_p}{3 C_D}}NEWLINENEWLINE v_{term} = K_{SB} \sqrt{\frac{4 g d_p}{3 C_D}}NEWLINENEWLINE Note this form corresponds to the Newton's law range (Re > 500), but inNEWLINE reality droplets are normally in the intermediate or Stoke's law regionNEWLINE [2]_. For this reason using the drag coefficient expression directly isNEWLINE cleaner, but identical results can be found with the Sounders BrownNEWLINE equation.NEWLINENEWLINE ExamplesNEWLINE --------NEWLINE >>> v_Sounders_Brown(K=0.08, rhol=985.4, rhog=1.3)NEWLINE 2.2010906387516167NEWLINENEWLINE ReferencesNEWLINE ----------NEWLINE .. [1] Souders, Mott., and George Granger. Brown. "Design of FractionatingNEWLINE Columns I. Entrainment and Capacity." Industrial & Engineering ChemistryNEWLINE 26, no. 1 (January 1, 1934): 98-103. https://doi.org/10.1021/ie50289a025.NEWLINE .. [2] Vasude, Gael D. Ulrich and Palligarnai T. Chemical EngineeringNEWLINE Process Design and Economics : A Practical Guide. 2nd edition. Durham,NEWLINE N.H: Process Publishing, 2004.NEWLINE '''NEWLINE return K*sqrt((rhol - rhog)/rhog)NEWLINENEWLINENEWLINEdef K_Sounders_Brown_theoretical(D, Cd, g=g):NEWLINE r'''Converts a known drag coefficient into a Sounders-Brown `K` factorNEWLINE for two-phase separator design. This factor is the traditional way forNEWLINE separator diameters to be obtained although it is unnecessary and theNEWLINE theoretical drag coefficient method can be used instead.NEWLINENEWLINE .. math::NEWLINE K_{SB} = \sqrt{\frac{(\rho_p-\rho_f)}{\rho_f}}NEWLINE = \sqrt{\frac{4 g d_p}{3 C_D}}NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE D : floatNEWLINE Design diameter of the droplets, [m]NEWLINE Cd : floatNEWLINE Drag coefficient [-]NEWLINE g : float, optionalNEWLINE Acceleration due to gravity, [m/s^2]NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE K : floatNEWLINE Sounders Brown `K` factor for two-phase separator design, [m/s]NEWLINENEWLINE NotesNEWLINE -----NEWLINE Drag coefficient is a function of velocity; so iteration is needed toNEWLINE obtain the most correct answer. The following example shows the use ofNEWLINE iteration to obtain the final velocity:NEWLINENEWLINE >>> from fluids import *NEWLINE >>> V = 2.0NEWLINE >>> D = 150E-6NEWLINE >>> rho = 1.3NEWLINE >>> rhol = 700.NEWLINE >>> mu = 1E-5NEWLINE >>> for i in range(10):NEWLINE ... Re = Reynolds(V=V, rho=rho, mu=mu, D=D)NEWLINE ... Cd = drag_sphere(Re)NEWLINE ... K = K_Sounders_Brown_theoretical(D=D, Cd=Cd)NEWLINE ... V = v_Sounders_Brown(K, rhol=rhol, rhog=rho)NEWLINE ... print('%.14f' %V)NEWLINE 0.76093307417658NEWLINE 0.56242939340131NEWLINE 0.50732895050696NEWLINE 0.48957142095508NEWLINE 0.48356021946899NEWLINE 0.48149076033622NEWLINE 0.48077414934614NEWLINE 0.48052549959141NEWLINE 0.48043916249756NEWLINE 0.48040917690193NEWLINENEWLINE The use of Sounders-Brown constants can be replaced as follows (theNEWLINE v_terminal method includes its own solver for terminal velocity):NEWLINENEWLINE >>> from fluids.drag import v_terminalNEWLINE >>> v_terminal(D=D, rhop=rhol, rho=rho, mu=mu)NEWLINE 0.4803932186998NEWLINENEWLINE ExamplesNEWLINE --------NEWLINE >>> K_Sounders_Brown_theoretical(D=150E-6, Cd=0.5)NEWLINE 0.06263114241333939NEWLINENEWLINE ReferencesNEWLINE ----------NEWLINE .. [1] Svrcek, W. Y., and W. D. Monnery. "Design Two-Phase SeparatorsNEWLINE within the Right Limits" Chemical Engineering Progress, (October 1,NEWLINE 1993): 53-60.NEWLINE '''NEWLINE return sqrt((4.0/3.0)*g*D/(Cd))NEWLINE
from io import BytesIONEWLINENEWLINEfrom PIL import ImageNEWLINEfrom flask import send_fileNEWLINENEWLINEfrom utils import httpNEWLINEfrom utils.endpoint import Endpoint, setupNEWLINENEWLINENEWLINE@setupNEWLINEclass Rip(Endpoint):NEWLINE params = ['avatar0']NEWLINENEWLINE def generate(self, avatars, text, usernames, kwargs):NEWLINE base = Image.open(self.assets.get('assets/rip/rip.bmp')).convert('RGBA').resize((642, 806))NEWLINE avatar = http.get_image(avatars[0]).resize((300, 300)).convert('RGBA')NEWLINENEWLINE base.paste(avatar, (175, 385), avatar)NEWLINE base = base.convert('RGBA')NEWLINENEWLINE b = BytesIO()NEWLINE base.save(b, format='png')NEWLINE b.seek(0)NEWLINE return send_file(b, mimetype='image/png')NEWLINE
text = """NEWLINE//------------------------------------------------------------------------------NEWLINE// Explicit instantiation.NEWLINE//------------------------------------------------------------------------------NEWLINE#include "Mesh/MeshPolicy.cc"NEWLINE#include "Geometry/Dimension.hh"NEWLINENEWLINEnamespace Spheral {NEWLINE template class MeshPolicy<Dim< %(ndim)s > >;NEWLINE}NEWLINENEWLINE"""NEWLINE
#!/usr/bin/env python2NEWLINE#NEWLINE# Distributed under the MIT/X11 software license, see the accompanyingNEWLINE# file COPYING or http://www.opensource.org/licenses/mit-license.php.NEWLINE#NEWLINENEWLINEfrom test_framework.mininode import *NEWLINEfrom test_framework.test_framework import BitcoinTestFrameworkNEWLINEfrom test_framework.util import *NEWLINEimport timeNEWLINENEWLINE'''NEWLINETest behavior of -maxuploadtarget.NEWLINENEWLINE* Verify that getdata requests for old blocks (>1week) are droppedNEWLINEif uploadtarget has been reached.NEWLINE* Verify that getdata requests for recent blocks are respecteved evenNEWLINEif uploadtarget has been reached.NEWLINE* Verify that the upload counters are reset after 24 hours.NEWLINE'''NEWLINENEWLINE# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sendingNEWLINE# p2p messages to a node, generating the messages in the main testing logic.NEWLINEclass TestNode(NodeConnCB):NEWLINE def __init__(self):NEWLINE NodeConnCB.__init__(self)NEWLINE self.connection = NoneNEWLINE self.ping_counter = 1NEWLINE self.last_pong = msg_pong()NEWLINE self.block_receive_map = {}NEWLINENEWLINE def add_connection(self, conn):NEWLINE self.connection = connNEWLINE self.peer_disconnected = FalseNEWLINENEWLINE def on_inv(self, conn, message):NEWLINE passNEWLINENEWLINE # Track the last getdata message we receive (used in the test)NEWLINE def on_getdata(self, conn, message):NEWLINE self.last_getdata = messageNEWLINENEWLINE def on_block(self, conn, message):NEWLINE message.block.calc_sha256()NEWLINE try:NEWLINE self.block_receive_map[message.block.sha256] += 1NEWLINE except KeyError as e:NEWLINE self.block_receive_map[message.block.sha256] = 1NEWLINENEWLINE # Spin until verack message is received from the node.NEWLINE # We use this to signal that our test can begin. ThisNEWLINE # is called from the testing thread, so it needs to acquireNEWLINE # the global lock.NEWLINE def wait_for_verack(self):NEWLINE def veracked():NEWLINE return self.verack_receivedNEWLINE return wait_until(veracked, timeout=10)NEWLINENEWLINE def wait_for_disconnect(self):NEWLINE def disconnected():NEWLINE return self.peer_disconnectedNEWLINE return wait_until(disconnected, timeout=10)NEWLINENEWLINE # Wrapper for the NodeConn's send_message functionNEWLINE def send_message(self, message):NEWLINE self.connection.send_message(message)NEWLINENEWLINE def on_pong(self, conn, message):NEWLINE self.last_pong = messageNEWLINENEWLINE def on_close(self, conn):NEWLINE self.peer_disconnected = TrueNEWLINENEWLINE # Sync up with the node after delivery of a blockNEWLINE def sync_with_ping(self, timeout=30):NEWLINE def received_pong():NEWLINE return (self.last_pong.nonce == self.ping_counter)NEWLINE self.connection.send_message(msg_ping(nonce=self.ping_counter))NEWLINE success = wait_until(received_pong, timeout)NEWLINE self.ping_counter += 1NEWLINE return successNEWLINENEWLINEclass MaxUploadTest(BitcoinTestFramework):NEWLINE def __init__(self):NEWLINE self.utxo = []NEWLINE self.txouts = gen_return_txouts()NEWLINE NEWLINE def add_options(self, parser):NEWLINE parser.add_option("--testbinary", dest="testbinary",NEWLINE default=os.getenv("PEWD", "brofistd"),NEWLINE help="brofistd binary to test")NEWLINENEWLINE def setup_chain(self):NEWLINE initialize_chain_clean(self.options.tmpdir, 2)NEWLINENEWLINE def setup_network(self):NEWLINE # Start a node with maxuploadtarget of 200 MB (/24h)NEWLINE self.nodes = []NEWLINE self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-maxuploadtarget=200", "-blockmaxsize=999000"]))NEWLINENEWLINE def mine_full_block(self, node, address):NEWLINE # Want to create a full blockNEWLINE # We'll generate a 66k transaction below, and 14 of them is close to the 1MB block limitNEWLINE for j in xrange(14):NEWLINE if len(self.utxo) < 14:NEWLINE self.utxo = node.listunspent()NEWLINE inputs=[]NEWLINE outputs = {}NEWLINE t = self.utxo.pop()NEWLINE inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})NEWLINE remchange = t["amount"] - Decimal("0.001000")NEWLINE outputs[address]=remchangeNEWLINE # Create a basic transaction that will send change back to ourself after account for a feeNEWLINE # And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #NEWLINE # of txouts is stored and is the only thing we overwrite from the original transactionNEWLINE rawtx = node.createrawtransaction(inputs, outputs)NEWLINE newtx = rawtx[0:92]NEWLINE newtx = newtx + self.txoutsNEWLINE newtx = newtx + rawtx[94:]NEWLINE # Appears to be ever so slightly faster to sign with SIGHASH_NONENEWLINE signresult = node.signrawtransaction(newtx,None,None,"NONE")NEWLINE txid = node.sendrawtransaction(signresult["hex"], True)NEWLINE # Mine a full sized block which will be these transactions we just createdNEWLINE node.generate(1)NEWLINENEWLINE def run_test(self):NEWLINE # Before we connect anything, we first set the time on the nodeNEWLINE # to be in the past, otherwise things break because the CNodeNEWLINE # time counters can't be reset backward after initializationNEWLINE old_time = int(time.time() - 2*60*60*24*7)NEWLINE self.nodes[0].setmocktime(old_time)NEWLINENEWLINE # Generate some old blocksNEWLINE self.nodes[0].generate(130)NEWLINENEWLINE # test_nodes[0] will only request old blocksNEWLINE # test_nodes[1] will only request new blocksNEWLINE # test_nodes[2] will test resetting the countersNEWLINE test_nodes = []NEWLINE connections = []NEWLINENEWLINE for i in xrange(3):NEWLINE test_nodes.append(TestNode())NEWLINE connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))NEWLINE test_nodes[i].add_connection(connections[i])NEWLINENEWLINE NetworkThread().start() # Start up network handling in another threadNEWLINE [x.wait_for_verack() for x in test_nodes]NEWLINENEWLINE # Test logic begins hereNEWLINENEWLINE # Now mine a big blockNEWLINE self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())NEWLINENEWLINE # Store the hash; we'll request this laterNEWLINE big_old_block = self.nodes[0].getbestblockhash()NEWLINE old_block_size = self.nodes[0].getblock(big_old_block, True)['size']NEWLINE big_old_block = int(big_old_block, 16)NEWLINENEWLINE # Advance to two days agoNEWLINE self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)NEWLINENEWLINE # Mine one more block, so that the prior block looks oldNEWLINE self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())NEWLINENEWLINE # We'll be requesting this new block tooNEWLINE big_new_block = self.nodes[0].getbestblockhash()NEWLINE new_block_size = self.nodes[0].getblock(big_new_block)['size']NEWLINE big_new_block = int(big_new_block, 16)NEWLINENEWLINE # test_nodes[0] will test what happens if we just keep requesting theNEWLINE # the same big old block too many times (expect: disconnect)NEWLINENEWLINE getdata_request = msg_getdata()NEWLINE getdata_request.inv.append(CInv(2, big_old_block))NEWLINENEWLINE max_bytes_per_day = 200*1024*1024NEWLINE daily_buffer = 144 * MAX_BLOCK_SIZENEWLINE max_bytes_available = max_bytes_per_day - daily_bufferNEWLINE success_count = max_bytes_available // old_block_sizeNEWLINENEWLINE # 144MB will be reserved for relaying new blocks, so expect this toNEWLINE # succeed for ~70 tries.NEWLINE for i in xrange(success_count):NEWLINE test_nodes[0].send_message(getdata_request)NEWLINE test_nodes[0].sync_with_ping()NEWLINE assert_equal(test_nodes[0].block_receive_map[big_old_block], i+1)NEWLINENEWLINE assert_equal(len(self.nodes[0].getpeerinfo()), 3)NEWLINE # At most a couple more tries should succeed (depending on how long NEWLINE # the test has been running so far).NEWLINE for i in xrange(3):NEWLINE test_nodes[0].send_message(getdata_request)NEWLINE test_nodes[0].wait_for_disconnect()NEWLINE assert_equal(len(self.nodes[0].getpeerinfo()), 2)NEWLINE print "Peer 0 disconnected after downloading old block too many times"NEWLINENEWLINE # Requesting the current block on test_nodes[1] should succeed indefinitely,NEWLINE # even when over the max upload target.NEWLINE # We'll try 200 timesNEWLINE getdata_request.inv = [CInv(2, big_new_block)]NEWLINE for i in xrange(200):NEWLINE test_nodes[1].send_message(getdata_request)NEWLINE test_nodes[1].sync_with_ping()NEWLINE assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)NEWLINENEWLINE print "Peer 1 able to repeatedly download new block"NEWLINENEWLINE # But if test_nodes[1] tries for an old block, it gets disconnected too.NEWLINE getdata_request.inv = [CInv(2, big_old_block)]NEWLINE test_nodes[1].send_message(getdata_request)NEWLINE test_nodes[1].wait_for_disconnect()NEWLINE assert_equal(len(self.nodes[0].getpeerinfo()), 1)NEWLINENEWLINE print "Peer 1 disconnected after trying to download old block"NEWLINENEWLINE print "Advancing system time on node to clear counters..."NEWLINENEWLINE # If we advance the time by 24 hours, then the counters should reset,NEWLINE # and test_nodes[2] should be able to retrieve the old block.NEWLINE self.nodes[0].setmocktime(int(time.time()))NEWLINE test_nodes[2].sync_with_ping()NEWLINE test_nodes[2].send_message(getdata_request)NEWLINE test_nodes[2].sync_with_ping()NEWLINE assert_equal(test_nodes[2].block_receive_map[big_old_block], 1)NEWLINENEWLINE print "Peer 2 able to download old block"NEWLINENEWLINE [c.disconnect_node() for c in connections]NEWLINENEWLINE #stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1NEWLINE print "Restarting nodes with -whitelist=127.0.0.1"NEWLINE stop_node(self.nodes[0], 0)NEWLINE self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-whitelist=127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000"])NEWLINENEWLINE #recreate/reconnect 3 test nodesNEWLINE test_nodes = []NEWLINE connections = []NEWLINENEWLINE for i in xrange(3):NEWLINE test_nodes.append(TestNode())NEWLINE connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))NEWLINE test_nodes[i].add_connection(connections[i])NEWLINENEWLINE NetworkThread().start() # Start up network handling in another threadNEWLINE [x.wait_for_verack() for x in test_nodes]NEWLINENEWLINE #retrieve 20 blocks which should be enough to break the 1MB limitNEWLINE getdata_request.inv = [CInv(2, big_new_block)]NEWLINE for i in xrange(20):NEWLINE test_nodes[1].send_message(getdata_request)NEWLINE test_nodes[1].sync_with_ping()NEWLINE assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)NEWLINENEWLINE getdata_request.inv = [CInv(2, big_old_block)]NEWLINE test_nodes[1].send_message(getdata_request)NEWLINE test_nodes[1].wait_for_disconnect()NEWLINE assert_equal(len(self.nodes[0].getpeerinfo()), 3) #node is still connected because of the whitelistNEWLINENEWLINE print "Peer 1 still connected after trying to download old block (whitelisted)"NEWLINENEWLINE [c.disconnect_node() for c in connections]NEWLINENEWLINEif __name__ == '__main__':NEWLINE MaxUploadTest().main()NEWLINE
# -*- coding: utf-8 -*-NEWLINEfrom __future__ import unicode_literalsNEWLINENEWLINEfrom django.db import migrations, modelsNEWLINEimport core.utilsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('questions', '0007_auto_20151209_1526'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.AddField(NEWLINE model_name='quiz',NEWLINE name='image',NEWLINE field=models.ImageField(default=None, upload_to=core.utils.PathAndRename(b'quiz/'), null=True, verbose_name='Image', blank=True),NEWLINE ),NEWLINE ]NEWLINE
import itertoolsNEWLINEimport mathNEWLINEimport stringNEWLINEimport sysNEWLINEfrom bisect import bisect_left as bi_lNEWLINEfrom bisect import bisect_right as bi_rNEWLINEfrom collections import Counter, defaultdict, dequeNEWLINEfrom functools import lru_cache, reduceNEWLINEfrom heapq import heapify, heappop, heappushNEWLINEfrom operator import or_, xorNEWLINENEWLINEsys.setrecursionlimit(10**8)NEWLINEinf = float('inf')NEWLINEMOD = 10**9+7NEWLINE# MOD = 998244353NEWLINENEWLINENEWLINEusing_numpy = 1NEWLINEimport networkx as nxNEWLINEimport numpy as npNEWLINEfrom numba import i8, njitNEWLINEfrom scipy import optimizeNEWLINEfrom scipy.ndimage import distance_transform_cdtNEWLINEfrom scipy.sparse import csr_matrixNEWLINEfrom scipy.sparse.csgraph import (NEWLINE connected_components,NEWLINE csgraph_to_dense,NEWLINE maximum_flow,NEWLINE minimum_spanning_tree,NEWLINE shortest_path,NEWLINE)NEWLINEfrom scipy.spatial import ConvexHullNEWLINEfrom scipy.special import combNEWLINENEWLINENEWLINEclass Algebra:NEWLINE class Modular(int):NEWLINE def __init__(self, n, mod=MOD):NEWLINE self.value = nNEWLINE self.mod = modNEWLINENEWLINE def __str__(self): return f'{self.value}'NEWLINENEWLINE def __add__(self, other):NEWLINE return self.__class__((self.value + other.value) % self.mod)NEWLINE def __sub__(self, x): return self.__class__((self.value - x.value) % self.mod)NEWLINE def __mul__(self, x): return self.__class__((self.value * x.value) % self.mod)NEWLINE def __pow__(self, x): return self.__class__(pow(self.value, x.value, self.mod))NEWLINENEWLINE def __lt__(self, x): return self.value < x.valueNEWLINE def __le__(self, x): return self.value <= x.valueNEWLINE def __eq__(self, x): return self.value == x.valueNEWLINE def __ne__(self, x): return self.value != x.valueNEWLINE def __gt__(self, x): return self.value > x.valueNEWLINE def __ge__(self, x): return self.value >= x.valueNEWLINENEWLINENEWLINENEWLINE class SemiGroup:NEWLINE passNEWLINE class Monoid:NEWLINE passNEWLINE class Group:NEWLINE passNEWLINE class SemiRing:NEWLINE passNEWLINE class Ring:NEWLINE passNEWLINENEWLINENEWLINE @staticmethodNEWLINE def identity(n):NEWLINE if using_numpy:NEWLINE return np.identity(n, dtype=np.int64)NEWLINE else:NEWLINE a = [[0]*n for _ in range(n)]NEWLINE for i in range(n): a[i][i] = 1NEWLINE return aNEWLINENEWLINE @staticmethodNEWLINE def dot(a, b):NEWLINE if using_numpy:NEWLINE return np.dot(a, b)NEWLINE else:NEWLINE h, w, l = len(a), len(b[0]), len(b)NEWLINE assert len(a[0]) == lNEWLINE c = [[0]*w for _ in range(h)]NEWLINE for i in range(h):NEWLINE for j in range(w):NEWLINE for k in range(l):NEWLINE c[i][j] += a[i][k]*b[k][j]NEWLINE return cNEWLINENEWLINE @classmethodNEWLINE def matrix_pow(cls, a, n, mod=10**9+7):NEWLINE m = len(a)NEWLINE b = cls.identity(m)NEWLINE while n:NEWLINE if n&1: b = cls.dot(b, a)NEWLINE n >>= 1; a = cls.dot(a, a)NEWLINE if using_numpy:NEWLINE a %= mod; b %= modNEWLINE else:NEWLINE for i in range(m):NEWLINE for j in range(m):NEWLINE a[i][j] %= modNEWLINE b[i][j] %= modNEWLINE return bNEWLINENEWLINE @staticmethodNEWLINE def bitwise_dot(a, b):NEWLINE if using_numpy:NEWLINE return np.bitwise_xor.reduce(a[:,None,:] & b.T[None,:,:], axis=-1)NEWLINE else:NEWLINE h, w, l = len(a), len(b[0]), len(b)NEWLINE assert len(a[0]) == lNEWLINE c = [[0]*w for _ in range(h)]NEWLINE for i in range(h):NEWLINE for j in range(w):NEWLINE for k in range(l):NEWLINE c[i][j] ^= a[i][k]&b[k][j]NEWLINE return cNEWLINENEWLINE @classmethodNEWLINE def bitwise_mat_pow(cls, a, n):NEWLINE if n==0: return np.eye(len(a), dtype=np.uint32)*((1<<32)-1)NEWLINE res = cls.bitwise_mat_pow(a, n//2)NEWLINE res = cls.bitwise_dot(res, res)NEWLINE return cls.bitwise_dot(res, a) if n&1 else resNEWLINENEWLINENEWLINE @staticmethodNEWLINE def cumprod(a, mod):NEWLINE l = len(a); sql = int(np.sqrt(l)+1)NEWLINE a = np.resize(a, sql**2).reshape(sql, sql)NEWLINE for i in range(sql-1): a[:, i+1] *= a[:, i]; a[:, i+1] %= modNEWLINE for i in range(sql-1): a[i+1] *= a[i, -1]; a[i+1] %= modNEWLINE return np.ravel(a)[:l]NEWLINENEWLINE @classmethodNEWLINE def generate_fac_ifac(cls, n, p=MOD):NEWLINE if using_numpy:NEWLINE fac = np.arange(n+1); fac[0] = 1; fac = cls.cumprod(fac, p)NEWLINE ifac = np.arange(n+1, 0, -1); ifac[0] = pow(int(fac[-1]), p-2, p)NEWLINE ifac = cls.cumprod(ifac, p)[n::-1]NEWLINE else:NEWLINE fac = [None]*(n+1); fac[0] = 1NEWLINE for i in range(n): fac[i+1] = fac[i]*(i+1)%pNEWLINE ifac = [None]*(n+1); ifac[n] = pow(fac[n], p-2, p)NEWLINE for i in range(n, 0, -1): ifac[i-1] = ifac[i]*i%pNEWLINE return fac, ifacNEWLINENEWLINE class Kitamasa:NEWLINE passNEWLINENEWLINENEWLINEmint = Algebra.ModularNEWLINENEWLINENEWLINEclass NumberTheory:NEWLINE class PrimeNumbers: # pnNEWLINE def __init__(self, n=2*10**6):NEWLINE self.is_prime, self.prime_nums = self.find(n)NEWLINENEWLINE def __call__(self, n): return self.is_prime[n]NEWLINE def __iter__(self): return iter(self.prime_nums)NEWLINE def __getitem__(self, key): return self.prime_nums[key]NEWLINENEWLINE @staticmethodNEWLINE def find(n): # Sieve of eratosthenesNEWLINE if using_numpy:NEWLINE is_prime = np.ones(n+1, dtype=np.bool); is_prime[:2] = 0NEWLINE for i in range(2, int(n**.5)+1):NEWLINE if is_prime[i]: is_prime[i*2::i] = 0NEWLINE prime_nums = np.flatnonzero(is_prime)NEWLINE else:NEWLINE is_prime = [True]*(n+1); is_prime[0] = is_prime[1] = 0NEWLINE for i in range(2, int(n**.5)+1):NEWLINE if not is_prime[i]: continueNEWLINE for j in range(i*2, n+1, i): is_prime[j] = 0NEWLINE prime_nums = [i for i in range(2, n+1) if is_prime[i]]NEWLINE return is_prime, prime_numsNEWLINENEWLINE @lru_cache(maxsize=None)NEWLINE def factorize(self, n):NEWLINE res = defaultdict(int)NEWLINE if n < 2: return resNEWLINE for p in self:NEWLINE if p*p > n: breakNEWLINE while n%p == 0: res[p] += 1; n //= pNEWLINE if n == 1: return resNEWLINE res[n] = 1; return resNEWLINENEWLINE def factorize_factorial(self, n):NEWLINE res = defaultdict(int)NEWLINE for i in range(2, n+1):NEWLINE for p, c in self.factorize(i).items(): res[p] += cNEWLINE return resNEWLINENEWLINE @classmethodNEWLINE @lru_cache(maxsize=None)NEWLINE def gcd(cls, a, b): return cls.gcd(b, a%b) if b else abs(a)NEWLINE @classmethodNEWLINE def lcm(cls, a, b): return abs(a // cls.gcd(a, b) * b)NEWLINENEWLINE @staticmethodNEWLINE def find_divisors(n):NEWLINE divisors = []NEWLINE for i in range(1, int(n**.5)+1):NEWLINE if n%i: continueNEWLINE divisors.append(i)NEWLINE j = n // iNEWLINE if j != i: divisors.append(j)NEWLINE return sorted(divisors)NEWLINENEWLINE @staticmethodNEWLINE def base_convert(n, b):NEWLINE if not n: return [0]NEWLINE res = []NEWLINE while n:NEWLINE n, r = divmod(n, b)NEWLINE if r < 0: n += 1; r -= bNEWLINE res.append(r)NEWLINE return resNEWLINENEWLINENEWLINENEWLINEclass Combinatorics:NEWLINE @classmethodNEWLINE @lru_cache(maxsize=None)NEWLINE def choose(cls, n, r, mod=None):NEWLINE if r > n or r < 0: return 0NEWLINE if r == 0: return 1NEWLINE res = cls.choose(n-1,r,mod) + cls.choose(n-1,r-1,mod)NEWLINE if mod: res %= modNEWLINE return resNEWLINENEWLINE class CombinationsMod:NEWLINE def __init__(self, n=2*10**6, mod=MOD):NEWLINE self.__mod = modNEWLINE self.fac, self.ifac = Algebra.generate_fac_ifac(n, mod)NEWLINENEWLINE def __call__(self, n, r): return self.__choose(n, r)NEWLINENEWLINE def __choose(self, n, r):NEWLINE bl = (0<=r) & (r<=n)NEWLINE p = self.__modNEWLINE return bl * self.fac[n] * self.ifac[r] % p * self.ifac[n-r] % pNEWLINENEWLINE def make_nchoose_table(self, n):NEWLINE p = self.__modNEWLINE r = len(self.__fac)-1NEWLINE if using_numpy:NEWLINE n_choose = np.arange(n+1, n-r, -1); n_choose[0] = 1NEWLINE n_choose = Algebra.cumprod(n_choose, p)*self.ifac%pNEWLINE else:NEWLINE n_choose = [None]*(r+1); n_choose[0] = 1NEWLINE for i in range(r): n_choose[i+1] = n_choose[i]*(n-i)%pNEWLINE for i in range(1,r+1): n_choose[i] = n_choose[i]*self.ifac[i]%pNEWLINE return n_chooseNEWLINENEWLINE @classmethodNEWLINE def permutations(cls, a, r=None, i=0):NEWLINE a = list(a); n = len(a)NEWLINE if r is None: r = nNEWLINE res = []NEWLINE if r > n or i > r: return resNEWLINE if i == r: return [tuple(a[:r])]NEWLINE for j in range(i, n): a[i],a[j] = a[j],a[i]; res += cls.permutations(a, r, i+1)NEWLINE return resNEWLINENEWLINE @staticmethodNEWLINE def combinations(a, r):NEWLINE a = tuple(a)NEWLINE n = len(a)NEWLINE if r > n: returnNEWLINE indices = list(range(r))NEWLINE yield a[:r]NEWLINE while True:NEWLINE for i in range(r-1, -1, -1):NEWLINE if indices[i] != i+n-r: breakNEWLINE else: returnNEWLINE indices[i] += 1NEWLINE for j in range(i+1, r): indices[j] = indices[j-1]+1NEWLINE yield tuple(a[i] for i in indices)NEWLINENEWLINENEWLINENEWLINEclass DP:NEWLINE @staticmethodNEWLINE def LIS(a):NEWLINE res = [inf] * len(a)NEWLINE for x in a: res[bi_l(res, x)] = xNEWLINE return resNEWLINENEWLINENEWLINEclass String:NEWLINE @staticmethodNEWLINE def z_algorithm(s):NEWLINE n = len(s)NEWLINE a = [0] * n; a[0] = nNEWLINE l = r = -1NEWLINE for i in range(1, n):NEWLINE if r >= i: a[i] = min(a[i-l], r-i)NEWLINE while i + a[i] < n and s[i+a[i]] == s[a[i]]: a[i] += 1NEWLINE if i+a[i] >= r: l, r = i, i+a[i]NEWLINE return aNEWLINENEWLINENEWLINEclass GeometryTopology:NEWLINE class Graph:NEWLINE class __Edge:NEWLINE def __init__(self, weight=1, capacity=1, **args):NEWLINE self.weight = weightNEWLINE self.capacity = capacityNEWLINENEWLINE def __str__(self):NEWLINE return f'weight: {self.weight}, cap: {self.capacity}'NEWLINENEWLINE class __Node:NEWLINE def __init__(self, **args):NEWLINE passNEWLINENEWLINE def __init__(self, n=0):NEWLINE self.__N = nNEWLINE self.nodes = [None] * nNEWLINE self.edges = [{} for _ in range(n)]NEWLINENEWLINE def add_node_info(self, v, **args): self.nodes[v] = self.__Node(**args)NEWLINENEWLINE def add_edge(self, u, v, update=False, **args):NEWLINE if not update and v in self.edges[u]: returnNEWLINE self.edges[u][v] = self.__Edge(**args)NEWLINENEWLINE def get_size(self): return self.__NNEWLINENEWLINE def bfs(self, src=0):NEWLINE n = self.__NNEWLINE self.depth = self.lv = lv = [None]*n; lv[src] = 0 # depth in tree, or level in general graph.NEWLINE self.dist = dist = [inf]*n; dist[src] = 0 # dist for only tree.NEWLINE self.parent = par = [None]*n; par[src] = srcNEWLINE q = deque([src])NEWLINE while q:NEWLINE u = q.popleft()NEWLINE for v, e in self.edges[u].items():NEWLINE if e.capacity == 0 or lv[v] is not None: continueNEWLINE lv[v], dist[v], par[v] = lv[u]+1, dist[u]+e.weight, uNEWLINE q.append(v)NEWLINE return distNEWLINENEWLINE def dinic(self, src, sink):NEWLINE def flow_to_sink(u, flow_in):NEWLINE if u == sink: return flow_inNEWLINE flow = 0NEWLINE for v, e in self.edges[u].items():NEWLINE if e.capacity == 0 or self.lv[v] <= self.lv[u]: continueNEWLINE f = flow_to_sink(v, min(flow_in, e.capacity))NEWLINE if not f: continueNEWLINE self.edges[u][v].capacity -= fNEWLINE if u in self.edges[v]: self.edges[v][u].capacity += fNEWLINE else: self.add_edge(v, u, capacity=f)NEWLINE flow_in -= fNEWLINE flow += fNEWLINE return flowNEWLINENEWLINE flow = 0NEWLINE while True:NEWLINE self.bfs(src)NEWLINE if self.lv[sink] is None: return flowNEWLINE flow += flow_to_sink(src, inf)NEWLINENEWLINE def ford_fulkerson(self):NEWLINE passNEWLINENEWLINE def push_relabel(self):NEWLINE passNEWLINENEWLINE def floyd_warshall(self):NEWLINE n = self.__NNEWLINE d = [[inf]*n for _ in range(n)]NEWLINE for u in range(n):NEWLINE d[u][u] = 0NEWLINE for v, e in self.edges[u].items(): d[u][v] = e.weightNEWLINE for w in range(n):NEWLINE for u in range(n):NEWLINE for v in range(n):NEWLINE d[u][v] = min(d[u][v], d[u][w]+d[w][v])NEWLINE return dNEWLINENEWLINE def dijkstra(self, src, paths_cnt=False, mod=None):NEWLINE dist = [inf] * self.__N; dist[src] = 0NEWLINE visited = [False] * self.__NNEWLINE paths = [0] * self.__N; paths[src] = 1NEWLINE q = [(0, src)]NEWLINE while q:NEWLINE d, u = heappop(q)NEWLINE if visited[u]: continueNEWLINE visited[u] = TrueNEWLINE for v, e in self.edges[u].items():NEWLINE dv = d + e.weightNEWLINE if dv > dist[v]: continueNEWLINE elif dv == dist[v]:NEWLINE paths[v] += paths[u]NEWLINE if mod: paths[v] %= modNEWLINE continueNEWLINE paths[v], dist[v] = paths[u], dvNEWLINE heappush(q, (dv, v))NEWLINE if paths_cnt: return dist, pathsNEWLINE else: return distNEWLINENEWLINE def astar(self, src, tgt, heuristic_func):NEWLINE cost = [inf] * self.__NNEWLINE q = [(heuristic_func(src, tgt), 0, src)]NEWLINE while q:NEWLINE _, c, u = heappop(q)NEWLINE if u == tgt: return cNEWLINE if cost[u] != inf: continueNEWLINE cost[u] = cNEWLINE for v, e in self.edges[u].items():NEWLINE if cost[v] != inf: continueNEWLINE h = heuristic_func(v, tgt)NEWLINE nc = c + e.weightNEWLINE heappush(q, (h+nc, nc, v))NEWLINE return infNEWLINENEWLINE def bellman_ford(self, src):NEWLINE n = self.__NNEWLINE d = [inf] * n; d[src] = 0NEWLINE for _ in range(n-1):NEWLINE for u in range(n):NEWLINE for v, e in self.edges[u].items(): d[v] = min(d[v], d[u]+e.weight)NEWLINE for u in range(n):NEWLINE for v, e in self.edges[u].items():NEWLINE if d[u]+e.weight < d[v]: raise Exception('found negative cycle.')NEWLINE return dNEWLINENEWLINE def bfs01(self, src=0):NEWLINE d = [inf]*self.__N; d[src] = 0NEWLINE q = deque([src])NEWLINE while q:NEWLINE u = q.popleft()NEWLINE for v, e in self.edges[u].items():NEWLINE dv = d[u] + e.weightNEWLINE if d[v] <= dv: continueNEWLINE d[v] = dvNEWLINE if e.weight: q.append(v)NEWLINE else: q.appendleft(v)NEWLINE return dNEWLINENEWLINENEWLINE def find_ancestors(self): # tree doubling.NEWLINE self.__ancestors = ancestors = [self.parent]NEWLINE for _ in range(max(self.depth).bit_length()):NEWLINE ancestors.append([ancestors[-1][u] for u in ancestors[-1]])NEWLINENEWLINENEWLINE def find_dist(self, u, v):NEWLINE return self.dist[u]+self.dist[v]-2*self.dist[self.__find_lca(u, v)]NEWLINENEWLINENEWLINE def __find_lca(self, u, v):NEWLINE du, dv = self.depth[u], self.depth[v]NEWLINE if du > dv:NEWLINE u, v = v, uNEWLINE du, dv = dv, duNEWLINENEWLINE d = dv - duNEWLINE for i in range(d.bit_length()): # up-streamNEWLINE if d>>i&1: v = self.__ancestors[i][v]NEWLINE if v == u: return vNEWLINENEWLINE for i in range(du.bit_length()-1, -1, -1): # find direct child of LCA.NEWLINE nu, nv = self.__ancestors[i][u], self.__ancestors[i][v]NEWLINE if nu == nv: continueNEWLINE u, v = nu, nvNEWLINENEWLINE return self.__ancestors[0][u]NEWLINENEWLINE def init_dsu(self): # disjoint set union (union-find)NEWLINE n = self.__NNEWLINE self.parent = list(range(n))NEWLINE self.rank = [0] * nNEWLINE self.size = [1] * nNEWLINENEWLINE def find(self, u):NEWLINE if self.parent[u] == u: return uNEWLINE self.parent[u] = self.find(self.parent[u])NEWLINE return self.parent[u]NEWLINENEWLINE def unite(self, u, v):NEWLINE u, v = self.find(u), self.find(v)NEWLINE if u == v: returnNEWLINE if self.rank[u] < self.rank[v]: u,v = v,uNEWLINE self.parent[v] = uNEWLINE self.size[u] += self.size[v]NEWLINE self.rank[u] = max(self.rank[u], self.rank[v]+1)NEWLINENEWLINE def same(self, u, v): return self.find(u)==self.find(v)NEWLINENEWLINE def groups(self, empty=True):NEWLINE n = self.__NNEWLINE groups = [[] for _ in range(n)]NEWLINE for u in range(n): groups[self.find(u)].append(u)NEWLINE return groups if empty else [g for g in groups if g]NEWLINENEWLINENEWLINE def scc(self): # strongly connected componentsNEWLINE n = self.__NNEWLINE visited, q, root, r = [False]*n, [], [None]*n, 0NEWLINE gg = self.__class__(n)NEWLINE for u in range(n):NEWLINE for v in self.edges[u]: gg.add_edge(v, u)NEWLINENEWLINE def dfs(u):NEWLINE if visited[u]: returnNEWLINE visited[u] = TrueNEWLINE for v in self.edges[u]: dfs(v)NEWLINE q.append(u)NEWLINENEWLINE def rev_dfs(u, r):NEWLINE if root[u] is not None: returnNEWLINE root[u] = rNEWLINE for v in gg.edges[u]: rev_dfs(v, r)NEWLINENEWLINE for u in range(n): dfs(u)NEWLINE for u in q[::-1]: rev_dfs(u, r); r += 1NEWLINE return rootNEWLINENEWLINENEWLINE def kruskal(self): # minimum spanning treeNEWLINE n = self.__NNEWLINE uf = self.__class__(n); uf.init_dsu()NEWLINE edges = sorted([(u,v,e.weight) for u in range(n) for v,e in self.edges[u].items()], key=lambda x: x[2])NEWLINE g = self.__class__(n)NEWLINE d = 0NEWLINE for u, v, w in edges:NEWLINE if uf.same(u,v): continueNEWLINE uf.unite(u, v); g.add_edge(u, v, weight=w); d += wNEWLINE return g, dNEWLINENEWLINE def prim(self, src=0, return_parent=False): # minimum spanning treeNEWLINE n = self.__NNEWLINE g = self.__class__(n)NEWLINE parent, visited, dist = [None]*n, [False]*n, 0NEWLINE q = [(0, (src, src))]NEWLINE while q:NEWLINE d, (w, u) = heappop(q)NEWLINE if visited[u]: continueNEWLINE visited[u], parent[u] = True, w; dist += d; g.add_edge(w,u, weight=d)NEWLINE for v, e in self.edges[u].items():NEWLINE if not visited[v]: heappush(q, (e.weight, (u,v)))NEWLINE if return_parent: return g, dist, parentNEWLINE return g, distNEWLINENEWLINE def boruvka(self): # minimum spanning treeNEWLINE n = self.__NNEWLINE uf = self.__class__(n); uf.init_dsu()NEWLINE g = self.__class__(n)NEWLINE d = 0NEWLINENEWLINE def dfs(u):NEWLINE if visited[u]: return (inf, (None, None))NEWLINE visited[u] = TrueNEWLINE cand = []NEWLINE for v, e in self.edges[u].items():NEWLINE if uf.same(u,v): cand.append(dfs(v)); continueNEWLINE cand.append((e.weight, (u,v)))NEWLINE return sorted(cand)[0]NEWLINENEWLINE while len(set(uf.parent))!=1:NEWLINE edges, visited = [], [False]*nNEWLINE for u in range(n):NEWLINE if visited[u]: continueNEWLINE edges.append(dfs(u))NEWLINE for w, (u, v) in edges:NEWLINE if uf.same(u,v): continueNEWLINE g.add_edge(u,v, weight=w); uf.unite(u,v); d += wNEWLINE for u in range(n): uf.find(u)NEWLINENEWLINE return g, dNEWLINENEWLINE def tsp(self): # traveling salesperson problemNEWLINE passNEWLINENEWLINE class FenwickTree: # BIT (Binary Indexed Tree)NEWLINE def __init__(self, n):NEWLINE self.__N = nNEWLINE self.data = [0]*(n+1)NEWLINENEWLINE def add(self, i, x):NEWLINE while i <= self.__N: self.data[i] += x; i += i&-iNEWLINENEWLINE def __sum(self, i):NEWLINE s = 0NEWLINE while i > 0: s += self.data[i]; i -= i&-iNEWLINE return sNEWLINENEWLINE def sum(self, l, r): return self.__sum(r) - self.__sum(l-1)NEWLINENEWLINE @staticmethodNEWLINE def triangle_area(p0, p1, p2, signed=False):NEWLINE x1, y1, x2, y2 = p1[0]-p0[0], p1[1]-p0[1], p2[0]-p0[0], p2[1]-p0[1]NEWLINE return (x1*y2 - x2*y1)/2 if signed else abs(x1*y2 - x2*y1)/2NEWLINENEWLINE @classmethodNEWLINE def intersect(cls, seg1, seg2):NEWLINE (p1, p2), (p3, p4) = seg1, seg2NEWLINE t1 = cls.triangle_area(p1, p2, p3, signed=True)NEWLINE t2 = cls.triangle_area(p1, p2, p4, signed=True)NEWLINE t3 = cls.triangle_area(p3, p4, p1, signed=True)NEWLINE t4 = cls.triangle_area(p3, p4, p2, signed=True)NEWLINE return (t1*t2<0) & (t3*t4<0)NEWLINENEWLINENEWLINEdef cumxor(a): return reduce(xor, a, 0)NEWLINEdef cumor(a): return reduce(or_, a, 0)NEWLINENEWLINEdef bit_count(n):NEWLINE cnt = 0NEWLINE while n: cnt += n&1; n >>= 1NEWLINE return cntNEWLINENEWLINENEWLINEclass AtCoder:NEWLINE class ABC001:NEWLINE @staticmethodNEWLINE def a():NEWLINE h1, h2 = map(int, sys.stdin.read().split()); print(h1-h2)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE def to_minuites(x):NEWLINE q, r = divmod(x, 100)NEWLINE return 60*q + rNEWLINENEWLINE def to_hmform(x):NEWLINE q, r = divmod(x, 60)NEWLINE return 100*q + rNEWLINENEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE term = [0] * 2001NEWLINE for _ in range(n):NEWLINE s, e = map(to_minuites, map(int, sys.stdin.readline().rstrip().split('-')))NEWLINE s = s//5 * 5NEWLINE e = (e+4)//5 * 5NEWLINE term[s] += 1NEWLINE term[e+1] -= 1NEWLINE for i in range(2000):NEWLINE term[i+1] += term[i]NEWLINENEWLINE res = []NEWLINE raining = FalseNEWLINE for i in range(2001):NEWLINE if term[i]:NEWLINE if not raining:NEWLINE s = iNEWLINE raining = TrueNEWLINE elif raining:NEWLINE res.append((s, i-1))NEWLINE raining = FalseNEWLINE for s, e in res:NEWLINE print(f'{to_hmform(s):04}-{to_hmform(e):04}')NEWLINENEWLINENEWLINENEWLINENEWLINE class ABC002:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(max(map(int, sys.stdin.readline().split())))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE vowels = set('aeiou')NEWLINE print(''.join([c for c in sys.stdin.readline().rstrip() if c not in vowels]))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE print(GeometryTopology.triangle_area(*map(int, sys.stdin.readline().split())))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE edges = set((x-1, y-1) for x, y in zip(*[map(int, sys.stdin.read().split())]*2))NEWLINE print(max(len(s) for i in range(1, 1<<n) for s in [[j for j in range(n) if i>>j&1]] if all((x, y) in edges for x, y in itertools.combinations(s, 2))))NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE relations = [1<<i for i in range(n)]NEWLINE for x, y in zip(*[map(int, sys.stdin.read().split())]*2):NEWLINE relations[x] |= 1<<(y-1); relations[y] |= 1<<(x-1)NEWLINE res = 0NEWLINE for i in range(1<<n):NEWLINE s, cnt = (1<<n)-1, 0NEWLINE for j in range(n):NEWLINE if i>>j & 1: t &= relations[j] | 1<<j; cnt += 1NEWLINE if s&i == i: res = max(res, cnt)NEWLINE print(res)NEWLINENEWLINE class ABC003:NEWLINE @staticmethodNEWLINE def a():NEWLINE print((int(sys.stdin.readline().rstrip())+1)*5000)NEWLINE @staticmethodNEWLINE def b():NEWLINE atcoder = set('atcoder')NEWLINE s, t = sys.stdin.read().split()NEWLINE print(all(s[i]==t[i] or s[i]=='@' and t[i] in atcoder or t[i]=='@' and s[i] in atcoder for i in range(len(s))) and 'You can win' or 'You will lose')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *r = map(int, sys.stdin.read().split()); print(reduce(lambda x, y: (x+y)/2, sorted(r)[-k:], 0))NEWLINENEWLINE class ABC004:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(int(sys.stdin.readline().rstrip())*2)NEWLINE @staticmethodNEWLINE def b():NEWLINE for l in [sys.stdin.readline().rstrip() for _ in range(4)][::-1]: print(l[::-1])NEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())%30NEWLINE res = list(range(1, 7))NEWLINE for i in range(n): i %= 5; res[i], res[i+1] = res[i+1], res[i]NEWLINE print(*res, sep='')NEWLINENEWLINENEWLINENEWLINE class ABC005:NEWLINE @staticmethodNEWLINE def a():NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print(y//x)NEWLINE @staticmethodNEWLINE def b():NEWLINE n, *t = map(int, sys.stdin.read().split())NEWLINE print(min(t))NEWLINE @staticmethodNEWLINE def c():NEWLINE t = int(sys.stdin.readline().rstrip())NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE m = int(sys.stdin.readline().rstrip())NEWLINE b = [int(x) for x in sys.stdin.readline().split()]NEWLINE i = 0NEWLINE for p in b:NEWLINE if i == n: print('no'); returnNEWLINE while p-a[i] > t:NEWLINE i += 1NEWLINE if i == n: print('no'); returnNEWLINE if a[i] > p: print('no'); returnNEWLINE i += 1NEWLINE print('yes')NEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE d = np.array([sys.stdin.readline().split() for _ in range(n)], np.int64)NEWLINE s = d.cumsum(axis=0).cumsum(axis=1)NEWLINE s = np.pad(s, 1)NEWLINE max_del = np.zeros((n+1, n+1), dtype=np.int64)NEWLINE for y in range(1, n+1):NEWLINE for x in range(1, n+1):NEWLINE max_del[y, x] = np.amax(s[y:n+1, x:n+1] - s[0:n-y+1, x:n+1] - s[y:n+1, 0:n-x+1] + s[0:n-y+1, 0:n-x+1])NEWLINE res = np.arange(n**2+1)[:, None]NEWLINE i = np.arange(1, n+1)NEWLINE res = max_del[i, np.minimum(res//i, n)].max(axis=1)NEWLINE q = int(sys.stdin.readline().rstrip())NEWLINE p = np.array(sys.stdin.read().split(), dtype=np.int64)NEWLINE print(*res[p], sep='\n')NEWLINENEWLINENEWLINE class ABC006:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = sys.stdin.readline().rstrip()NEWLINE if '3' in n: print('YES')NEWLINE elif int(n)%3 == 0: print('YES')NEWLINE else: print('NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE mod = 10007NEWLINE a = np.eye(N=3, k=-1, dtype=np.int64); a[0] = 1NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE a = Algebra.matrix_pow(a, n-1, mod)NEWLINE print(a[2][0])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE cnt = [0, 0, 0]NEWLINE if m == 1: cnt = [-1, -1, -1]NEWLINE else:NEWLINE if m & 1: m -= 3; cnt[1] += 1; n -= 1NEWLINE cnt[2] = m//2 - nNEWLINE cnt[0] = n - cnt[2]NEWLINE if cnt[0]<0 or cnt[1]<0 or cnt[2]<0: print(-1, -1, -1)NEWLINE else: print(*cnt, sep=' ')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *c = map(int, sys.stdin.read().split())NEWLINE lis = [inf]*nNEWLINE for x in c: lis[bi_l(lis, x)] = xNEWLINE print(n - bi_l(lis, inf))NEWLINENEWLINE class ABC007:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(n-1)NEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE if s == 'a': print(-1)NEWLINE else: print('a')NEWLINE @staticmethodNEWLINE def c():NEWLINE r, c = map(int, sys.stdin.readline().split())NEWLINE sy, sx = map(int, sys.stdin.readline().split())NEWLINE gy, gx = map(int, sys.stdin.readline().split())NEWLINE sy -= 1; sx -=1; gy -= 1; gx -= 1NEWLINE maze = [sys.stdin.readline().rstrip() for _ in range(r)]NEWLINE queue = deque([(sy, sx)])NEWLINE dist = np.full((r, c), np.inf); dist[sy, sx] = 0NEWLINE while queue:NEWLINE y, x = queue.popleft()NEWLINE for i, j in [(-1, 0), (1, 0), (0, -1), (0, 1)]:NEWLINE i += y; j += xNEWLINE if maze[i][j] == '#' or dist[i, j] != np.inf: continueNEWLINE dist[i, j] = dist[y, x] + 1NEWLINE queue.append((i, j))NEWLINE print(int(dist[gy, gx]))NEWLINE @staticmethodNEWLINE def d():NEWLINE ng = set([4, 9])NEWLINE def count(d):NEWLINE return d if d<=4 else d-1NEWLINE def f(n):NEWLINE x = [int(d) for d in str(n)]NEWLINE flg = TrueNEWLINE dp = 0NEWLINE for d in x:NEWLINE dp = dp*8 + flg*count(d)NEWLINE if d in ng: flg = FalseNEWLINE return n-(dp+flg)NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print(f(b) - f(a-1))NEWLINENEWLINE class ABC008:NEWLINE @staticmethodNEWLINE def a():NEWLINE s, t = map(int, sys.stdin.readline().split())NEWLINE print(t-s+1)NEWLINE @staticmethodNEWLINE def b():NEWLINE n, *s = sys.stdin.read().split()NEWLINE res = defaultdict(int)NEWLINE for name in s: res[name] += 1NEWLINE print(sorted(res.items(), key=lambda x: x[1])[-1][0])NEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE c = n - np.count_nonzero(a[:, None]%a, axis=1)NEWLINE print(np.sum((c+1)//2/c))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE w, h, n, *xy = map(int, sys.stdin.read().split())NEWLINE *xy, = zip(*([iter(xy)]*2))NEWLINENEWLINE @lru_cache(maxsize=None)NEWLINE def count(x1, y1, x2, y2):NEWLINE res = 0NEWLINE for x, y in xy:NEWLINE if not (x1 <= x <= x2 and y1 <= y <= y2): continueNEWLINE cnt = (x2-x1) + (y2-y1) + 1NEWLINE cnt += count(x1, y1, x-1, y-1)NEWLINE cnt += count(x1, y+1, x-1, y2)NEWLINE cnt += count(x+1, y1, x2, y-1)NEWLINE cnt += count(x+1, y+1, x2, y2)NEWLINE res = max(res, cnt)NEWLINE return resNEWLINE print(count(1, 1, w, h))NEWLINENEWLINENEWLINE class ABC009:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print((n+1)//2)NEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE print(sorted(set(a))[-2])NEWLINE @staticmethodNEWLINE def c():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE s = list(sys.stdin.readline().rstrip())NEWLINE cost = [1]*nNEWLINE r = kNEWLINE for i in range(n-1):NEWLINE q = []NEWLINE for j in range(i+1, n):NEWLINE if s[j] < s[i] and cost[i]+cost[j] <= r:NEWLINE heappush(q, (s[j], cost[i]+cost[j], -j))NEWLINE if not q: continueNEWLINE _, c, j = heappop(q); j = -jNEWLINE s[i], s[j] = s[j], s[i]NEWLINE r -= cNEWLINE cost[i] = cost[j] = 0NEWLINE print(''.join(s))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE k, m = map(int, sys.stdin.readline().split())NEWLINE a = np.array([int(x) for x in sys.stdin.readline().split()])NEWLINE c = np.array([int(x) for x in sys.stdin.readline().split()])NEWLINE mask = (1<<32) - 1NEWLINE d = np.eye(k, k, -1, dtype=np.uint32) * mask; d[0] = cNEWLINE if m <= k: print(a[m-1]); returnNEWLINE # print(Algebra.bitwise_mat_pow(d, m-k))NEWLINE # print(Algebra.bitwise_dot(Algebra.bitwise_mat_pow(d, m-k), a[::-1].reshape(-1, 1))[0].item())NEWLINE print(Algebra.bitwise_dot(Algebra.bitwise_mat_pow(d, m-k), a[::-1].reshape(-1, 1))[0][0])NEWLINENEWLINENEWLINENEWLINE class ABC010:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(sys.stdin.readline().rstrip()+'pp')NEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE tot = 0NEWLINE for x in a:NEWLINE c = 0NEWLINE while x%2==0 or x%3==2:NEWLINE x -= 1NEWLINE c += 1NEWLINE tot += cNEWLINE print(tot)NEWLINE @staticmethodNEWLINE def c():NEWLINE sx, sy, gx, gy, t, v, n, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = np.array(xy).reshape(-1, 2).TNEWLINE def dist(x1, y1, x2, y2):NEWLINE return np.sqrt((x2-x1)**2 + (y2-y1)**2)NEWLINE ans = 'YES' if (dist(sx, sy, x, y)+dist(x, y, gx, gy) <= v*t).any() else 'NO'NEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, g, e = map(int, sys.stdin.readline().split())NEWLINE p = [int(x) for x in sys.stdin.readline().split()]NEWLINE x, y = [], []NEWLINE for _ in range(e):NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE x.append(a); y.append(b)NEWLINE x.append(b); y.append(a)NEWLINE for a in p:NEWLINE x.append(a)NEWLINE y.append(n)NEWLINE if not x:NEWLINE print(0)NEWLINE returnNEWLINE c = [1] * len(x)NEWLINE min_cut = maximum_flow(csr_matrix((c, (x, y)), (n+1, n+1)), source=0, sink=n).flow_valueNEWLINE print(min_cut)NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, g, e = map(int, sys.stdin.readline().split())NEWLINE graph = nx.DiGraph()NEWLINE graph.add_nodes_from(range(n+1))NEWLINE for p in [int(x) for x in sys.stdin.readline().split()]:NEWLINE graph.add_edge(p, n, capacity=1)NEWLINE for _ in range(e):NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE graph.add_edge(a, b, capacity=1)NEWLINE graph.add_edge(b, a, capacity=1)NEWLINE print(nx.minimum_cut_value(graph, 0, n))NEWLINENEWLINE @staticmethodNEWLINE def d_3():NEWLINE n, q, m = map(int, sys.stdin.readline().split())NEWLINE g = GeometryTopology.Graph(n+1)NEWLINE # for i in range(n+1): g.add_node(i)NEWLINE for p in [int(x) for x in sys.stdin.readline().split()]:NEWLINE g.add_edge(p, n, capacity=1)NEWLINE for a, b in zip(*[map(int, sys.stdin.read().split())]*2):NEWLINE g.add_edge(a, b, capacity=1)NEWLINE g.add_edge(b, a, capacity=1)NEWLINE print(g.dinic(0, n))NEWLINENEWLINENEWLINENEWLINE class ABC011:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(n%12+1)NEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(s[0].upper()+s[1:].lower())NEWLINE @staticmethodNEWLINE def c():NEWLINE n, *ng = map(int, sys.stdin.read().split())NEWLINE ng = set(ng)NEWLINE if n in ng: print('NO')NEWLINE else:NEWLINE r = 100NEWLINE while n > 0:NEWLINE if r == 0: print('NO'); returnNEWLINE for i in range(3, 0, -1):NEWLINE if (n-i) in ng: continueNEWLINE n -= iNEWLINE r -= 1NEWLINE breakNEWLINE else: print('NO'); returnNEWLINE print('YES')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, d, x, y = map(int, sys.stdin.read().split())NEWLINE x, y = abs(x), abs(y)NEWLINE if x%d or y%d: print(0); returnNEWLINE x, y = x//d, y//dNEWLINE r = n - (x+y)NEWLINE if r < 0 or r&1: print(0); returnNEWLINENEWLINE res = 0NEWLINE half_p = pow(1/2, n)NEWLINE for d in range(r//2 + 1): # 0 <= d <= r//2, southNEWLINE south, north = d, y+dNEWLINE west = (r - 2*d)//2NEWLINE res += half_p * comb(n, south, exact=True) * comb(n-south, north, exact=True)\NEWLINE * comb(n-south-north, west, exact=True) * half_pNEWLINE print(res)NEWLINENEWLINENEWLINE class ABC012:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print(b, a)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE h, n = divmod(n, 3600)NEWLINE m, s = divmod(n, 60)NEWLINE print(f'{h:02}:{m:02}:{s:02}')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = 2025 - int(sys.stdin.readline().rstrip())NEWLINE res = []NEWLINE for i in range(1, 10):NEWLINE if n%i != 0 or n//i > 9: continueNEWLINE res.append(f'{i} x {n//i}')NEWLINE print(*sorted(res), sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *abt = map(int, sys.stdin.read().split())NEWLINE a, b, t = np.array(abt).reshape(m, 3).TNEWLINE res = shortest_path(csr_matrix((t, (a-1, b-1)), (n, n)), method='FW', directed=False)NEWLINE print(res.max(axis=-1).min().astype(np.int64))NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, m, *abt = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for a, b, t in zip(*[iter(abt)]*3):NEWLINE a -= 1; b -= 1NEWLINE g.add_edge(a, b, weight=t)NEWLINE g.add_edge(b, a, weight=t)NEWLINENEWLINE print(min(max(d) for d in g.floyd_warshall()))NEWLINENEWLINENEWLINENEWLINE class ABC013:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(ord(sys.stdin.readline().rstrip()) - ord('A') + 1)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b = map(int, sys.stdin.read().split())NEWLINE d = abs(a - b)NEWLINE print(min(d, 10-d))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, h, a, b, c, d, e = map(int, sys.stdin.read().split())NEWLINE y = np.arange(n+1)NEWLINE x = (n*e-h-(d+e)*y)//(b+e) + 1NEWLINE np.maximum(x, 0, out=x)NEWLINE np.minimum(x, n-y, out=x)NEWLINE print(np.amin(a*x + c*y))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, d, *a = map(int, sys.stdin.read().split())NEWLINE res = list(range(n))NEWLINE def swap(i, j): res[i], res[j] = res[j], res[i]NEWLINE for i in a[::-1]: swap(i-1, i)NEWLINE res = np.array(res)NEWLINE def binary_method(a, p):NEWLINE b = np.arange(n)NEWLINE while p:NEWLINE if p&1: b = a[b]NEWLINE p >>= 1NEWLINE a = a[a]NEWLINE return bNEWLINE print(*(binary_method(res, d)+1), sep='\n')NEWLINENEWLINE class ABC014:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.read().split())NEWLINE print((a+b-1)//b * b - a)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, x, *a = map(int, sys.stdin.read().split())NEWLINE print(sum(a[i] for i in range(n) if x>>i&1))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(n, 2).TNEWLINE res = np.zeros(10**6+2, dtype=np.int64)NEWLINE np.add.at(res, a, 1)NEWLINE np.subtract.at(res, b+1, 1)NEWLINE np.cumsum(res, out=res)NEWLINE print(res.max())NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for _ in range(n-1):NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE x -= 1; y -= 1NEWLINE g.add_edge(x, y, weight=1)NEWLINE g.add_edge(y, x, weight=1)NEWLINENEWLINE g.bfs(0)NEWLINE g.find_ancestors()NEWLINENEWLINE q, *ab = map(int, sys.stdin.read().split())NEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE print(g.find_dist(a, b) + 1)NEWLINENEWLINE class ABC015:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = sys.stdin.read().split()NEWLINE print(a if len(a) > len(b) else b)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE print(np.ceil(a[np.nonzero(a)[0]].sum() / np.count_nonzero(a)).astype(np.int8))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *t = map(int, sys.stdin.read().split())NEWLINE t = np.array(t).reshape(n, k)NEWLINE x = np.zeros((1, 1), dtype=np.int8)NEWLINE for i in range(n):NEWLINE x = x.reshape(-1, 1) ^ t[i]NEWLINE print('Found' if np.count_nonzero(x==0) > 0 else 'Nothing')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE w, n, k, *ab = map(int, sys.stdin.read().split())NEWLINE dp = np.zeros((k+1, w+1), dtype=np.int32)NEWLINE for a, b in zip(*[iter(ab)]*2): np.maximum(dp[1:,a:], dp[:-1,:-a]+b, out=dp[1:,a:])NEWLINE print(dp[k][w])NEWLINENEWLINENEWLINE class ABC016:NEWLINE @staticmethodNEWLINE def a():NEWLINE m, d = map(int, sys.stdin.readline().split())NEWLINE print('YES' if m%d == 0 else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE f1, f2 = a+b==c, a-b==cNEWLINE if f1 & f2: print('?')NEWLINE elif f1 & (~f2): print('+')NEWLINE elif (~f1) & f2: print('-')NEWLINE else: print('!')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, _, *ab = map(int, sys.stdin.read().split())NEWLINE f = [0] * nNEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE f[a] |= 1<<bNEWLINE f[b] |= 1<<aNEWLINE res = [bit_count(cumor(f[j] for j in range(n) if f[i]>>j&1) & ~(f[i] | 1<<i)) for i in range(n)]NEWLINE print(*res, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE sx, sy, gx, gy = map(int, sys.stdin.readline().split())NEWLINE seg1 = ((sx, sy), (gx, gy))NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE p1 = np.array(sys.stdin.read().split(), dtype=np.int64).reshape(n, 2).TNEWLINE p2 = np.hstack((p1[:, 1:], p1[:, :1]))NEWLINE seg2 = (p1, p2)NEWLINE print(np.count_nonzero(GeometryTopology.intersect(seg1, seg2))//2 + 1)NEWLINENEWLINE class ABC017:NEWLINE @staticmethodNEWLINE def a():NEWLINE s, e = np.array(sys.stdin.read().split(), dtype=np.int16).reshape(3, 2).TNEWLINE print((s // 10 * e).sum())NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE choku_tail = set('ch, o, k, u'.split(', '))NEWLINE def is_choku(s):NEWLINE if s == '': return TrueNEWLINE if len(s)>=1 and (s[-1] in choku_tail) and is_choku(s[:-1]): return TrueNEWLINE if len(s)>=2 and (s[-2:] in choku_tail) and is_choku(s[:-2]): return TrueNEWLINE return FalseNEWLINE print('YES' if is_choku(sys.stdin.readline().rstrip()) else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, *lrs = map(int, sys.stdin.read().split())NEWLINE l, r, s = np.array(lrs).reshape(n, 3).TNEWLINE score = np.zeros((m+1, ), dtype=np.int32)NEWLINE np.add.at(score, l-1, s)NEWLINE np.subtract.at(score, r, s)NEWLINE np.cumsum(score, out=score)NEWLINE print(s.sum() - score[:m].min())NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *f = map(int, sys.stdin.read().split())NEWLINE prev = [0] * (n+1)NEWLINE tmp = defaultdict(int)NEWLINE for i in range(n):NEWLINE prev[i+1] = tmp[f[i]]NEWLINE tmp[f[i]] = i+1NEWLINENEWLINE dp = [0] * (n+1); dp[0] = 1NEWLINE l, s = 0, dp[0]NEWLINE for i in range(1, n+1):NEWLINE while l < prev[i]:NEWLINE s = (s - dp[l]) % MODNEWLINE l += 1NEWLINE dp[i] = sNEWLINE s = (s + dp[i]) % MODNEWLINE print(dp[n])NEWLINENEWLINE class ABC018:NEWLINE @staticmethodNEWLINE def a():NEWLINE *a, = map(int, sys.stdin.read().split())NEWLINE a = sorted(enumerate(a), key=lambda x: -x[1])NEWLINE res = [None] * 3NEWLINE for i in range(3):NEWLINE res[a[i][0]] = i+1NEWLINE print(*res, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE n, *lr = map(int, sys.stdin.read().split())NEWLINE for l, r in zip(*[iter(lr)]*2):NEWLINE l -= 1; r -= 1NEWLINE s = s[:l] + s[l:r+1][::-1] + s[r+1:]NEWLINE print(s)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE r, c, k = map(int, sys.stdin.readline().split())NEWLINE s = np.array([list(s) for s in sys.stdin.read().split()])NEWLINE s = np.pad(s, 1, constant_values='x')NEWLINENEWLINE a = np.zeros_like(s, dtype=np.float64)NEWLINE a[s=='o'] = np.infNEWLINE for i in range(1, r+1): np.minimum(a[i-1,:]+1, a[i,:], out=a[i,:])NEWLINE for i in range(r, 0, -1): np.minimum(a[i+1,:]+1, a[i,:], out=a[i,:])NEWLINE for j in range(1, c+1): np.minimum(a[:,j-1]+1, a[:,j], out=a[:,j])NEWLINE for j in range(c, 0, -1): np.minimum(a[:,j+1]+1, a[:,j], out=a[:,j])NEWLINE print(np.count_nonzero(a>=k))NEWLINENEWLINE @staticmethodNEWLINE def c_2():NEWLINE r, c, k = map(int, sys.stdin.readline().split())NEWLINE s = np.array([list(s) for s in sys.stdin.read().split()])NEWLINE s = np.pad(s, 1, constant_values='x')NEWLINE a = (s=='o').astype(np.int16)NEWLINE a = distance_transform_cdt(a, metric='taxicab')NEWLINE print(np.count_nonzero(a>=k))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, p, q, r, *xyz = map(int, sys.stdin.read().split())NEWLINE x, y, z = np.array(xyz).reshape(r, 3).TNEWLINE h = np.zeros((n, m), dtype=np.int32); h[x-1, y-1] = zNEWLINE g = np.array([*itertools.combinations(range(n), p)])NEWLINE print(np.sort(h[g].sum(axis=1), axis=1)[:,-q:].sum(axis=1).max())NEWLINENEWLINENEWLINE class ABC019:NEWLINE @staticmethodNEWLINE def a():NEWLINE *a, = map(int, sys.stdin.readline().split())NEWLINE print(sorted(a)[1])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip() + '$'NEWLINE cnt = 0NEWLINE prev = '$'NEWLINE t = ''NEWLINE for c in s:NEWLINE if c == prev: cnt += 1; continueNEWLINE t += prev+str(cnt)NEWLINE prev = c; cnt = 1NEWLINE print(t[2:])NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE res = set()NEWLINE for x in a:NEWLINE while not x&1:NEWLINE x >>= 1NEWLINE res.add(x)NEWLINE print(len(res))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE def inquire(u, v):NEWLINE print(f'? {u} {v}'.format(u, v), flush=True)NEWLINE return int(sys.stdin.readline().rstrip())NEWLINENEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE u = sorted([(inquire(1, v), v) for v in range(2, n+1)])[-1][1]NEWLINE d = max((inquire(u, v)) for v in range(1, n+1) if u!=v)NEWLINE print(f'! {d}')NEWLINENEWLINE class ABC020:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('ABC' if int(sys.stdin.readline().rstrip())==1 else 'chokudai')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b = sys.stdin.readline().split()NEWLINE print(int(a+b) * 2)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w, t = map(int, sys.stdin.readline().split())NEWLINE s = [list(s) for s in sys.stdin.read().split()]NEWLINE for i in range(h):NEWLINE for j in range(w):NEWLINE if s[i][j] == 'S': sy, sx = i, jNEWLINE if s[i][j] == 'G': gy, gx = i, jNEWLINE s[sy][sx] = s[gy][gx] = '.'NEWLINE source, target = sy*w+sx, gy*w+gxNEWLINENEWLINE def heuristic_function(u, v=target):NEWLINE uy, ux = divmod(u, w)NEWLINE vy, vx = divmod(v, w)NEWLINE return abs(vy-uy) + abs(ux-vx)NEWLINENEWLINE def min_time(x):NEWLINE g = GeometryTopology.Graph(h*w)NEWLINE # g = nx.DiGraph()NEWLINENEWLINE for i in range(h):NEWLINE for j in range(w):NEWLINE u = i*w + jNEWLINE if i > 0: g.add_edge(u, (i-1)*w+j, weight=(1 if s[i-1][j]=='.' else x))NEWLINE if i < h-1: g.add_edge(u, (i+1)*w+j, weight=(1 if s[i+1][j]=='.' else x))NEWLINE if j > 0: g.add_edge(u, i*w+j-1, weight=(1 if s[i][j-1]=='.' else x))NEWLINE if j < w-1: g.add_edge(u, i*w+j+1, weight=(1 if s[i][j+1]=='.' else x))NEWLINENEWLINE return g.dijkstra(source)[target]NEWLINE return g.astar(source, target, heuristic_function)NEWLINE # return nx.dijkstra_path_length(g, source, target)NEWLINE # return nx.astar_path_length(g, source, target, heuristic_function)NEWLINENEWLINE def binary_search():NEWLINE lo, hi = 1, t+1NEWLINE while lo+1 < hi:NEWLINE x = (lo+hi)//2NEWLINE if min_time(x) > t:NEWLINE hi = xNEWLINE else:NEWLINE lo = xNEWLINE return loNEWLINENEWLINE print(binary_search())NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE div = sorted(NumberTheory.find_divisors(k))NEWLINE l = len(div)NEWLINE s = [0] * lNEWLINE for i, d in enumerate(div): s[i] = (1+n//d)*(n//d)//2 * d % MODNEWLINE for i in range(l-1, -1, -1):NEWLINE for j in range(i+1, l):NEWLINE if div[j]%div[i]: continueNEWLINE s[i] = (s[i]-s[j])%MODNEWLINENEWLINE print(sum(s[i]*k//div[i]%MOD for i in range(l))%MOD) # ans is LCM.NEWLINENEWLINE class ABC021:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE s = [1<<i for i in range(5) if n>>i&1]NEWLINE print(len(s), *s, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, a, b, k, *p = map(int, sys.stdin.read().split())NEWLINE print('YES' if len(set(p)|set([a, b])) == k+2 else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, a, b, m, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = np.array(xy).reshape(m, 2).T - 1NEWLINE a -= 1; b -= 1NEWLINE g = csgraph_to_dense(csr_matrix((np.ones(m), (x, y)), (n, n), dtype=np.int8))NEWLINE g = np.logical_or(g, g.T)NEWLINE paths = np.zeros(n, dtype=np.int64).reshape(-1, 1)NEWLINE paths[a, 0] = 1NEWLINE while not paths[b, 0]:NEWLINE paths = np.dot(g, paths) % MODNEWLINE print(paths[b, 0])NEWLINENEWLINE @staticmethodNEWLINE def c_2():NEWLINE n, a, b, m, *xy = map(int, sys.stdin.read().split())NEWLINE a -= 1; b -= 1NEWLINE g = GeometryTopology.Graph()NEWLINENEWLINE for x, y in zip(*[iter(xy)]*2):NEWLINE x -= 1; y -= 1NEWLINE g.add_edge(x, y, weight=1)NEWLINE g.add_edge(y, x, weight=1)NEWLINENEWLINE dist, paths = g.dijkstra(a, paths_cnt=True, mod=MOD)NEWLINE print(paths[b])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.read().split())NEWLINE cn = Combinatorics.CombinationsMod()NEWLINE print(cn(n+k-1, k))NEWLINENEWLINENEWLINE class ABC022:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, s, t, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE np.cumsum(a, out=a)NEWLINE print(((s<=a) & (a<=t)).sum())NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE c = Counter(a)NEWLINE print(sum(c.values())-len(c))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, *uvl = map(int, sys.stdin.read().split())NEWLINE u, v, l = np.array(uvl).reshape(m, 3).TNEWLINE u -= 1; v -= 1NEWLINE g = csgraph_to_dense(csr_matrix((l, (u,v)), (n,n)))NEWLINE g += g.TNEWLINE g[g==0] = np.infNEWLINE dist0 = g[0].copy()NEWLINE g[0] = 0; g[:, 0] = 0NEWLINE dist = shortest_path(g, method='FW', directed=False)NEWLINE u, v = np.array([*itertools.combinations(range(1,n), 2)]).TNEWLINE res = (dist0[u]+dist[u,v]+dist0[v]).min()NEWLINE print(-1 if res==np.inf else int(res))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE c = np.array(ab).reshape(2,n,2)NEWLINE g = c.mean(axis=1)NEWLINE d = np.sqrt(((c-g[:,None,:])**2).sum(axis=-1)).sum(axis=1)NEWLINE print(d[1]/d[0])NEWLINENEWLINENEWLINE class ABC023:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(sum(divmod(int(sys.stdin.readline().rstrip()), 10)))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, s = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE t = 'b'NEWLINE for i in range(n//2):NEWLINE if i%3==0: t = 'a'+t+'c'NEWLINE elif i%3==1: t = 'c'+t+'a'NEWLINE else: t = 'b'+t+'b'NEWLINE print(n//2 if t==s else -1)NEWLINENEWLINE @staticmethodNEWLINE def b_2():NEWLINE n, s = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE if n&1^1: print(-1); returnNEWLINE a = list('abc')NEWLINE i = (1-n//2)%3NEWLINE for c in s:NEWLINE if c != a[i]:NEWLINE print(-1); returnNEWLINE i = (i+1) % 3NEWLINE print(n//2)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w, k, n, *rc = map(int, sys.stdin.read().split())NEWLINE r, c = np.array(rc).reshape(n,2).T - 1NEWLINE rb = np.bincount(r, minlength=h)NEWLINE cb = np.bincount(c, minlength=w)NEWLINE rbb = np.bincount(rb, minlength=k+1)NEWLINE cbb = np.bincount(cb, minlength=k+1)NEWLINE tot = (rbb[:k+1]*cbb[k::-1]).sum()NEWLINE real = np.bincount(rb[r]+cb[c]-1, minlength=k+1)NEWLINE print(tot-real[k-1]+real[k])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *hs = map(int, sys.stdin.read().split())NEWLINE h, s = np.array(hs).reshape(n,2).TNEWLINENEWLINE t = np.arange(n)NEWLINE def is_ok(x): return np.all(np.sort((x-h)//s) >= t)NEWLINE def binary_search():NEWLINE lo, hi = 0, 10**14NEWLINE while lo+1 < hi:NEWLINE x = (lo+hi)//2NEWLINE if is_ok(x): hi = xNEWLINE else: lo = xNEWLINE return hiNEWLINENEWLINE print(binary_search())NEWLINENEWLINE class ABC024:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c, k, s, t = map(int, sys.stdin.read().split())NEWLINE print(a*s + b*t - c*(s+t)*(s+t>=k))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, t, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE print(np.minimum(a[1:]-a[:-1], t).sum() + t)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, d, k, *lrst = map(int, sys.stdin.read().split())NEWLINE lrst = np.array(lrst)NEWLINE lr = lrst[:2*d].reshape(d,2)NEWLINE s, t = lrst[2*d:].reshape(k,2).TNEWLINE day = np.zeros((k,),dtype=np.int32)NEWLINE for i in range(d):NEWLINE l, r = lr[i]NEWLINE move = (l<=s)&(s<=r)&(s!=t)NEWLINE reach = move&(l<=t)&(t<=r)NEWLINE s[move&(s<t)] = rNEWLINE s[move&(s>t)] = lNEWLINE s[reach] = t[reach]; day[reach] = i+1NEWLINE print(*day, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE a, b, c = map(int, sys.stdin.read().split())NEWLINE p = MODNEWLINE denom = pow(a*b%p - b*c%p + c*a%p, p-2, p)NEWLINE w = (b*c-a*b)%p*denom%pNEWLINE h = (b*c-a*c)%p*denom%pNEWLINE print(h,w)NEWLINENEWLINE class ABC025:NEWLINE @staticmethodNEWLINE def a():NEWLINE s, n = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE i, j = divmod(n-1, 5)NEWLINE print(s[i]+s[j])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, a, b = map(int, sys.stdin.readline().split())NEWLINE res = defaultdict(int)NEWLINE for _ in range(n):NEWLINE s, d = sys.stdin.readline().split()NEWLINE d = int(d)NEWLINE res[s] += min(max(d,a),b)NEWLINE res = res['East'] - res['West']NEWLINE if res == 0: ans = 0NEWLINE elif res > 0: ans = f'East {res}'NEWLINE else: ans = f'West {-res}'NEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE b = [0] * 6NEWLINE for i in range(2):NEWLINE *row, = map(int, sys.stdin.readline().split())NEWLINE for j in range(3):NEWLINE b[i*3+j] = row[j]NEWLINE c = [0] * 8NEWLINE for i in range(3):NEWLINE *row, = map(int, sys.stdin.readline().split())NEWLINE for j in range(2):NEWLINE c[i*3+j] = row[j]NEWLINE tot = sum(b) + sum(c)NEWLINENEWLINE @lru_cache(maxsize=None)NEWLINE def f(s=tuple(0 for _ in range(9))):NEWLINE if all(s):NEWLINE res = 0NEWLINE for i in range(6): res += (s[i]==s[i+3])*b[i]NEWLINE for i in range(8): res += (s[i]==s[i+1])*c[i]NEWLINE return resNEWLINE cand = [i for i in range(9) if not s[i]]NEWLINE flg = len(cand)&1NEWLINE s = list(s)NEWLINE res = []NEWLINE for i in cand:NEWLINE s[i] = (flg^1)+1NEWLINE res.append(f(tuple(s)))NEWLINE s[i] = 0NEWLINE return sorted(res, reverse=flg)[0]NEWLINENEWLINE a = f(); b = tot-aNEWLINE print(a)NEWLINE print(b)NEWLINENEWLINENEWLINENEWLINE class ABC026:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = int(sys.stdin.readline().rstrip())NEWLINE print(a//2 * (a-a//2))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *r = map(int, sys.stdin.read().split())NEWLINE s = np.pi * np.array([0]+r)**2; s.sort()NEWLINE res = s[n::-2].sum() - s[n-1::-2].sum()NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *b = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph()NEWLINE for i in range(1, n): g.add_edge(b[i-1]-1, i, weight=1)NEWLINENEWLINE def f(u=0):NEWLINE if not g.edges[u]: return 1NEWLINE s = [f(v) for v in g.edges[u]]NEWLINE return max(s) + min(s) + 1NEWLINENEWLINE print(f())NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE def f(t): return a*t + b*np.sin(c*t*np.pi) - 100NEWLINE print(optimize.brenth(f, 0, 200))NEWLINENEWLINENEWLINE class ABC027:NEWLINE @staticmethodNEWLINE def a():NEWLINE l = [int(l) for l in sys.stdin.readline().split()]NEWLINE l.sort()NEWLINE print(l[2] if l[0]==l[1] else l[0])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE m, r = divmod(sum(a), n)NEWLINE if r: print(-1); returnNEWLINE population = 0NEWLINE towns = 0NEWLINE cnt = 0NEWLINE for x in a:NEWLINE population += xNEWLINE towns += 1NEWLINE if population/towns != m: cnt+=1; continueNEWLINE population, towns = 0, 0NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE flg = n.bit_length()&1^1NEWLINE t = 0NEWLINE x = 1NEWLINE while x <= n:NEWLINE t += 1NEWLINE x = 2*x+1 if t&1^flg else 2*xNEWLINE print('Aoki' if t&1 else 'Takahashi')NEWLINENEWLINENEWLINE class ABC028:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print('Bad' if n<60 else 'Good' if n<90 else 'Great' if n<100 else 'Perfect')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE cnt = Counter(s)NEWLINE print(*[cnt.get(c, 0) for c in 'ABCDEF'])NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE a, b, c, d, e = map(int, sys.stdin.readline().split())NEWLINE print(max(b+c+e, a+d+e))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE c = 3*2*(n-k)*(k-1) + 3*(n-1) + 1NEWLINE print(c/n**3)NEWLINENEWLINENEWLINE class ABC029:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(sys.stdin.readline().rstrip()+'s')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE print(sum('r' in s for s in sys.stdin.read().split()))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE print(*[''.join(s) for s in itertools.product('abc', repeat=int(sys.stdin.readline().rstrip()))], sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(sum(n//10**(i+1)*10**i + min(max((n%10**(i+1)-10**i+1), 0), 10**i) for i in range(9)))NEWLINENEWLINE class ABC030:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c, d = map(int, sys.stdin.readline().split())NEWLINE e, f = b*c, d*aNEWLINE print('TAKAHASHI' if e>f else 'AOKI' if f>e else 'DRAW')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE n = (n%12 + m/60)*30; m *= 6NEWLINE d = abs(n-m)NEWLINE print(min(d, 360-d))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE b = [int(x) for x in sys.stdin.readline().split()]NEWLINENEWLINE t = 0NEWLINE p = 1NEWLINE cnt = 0NEWLINE while True:NEWLINE if p:NEWLINE i = bi_l(a, t)NEWLINE if i == n: breakNEWLINE t = a[i] + xNEWLINE else:NEWLINE i = bi_l(b, t)NEWLINE if i == m: breakNEWLINE t = b[i] + yNEWLINE cnt += 1NEWLINE p ^= 1NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, a = map(int , sys.stdin.readline().split()); a -= 1NEWLINE k = sys.stdin.readline().rstrip()NEWLINE b = [int(x)-1 for x in sys.stdin.readline().split()]NEWLINENEWLINE c = [None] * nNEWLINE for i in range(n+1):NEWLINE if str(i)==k: print(a+1);returnNEWLINE if c[a] is not None: l, d = i-c[a], c[a];breakNEWLINE c[a] = i; a = b[a]NEWLINENEWLINE r = [None] * len(k); r[0] = 1NEWLINE for i in range(len(k)-1): r[i+1] = r[i]*10%lNEWLINE k = [int(c) for c in k][::-1]NEWLINE d = (sum(r[i]*k[i] for i in range(len(k)))-d) % lNEWLINE for _ in range(d): a = b[a]NEWLINE print(a+1)NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, a, k, *b = map(int, sys.stdin.read().split())NEWLINE a -= 1; b = [x-1 for x in b]NEWLINE c = [None]*nNEWLINE for i in range(n+1):NEWLINE if i==k: print(a+1); returnNEWLINE if c[a] is not None:NEWLINE for _ in range((k-c[a])%(i-c[a])): a = b[a]NEWLINE print(a+1); returnNEWLINE c[a] = i; a = b[a]NEWLINENEWLINENEWLINE class ABC031:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, d = map(int, sys.stdin.readline().split())NEWLINE if a > d: a,d = d,aNEWLINE print((a+1)*d)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE l, h, n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE res = np.maximum(l-a, 0)NEWLINE res[a>h] = -1NEWLINE print(*res, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE np.cumsum(a[::2], out=a[::2])NEWLINE np.cumsum(a[1::2], out=a[1::2])NEWLINE a = list(a) + [0]*2NEWLINENEWLINE def score(i, j):NEWLINE if i > j: i, j = j, iNEWLINE if (j-i)&1: x, y = a[j-1]-a[i-2], a[j]-a[i-1]NEWLINE else: x, y = a[j]-a[i-2], a[j-1]-a[i-1]NEWLINE return x, yNEWLINENEWLINE res = -infNEWLINE for i in range(n):NEWLINE s = -infNEWLINE for j in range(n):NEWLINE if i==j: continueNEWLINE x, y = score(i, j)NEWLINE if y>s: s,t = y,xNEWLINE res = max(res, t)NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE k, m = map(int, sys.stdin.readline().split())NEWLINE *vw, = zip(*[iter(sys.stdin.read().split())]*2)NEWLINE for l in itertools.product((1,2,3), repeat=k):NEWLINE s = dict()NEWLINE for v, w in vw:NEWLINE i = 0NEWLINE for d in v:NEWLINE d = int(d)-1NEWLINE j = i+l[d]NEWLINE if j > len(w): breakNEWLINE t = w[i:j]NEWLINE if d in s and s[d] != t: breakNEWLINE s[d] = tNEWLINE i = jNEWLINE else:NEWLINE if i == len(w): continueNEWLINE breakNEWLINE else:NEWLINE for i in range(k): print(s[i])NEWLINE returnNEWLINENEWLINENEWLINE class ABC032:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, n = map(int, sys.stdin.read().split())NEWLINE l = NumberTheory.lcm(a, b)NEWLINE print((n+l-1)//l*l)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s, k = sys.stdin.read().split()NEWLINE k = int(k)NEWLINE res = set()NEWLINE for i in range(len(s)-k+1):NEWLINE res.add(s[i:i+k])NEWLINE print(len(res))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *s = map(int, sys.stdin.read().split())NEWLINE if 0 in s: print(n); returnNEWLINE if k == 0: print(0); returnNEWLINE res, tmp, l = 0, 1, 0NEWLINE for r in range(n):NEWLINE tmp *= s[r]NEWLINE while tmp > k: tmp //= s[l]; l+=1NEWLINE res = max(res, r-l+1)NEWLINENEWLINE print(res)NEWLINENEWLINE class ABC033:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('SAME' if len(set(sys.stdin.readline().rstrip()))==1 else 'DIFFERENT')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE res = dict()NEWLINE for _ in range(n):NEWLINE s, p = sys.stdin.readline().split()NEWLINE res[s] = int(p)NEWLINE tot = sum(res.values())NEWLINE for s, p in res.items():NEWLINE if p > tot/2: print(s); returnNEWLINE print('atcoder')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(sum(not '0' in f for f in s.split('+')))NEWLINENEWLINENEWLINE class ABC034:NEWLINE @staticmethodNEWLINE def a():NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print('Better' if y>x else 'Worse')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(n+1 if n&1 else n-1)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w = map(int, sys.stdin.read().split())NEWLINE choose = Combinatorics.CombinationsMod()NEWLINE print(choose(h+w-2, h-1))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k, *wp = map(int, sys.stdin.read().split())NEWLINE w, p = np.array(wp).reshape(-1, 2).TNEWLINE def f(x):NEWLINE return np.sort(w*(p-x))[-k:].sum()NEWLINE print(optimize.bisect(f, 0, 100))NEWLINENEWLINE class ABC035:NEWLINE @staticmethodNEWLINE def a():NEWLINE w, h = map(int, sys.stdin.readline().split())NEWLINE print('4:3' if 4*h==3*w else '16:9')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s, t = sys.stdin.read().split()NEWLINE y = x = z = 0NEWLINE for c in s:NEWLINE if c == '?': z += 1NEWLINE elif c == 'L': x -= 1NEWLINE elif c == 'R': x += 1NEWLINE elif c == 'D': y -= 1NEWLINE elif c == 'U': y += 1NEWLINE d = abs(y)+abs(x)NEWLINE print(d+z if t=='1' else max(d-z, (d-z)&1))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, q, *lr = map(int, sys.stdin.read().split())NEWLINE l, r = np.array(lr).reshape(q, 2).TNEWLINE res = np.zeros(n+1, dtype=int)NEWLINE np.add.at(res, l-1, 1)NEWLINE np.subtract.at(res, r, 1)NEWLINE np.cumsum(res, out=res)NEWLINE res = res&1NEWLINE print(''.join(map(str, res[:-1])))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, t = map(int, sys.stdin.readline().split())NEWLINE point = np.array(sys.stdin.readline().split(), dtype=int)NEWLINE a, b, c = np.array(sys.stdin.read().split(), dtype=np.int64).reshape(m, 3).TNEWLINE a -= 1; b -= 1NEWLINE d_1 = shortest_path(csr_matrix((c, (a, b)), (n, n)), method='D', directed=True, indices=0)NEWLINE d_2 = shortest_path(csr_matrix((c, (b, a)), (n, n)), method='D', directed=True, indices=0)NEWLINE print(int(np.amax((t-(d_1+d_2))*point)))NEWLINENEWLINE class ABC036:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print((b+a-1)//a)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *s = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE for j in range(n):NEWLINE row = ''NEWLINE for i in range(n-1, -1, -1):NEWLINE row += s[i][j]NEWLINE print(row)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE b = [None]*nNEWLINE prev = NoneNEWLINE j = -1NEWLINE for i, x in sorted(enumerate(a), key=lambda x: x[1]):NEWLINE if x != prev: j += 1NEWLINE b[i] = jNEWLINE prev = xNEWLINE print(*b, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE edges = [[] for _ in range(n)]NEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE edges[a].append(b)NEWLINE edges[b].append(a)NEWLINE parent = [None]*nNEWLINE def count(u):NEWLINE black, white = 1, 1NEWLINE for v in edges[u]:NEWLINE if v == parent[u]: continueNEWLINE parent[v] = uNEWLINE b, w = count(v)NEWLINE black *= w; black %= MODNEWLINE white *= (b+w)%MOD; white %= MODNEWLINE return black, whiteNEWLINE print(sum(count(0))%MOD)NEWLINENEWLINE class ABC037:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print(c//min(a, b))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, q, *lrt = map(int, sys.stdin.read().split())NEWLINE a = np.zeros(n, dtype=int)NEWLINE for l, r, t in zip(*[iter(lrt)]*3):NEWLINE a[l-1:r] = tNEWLINE print(*a, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array([0]+a)NEWLINE np.cumsum(a, out=a)NEWLINE s = (a[k:] - a[:-k]).sum()NEWLINE print(s)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, *a = map(int, sys.stdin.read().split())NEWLINE p = [None]*(h*w)NEWLINE def paths(k):NEWLINE if p[k]: return p[k]NEWLINE p[k] = 1NEWLINE i, j = divmod(k,w)NEWLINE if j>0 and a[k]>a[k-1]: p[k] += paths(k-1)NEWLINE if j<w-1 and a[k]>a[k+1]: p[k] += paths(k+1)NEWLINE if i>0 and a[k]>a[k-w]: p[k] += paths(k-w)NEWLINE if i<h-1 and a[k]>a[k+w]: p[k] += paths(k+w)NEWLINE p[k] %= MOD; return p[k]NEWLINE print(sum(paths(i) for i in range(h*w))%MOD)NEWLINENEWLINENEWLINE class ABC038:NEWLINE @staticmethodNEWLINE def a():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print('YES' if s[-1]=='T' else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c, d = map(int, sys.stdin.read().split())NEWLINE print('YES' if a==c or b==c or a==d or b==d else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a += [-1]NEWLINE cnt = nNEWLINE tmp = 1NEWLINE for i in range(n):NEWLINE if a[i+1] > a[i]:NEWLINE tmp += 1NEWLINE else:NEWLINE cnt += tmp*(tmp-1)//2NEWLINE tmp = 1NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *wh = map(int, sys.stdin.read().split())NEWLINE a = [x[1] for x in sorted(zip(*[iter(wh)]*2), key=lambda x: (x[0], -x[1]))]NEWLINE print(bi_l(DP.LIS(a), inf))NEWLINENEWLINE class ABC039:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print((a*b+b*c+c*a)*2)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE x = int(sys.stdin.readline().rstrip())NEWLINE for n in range(1, int(x**0.5)+1):NEWLINE if pow(n, 4)==x:NEWLINE print(n); returnNEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE board = 'WBWBWWBWBWBW' * 3NEWLINE convert = 'Do, *, Re, *, Mi, Fa, *, So, *, La, *, Si'.split(', ')NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(convert[board.index(s)])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w = map(int, sys.stdin.readline().split())NEWLINE s = ''.join(sys.stdin.read().split())NEWLINE white = set()NEWLINE for i in range(h*w):NEWLINE if s[i]=='#': continueNEWLINE l = 0 if i%w==0 else -1NEWLINE r = 0 if (i+1)%w==0 else 1NEWLINE white |= {i+dy+dx for dy in range(-w, w+1, w) for dx in range(l,r+1)}NEWLINE black_before = set(range(h*w)) - whiteNEWLINE black_after = set()NEWLINE for i in black_before:NEWLINE l = 0 if i%w==0 else -1NEWLINE r = 0 if (i+1)%w==0 else 1NEWLINE black_after |= {i+dy+dx for dy in range(-w, w+1, w) for dx in range(l,r+1)}NEWLINE black_after &= set(range(h*w))NEWLINE for i in range(h*w):NEWLINE if s[i]=='#' and not i in black_after: print('impossible'); returnNEWLINE print('possible')NEWLINE for i in range(h):NEWLINE print(''.join(['#' if i*w+j in black_before else '.' for j in range(w)]))NEWLINENEWLINENEWLINENEWLINENEWLINE class ABC040:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, x = map(int, sys.stdin.readline().split())NEWLINE print(min(x-1, n-x))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE res = infNEWLINE for i in range(1, int(n**.5)+1):NEWLINE res = min(res, n//i-i+n%i)NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *h = map(int, sys.stdin.read().split())NEWLINE h = [h[0]]+hNEWLINE cost = [None] * (n+1); cost[0] = cost[1] = 0NEWLINE for i in range(2, n+1):NEWLINE cost[i] = min(NEWLINE cost[i-2] + abs(h[i]-h[i-2]),NEWLINE cost[i-1] + abs(h[i]-h[i-1])NEWLINE )NEWLINE print(cost[n])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE queue = []NEWLINE for _ in range(m):NEWLINE a, b, y = map(int, sys.stdin.readline().split())NEWLINE heappush(queue, (-(2*y), a-1, b-1))NEWLINE q = int(sys.stdin.readline().rstrip())NEWLINE for i in range(q):NEWLINE v, y = map(int, sys.stdin.readline().split())NEWLINE heappush(queue, (-(2*y+1), v-1, i))NEWLINE res = [None] * qNEWLINE while queue:NEWLINE y, i, j = heappop(queue)NEWLINE if y&1:NEWLINE res[j] = uf.size[uf.find(i)]NEWLINE else:NEWLINE uf.unite(i, j)NEWLINE print(*res, sep='\n')NEWLINENEWLINE class ABC041:NEWLINE @staticmethodNEWLINE def a():NEWLINE s, i = sys.stdin.read().split()NEWLINE i = int(i)NEWLINE print(s[i-1])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE ans = a * b % MOD * c % MODNEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE for i, h in sorted(enumerate(a), key=lambda x: -x[1]):NEWLINE print(i+1)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, _, *xy = map(int, sys.stdin.read().split())NEWLINE g = [0]*nNEWLINE for x, y in zip(*[iter(xy)]*2): g[x-1] |= 1<<(y-1)NEWLINE res = [0]*(1<<n); res[0] = 1NEWLINE for i in range(1<<n):NEWLINE for j in range(n):NEWLINE if i>>j&1^1: continueNEWLINE if not(g[j]&i): res[i] += res[i&~(1<<j)]NEWLINE print(res[-1])NEWLINENEWLINENEWLINENEWLINE class ABC042:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE c = Counter(a)NEWLINE print('YES' if c[5]==2 and c[7]==1 else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, l, *s = sys.stdin.read().split()NEWLINE print(''.join(sorted(s)))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *d = sys.stdin.read().split()NEWLINE l = len(n)NEWLINE ok = sorted(set(string.digits)-set(d))NEWLINE cand = [int(''.join(p)) for p in itertools.product(ok, repeat=l)] + [int(min(x for x in ok if x > '0')+min(ok)*l)]NEWLINE print(cand[bi_l(cand, int(n))])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, a, b = map(int, sys.stdin.read().split())NEWLINE combinations = Combinatorics.CombinationsMod(n=2*10**5, mod=MOD)NEWLINE i = np.arange(h-a, h)NEWLINE ng = np.sum(combinations(i+b-1, i) * combinations(h-i+w-b-2, h-1-i) % MOD)NEWLINE print((combinations(h+w-2, h-1)-ng)%MOD)NEWLINENEWLINENEWLINE class ABC043:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print((1+n)*n//2)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE t = ''NEWLINE for c in s:NEWLINE if c == 'B': t = t[:-1]NEWLINE else: t += cNEWLINE print(t)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE x = np.around(a.sum()/n).astype(int)NEWLINE print(np.sum((a-x)**2))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE n = len(s)NEWLINE for i in range(n-1):NEWLINE if s[i] == s[i+1]: print(i+1, i+2); returnNEWLINE for i in range(n-2):NEWLINE if s[i] == s[i+2]: print(i+1, i+3); returnNEWLINE print(-1, -1)NEWLINENEWLINE class ABC044:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, k, x, y = map(int, sys.stdin.read().split())NEWLINE print(min(n,k)*x + max(0,n-k)*y)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE res = set(c&1 for c in Counter(sys.stdin.readline().rstrip()).values())NEWLINE print('Yes' if len(res)==1 and res.pop()==0 else 'No')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, a, *x = map(int, sys.stdin.read().split())NEWLINE dp = np.zeros((n+1, 2501), dtype=np.int64); dp[0,0] = 1NEWLINE for v in x: dp[1:,v:] += dp[:-1,:-v]NEWLINE i = np.arange(1, n+1)NEWLINE print(dp[i, i*a].sum())NEWLINENEWLINE @staticmethodNEWLINE def c_2():NEWLINE n, a, *x = map(int, sys.stdin.read().split())NEWLINE for i in range(n): x[i] -= aNEWLINENEWLINE s = defaultdict(int); s[0] = 1NEWLINE for i in range(n):NEWLINE ns = s.copy()NEWLINE for k, v in s.items(): ns[k+x[i]] += vNEWLINE s = nsNEWLINE print(s[0]-1)NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE passNEWLINENEWLINE class ABC045:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, h = map(int, sys.stdin.read().split())NEWLINE print((a+b)*h//2)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c = sys.stdin.read().split()NEWLINE d = {'a': a[::-1], 'b': b[::-1], 'c': c[::-1]}NEWLINE nx = 'a'NEWLINE while 1:NEWLINE if not d[nx]: print(nx.upper()); returnNEWLINE d[nx], nx = d[nx][:-1], d[nx][-1]NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE def c(l): return pow(2, max(0,l-1))NEWLINE s = sys.stdin.readline().rstrip()NEWLINE n = len(s)NEWLINE print(sum(int(s[i:j+1])*c(i)*c(n-1-j) for i in range(n) for j in range(i, n)))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, n, *ab = map(int, sys.stdin.read().split())NEWLINE c = defaultdict(int)NEWLINE for y, x in zip(*[iter(ab)] * 2):NEWLINE y -= 1; x -= 1NEWLINE for dy, dx in itertools.product(range(-1, 2), repeat=2):NEWLINE i, j = y+dy, x+dxNEWLINE if not(0<i<h-1 and 0<j<w-1): continueNEWLINE c[(i,j)] += 1NEWLINE c = Counter(c.values())NEWLINE c[0] = (h-2)*(w-2)-sum(c.values())NEWLINE for i in range(10): print(c[i])NEWLINENEWLINENEWLINE class ABC046:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(len(set(sys.stdin.readline().split())))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE print(k*pow(k-1, n-1))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE a, b = 1, 1NEWLINE for x, y in zip(*[iter(xy)]*2):NEWLINE n = max((a+x-1)//x, (b+y-1)//y)NEWLINE a, b = n*x, n*yNEWLINE print(a+b)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE c = Counter(sys.stdin.readline().rstrip())NEWLINE print((c['g']-c['p'])//2)NEWLINENEWLINENEWLINENEWLINE class ABC047:NEWLINE @staticmethodNEWLINE def a():NEWLINE c = sorted(map(int, sys.stdin.readline().split()))NEWLINE print('Yes' if c[0]+c[1]==c[2] else 'No')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE w, h, n, *xyf = map(int, sys.stdin.read().split())NEWLINE l, r, d, u = 0, w, 0, hNEWLINE for x, y, f in zip(*[iter(xyf)]*3):NEWLINE if f == 1: l = max(l, x)NEWLINE if f == 2: r = min(r, x)NEWLINE if f == 3: d = max(d, y)NEWLINE if f == 4: u = min(u, y)NEWLINE print(max(0, r-l)*max(0, u-d))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(sum(s[i]!=s[i+1] for i in range(len(s)-1)))NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE mn, mx, c = inf, -1, 0NEWLINE n, t, *a = map(int, sys.stdin.read().split())NEWLINE for p in a:NEWLINE if p-mn == mx: c += 1NEWLINE elif p-mn>mx: mx, c = p-mn, 1NEWLINE mn = min(mn, p)NEWLINE print(c)NEWLINENEWLINE class ABC048:NEWLINE @staticmethodNEWLINE def a():NEWLINE def initial(s): return s[0].upper()NEWLINE print(''.join(map(initial, sys.stdin.readline().split())))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, x = map(int, sys.stdin.readline().split())NEWLINE print(b//x - (a-1)//x) # if a=0, (a-1)/x is rounded down to -1.NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, x, *a = map(int, sys.stdin.read().split())NEWLINE cnt = prev = 0NEWLINE for i in range(n):NEWLINE d = prev+a[i] - xNEWLINE prev = a[i]NEWLINE if d <= 0: continueNEWLINE cnt += d; prev -= dNEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print('First' if len(s)&1^(s[0]==s[-1]) else 'Second')NEWLINENEWLINENEWLINE class ABC049:NEWLINE @staticmethodNEWLINE def a():NEWLINE vowels = set('aeiou')NEWLINE print('vowel' if sys.stdin.readline().rstrip() in vowels else 'consonant')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE h, w, *s = sys.stdin.read().split()NEWLINE for l in s:NEWLINE for _ in range(2): print(l)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE t = set('dream, dreamer, erase, eraser'.split(', '))NEWLINE def obtainable(s):NEWLINE while True:NEWLINE for i in range(5, 8):NEWLINE if s[-i:] in t:NEWLINE s = s[:-i]NEWLINE if not s: return TrueNEWLINE breakNEWLINE else: return FalseNEWLINENEWLINE s = sys.stdin.readline().rstrip()NEWLINE print('YES' if obtainable(s) else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k, l = map(int, sys.stdin.readline().split())NEWLINE uf1 = GeometryTopology.Graph(n); uf1.init_dsu()NEWLINE uf2 = GeometryTopology.Graph(n); uf2.init_dsu()NEWLINENEWLINE def add_edges(uf, m):NEWLINE for _ in range(m):NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE x -= 1; y -= 1NEWLINE uf.unite(x, y)NEWLINENEWLINE add_edges(uf1, k); add_edges(uf2, l)NEWLINENEWLINE g = defaultdict(list)NEWLINE for i in range(n): g[(uf1.find(i), uf2.find(i))].append(i)NEWLINENEWLINE res = [None] * nNEWLINE for a in g:NEWLINE for i in g[a]: res[i] = len(g[a])NEWLINENEWLINE print(*res, sep=' ')NEWLINENEWLINENEWLINE class ABC050:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(eval(sys.stdin.readline().rstrip()))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE t = np.array(sys.stdin.readline().split(), dtype=np.int64)NEWLINE m, *px = map(int, sys.stdin.read().split())NEWLINE p, x = np.array(px).reshape(m, 2).T; p -= 1NEWLINE print(*(t.sum()+x-t[p]), sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = Counter(a)NEWLINE if n&1 and not(a[0]==1 and all(a[i]==2 for i in range(2, n, 2))):NEWLINE print(0); returnNEWLINE if ~n&1 and any(a[i]!= 2 for i in range(1, n, 2)):NEWLINE print(0); returnNEWLINE print(pow(2, n//2, MOD))NEWLINENEWLINE @staticmethodNEWLINE def d(): passNEWLINENEWLINENEWLINE class ABC051:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(' '.join(sys.stdin.readline().rstrip().split(',')))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE k, s = map(int, sys.stdin.readline().split())NEWLINE tot = 0NEWLINE for x in range(k+1):NEWLINE if s-x < 0: breakNEWLINE if s-x > 2*k: continueNEWLINE tot += s-x+1 if s-x<=k else 2*k-(s-x)+1NEWLINE print(tot)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE x1, y1, x2, y2 = map(int, sys.stdin.readline().split())NEWLINE dx, dy = x2-x1, y2-y1NEWLINE print('U'*dy+'R'*(dx+1)+'D'*(dy+1)+'L'*(dx+1)+'U'+'L'+'U'*(dy+1)+'R'*(dx+1)+'D'*(dy+1)+'L'*dx)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *abc = map(int, sys.stdin.read().split())NEWLINE x = np.arange(n)NEWLINE a, b, c = np.array(abc).reshape(m, 3).T; a -= 1; b -= 1NEWLINE d = shortest_path(csr_matrix((c, (a, b)), shape=(n, n)), method='FW', directed=False).astype(np.int64)NEWLINE print(m-np.any(d[x,a[:,None]]+c[:,None]==d[x,b[:,None]], axis=1).sum())NEWLINENEWLINENEWLINE class ABC052:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c, d = map(int, sys.stdin.readline().split())NEWLINE print(max(a*b, c*d))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, s = sys.stdin.read().split()NEWLINE n = int(n)NEWLINE a = [0] * (n+1)NEWLINE for i in range(n):NEWLINE a[i+1] = a[i] + (1 if s[i]=='I' else -1)NEWLINE print(max(a))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE pn = NumberTheory.PrimeNumbers(n)NEWLINE s = 1NEWLINE for c in pn.factorize_factorial(n).values():NEWLINE s = s*(c+1)%MODNEWLINE print(s)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, a, b, *x = map(int, sys.stdin.read().split())NEWLINE x = np.array(x)NEWLINE print(np.minimum((x[1:]-x[:-1])*a, b).sum())NEWLINENEWLINE class ABC053:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('ABC' if int(sys.stdin.readline().rstrip())<1200 else 'ARC')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(len(s)-s.find('A')-s[::-1].find('Z'))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE x = int(sys.stdin.readline().rstrip())NEWLINE q, r = divmod(x, 11)NEWLINE print(2*q + (r+5)//6)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE print(n-((n-len(set(a))+1)//2*2))NEWLINENEWLINE class ABC054:NEWLINE @staticmethodNEWLINE def a():NEWLINE def f(x):NEWLINE return (x+11)%13NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print('Alice' if f(a)>f(b) else 'Bob' if f(a)<f(b) else 'Draw')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE a = [sys.stdin.readline().rstrip() for _ in range(n)]NEWLINE b = [sys.stdin.readline().rstrip() for _ in range(m)]NEWLINENEWLINE for i in range(n-m+1):NEWLINE for j in range(n-m+1):NEWLINE for y in range(m):NEWLINE for x in range(m):NEWLINE if a[i+y][j+x]==b[y][x]: continueNEWLINE breakNEWLINE else: continueNEWLINE breakNEWLINE else: print('Yes'); returnNEWLINE print('No')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE g.add_edge(a,b)NEWLINE g.add_edge(b,a)NEWLINENEWLINE cnt = 0NEWLINE stack = [(0, 1)]NEWLINE while stack:NEWLINE u, s = stack.pop()NEWLINE if s==(1<<n)-1: cnt+=1; continueNEWLINE for v in g.edges[u]:NEWLINE if s>>v&1: continueNEWLINE stack.append((v, s|1<<v))NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, ma, mb, *abc = map(int, sys.stdin.read().split())NEWLINE dp = np.full((401, 401), np.inf); dp[0,0] = 0NEWLINE for a, b, c in zip(*[iter(abc)]*3):NEWLINE np.minimum(dp[a:, b:], dp[:-a, :-b]+c, out=dp[a:, b:])NEWLINE i = np.arange(1, 400//max(ma,mb)+1)NEWLINE res = dp[i*ma, i*mb].min()NEWLINE print(int(res) if res != np.inf else -1)NEWLINENEWLINENEWLINE class ABC055:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print(800*n - 200*(n//15))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE fac, _ = Algebra.generate_fac_ifac(n, MOD)NEWLINE print(fac[-1])NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE print(m//2 if m<=2*n else n+(m-2*n)//4)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, s = sys.stdin.read().split(); n = int(n)NEWLINE s = [1 if c=='o' else 0 for c in s]NEWLINE def possible(t):NEWLINE for i in range(1, n-1): t[i+1] = t[i-1]^t[i]^s[i]NEWLINE return ((t[0]^s[0]^t[1]^t[-1])|(t[-1]^s[-1]^t[-2]^t[0]))^1NEWLINENEWLINE for fst in [(1,0), (0,1), (1,1), (0,0)]:NEWLINE t = [None]*n; t[0], t[1] = fst[0], fst[1]NEWLINE if possible(t): print(''.join('S' if x==1 else 'W' for x in t)); returnNEWLINE print(-1)NEWLINENEWLINENEWLINE class ABC056:NEWLINE @staticmethodNEWLINE def a():NEWLINE def to_i(c):NEWLINE return 1 if c=='H' else 0NEWLINE a, b = map(to_i, sys.stdin.readline().split())NEWLINE print('D' if a^b else 'H')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE w, a, b = map(int, sys.stdin.readline().split())NEWLINE if a>b: a,b = b,aNEWLINE print(max(b-(a+w), 0))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE x = int(sys.stdin.readline().rstrip())NEWLINE print(int(math.ceil(math.sqrt(2*x+1/4)-.5)))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE a = sorted(min(x,k) for x in a)NEWLINENEWLINE def necessary(i):NEWLINE dp = np.zeros(k, dtype=np.bool); dp[0] = TrueNEWLINE for j in range(n):NEWLINE if j==i: continueNEWLINE dp[a[j]:] += dp[:-a[j]]NEWLINE return np.any(dp[k-a[i]:])NEWLINENEWLINE def binary_search():NEWLINE lo, hi = -1, nNEWLINE while hi-lo > 1:NEWLINE i = (lo+hi)//2NEWLINE if necessary(i): hi = iNEWLINE else: lo = iNEWLINE return hiNEWLINENEWLINE print(binary_search())NEWLINENEWLINENEWLINENEWLINE class ABC057:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print((a+b)%24)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I).reshape(-1, 2)NEWLINE ab, cd = I[:n], I[n:]NEWLINE print(*(np.argmin(np.absolute(ab[:,None]-cd).sum(axis=-1), axis=-1)+1), sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE divs = NumberTheory.find_divisors(n)NEWLINE print(len(str(divs[bi_l(divs, math.sqrt(n))])))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE c = Combinatorics.chooseNEWLINE n, a, b, *v = map(int, sys.stdin.read().split())NEWLINE v.sort()NEWLINE print(sum(v[-a:])/a)NEWLINE l, r = bi_l(v, v[-a]), bi_r(v, v[-a])NEWLINE print(sum(c(r-l, i) for i in range(r-n+a, r-max(l,n-b)+1)) if r==n else c(r-l, r-n+a))NEWLINENEWLINENEWLINE class ABC058:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print('YES' if c-b==b-a else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s, t = sys.stdin.read().split()NEWLINE a = ''NEWLINE for i in range(len(t)): a += s[i]+t[i]NEWLINE if len(s)>len(t): a += s[-1]NEWLINE print(a)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *s = sys.stdin.read().split()NEWLINE res = {c: 100 for c in string.ascii_lowercase}NEWLINE for counter in map(Counter, s):NEWLINE for c, x, in res.items(): res[c] = min(x, counter[c])NEWLINE t = ''NEWLINE for c, x in sorted(res.items()): t += c*xNEWLINE print(t)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = np.array(xy[:n]), np.array(xy[n:])NEWLINE print((x*(np.arange(n)+1)-np.cumsum(x)).sum()%MOD*((y*(np.arange(m)+1)-np.cumsum(y)).sum()%MOD)%MOD)NEWLINENEWLINE class ABC059:NEWLINE @staticmethodNEWLINE def a():NEWLINE def initial(s): return s[0].upper()NEWLINE print(''.join(map(initial, sys.stdin.readline().split())))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b = sys.stdin.read().split()NEWLINE la, lb = len(a), len(b)NEWLINE print('GREATER' if la>lb else 'LESS' if la<lb else 'GREATER' if a>b else 'LESS' if a<b else 'EQUAL')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE c = s = 0NEWLINE for i in range(n):NEWLINE s += a[i]NEWLINE if i&1 and s>=0: c += s+1; s=-1NEWLINE elif i&1^1 and s<=0: c += 1-s; s=1NEWLINE c1 = cNEWLINE c = s = 0NEWLINE for i in range(n):NEWLINE s += a[i]NEWLINE if i&1 and s<=0: c += 1-s; s=1NEWLINE elif i&1^1 and s>=0: c += s+1; s=-1NEWLINE c2 = cNEWLINE print(min(c1, c2))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print('Brown' if abs(x-y)<=1 else 'Alice')NEWLINENEWLINENEWLINE class ABC060:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = sys.stdin.readline().split()NEWLINE print('YES' if a[-1]==b[0] and b[-1]==c[0] else 'NO')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print('NO' if c%NumberTheory.gcd(a,b) else 'YES')NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, t, *a = map(int, sys.stdin.read().split())NEWLINE print(sum(min(a[i+1]-a[i], t) for i in range(n-1))+t)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, W, *wv = map(int, sys.stdin.read().split())NEWLINE v, w0 = [[] for _ in range(4)], wv[0]NEWLINE for a, b in zip(*[iter(wv)]*2): v[a-w0].append(b)NEWLINE for i in range(4):NEWLINE v[i] = (sorted(v[i])+[0])[::-1]NEWLINE *v[i], = itertools.accumulate(v[i])NEWLINE global res; res = 0NEWLINE @lru_cache(maxsize=None)NEWLINE def dfs(i,j,k):NEWLINE if i>=len(v[0]) or j>=len(v[1]) or k>=len(v[2]): returnNEWLINE w = j+2*k + (i+j+k)*w0NEWLINE if w > W: returnNEWLINE l = min(len(v[3])-1, (W-w)//(w0+3))NEWLINE global res; res = max(res, v[0][i]+v[1][j]+v[2][k]+v[3][l])NEWLINE dfs(i+1,j,k); dfs(i,j+1,k); dfs(i,j,k+1)NEWLINE dfs(0,0,0)NEWLINE print(res)NEWLINENEWLINE class ABC061:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b, c = map(int, sys.stdin.readline().split())NEWLINE print('Yes' if a <= c <= b else 'No')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE ab = np.array(ab) - 1NEWLINE g = np.zeros(n, dtype=np.int32)NEWLINE np.add.at(g, ab, 1)NEWLINE print(*g, sep='\n')NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *ab = map(int, sys.stdin.read().split())NEWLINE ab = np.transpose(np.array(ab).reshape(n,2))NEWLINE a, b = ab[:, np.argsort(ab[0])]NEWLINE print(a[np.cumsum(b)>=k][0])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *abc = map(int, sys.stdin.read().split())NEWLINE a, b, c = np.array(abc).reshape(m, 3).T; a -= 1; b -= 1; c *= -1NEWLINE g = csr_matrix(([1]*(m+1), (np.append(a, n-1), np.append(b, 0))), (n, n))NEWLINE _, labels = connected_components(g, connection='strong')NEWLINE bl = (labels[a]==labels[0]) & (labels[b]==labels[0])NEWLINE g = csr_matrix((c[bl], (a[bl], b[bl])), (n, n))NEWLINE try: print(-shortest_path(g, method='BF', directed=True, indices=0)[-1].astype(int))NEWLINE except: print('inf')NEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, m, *abc = map(int, sys.stdin.read().split())NEWLINE a, b, c = np.array(abc).reshape(m, 3).T; a -= 1; b -= 1; c *= -1NEWLINE d = np.full(n, np.inf); d[0] = 0NEWLINE for _ in range(n-1): np.minimum.at(d, b, d[a]+c)NEWLINE neg_cycle = np.zeros(n, dtype=np.bool)NEWLINE for _ in range(n):NEWLINE np.logical_or.at(neg_cycle, b, d[a]+c<d[b])NEWLINE np.minimum.at(d, b, d[a]+c)NEWLINE print(inf if neg_cycle[-1] else -d[-1].astype(int))NEWLINENEWLINENEWLINE class ABC062:NEWLINE @staticmethodNEWLINE def a():NEWLINE g = [0, 2, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0]NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print('Yes' if g[x-1]==g[y-1] else 'No')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE h, w = map(int, sys.stdin.readline().split())NEWLINE a = np.array([list(s) for s in sys.stdin.read().split()], dtype='U1')NEWLINE a = np.pad(a, pad_width=1, constant_values='#')NEWLINE for s in a: print(''.join(s))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w = map(int, sys.stdin.readline().split())NEWLINE if h*w%3==0: print(0); returnNEWLINE def minimize(h, w):NEWLINE return min(h, *(s[-1]-s[0] for x in range(w//3, w//3+2) for s in (sorted([h*x, h//2*(w-x), (h+1)//2*(w-x)]),)))NEWLINENEWLINE print(min(minimize(h,w), minimize(w,h)))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINENEWLINE def optimize(a):NEWLINE a = list(a)NEWLINE l, r = a[:n], a[n:]; heapify(l)NEWLINE s = [None]*(n+1); s[0] = sum(l)NEWLINE for i in range(n):NEWLINE x = heappop(l)NEWLINE heappush(l, max(x, r[i]))NEWLINE s[i+1] = s[i]+max(0, r[i]-x)NEWLINE return np.array(s)NEWLINENEWLINE print((optimize(a[:2*n]) + optimize(-a[-1:n-1:-1])[::-1]).max())NEWLINENEWLINE class ABC063:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = sum(map(int, sys.stdin.readline().split()))NEWLINE print('error' if a>=10 else a)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print('yes' if len(set(s))==len(s) else 'no')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE s = a.sum()NEWLINE if s%10: print(s)NEWLINE elif not np.count_nonzero(a%10): print(0)NEWLINE else: print(s-a[a%10!=0].min())NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, a, b, *h = map(int, sys.stdin.read().split())NEWLINE h = np.array(h)NEWLINE d = a-bNEWLINENEWLINE def possible(c):NEWLINE hh = h.copy()NEWLINE np.maximum(hh-b*c, 0, out=hh)NEWLINE return ((hh+d-1)//d).sum() <= cNEWLINENEWLINE def binary_search():NEWLINE lo, hi = 0, 10**9NEWLINE while hi-lo > 1:NEWLINE c = (lo+hi)//2NEWLINE if possible(c): hi = cNEWLINE else: lo = cNEWLINE return hiNEWLINENEWLINE print(binary_search())NEWLINENEWLINE class ABC064:NEWLINE @staticmethodNEWLINE def a():NEWLINE r, g, b = map(int, sys.stdin.readline().split())NEWLINE print('NO' if (10*g+b)%4 else 'YES')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a.sort()NEWLINE print(a[-1]-a[0])NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.bincount(np.minimum(np.array(a)//400, 8), minlength=9)NEWLINE mx = np.count_nonzero(a[:-1]) + a[-1]NEWLINE mn = max(mx-a[-1], 1)NEWLINE print(mn, mx)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, s = sys.stdin.read().split()NEWLINE l = r = 0NEWLINE for c in s:NEWLINE if c=='(': r += 1NEWLINE else:NEWLINE if r==0: l += 1NEWLINE else: r -= 1NEWLINE print('('*l+s+')'*r)NEWLINENEWLINE class ABC065:NEWLINE @staticmethodNEWLINE def a():NEWLINE x, a, b = map(int, sys.stdin.readline().split())NEWLINE y = -a+bNEWLINE print('delicious' if y<=0 else 'safe' if y<=x else 'dangerous')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *a = [int(x)-1 for x in sys.stdin.read().split()]NEWLINE i = 0NEWLINE for c in range(n):NEWLINE i = a[i]NEWLINE if i == 1: print(c+1); returnNEWLINE print(-1)NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE d = abs(n-m)NEWLINE if d >= 2: print(0); returnNEWLINE fac, _ = Algebra.generate_fac_ifac(10**5)NEWLINE print(fac[n]*fac[m]*(1 if d else 2)%MOD)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = np.array(xy).reshape(n,2).TNEWLINE i = np.argsort(x); ax, bx, cx = i[:-1], i[1:], x[i[1:],]-x[i[:-1]]NEWLINE i = np.argsort(y); ay, by, cy = i[:-1], i[1:], y[i[1:],]-y[i[:-1]]NEWLINE e = np.vstack([np.hstack([ax,ay]),np.hstack([bx,by]),np.hstack([cx,cy])])NEWLINE e = e[:,np.argsort(e[-1])]NEWLINE _, i = np.unique(e[:-1], return_index=True, axis=1)NEWLINE a, b, c = e[:,i]NEWLINE print(minimum_spanning_tree(csr_matrix((c,(a,b)), (n,n))).astype(np.int64).sum())NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d_2():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE x, y = xy[::2], xy[1::2]NEWLINE g = GeometryTopology.Graph(n)NEWLINE def make(a):NEWLINE b = sorted(enumerate(a), key=lambda x: x[1])NEWLINE for i in range(n-1):NEWLINE u, v, w = b[i][0], b[i+1][0], b[i+1][1]-b[i][1]NEWLINE for u, v in [(v,u), (u,v)]:NEWLINE if not v in g.edges[u]: g.add_edge(u, v, weight=w)NEWLINE else: g.edges[u][v].weight = min(g.edges[u][v].weight, w)NEWLINE make(x); make(y)NEWLINE _, d = g.kruskal()NEWLINE # _, d = g.prim()NEWLINE # _, d = g.boruvka()NEWLINE print(d)NEWLINENEWLINENEWLINENEWLINE class ABC066:NEWLINE @staticmethodNEWLINE def a():NEWLINE print(sum(sorted(map(int, sys.stdin.readline().split()))[:-1]))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE def f(s):NEWLINE n = len(s)//2NEWLINE return s[:n] == s[n:]NEWLINE for i in range(len(s)-2, 0, -2):NEWLINE if f(s[:i]): print(i); returnNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE b = deque()NEWLINE for i in range(n):NEWLINE if i&1: b.appendleft(a[i])NEWLINE else: b.append(a[i])NEWLINE if n&1: b.reverse()NEWLINE print(*b)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE tmp = [None]*(n+1)NEWLINE for i in range(n+1):NEWLINE if tmp[a[i]] is not None: d=tmp[a[i]]+n-i; breakNEWLINE tmp[a[i]] = iNEWLINE k = np.arange(1, n+2)NEWLINE c = Combinatorics.CombinationsMod(n+1, MOD)NEWLINE print(*((c(n+1,k)-c(d,k-1))%MOD), sep='\n')NEWLINENEWLINENEWLINE class ABC067:NEWLINE @staticmethodNEWLINE def a():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print('Impossible' if a%3 and b%3 and (a+b)%3 else 'Possible')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, k, *l = map(int, sys.stdin.read().split())NEWLINE print(sum(sorted(l)[-k:]))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE np.cumsum(a, out=a)NEWLINE print(np.absolute(a[-1]-2*a[:-1]).min())NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for a, b in zip(*[iter(ab)]*2):NEWLINE a -= 1; b -= 1NEWLINE g.add_edge(a, b); g.add_edge(b,a)NEWLINE d1, d2 = g.bfs(0), g.bfs(n-1)NEWLINE print('Fennec' if sum(d1[i]<=d2[i] for i in range(n)) > n//2 else 'Snuke')NEWLINENEWLINE class ABC068:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('ABC'+sys.stdin.readline().rstrip())NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE print(2**math.floor(math.log2(int(sys.stdin.readline().rstrip()))))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(m, 2).TNEWLINE d = shortest_path(csr_matrix(([1]*m, (a-1, b-1)), (n,n)), method='D', directed=False, indices=0).astype(np.int32)NEWLINE print('POSSIBLE' if d[-1]==2 else 'IMPOSSIBLE')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE k = int(sys.stdin.readline().rstrip())NEWLINE n = 50; print(n)NEWLINE q,r = divmod(k,n); a = np.arange(n-1,-1,-1)+q; a[:r]+=1; print(*a)NEWLINENEWLINE class ABC069:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE print((n-1)*(m-1))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, n, *a = map(int, sys.stdin.read().split())NEWLINE c = [i+1 for i in range(n) for j in range(a[i])]NEWLINE for i in range(h):NEWLINE row = c[i*w:(i+1)*w]NEWLINE if i&1: row = row[::-1]NEWLINE print(*row)NEWLINENEWLINE class ABC070:NEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for _ in range(n-1):NEWLINE a, b, c = map(int, sys.stdin.readline().split()); a-=1; b-=1NEWLINE g.add_edge(a, b, weight=c); g.add_edge(b, a, weight=c)NEWLINE q, k = map(int, sys.stdin.readline().split())NEWLINE d = g.bfs(k-1)NEWLINE for _ in range(q):NEWLINE x, y = map(int, sys.stdin.readline().split()); x-=1; y-=1NEWLINE print(d[x]+d[y])NEWLINENEWLINE class ABC071:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *s = sys.stdin.read().split(); n = int(n)NEWLINE s = list(zip(*s))NEWLINE dp = [0]*n; dp[0] = 3 if s[0][0]==s[0][1] else 6NEWLINE for i in range(1,n):NEWLINE dp[i] = dp[i-1]NEWLINE if s[i][0]==s[i-1][0]: continueNEWLINE dp[i] *= 2 if s[i-1][0]==s[i-1][1] else 3 if s[i][0]!=s[i][1] else 1NEWLINE dp[i] %= MODNEWLINE print(dp[-1])NEWLINENEWLINE class ABC072:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *p = map(int, sys.stdin.read().split())NEWLINE p += [-1]NEWLINE cnt, i = 0, 0NEWLINE while i < n:NEWLINE if p[i]==i+1:NEWLINE cnt += p[i]==i+1NEWLINE if p[i+1]==i+2: i += 1NEWLINE i += 1NEWLINE print(cnt)NEWLINENEWLINENEWLINE class ABC073:NEWLINE @staticmethodNEWLINE def a():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, r, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE a, b, c = I[r:].reshape(m,3).TNEWLINE d = shortest_path(csr_matrix((c, (a-1, b-1)), (n,n)), method='FW', directed=False).astype(np.int32)NEWLINE r = np.array([*itertools.permutations(I[:r]-1)])NEWLINE print((d[r[:,:-1], r[:,1:]].sum(axis=1)).min())NEWLINENEWLINE class ABC074:NEWLINE @staticmethodNEWLINE def a():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a, dtype=np.int32).reshape(n,n)NEWLINE b = shortest_path(a, method='FW').astype(np.int32)NEWLINE if (b < a).any(): print(-1); returnNEWLINE np.fill_diagonal(b, 10**9)NEWLINE a[np.any(b[:,None]+b<=a[:,:,None], axis=2)] = 0NEWLINE print(a.sum()//2)NEWLINENEWLINENEWLINENEWLINE class ABC075:NEWLINE @staticmethodNEWLINE def a():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k, *xy = map(int, sys.stdin.read().split())NEWLINE xy = np.array(xy).reshape(n,2)NEWLINE x_y = xy.copy()[np.argsort(xy[:,0])]NEWLINE y_x = xy.copy()[np.argsort(xy[:,1])]NEWLINE comb = np.array([*itertools.combinations(range(n),2)])NEWLINE i1, i2 = comb.TNEWLINE j1, j2 = comb[None,:].TNEWLINE s = (y_x[:,1][i2]-y_x[:,1][i1]) * (x_y[:,0][j2]-x_y[:,0][j1])NEWLINE c = np.zeros((n+1,n+1), dtype=np.int64)NEWLINE for i in range(n): c[i+1, 1:] += c[i, 1:] + (y_x[i,0]<=x_y[:,0])NEWLINE a = c[i2+1, j2+1] - c[i2+1, j1] - c[i1, j2+1] + c[i1, j1]NEWLINE print(s[a>=k].min())NEWLINENEWLINENEWLINE class ABC076:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *tv = map(int, sys.stdin.read().split())NEWLINE t, v = np.array(tv).reshape(2, n)NEWLINE t = np.pad(t, pad_width=[2,1], constant_values=0)NEWLINE np.cumsum(t, out=t)NEWLINE l, r = t[:-1], t[1:]NEWLINE v = np.pad(v, pad_width=[1,1], constant_values=0)NEWLINE x = np.arange(0, r[-1]+0.1, 0.5, dtype=np.float32)[:,None]NEWLINE # y = np.stack([v-(x-l), np.zeros(r[-1]*2+1, dtype=np.float32)[:,None]+v, v+(x-r)]).max(axis=0).min(axis=1)NEWLINE mx = v-(x-l); np.maximum(mx, v, out=mx); np.maximum(mx, v+(x-r), out=mx)NEWLINE y = mx.min(axis=1)NEWLINE print(((y[:-1]+y[1:])/4).sum())NEWLINENEWLINENEWLINE class ABC077:NEWLINE @staticmethodNEWLINE def d():NEWLINE k = int(sys.stdin.readline().rstrip())NEWLINE g = GeometryTopology.Graph(k)NEWLINE for i in range(k):NEWLINE g.add_edge(i, i*10%k, weight=0)NEWLINE g.add_edge(i, (i+1)%k, update=False, weight=1)NEWLINE print(1+g.bfs01(1)[0])NEWLINENEWLINENEWLINE class ABC078:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, z, w, *a = map(int, sys.stdin.read().split())NEWLINE print(abs(a[0]-w) if n==1 else max(abs(a[-1]-w), abs(a[-1]-a[-2])))NEWLINENEWLINE class ABC079:NEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE c = I[:100].reshape(10,10)NEWLINE a = I[100:].reshape(h,w)NEWLINE c = shortest_path(c.T, method='D', indices=1).astype(np.int32)NEWLINE print(c[a[a!=-1]].sum())NEWLINENEWLINE class ABC080:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, c, *stc = map(int, sys.stdin.read().split())NEWLINE using = np.zeros((c, 10**5+2), dtype=np.int8)NEWLINE s, t, c = np.array(stc).reshape(n,3).TNEWLINE np.add.at(using, (c-1, s), 1)NEWLINE np.subtract.at(using, (c-1, t+1), 1)NEWLINE np.cumsum(using, axis=1, out=using)NEWLINE print(np.count_nonzero(using, axis=0).max())NEWLINENEWLINE class ABC081:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE i = np.argmax(np.absolute(a))NEWLINE print(2*n-1)NEWLINE for j in range(n): print(i+1, j+1)NEWLINE if a[i] >= 0:NEWLINE for j in range(n-1): print(j+1, j+2)NEWLINE else:NEWLINE for j in range(n-1, 0, -1): print(j+1, j)NEWLINENEWLINENEWLINE class ABC082:NEWLINE @staticmethodNEWLINE def d():NEWLINE s = [1 if c=='T' else 0 for c in sys.stdin.readline().rstrip()] + [1]NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE i = j = 0NEWLINE while s[i]==0: x -= 1; i +=1NEWLINE d = [[], []]NEWLINE while i < len(s):NEWLINE if s[i]: j ^= 1; i += 1; continueNEWLINE c = 0NEWLINE while s[i]==0: c += 1; i += 1NEWLINE d[j].append(c)NEWLINENEWLINE def possible(a, s):NEWLINE dp = np.zeros(sum(a)+1, dtype=np.bool)NEWLINE if s >= len(dp): return FalseNEWLINE dp[-1] = TrueNEWLINE for x in a: dp[:-2*x] += dp[2*x:]NEWLINE return dp[s]NEWLINENEWLINE print('Yes' if possible(d[0], abs(x)) & possible(d[1], abs(y)) else 'No')NEWLINENEWLINENEWLINE class ABC083:NEWLINE @staticmethodNEWLINE def d():NEWLINE s = np.array(list(sys.stdin.readline().rstrip()), dtype=np.int8)NEWLINE k = np.argwhere(s[:-1] != s[1:]).ravel()NEWLINE if not k.size: print(len(s)); returnNEWLINE print(np.maximum(k+1, len(s)-1-k).min())NEWLINENEWLINENEWLINE class ABC084:NEWLINE @staticmethodNEWLINE def d():NEWLINE pn = NumberTheory.PrimeNumbers()NEWLINE n = np.arange(10**5+1)NEWLINE cnt = (pn.is_prime[n] & pn.is_prime[(n+1)//2]).astype(np.int32)NEWLINE np.cumsum(cnt, out=cnt)NEWLINE q, *lr = map(int, sys.stdin.read().split())NEWLINE l, r = np.array(lr).reshape(q, 2).TNEWLINE print(*(cnt[r]-cnt[l-1]), sep='\n')NEWLINENEWLINENEWLINE class ABC085:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, h, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(n, 2).TNEWLINE a = np.sort(a)[-1]; b = np.sort(b[b>=a])[::-1]NEWLINE np.cumsum(b, out=b)NEWLINE print(np.searchsorted(b, h, side='left')+1 if h<=b[-1] else len(b)+(h-b[-1]+a-1)//a)NEWLINENEWLINE class ABC086:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE xy = []NEWLINE for _ in range(n):NEWLINE a, b, c = sys.stdin.readline().split()NEWLINE a, b = int(a), int(b)NEWLINE b += k*(c=='W')NEWLINE xy.append((a,b))NEWLINE x, y = np.array(xy, dtype=np.int32).T % (2*k)NEWLINE s = np.zeros((3*k, 3*k), dtype=np.int32)NEWLINE np.add.at(s, (y,x), 1); np.add.at(s, (y+k, x+k), 1); np.add.at(s, (y+k, x), -1); np.add.at(s, (y, x+k), -1)NEWLINE del x; del yNEWLINE s = s.cumsum(axis=0).cumsum(axis=1)NEWLINE s[:k] += s[-k:]; s[:, :k] += s[:, -k:]; s = s[:-k, :-k]NEWLINE s[:k, :k] += s[-k:, -k:]; s[:k, -k:] += s[-k:, :k]; s = s[:k]NEWLINE print(s.max())NEWLINENEWLINE class ABC087:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *lrd = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for l, r, d in zip(*[iter(lrd)]*3):NEWLINE l -= 1; r -= 1NEWLINE g.add_edge(l, r, weight=d); g.add_edge(r, l, weight=-d)NEWLINENEWLINE x = [None] * nNEWLINE @lru_cache(maxsize=None)NEWLINE def dfs(u, y):NEWLINE if x[u] is not None:NEWLINE if x[u] != y: raise Exception('conflict!')NEWLINE returnNEWLINE x[u] = yNEWLINE for v, e in g.edges[u].items(): dfs(v, y+e.weight)NEWLINENEWLINE for u in range(n):NEWLINE if x[u] is not None: continueNEWLINE # try: dfs(u, 0)NEWLINE # except: print('No'); returnNEWLINE stack = [(u, 0)]NEWLINE while stack:NEWLINE u, y = stack.pop()NEWLINE if x[u] is not None:NEWLINE if x[u] != y: print('No'); returnNEWLINE continueNEWLINE x[u] = yNEWLINE for v, e in g.edges[u].items(): stack.append((v, y+e.weight))NEWLINE print('Yes')NEWLINENEWLINE class ABC088:NEWLINE @staticmethodNEWLINE def d():NEWLINE h, w = map(int, sys.stdin.readline().split())NEWLINE s = ''.join(sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(h*w)NEWLINE cnt = h*wNEWLINE for u in range(h*w):NEWLINE if s[u] == '#': cnt -= 1; continueNEWLINE i, j = divmod(u, w)NEWLINE if i>0 and s[u-w]=='.': g.add_edge(u, u-w, weight=1)NEWLINE if i<h-1 and s[u+w]=='.': g.add_edge(u, u+w, weight=1)NEWLINE if j>0 and s[u-1]=='.': g.add_edge(u, u-1, weight=1)NEWLINE if j<w-1 and s[u+1]=='.': g.add_edge(u, u+1, weight=1)NEWLINE d = g.bfs(0)NEWLINE print(-1 if d[-1]==inf else cnt-d[-1]-1)NEWLINENEWLINE class ABC089:NEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, d, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE a = I[:h*w].reshape(h,w)NEWLINE l, r = I[h*w+1:].reshape(-1,2).T - 1NEWLINE yx = np.pad(np.argwhere(a)[np.argsort(a.ravel())], pad_width=[(0,d), (0,0)], constant_values=0)NEWLINE a = np.zeros(h*w+d, dtype=np.int32)NEWLINE for i in range(0, h*w-d, d):NEWLINE a[i+d:i+2*d] = a[i:i+d] + np.absolute(yx[i+d:i+2*d]-yx[i:i+d]).sum(axis=1)NEWLINE print(*(a[r]-a[l]), sep='\n')NEWLINENEWLINE class ABC090:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE b = np.arange(k+1, n+1)NEWLINE print((n//b*(b-k) + np.maximum(0, (n%b)-k+1*(k!=0))).sum())NEWLINENEWLINE class ABC091:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE c = 2**np.arange(30)NEWLINE a, b = np.sort(np.array(ab).reshape(2,n)[:, None] % (2*c)[:,None])NEWLINE res = 0NEWLINE for i in range(30):NEWLINE j = np.searchsorted(b[i], np.arange(1, 5)[:,None]*c[i]-a[i]).sum(axis=1)NEWLINE j[1::2] *= -1NEWLINE res += (j.sum()&1) * c[i]NEWLINE print(res)NEWLINENEWLINE class ABC092:NEWLINE @staticmethodNEWLINE def d():NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE def make(color, cnt):NEWLINE g = [[color^1]*100 for _ in range(21)]NEWLINE for i in range(1, 21, 2):NEWLINE for j in range(0, 100, 2):NEWLINE if not cnt: return gNEWLINE g[i][j] = color; cnt -= 1NEWLINE g = make(0,a-1) + make(1,b-1)NEWLINE def convert(s): return ''.join('#' if c else '.' for c in s)NEWLINE print(42, 100)NEWLINE print(*map(convert, g), sep='\n')NEWLINENEWLINE class ABC093:NEWLINE @staticmethodNEWLINE def d():NEWLINE q, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.sort(np.array(ab).reshape(q,2)).TNEWLINE x = np.sqrt(a*b).astype(int)NEWLINE x[x*x==a*b] -= 1NEWLINE res = a-1NEWLINE res += (a-1) * (b-a<=1)NEWLINE res += (x+np.minimum(x-a-1*(x*(x+1)>=a*b), b-x-1)) * (b-a>=2)NEWLINENEWLINE # res = 0NEWLINE # res += 2*(a-1) * (b-a<=1)NEWLINE # res += (2*x-1 - 1*(x*(x+1)>=a*b)) * (b-a >= 2)NEWLINENEWLINE print(*res, sep='\n')NEWLINENEWLINENEWLINENEWLINE class ABC094:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a.sort()NEWLINE print(a[-1], end=' ')NEWLINE b = (a[-1]+1)//2NEWLINE i = bi_l(a, b)NEWLINE print(a[-2] if i==n-1 else a[i-1] if b-a[i-1]<=a[i]-b else a[i])NEWLINENEWLINENEWLINENEWLINE class ABC095:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, c, *xv = map(int, sys.stdin.read().split())NEWLINENEWLINE def make(xv):NEWLINE x, v = xv.TNEWLINE s = np.cumsum(v)-x; rs = s-xNEWLINE np.maximum.accumulate(s, out=s)NEWLINE np.maximum.accumulate(rs, out=rs)NEWLINE return s, rsNEWLINENEWLINE xv = np.pad(np.array(xv).reshape(n,2), pad_width=[(1,0), (0,0)], constant_values=0)NEWLINE ls, lrs = make(xv)NEWLINE xv[1:, 0] = c-xv[1:, 0]; xv[1:] = xv[-1:0:-1]NEWLINE rs, rrs = make(xv)NEWLINE print(np.maximum(ls+rrs[::-1], rs+lrs[::-1]).max())NEWLINENEWLINE class ABC096:NEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE pn = NumberTheory.PrimeNumbers()NEWLINE a = [p for p in pn if p%5==1]NEWLINE print(*a[:n])NEWLINENEWLINENEWLINE class ABC097:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE p = [int(x)-1 for x in sys.stdin.readline().split()]NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE for x, y in zip(*[map(int, sys.stdin.read().split())]*2): uf.unite(x-1, y-1)NEWLINE groups = [set(p[u] for u in g) for g in uf.groups()]NEWLINE print(sum(i in groups[uf.find(i)] for i in range(n)))NEWLINENEWLINE class ABC098:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE r = s = cnt = 0NEWLINE for l in range(n):NEWLINE while r<n and not(s&a[r]): s ^= a[r]; r += 1NEWLINE cnt += r-l; s ^= a[l]NEWLINE print(cnt)NEWLINENEWLINENEWLINE class ABC099:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, c, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE d = I[:c*c].reshape(c,c)NEWLINE r = np.arange(n*n); r = (r//n + r%n)%3NEWLINE a = d[I[c*c:]-1, np.arange(c)[:,None]]NEWLINE r = np.arange(n*n); r = (r//n + r%n)%3 == np.arange(3)[:,None]NEWLINE a = np.vstack([a[:,r[i]].sum(axis=1) for i in range(3)])NEWLINE p = np.array([*itertools.permutations(range(c), 3)])NEWLINE print(a[np.arange(3),p].sum(axis=1).min())NEWLINENEWLINENEWLINE class ABC100:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *xyz = map(int, sys.stdin.read().split())NEWLINE xyz = np.array(xyz).reshape(n,3)NEWLINE op = np.array([*itertools.product((-1,1), repeat=3)])NEWLINE print(np.sort((op[:,None]*xyz).sum(axis=-1), axis=-1)[:,n-m:].sum(axis=-1).max())NEWLINENEWLINE class ABC101:NEWLINE @staticmethodNEWLINE def d():NEWLINE def s(n): return sum(int(d) for d in str(n))NEWLINE def f(n):NEWLINE return sorted([pow(10,d)*(n//pow(10,d)+2)-1 for d in range(int(math.log10(n))+2)], key=lambda x: x/s(x))[0]NEWLINE k = int(sys.stdin.readline().rstrip())NEWLINE n = 1NEWLINE for _ in range(k): print(n); n = f(n)NEWLINENEWLINE class ABC102:NEWLINE @staticmethodNEWLINE def d(): # two pointers (online)NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE mn = infNEWLINE i, k = 0, 2NEWLINE p,q,r,s = a[0], 0, a[1]+a[2], sum(a[3:])NEWLINE for j in range(1,n-2):NEWLINE q += a[j]; r -= a[j]NEWLINE while i < j-1:NEWLINE if abs(q-p-2*a[i+1]) <= abs(q-p):NEWLINE q -= a[i+1]; p += a[i+1]NEWLINE i += 1; continueNEWLINE breakNEWLINE while k < n-2:NEWLINE if abs(s-r-2*a[k+1]) <= abs(s-r):NEWLINE s -= a[k+1]; r += a[k+1]NEWLINE k += 1; continueNEWLINE breakNEWLINE tmp = sorted([p,q,r,s])NEWLINE mn = min(mn, tmp[-1]-tmp[0])NEWLINE print(mn)NEWLINENEWLINE @staticmethodNEWLINE def d_2(): # binary_search (offline)NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE def f(a):NEWLINE s = np.cumsum(a)NEWLINE i = np.searchsorted(s, s/2)NEWLINE l, r = s[i], s-s[i]NEWLINE bl = np.abs(r-l) > np.abs(r-l+2*a[i])NEWLINE l -= a[i]*bl; r += a[i]*blNEWLINE return l, rNEWLINE (p,q), (s,r) = f(a), f(a[::-1])NEWLINE a = np.sort(np.vstack((p[:-1], q[:-1], r[-2::-1], s[-2::-1])), axis=0)[:,1:-1]NEWLINE print((a[-1]-a[0]).min())NEWLINENEWLINE class ABC103:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE cnt = prev = 0NEWLINE for a, b in sorted(zip(*[iter(ab)]*2), key=lambda x: x[1]):NEWLINE a -= 1; b -= 1NEWLINE if a < prev: continueNEWLINE prev = b; cnt += 1NEWLINE print(cnt)NEWLINENEWLINE class ABC104:NEWLINE @staticmethodNEWLINE def d():NEWLINE s = sys.stdin.readline().rstrip()[::-1]NEWLINE a = b = c = 0; d = 1NEWLINE for i in range(len(s)):NEWLINE if s[i]=='?': a,b,c,d = 3*a+b, 3*b+c, 3*c+d, 3*dNEWLINE elif s[i] == 'A': a += bNEWLINE elif s[i] == 'B': b += cNEWLINE elif s[i] == 'C': c += dNEWLINE a %= MOD; b %= MOD; c %= MOD; d %= MODNEWLINE print(a)NEWLINENEWLINE class ABC105:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, *a = map(int, sys.stdin.read().split())NEWLINE c = Counter(np.array(a).cumsum()%m)NEWLINE print(c[0] + sum([v*(v-1)//2 for v in c.values()]))NEWLINENEWLINE class ABC106:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m, q, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I).reshape(-1,2) - 1NEWLINE (l,r), (p,q) = I[:m].T, I[-q:].TNEWLINE c = np.zeros((n+1, n+1), dtype=np.int64)NEWLINE np.add.at(c, (0,r), 1); np.add.at(c, (l+1,-1), 1)NEWLINE np.add.at(c, (l+1,r), -1); c[0,-1] -= mNEWLINE c = c.cumsum(axis=0).cumsum(axis=1)NEWLINE print(*c[p,q], sep='\n')NEWLINENEWLINE class ABC107:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE h = (n*(n+1)//2 + 1)//2NEWLINENEWLINE def f(x):NEWLINE *b, = itertools.accumulate([0]+[-1+2*(v>=x) for v in a])NEWLINE mn = min(b)NEWLINE b = [v-mn+1 for v in b]NEWLINE bit = GeometryTopology.FenwickTree(max(b))NEWLINE c = 0NEWLINE for v in b: c += bit.sum(1, v); bit.add(v, 1)NEWLINE return c >= hNEWLINENEWLINE def f_2(x):NEWLINE tot = 0NEWLINE s, cs, c = 0, defaultdict(int), 0; cs[0] = 1NEWLINE for v in a:NEWLINE if v>=x: s += 1; c += cs[s]NEWLINE else: c -= cs[s]; s -= 1NEWLINE tot += c; cs[s] += 1; c += 1NEWLINE # print(tot)NEWLINE return tot >= hNEWLINENEWLINE def binary_search():NEWLINE lo, hi = 1, 10**9+1NEWLINE while hi-lo > 1:NEWLINE x = (hi+lo)//2NEWLINE # if f(x): lo = xNEWLINE if f_2(x): lo = xNEWLINE else: hi = xNEWLINE return loNEWLINE print(binary_search())NEWLINENEWLINENEWLINENEWLINE class ABC108:NEWLINE @staticmethodNEWLINE def d():NEWLINE l = int(sys.stdin.readline().rstrip())NEWLINE n = l.bit_length()NEWLINE m = 2*(n-1) + bit_count(l)-1NEWLINE edges = [(i, i+1, d) for i in range(n-1) for d in [0, 1<<i]]NEWLINE d = 1<<(n-1)NEWLINE for i in range(n-1):NEWLINE if l>>i&1: edges.append((i, n-1, d)); d += 1<<iNEWLINE print(n, m)NEWLINE for u, v, d in edges: print(u+1, v+1, d)NEWLINENEWLINENEWLINE class ABC109:NEWLINE @staticmethodNEWLINE def d():NEWLINE h, w, *a = map(int, sys.stdin.read().split())NEWLINENEWLINE passNEWLINENEWLINE class ABC110:NEWLINE @staticmethodNEWLINE def d():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE c = Combinatorics.CombinationsMod(n=10**6, mod=MOD)NEWLINE pn = NumberTheory.PrimeNumbers(10**5)NEWLINE f = np.array([*pn.factorize(m).values()])NEWLINE print(Algebra.cumprod(c(n+f-1, f), mod=MOD)[-1])NEWLINENEWLINE class ABC111: passNEWLINE class ABC112: passNEWLINE class ABC113: passNEWLINE class ABC114: passNEWLINE class ABC115: passNEWLINE class ABC116: passNEWLINE class ABC117: passNEWLINE class ABC118: passNEWLINE class ABC119: passNEWLINE class ABC120: passNEWLINE class ABC121: passNEWLINE class ABC122: passNEWLINE class ABC123: passNEWLINE class ABC124: passNEWLINE class ABC125: passNEWLINE class ABC126: passNEWLINE class ABC127: passNEWLINE class ABC128: passNEWLINE class ABC129: passNEWLINE class ABC130: passNEWLINE class ABC131: passNEWLINE class ABC132: passNEWLINE class ABC133: passNEWLINE class ABC134: passNEWLINE class ABC135: passNEWLINE class ABC136: passNEWLINE class ABC137: passNEWLINE class ABC138: passNEWLINE class ABC139: passNEWLINE class ABC140: passNEWLINE class ABC141: passNEWLINE class ABC142: passNEWLINE class ABC143: passNEWLINE class ABC144: passNEWLINE class ABC145: passNEWLINE class ABC146: passNEWLINE class ABC147: passNEWLINE class ABC148: passNEWLINE class ABC149: passNEWLINE class ABC150: passNEWLINE class ABC151: passNEWLINE class ABC152: passNEWLINE class ABC153: passNEWLINE class ABC154: passNEWLINE class ABC155: passNEWLINE class ABC156: passNEWLINE class ABC157: passNEWLINE class ABC158: passNEWLINE class ABC159: passNEWLINE class ABC160: passNEWLINE class ABC161: passNEWLINE class ABC162: passNEWLINE class ABC163: passNEWLINE class ABC164: passNEWLINE class ABC165: passNEWLINE class ABC166: passNEWLINE class ABC167: passNEWLINE class ABC168: passNEWLINE class ABC169: passNEWLINENEWLINE class ABC170:NEWLINE @staticmethodNEWLINE def a():NEWLINE x = [int(x) for x in sys.stdin.readline().split()]NEWLINE for i in range(5):NEWLINE if x[i] != i+1:NEWLINE print(i+1)NEWLINE breakNEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE x, y = map(int, sys.stdin.readline().split())NEWLINE print('Yes' if 2*x <= y <= 4*x and y%2 == 0 else 'No')NEWLINE @staticmethodNEWLINE def c():NEWLINE x, n, *p = map(int, sys.stdin.read().split())NEWLINE a = list(set(range(102)) - set(p))NEWLINE a = [(abs(y-x), y) for y in a]NEWLINE print(sorted(a)[0][1])NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE cand = set(a)NEWLINE cnt = 0NEWLINE for x, c in sorted(Counter(a).items()):NEWLINE cnt += c == 1 and x in candNEWLINE cand -= set(range(x*2, 10**6+1, x))NEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, q = map(int, sys.stdin.readline().split())NEWLINE queue = []NEWLINE m = 2*10**5NEWLINE infants = [[] for _ in range(m)]NEWLINE highest_rate = [None] * mNEWLINE where = [None] * nNEWLINE rate = [None] * nNEWLINENEWLINE def entry(i, k):NEWLINE where[i] = kNEWLINE while infants[k]:NEWLINE r, j = heappop(infants[k])NEWLINE if where[j] != k or j == i: continueNEWLINE if rate[i] >= -r:NEWLINE highest_rate[k] = rate[i]NEWLINE heappush(queue, (rate[i], k, i))NEWLINE heappush(infants[k], (r, j))NEWLINE breakNEWLINE else:NEWLINE highest_rate[k] = rate[i]NEWLINE heappush(queue, (rate[i], k, i))NEWLINE heappush(infants[k], (-rate[i], i))NEWLINENEWLINE def transfer(i, k):NEWLINE now = where[i]NEWLINE while infants[now]:NEWLINE r, j = heappop(infants[now])NEWLINE if where[j] != now or j == i: continueNEWLINE if highest_rate[now] != -r:NEWLINE highest_rate[now] = -rNEWLINE heappush(queue, (-r, now, j))NEWLINE heappush(infants[now], (r, j))NEWLINE breakNEWLINE else:NEWLINE highest_rate[now] = NoneNEWLINE entry(i, k)NEWLINENEWLINE def inquire():NEWLINE while True:NEWLINE r, k, i = heappop(queue)NEWLINE if where[i] != k or r != highest_rate[k]: continueNEWLINE heappush(queue, (r, k, i))NEWLINE return rNEWLINENEWLINE for i in range(n):NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE rate[i] = aNEWLINE entry(i, b-1)NEWLINE for _ in range(q):NEWLINE c, d = map(int, sys.stdin.readline().split())NEWLINE transfer(c-1, d-1)NEWLINE print(inquire())NEWLINENEWLINENEWLINENEWLINE class ABC171:NEWLINE @staticmethodNEWLINE def a():NEWLINE c = sys.stdin.readline().rstrip()NEWLINE print('A' if c < 'a' else 'a')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, k, *p = map(int, sys.stdin.read().split())NEWLINE print(sum(sorted(p)[:k]))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE n -= 1NEWLINE l = 1NEWLINE while True:NEWLINE if n < pow(26, l):NEWLINE breakNEWLINE n -= pow(26, l)NEWLINE l += 1NEWLINE res = ''.join([chr(ord('a')+d) for d in NumberTheory.base_convert(n, 26)][::-1])NEWLINE res = 'a'*(l-len(res)) + resNEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE s = sum(a)NEWLINE cnt = Counter(a)NEWLINE q = int(sys.stdin.readline().rstrip())NEWLINE for _ in range(q):NEWLINE b, c = map(int, sys.stdin.readline().split())NEWLINE s += (c-b)*cnt[b]NEWLINE print(s)NEWLINE cnt[c] += cnt[b]; cnt[b] = 0NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE s = 0NEWLINE for x in a: s ^= xNEWLINE b = map(lambda x: x^s, a)NEWLINE print(*b, sep=' ')NEWLINENEWLINENEWLINE class ABC172:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = int(sys.stdin.readline().rstrip()); print(a*(1+a+a**2))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE s, t = sys.stdin.read().split(); print(sum(s[i]!=t[i] for i in range(len(s))))NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m, k = map(int, sys.stdin.readline().split())NEWLINE a = [0] + [int(x) for x in sys.stdin.readline().split()]NEWLINE b = [int(x) for x in sys.stdin.readline().split()]NEWLINE *sa, = itertools.accumulate(a)NEWLINE *sb, = itertools.accumulate(b)NEWLINE res = 0NEWLINE for i in range(n+1):NEWLINE r = k - sa[i]NEWLINE if r < 0: breakNEWLINE res = max(res, i+bi_r(sb, r))NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE f = np.zeros(n+1, dtype=np.int64)NEWLINE for i in range(1, n+1):NEWLINE f[i::i] += 1NEWLINE print((np.arange(1, n+1)*f[1:]).sum())NEWLINENEWLINENEWLINE class ABC173:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE charge = (n+999)//1000 * 1000 - nNEWLINE print(charge)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *s = sys.stdin.read().split()NEWLINE c = Counter(s)NEWLINE for v in 'AC, WA, TLE, RE'.split(', '):NEWLINE print(f'{v} x {c[v]}')NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE h, w, k = map(int, sys.stdin.readline().split())NEWLINE c = [sys.stdin.readline().rstrip() for _ in range(h)]NEWLINE tot = 0NEWLINE for i in range(1<<h):NEWLINE for j in range(1<<w):NEWLINE cnt = 0NEWLINE for y in range(h):NEWLINE for x in range(w):NEWLINE if i>>y & 1 or j>>x & 1:NEWLINE continueNEWLINE cnt += c[y][x] == '#'NEWLINE tot += cnt == kNEWLINE print(tot)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a.sort(reverse=True)NEWLINE res = a[0] + sum(a[1:1+(n-2)//2])*2 + a[1+(n-2)//2]*(n & 1)NEWLINE print(res)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE MOD = 10**9+7NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE minus = [x for x in a if x < 0]NEWLINE plus = [x for x in a if x > 0]NEWLINE if len(plus) + len(minus)//2*2 >= k: # plusNEWLINE *minus, = map(abs, minus)NEWLINE minus.sort(reverse=True)NEWLINE plus.sort(reverse=True)NEWLINE cand = []NEWLINE if len(minus)&1: minus = minus[:-1]NEWLINE for i in range(0, len(minus)-1, 2):NEWLINE cand.append(minus[i]*minus[i+1]%MOD)NEWLINE if k & 1:NEWLINE res = plus[0]NEWLINE plus = plus[1:]NEWLINE else:NEWLINE res = 1NEWLINE if len(plus)&1: plus = plus[:-1]NEWLINE for i in range(0, len(plus)-1, 2):NEWLINE cand.append(plus[i]*plus[i+1]%MOD)NEWLINE cand.sort(reverse=True)NEWLINE for x in cand[:k//2]:NEWLINE res *= xNEWLINE res %= MODNEWLINE print(res)NEWLINE elif 0 in a:NEWLINE print(0)NEWLINE else:NEWLINE cand = sorted(map(abs, a))NEWLINE res = 1NEWLINE for i in range(k):NEWLINE res *= cand[i]NEWLINE res %= MODNEWLINE res = MOD - resNEWLINE print(res)NEWLINE passNEWLINENEWLINENEWLINE class ABC174:NEWLINE @staticmethodNEWLINE def a():NEWLINE print('Yes' if int(sys.stdin.readline().rstrip())>=30 else 'No')NEWLINENEWLINENEWLINENEWLINENEWLINE class ABC178:NEWLINE @staticmethodNEWLINE def a(): passNEWLINENEWLINE @staticmethodNEWLINE def b(): passNEWLINENEWLINE @staticmethodNEWLINE def c(): passNEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE s = int(sys.stdin.readline().rstrip())NEWLINE if s == 0: print(1); returnNEWLINE elif s == 1: print(0); returnNEWLINE c = np.eye(3, k=-1, dtype=np.int64)NEWLINE c[0, 0] = c[0, 2] = 1NEWLINE a = np.array([0, 0, 1])NEWLINE print(Algebra.dot(Algebra.matrix_pow(c, s-2), a)[0])NEWLINENEWLINE class ABC179:NEWLINE @staticmethodNEWLINE def a():NEWLINE s = sys.stdin.readline().rstrip()NEWLINE print(s+'s' if s[-1]!='s' else s+'es')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *d = map(int, sys.stdin.read().split())NEWLINE d = np.array(d).reshape(n, 2).TNEWLINE d = np.equal(d[0], d[1]).astype(int)NEWLINE dd = d.copy()NEWLINE dd[1:] += d[:-1]NEWLINE dd[:-1] += d[1:]NEWLINE print('Yes' if (dd>=3).any() else 'No')NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE res = (n//np.arange(1, n+1)).sum() - len(NumberTheory.find_divisors(n))NEWLINE print(res)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE mod = 998244353NEWLINE n, k, *lr = map(int, sys.stdin.read().split())NEWLINE l, r = np.array(lr).reshape(k, -1).TNEWLINE @njit((i8, i8[:], i8[:]), cache=True)NEWLINE def solve(n, l, r):NEWLINE res = np.zeros(n*2, dtype=np.int64); res[0], res[1] = 1, -1NEWLINE for i in range(n-1):NEWLINE res[i+1] = (res[i+1]+res[i]) % modNEWLINE res[i+l] = (res[i+l]+res[i]) % modNEWLINE res[i+r+1] = (res[i+r+1]-res[i]) % modNEWLINE print(res[n-1])NEWLINE solve(n, l, r)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, x, m = map(int, sys.stdin.readline().split())NEWLINE res = [-1 for _ in range(m)]NEWLINE s = 0NEWLINE loop = np.zeros(m, dtype=np.int64)NEWLINE for i in range(m+1):NEWLINE if i==n: print(s); returnNEWLINE if res[x] != -1:NEWLINE l, loop = i-res[x], loop[res[x]:i]NEWLINE q, r = divmod(n-i, l)NEWLINE print(s+q*loop.sum()+loop[:r].sum()); returnNEWLINE res[x], loop[i] = i, xNEWLINE s += x; x = x**2 % mNEWLINENEWLINENEWLINE class ABC180:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, a, b = map(int, sys.stdin.readline().split())NEWLINE print(n-a+b)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *x = map(int, sys.stdin.read().split())NEWLINE x = np.absolute(np.array(x))NEWLINE print(x.sum())NEWLINE print(np.sqrt((x**2).sum()))NEWLINE print(x.max())NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE div = NumberTheory.find_divisors(n)NEWLINE print(*div, sep='\n')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE x, y, a, b = map(int, sys.stdin.readline().split())NEWLINE cnt = 0NEWLINE while x*a <= x+b:NEWLINE x *= aNEWLINE if x >= y:NEWLINE print(cnt); returnNEWLINE cnt += 1NEWLINE cnt += (y-x-1) // bNEWLINE print(cnt)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, *xyz = map(int, sys.stdin.read().split())NEWLINENEWLINE xyz = list(zip(*[iter(xyz)] * 3))NEWLINE dist = [[0] * n for _ in range(n)]NEWLINE for i in range(n):NEWLINE a, b, c = xyz[i]NEWLINE for j in range(n):NEWLINE p, q, r = xyz[j]NEWLINE dist[i][j] = abs(p-a) + abs(q-b) + max(0, r-c)NEWLINENEWLINE dp = [[inf] * n for _ in range(1<<n)]NEWLINE dp[0][0] = 0NEWLINE for s in range(1<<n):NEWLINE for i in range(n):NEWLINE t = s|(1<<i)NEWLINE for j in range(n):NEWLINE dp[t][i] = min(dp[t][i], dp[s][j]+dist[j][i])NEWLINE print(dp[-1][0])NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def f(): # rewrite with jit compiling later.NEWLINE n, m, l = map(int, sys.stdin.readline().split())NEWLINE c = Combinatorics.CombinationsMod(n, MOD)NEWLINE path = np.zeros(n+1, dtype=np.int64); path[1] = path[2] = 1NEWLINE for i in range(3, n+1): path[i] = path[i-1]*i%MODNEWLINE cycle = np.zeros(n+1, dtype=np.int64); cycle[1:] = path[:-1]NEWLINE dp = np.zeros((n+1, m+1), dtype=np.int64)NEWLINE def f(l):NEWLINE dp[:,:] = 0; dp[0,0] = 1NEWLINE for i in range(n):NEWLINE for j in range(m+1):NEWLINE k = np.arange(1, min(l, n-i, m-j+1)+1)NEWLINE dp[i+k, j+k-1] += dp[i, j]*c(n-i-1, k-1)%MOD*path[k]%MODNEWLINE dp[i+k, j+k-1] %= MODNEWLINE k = np.arange(2, min(l, n-i, m-j)+1)NEWLINE dp[i+k, j+k] += dp[i, j]*c(n-i-1, k-1)%MOD*cycle[k]%MODNEWLINE dp[i+k, j+k] %= MODNEWLINE return dp[n, m]NEWLINE print((f(l)-f(l-1))%MOD)NEWLINENEWLINE @staticmethodNEWLINE def f_2(): # PyPyNEWLINE n, m, l = map(int, sys.stdin.readline().split())NEWLINE c = Combinatorics.CombinationsMod(n, MOD)NEWLINE path = [0] * (n+1); path[1] = path[2] = 1NEWLINE for i in range(3, n+1): path[i] = path[i-1]*i%MODNEWLINE cycle = [0] + path[:-1]NEWLINE def f(l):NEWLINE dp = [[0]*(m+1) for _ in range(n+1)]; dp[0][0] = 1NEWLINE for i in range(n):NEWLINE for j in range(m+1):NEWLINE for k in range(1, min(l, n-i, m-j+1)+1):NEWLINE dp[i+k][j+k-1] += dp[i][j]*c(n-i-1, k-1)%MOD*path[k]%MODNEWLINE dp[i+k][j+k-1] %= MODNEWLINE for k in range(1, min(l, n-i, m-j)+1):NEWLINE dp[i+k][j+k] += dp[i][j]*c(n-i-1, k-1)%MOD*cycle[k]%MODNEWLINE dp[i+k][j+k] %= MODNEWLINE return dp[n][m]NEWLINE print((f(l)-f(l-1))%MOD)NEWLINENEWLINENEWLINE class ABC181:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE print('White' if n&1==0 else 'Black')NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(n,2).TNEWLINE print(((a+b)*(b-a+1)//2).sum())NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE i, j, k = np.array([*itertools.combinations(range(n),3)]).TNEWLINE x, y = np.array(xy).reshape(-1,2).TNEWLINE b = (y[j]-y[i])*(x[k]-x[j]) == (y[k]-y[j])*(x[j]-x[i])NEWLINE print('Yes' if b.any() else 'No')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n = sys.stdin.readline().rstrip()NEWLINE if len(n)<=2:NEWLINE print('Yes' if int(n)%8==0 or int(n[::-1])%8==0 else 'No')NEWLINE returnNEWLINE c = Counter(n)NEWLINE for i in range(112, 1000, 8):NEWLINE if not Counter(str(i))-c: print('Yes'); returnNEWLINE print('No')NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE n, m, *I = map(int, sys.stdin.read().split())NEWLINE I = np.array(I)NEWLINE h, w = np.sort(I[:n]), np.sort(I[-m:])NEWLINE tmp = np.pad(h[1:]-h[:-1], 1, constant_values=0)NEWLINE l = tmp.copy(); l[::2] = 0; np.cumsum(l, out=l)NEWLINE r = tmp.copy(); r[1::2] = 0; np.cumsum(r[::-1], out=r[::-1])NEWLINE i = np.searchsorted(w, h)NEWLINE d = np.pad(h[2:]-h[:-2], 1, constant_values=0); d[::2] = 0NEWLINE d += np.minimum(np.abs(h-w[np.maximum(i-1, 0)]), np.abs(h-w[np.minimum(m-1, i)]))NEWLINE print((l[:-1]+r[1:]+d).min())NEWLINENEWLINE @staticmethodNEWLINE def f():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE xy = np.array(xy).reshape(n,2)NEWLINE y = xy[:, 1]NEWLINE if n == 1: print(np.maximum(100-y, y+100)[0]/2); returnNEWLINE ij = np.array([*itertools.combinations(range(n),2)])NEWLINE d = (np.diff(xy[ij], axis=1)**2).sum(axis=-1).ravel()NEWLINE def f(r):NEWLINE r *= 2NEWLINE uf = GeometryTopology.Graph(n+2); uf.init_dsu()NEWLINE for i in np.argwhere(y+100<=r).ravel(): uf.unite(i, n)NEWLINE for i in np.argwhere(100-y<=r).ravel(): uf.unite(i, n+1)NEWLINE for i, j in ij[np.argwhere(d<=r*r).ravel()]: uf.unite(i, j)NEWLINE return uf.same(n, n+1)NEWLINENEWLINE def binary_search():NEWLINE lo, hi = 0, 200.1NEWLINE while hi-lo > 1e-9:NEWLINE r = (lo+hi)/2NEWLINE if f(r): hi = rNEWLINE else: lo = rNEWLINE return loNEWLINE print(binary_search())NEWLINENEWLINE class ARC106:NEWLINE @staticmethodNEWLINE def a():NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE a = 1NEWLINE while pow(3,a)<=n:NEWLINE m, b = n-pow(3,a), 1NEWLINE while pow(5,b)<=m:NEWLINE if pow(5,b)==m: print(a, b); returnNEWLINE b += 1NEWLINE a += 1NEWLINE print(-1)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE b = [int(x) for x in sys.stdin.readline().split()]NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE for _ in range(m):NEWLINE c, d = map(int, sys.stdin.readline().split()); c -= 1; d -= 1NEWLINE uf.unite(c, d)NEWLINE ga, gb = [[] for _ in range(n)], [[] for _ in range(n)]NEWLINE for i in range(n):NEWLINE r = uf.find(i)NEWLINE ga[r].append(a[i]); gb[r].append(b[i])NEWLINE print('Yes' if all(sum(ga[i])==sum(gb[i]) for i in range(n)) else 'No')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE if m < 0: print(-1); returnNEWLINE if n == 1:NEWLINE if m != 0: print(-1); returnNEWLINE print(1, 2); returnNEWLINE if m >= n-1: print(-1); returnNEWLINE l, r = 1, 10**9NEWLINE print(l, r)NEWLINE for _ in range(n-2-m):NEWLINE l += 1; r -= 1; print(l, r)NEWLINE r = lNEWLINE for _ in range(m+1):NEWLINE l, r = r+1, r+2NEWLINE print(l, r)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE mod = 998244353NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a)NEWLINE b = np.zeros((k+1, n), dtype=np.int64)NEWLINE b[0] = 1NEWLINE for i in range(k): b[i+1] = b[i]*a%modNEWLINE s = b.sum(axis=1) % modNEWLINE inv_2 = pow(2, mod-2, mod)NEWLINE c = Combinatorics.CombinationsMod(mod=mod)NEWLINE for x in range(1, k+1):NEWLINE l = np.arange(x+1)NEWLINE print(((c(x, l)*s[l]%mod*s[l][::-1]%mod).sum()%mod - pow(2,x,mod)*s[x])%mod*inv_2%mod)NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def f():NEWLINE passNEWLINENEWLINENEWLINE class ARC107:NEWLINE @staticmethodNEWLINE def a():NEWLINE a = np.array(sys.stdin.read().split(), dtype=np.int64)NEWLINE print(Algebra.cumprod((1+a)*a//2%MOD, mod=MOD)[2])NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE def c(m): return np.minimum(m-1,2*n-m+1)NEWLINE x = np.arange(2, 2*n+1)NEWLINE print((c(x)*c(x-k)*((x-k>=2)&(x-k<=2*n))).sum())NEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE a = np.array(a).reshape(n,n)NEWLINE fac, _ = Algebra.generate_fac_ifac(n=50, p=MOD)NEWLINE def count(a):NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE for i, j in itertools.combinations(range(n),2):NEWLINE if (a[i]+a[j] <= k).all(): uf.unite(i,j)NEWLINE c = 1NEWLINE for g in uf.groups():NEWLINE if g: c *= fac[len(g)]; c %= MODNEWLINE return cNEWLINE print(count(a)*count(a.T)%MOD)NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, k = map(int, sys.stdin.readline().split())NEWLINE @njit((i8,i8), cache=True)NEWLINE def solve(n, k):NEWLINE dp = np.zeros((n+1, 2*n+1), dtype=np.int64); dp[0,0] = 1NEWLINE for i in range(1, n+1):NEWLINE for j in range(i, 0, -1):NEWLINE dp[i,j] = dp[i-1,j-1] + dp[i,2*j]NEWLINE dp[i,j] %= MODNEWLINE return dp[-1,k]NEWLINE print(solve(n,k))NEWLINENEWLINE @staticmethodNEWLINE def e():NEWLINE passNEWLINENEWLINE @staticmethodNEWLINE def f():NEWLINE passNEWLINENEWLINENEWLINENEWLINE class ACL001:NEWLINE @staticmethodNEWLINE def a():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE *xy, = zip(*[iter(xy)]*2)NEWLINE print(xy)NEWLINE passNEWLINENEWLINENEWLINENEWLINE class TDPC:NEWLINE @staticmethodNEWLINE def t():NEWLINE passNEWLINENEWLINENEWLINE class ChokudaiSpecialRun001:NEWLINE @staticmethodNEWLINE def j():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE bit = GeometryTopology.FenwickTree(n)NEWLINE c = 0NEWLINE for x in a:NEWLINE c += bit.sum(1,n) - bit.sum(1,x)NEWLINE bit.add(x,1)NEWLINE print(c)NEWLINENEWLINE class ALPC: # AtCoder Library Practice Contest\NEWLINE @staticmethodNEWLINE def a():NEWLINE n, q, *tuv = map(int, sys.stdin.read().split())NEWLINE uf = GeometryTopology.Graph(n); uf.init_dsu()NEWLINE for t, u, v in zip(*[iter(tuv)]*3):NEWLINE if t == 0: uf.unite(u,v)NEWLINE else: print(int(uf.same(u,v)))NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE n, q = map(int, sys.stdin.readline().split())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE bit = GeometryTopology.FenwickTree(n)NEWLINE for i in range(n): bit.add(i+1, a[i])NEWLINE for t, i, j in zip(*[map(int, sys.stdin.read().split())]*3):NEWLINE if t==0: bit.add(i+1,j)NEWLINE else: print(bit.sum(i+1,j))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def g():NEWLINE n, m, *ab = map(int, sys.stdin.read().split())NEWLINE a, b = np.array(ab).reshape(m,2).TNEWLINE _, r = connected_components(csr_matrix(([1]*m, (a,b)), (n,n)), connection='strong')NEWLINE groups = [[] for _ in range(n)]NEWLINE for u in range(n): groups[r[u]].append(u)NEWLINE groups = [group for group in groups if group]NEWLINE print(len(groups))NEWLINE for group in groups[::-1]: print(len(group), *group)NEWLINENEWLINENEWLINE class MSolutions2020:NEWLINE @staticmethodNEWLINE def a():NEWLINE x = int(sys.stdin.readline().rstrip())NEWLINE x -= 400NEWLINE print(8-x//200)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE r, g, b, k = map(int, sys.stdin.read().split())NEWLINE while k and g <= r:NEWLINE g *= 2NEWLINE k -= 1NEWLINE while k and b <= g:NEWLINE b *= 2NEWLINE k -= 1NEWLINE print('Yes' if r < g < b else 'No')NEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE n, k, *a = map(int, sys.stdin.read().split())NEWLINE for i in range(k, n):NEWLINE print('Yes' if a[i] > a[i-k] else 'No')NEWLINENEWLINE @staticmethodNEWLINE def d():NEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE a += [-1]NEWLINE m = 1000NEWLINE s = 0NEWLINE for i in range(n):NEWLINE if a[i+1] == a[i]: continueNEWLINE elif a[i+1] > a[i]:NEWLINE cnt = m//a[i]NEWLINE m -= a[i]*cntNEWLINE s += cntNEWLINE else:NEWLINE m += a[i]*sNEWLINE s = 0NEWLINE print(m)NEWLINENEWLINENEWLINEclass Codeforces:NEWLINE class CR676div2:NEWLINE @staticmethodNEWLINE def a():NEWLINE t = int(sys.stdin.readline().rstrip())NEWLINE for _ in range(t):NEWLINE a, b = map(int, sys.stdin.readline().split())NEWLINE print(a^b)NEWLINENEWLINE @staticmethodNEWLINE def b():NEWLINE t = int(sys.stdin.readline().rstrip())NEWLINE for _ in range(t):NEWLINE n = int(sys.stdin.readline().rstrip())NEWLINE s = [list(sys.stdin.readline().rstrip()) for _ in range(n)]NEWLINE s[0][0] = s[-1][-1] = '0'NEWLINE for i in range(n):NEWLINE for j in range(n):NEWLINE s[i][j] = int(s[i][j])NEWLINENEWLINENEWLINE def can_goal(g, c=0):NEWLINE visited = [0] * nNEWLINE stack = [(0, 0)]NEWLINE visited[0] |= 1<<0NEWLINE while stack:NEWLINE y, x = stack.pop()NEWLINE for dy, dx in [(-1, 0), (0, -1), (1, 0), (0, 1)]:NEWLINE i, j = y+dy, x+dxNEWLINE if i<0 or i>=n or j<0 or j>=n: continueNEWLINE if i == j == n-1: return TrueNEWLINE if visited[i]>>j&1: continueNEWLINE visited[i] |= 1<<jNEWLINE if g[i][j] != c: continueNEWLINE stack.append((i, j))NEWLINE return FalseNEWLINENEWLINE if not (can_goal(s, 0) or can_goal(s, 1)):NEWLINE print(0)NEWLINE continueNEWLINENEWLINE flg = 0NEWLINE for i in range(n):NEWLINE for j in range(n):NEWLINE if i==j==0 or i==j==n-1: continueNEWLINE s[i][j] ^= 1NEWLINE if not (can_goal(s, 0) or can_goal(s, 1)):NEWLINE print(1)NEWLINE print(i+1, j+1)NEWLINE flg = 1NEWLINE breakNEWLINE s[i][j] ^= 1NEWLINE if flg: breakNEWLINE if flg: continueNEWLINENEWLINE print(2)NEWLINE if s[0][1] == s[1][0]:NEWLINE print(n, n-1)NEWLINE print(n-1, n)NEWLINE continueNEWLINENEWLINE if s[0][1] == s[-1][-2]:NEWLINE print(1, 2)NEWLINE print(n-1, n)NEWLINE else:NEWLINE print(1, 2)NEWLINE print(n, n-1)NEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def c():NEWLINE passNEWLINENEWLINEclass ProjectEuler:NEWLINE @staticmethodNEWLINE def p1():NEWLINE def f(n, x):NEWLINE return (x + n//x*x) * (n//x) // 2NEWLINE n = 1000NEWLINE ans = f(n-1, 3)+f(n-1, 5)-f(n-1, 15)NEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def p2():NEWLINE fib = [1, 2]NEWLINE while fib[-1] < 4*10**6:NEWLINE fib.append(fib[-1]+fib[-2])NEWLINE print(sum(fib[1:-1:3]))NEWLINENEWLINE @staticmethodNEWLINE def p3():NEWLINE pn = NumberTheory.PrimeNumbers()NEWLINE res = pn.factorize(600851475143)NEWLINE print(max(res.keys()))NEWLINENEWLINE @staticmethodNEWLINE def p4():NEWLINE def is_palindrome(n):NEWLINE n = str(n)NEWLINE return n == n[::-1]NEWLINE cand = []NEWLINE for a in range(100, 1000):NEWLINE for b in range(a, 1000):NEWLINE n = a*bNEWLINE if is_palindrome(n): cand.append(n)NEWLINE print(max(cand))NEWLINENEWLINE @staticmethodNEWLINE def p5():NEWLINE pn = NumberTheory.PrimeNumbers()NEWLINE res = defaultdict(int)NEWLINE for i in range(1, 21):NEWLINE for p, c in pn.factorize(i).items():NEWLINE res[p] = max(res[p], c)NEWLINE ans = 1NEWLINE for p, c in res.items(): ans *= pow(p, c)NEWLINE print(ans)NEWLINENEWLINE @staticmethodNEWLINE def p6():NEWLINE a = np.arange(101)NEWLINE b = np.cumsum(a**2)NEWLINE a = a.cumsum()NEWLINE print(a[100]**2 - b[100])NEWLINENEWLINE @staticmethodNEWLINE def p7():NEWLINE nt = NumberTheory.PrimeNumbers()NEWLINE print(sorted(nt)[10000])NEWLINE @staticmethodNEWLINE def p8():NEWLINE n = '7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450'NEWLINE n = [int(d) for d in list(n)]NEWLINE res = 0NEWLINE for i in range(988):NEWLINE x = 1NEWLINE for j in range(13):NEWLINE x *= n[i+j]NEWLINE res = max(res, x)NEWLINE print(res)NEWLINE @staticmethodNEWLINE def p9():NEWLINE for a in range(1, 997):NEWLINE for b in range(a, 998-a):NEWLINE c = 1000 - a - bNEWLINE if a**2 + b**2 == c**2:NEWLINE print(a*b*c)NEWLINE returnNEWLINE @staticmethodNEWLINE def p10():NEWLINE pn = NumberTheory.PrimeNumbers(2*10**6+1)NEWLINE print(sum(pn))NEWLINE @staticmethodNEWLINE def p11():NEWLINE grid = '08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48'NEWLINE print(grid)NEWLINENEWLINE passNEWLINENEWLINEclass Yukicoder:NEWLINE def __init__(self):NEWLINE passNEWLINENEWLINE def __call__(self):NEWLINE print(1)NEWLINENEWLINENEWLINEclass AOJ:NEWLINE @staticmethodNEWLINE def ALDS1_12_A(): # minimum spanning treeNEWLINE n, *a = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for i in range(n-1):NEWLINE for j in range(i+1, n):NEWLINE if a[i*n+j] == -1: continueNEWLINE g.add_edge(i,j, weight=a[i*n+j])NEWLINE g.add_edge(j,i, weight=a[i*n+j])NEWLINE _, d = g.kruskal()NEWLINE # _, d = g.prim()NEWLINE # _, d = g.boruvka()NEWLINE print(d)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def GRL_3_C(): # strongly connected componentsNEWLINE n, m = map(int, sys.stdin.readline().split())NEWLINE _, r = connecteNEWLINE g = GeometryTopology.Graph(n)NEWLINE for _ in range(m): g.add_edge(*map(int, sys.stdin.readline().split()))NEWLINE r = g.scc()NEWLINE q, *uv = map(int, sys.stdin.read().split())NEWLINE for u, v in zip(*[iter(uv)] * 2): print(int(r[u]==r[v]))NEWLINENEWLINENEWLINE @staticmethodNEWLINE def DSL_2_B(): # Binary Indexed Tree (Fenwick Tree)NEWLINE n, q, *txy = map(int, sys.stdin.read().split())NEWLINE bit = GeometryTopology.FenwickTree(n)NEWLINE for t, x, y in zip(*[iter(txy)]*3):NEWLINE if t==0: bit.add(x, y)NEWLINE else: print(bit.sum(x,y))NEWLINENEWLINENEWLINEclass YosupoJudge:NEWLINENEWLINE @staticmethodNEWLINE def PointAddRangeSum():NEWLINE n, q = map(int, sys.stdin.readline().split())NEWLINE a = [int(x) for x in sys.stdin.readline().split()]NEWLINE bit = GeometryTopology.FenwickTree(n)NEWLINE for i in range(n): bit.add(i+1, a[i])NEWLINE for t, i, j in zip(*[map(int, sys.stdin.read().split())]*3):NEWLINE if t==0: bit.add(i+1,j)NEWLINE else: print(bit.sum(i+1,j))NEWLINENEWLINE @staticmethodNEWLINE def Directed_MST():NEWLINE n, m, s, *abc = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINE for a, b, c in zip(*[iter(abc)]*3):g.add_edge(a, b, weight=c)NEWLINE _, d, p = g.prim(src=s, return_parent=True)NEWLINE print(d)NEWLINE print(*p)NEWLINENEWLINE @staticmethodNEWLINE def Manhattan_MST():NEWLINE n, *xy = map(int, sys.stdin.read().split())NEWLINE g = GeometryTopology.Graph(n)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE AtCoder.ABC110.d()NEWLINE # AtCoder.ARC107.e()NEWLINE passNEWLINE
import sysNEWLINEimport osNEWLINEimport gzipNEWLINEimport numpy as npNEWLINEimport pickleNEWLINEimport scipy.io as sioNEWLINEimport h5pyNEWLINENEWLINENEWLINEdef to_one_hot(x, depth):NEWLINE ret = np.zeros((x.shape[0], depth), dtype=np.int32)NEWLINE ret[np.arange(x.shape[0]), x] = 1NEWLINE return retNEWLINENEWLINENEWLINEdef get_ssl_data(x_train, y_train, num_labeled, n_y, seed=None):NEWLINE if num_labeled is None:NEWLINE return x_train, y_trainNEWLINENEWLINE seed = int(seed)NEWLINE rng_data = np.random.RandomState(seed)NEWLINE inds = rng_data.permutation(x_train.shape[0])NEWLINE x_train = x_train[inds]NEWLINE y_train = y_train[inds]NEWLINENEWLINE x_labelled = []NEWLINE y_labelled = []NEWLINE for j in range(n_y):NEWLINE x_labelled.append(x_train[y_train == j][: num_labeled // n_y])NEWLINE y_labelled.append(y_train[y_train == j][: num_labeled // n_y])NEWLINE x_train = np.concatenate(x_labelled)NEWLINE y_train = np.concatenate(y_labelled)NEWLINE return x_train, y_trainNEWLINENEWLINENEWLINEdef load_mnist_realval(NEWLINE path="/home/Data/mnist.pkl.gz",NEWLINE asimage=True,NEWLINE one_hot=False,NEWLINE validation=True,NEWLINE isTf=True,NEWLINE return_all=False,NEWLINE **kwargsNEWLINE):NEWLINE """NEWLINE return_all flag will return all of the data. It will overwrite validationNEWLINE nlabeled.NEWLINE """NEWLINE if not os.path.isfile(path):NEWLINE data_dir = os.path.dirname(path)NEWLINE if not os.path.exists(os.path.dirname(path)):NEWLINE os.makedirs(data_dir)NEWLINENEWLINE def download_dataset(url, _path):NEWLINE print("Downloading data from %s" % url)NEWLINE if sys.version_info > (2,):NEWLINE import urllib.request as requestNEWLINE else:NEWLINE from urllib2 import Request as requestNEWLINE request.urlretrieve(url, _path)NEWLINENEWLINE download_dataset(NEWLINE "http://www.iro.umontreal.ca/~lisa/deep/data/mnist" "/mnist.pkl.gz", pathNEWLINE )NEWLINENEWLINE with gzip.open(path, "rb") as f:NEWLINE if sys.version_info > (3,):NEWLINE train_set, valid_set, test_set = pickle.load(f, encoding="latin1")NEWLINE else:NEWLINE train_set, valid_set, test_set = pickle.load(f)NEWLINE x_train, y_train = train_set[0], train_set[1].astype("int32")NEWLINE x_valid, y_valid = valid_set[0], valid_set[1].astype("int32")NEWLINE x_test, y_test = test_set[0], test_set[1].astype("int32")NEWLINENEWLINE n_y = y_train.max() + 1NEWLINE t_transform = (lambda x: to_one_hot(x, n_y)) if one_hot else (lambda x: x)NEWLINE y_train, y_valid = t_transform(y_train), t_transform(y_valid)NEWLINE y_test = t_transform(y_test)NEWLINENEWLINE if asimage is True:NEWLINE x_train = x_train.reshape([-1, 28, 28, 1])NEWLINE x_valid = x_valid.reshape([-1, 28, 28, 1])NEWLINE x_test = x_test.reshape([-1, 28, 28, 1])NEWLINE if isTf is False:NEWLINE x_train = x_train.transpose([0, 3, 1, 2])NEWLINE x_valid = x_valid.transpose([0, 3, 1, 2])NEWLINE x_test = x_test.transpose([0, 3, 1, 2])NEWLINENEWLINE if return_all is True:NEWLINE return x_train, y_train, x_valid, y_valid, x_test, y_testNEWLINENEWLINE if validation is True:NEWLINE x_test = x_validNEWLINE y_test = y_validNEWLINE else:NEWLINE x_train = np.concatenate((x_train, x_valid))NEWLINE y_train = np.concatenate((y_train, y_valid))NEWLINENEWLINE return x_train, y_train, x_test, y_testNEWLINENEWLINENEWLINEdef load_cifar10(data_dir="/home/Data/cifar/", one_hot=False, isTf=True, **kwargs):NEWLINE def file_name(ind):NEWLINE return os.path.join(data_dir, "cifar-10-batches-py/data_batch_" + str(ind))NEWLINENEWLINE def unpickle_cifar_batch(file_):NEWLINE fo = open(file_, "rb")NEWLINE tmp_data = pickle.load(fo, encoding="bytes")NEWLINE fo.close()NEWLINE x_ = tmp_data[b"data"].astype(np.float32)NEWLINE x_ = x_.reshape((10000, 3, 32, 32)) / 255.0NEWLINE y_ = np.array(tmp_data[b"labels"]).astype(np.float32)NEWLINE return {"x": x_, "y": y_}NEWLINENEWLINE train_data = [unpickle_cifar_batch(file_name(i)) for i in range(1, 6)]NEWLINE x_train = np.concatenate([td["x"] for td in train_data])NEWLINE y_train = np.concatenate([td["y"] for td in train_data])NEWLINE y_train = y_train.astype("int32")NEWLINENEWLINE test_data = unpickle_cifar_batch(NEWLINE os.path.join(data_dir, "cifar-10-batches-py/test_batch")NEWLINE )NEWLINE x_test = test_data["x"]NEWLINE y_test = test_data["y"].astype("int32")NEWLINENEWLINE n_y = int(y_test.max() + 1)NEWLINE y_transform = (lambda x: to_one_hot(x, n_y)) if one_hot else (lambda x: x)NEWLINENEWLINE if isTf is True:NEWLINE x_train = x_train.transpose([0, 2, 3, 1])NEWLINE x_test = x_test.transpose([0, 2, 3, 1])NEWLINE return x_train, y_transform(y_train), x_test, y_transform(y_test)NEWLINENEWLINENEWLINEdef load_cifar100(data_dir="/home/Data/cifar/", one_hot=False, isTf=True, **kwargs):NEWLINE def unpickle_cifar_batch(file_, num):NEWLINE fo = open(file_, "rb")NEWLINE if sys.version_info > (3,):NEWLINE tmp_data = pickle.load(fo, encoding="bytes")NEWLINE else:NEWLINE tmp_data = pickle.load(fo)NEWLINE fo.close()NEWLINE x_ = tmp_data[b"data"].astype(np.float32)NEWLINE x_ = x_.reshape((num, 3, 32, 32)) / 255.0NEWLINE y_ = np.array(tmp_data[b"fine_labels"]).astype(np.float32)NEWLINE return {"x": x_, "y": y_}NEWLINENEWLINE train_data = unpickle_cifar_batch(NEWLINE os.path.join(data_dir, "cifar-100-python/train"), num=50000NEWLINE )NEWLINE x_train = train_data["x"]NEWLINE y_train = train_data["y"].astype("int32")NEWLINENEWLINE test_data = unpickle_cifar_batch(NEWLINE os.path.join(data_dir, "cifar-100-python/test"), num=10000NEWLINE )NEWLINE x_test = test_data["x"]NEWLINE y_test = test_data["y"].astype("int32")NEWLINENEWLINE n_y = int(y_test.max() + 1)NEWLINE y_transform = (lambda x: to_one_hot(x, n_y)) if one_hot else (lambda x: x)NEWLINENEWLINE if isTf is True:NEWLINE x_train = x_train.transpose([0, 2, 3, 1])NEWLINE x_test = x_test.transpose([0, 2, 3, 1])NEWLINE return x_train, y_transform(y_train), x_test, y_transform(y_test)NEWLINENEWLINENEWLINEdef load_svhn(data_dir="/home/Data", one_hot=False, isTf=True, **kwargs):NEWLINE data_dir = os.path.join(data_dir, "svhn")NEWLINE train_dat = sio.loadmat(os.path.join(data_dir, "train_32x32.mat"))NEWLINE train_x = train_dat["X"].astype("float32")NEWLINE train_y = train_dat["y"].flatten()NEWLINE train_y[train_y == 10] = 0NEWLINE train_x = train_x.transpose([3, 0, 1, 2])NEWLINENEWLINE test_dat = sio.loadmat(os.path.join(data_dir, "test_32x32.mat"))NEWLINE test_x = test_dat["X"].astype("float32")NEWLINE test_y = test_dat["y"].flatten()NEWLINE test_y[test_y == 10] = 0NEWLINE test_x = test_x.transpose([3, 0, 1, 2])NEWLINENEWLINE n_y = int(train_y.max() + 1)NEWLINE y_transform = (lambda x: to_one_hot(x, n_y)) if one_hot else (lambda x: x)NEWLINENEWLINE if isTf is False:NEWLINE train_x = train_x.transpose([0, 3, 1, 2])NEWLINE test_x = test_x.transpose([0, 3, 1, 2])NEWLINENEWLINE train_x, test_x = train_x / 255.0, test_x / 255.0NEWLINE return train_x, y_transform(train_y), test_x, y_transform(test_y)NEWLINENEWLINENEWLINEdef load_caltech101_img(NEWLINE path="./Data/", require_subset=["img", "seg"], num_train_test=[30, 20], seed=1234NEWLINE):NEWLINE with open(os.path.join(path, "Caltech101_label2index.pkl"), "rb") as f:NEWLINE label_index_dict = pickle.load(f)NEWLINENEWLINE results_list = [[] for _ in require_subset]NEWLINE label_list = []NEWLINE hf = h5py.File(os.path.join(path, "Caltech101.h5"), "r")NEWLINE for k in hf.keys():NEWLINE if k not in label_index_dict:NEWLINE continueNEWLINE label = label_index_dict[k]NEWLINE ind = 0NEWLINE while str(ind) in hf[k].keys():NEWLINE for require_ind, require_key in enumerate(require_subset):NEWLINE results_list[require_ind].append(hf[k][str(ind) + "_" + require_key])NEWLINE label_list.append(label)NEWLINE ind = ind + 1NEWLINENEWLINE # label_list = np.asarray(label_list)NEWLINE # results_list = [np.asarray(item) for item in results_list]NEWLINENEWLINE rng_data = np.random.RandomState(seed)NEWLINE inds = rng_data.permutation(len(label_list))NEWLINE new_results_list = [[] for _ in require_subset]NEWLINE new_label_list = []NEWLINE for ind in inds:NEWLINE for data_id, data_item in enumerate(results_list):NEWLINE new_results_list[data_id].append(data_item[ind])NEWLINE new_label_list.append(label_list[ind])NEWLINENEWLINE results_list = new_results_listNEWLINE label_list = np.array(new_label_list)NEWLINENEWLINE results_train, label_train = [[] for _ in require_subset], []NEWLINE results_test, label_test = [[] for _ in require_subset], []NEWLINE total_num = num_train_test[0] + num_train_test[1]NEWLINENEWLINE for label_ind in range(np.max(label_list) + 1):NEWLINE index_each = np.where(label_list == label_ind)[0]NEWLINE for count, ind_each in enumerate(index_each):NEWLINE if count < num_train_test[1]:NEWLINE for data_id, data_item in enumerate(results_list):NEWLINE results_test[data_id].append(data_item[ind_each])NEWLINE label_test.append(label_ind)NEWLINE elif count < total_num:NEWLINE for data_id, data_item in enumerate(results_list):NEWLINE results_train[data_id].append(data_item[ind_each])NEWLINE label_train.append(label_ind)NEWLINE else:NEWLINE breakNEWLINENEWLINE return results_train, label_train, results_test, label_testNEWLINENEWLINENEWLINEdef load_caltech101_path(path="./Data/", num_train_test=[30, 20], seed=1234):NEWLINE with open(os.path.join(path, "Caltech101_imgpath.pkl"), "rb") as f:NEWLINE path_label_dict = pickle.load(f)NEWLINENEWLINE imgpath_list = path_label_dict["img_path"]NEWLINE label_list = path_label_dict["label"]NEWLINENEWLINE # label_list = np.asarray(label_list)NEWLINE # results_list = [np.asarray(item) for item in results_list]NEWLINENEWLINE rng_data = np.random.RandomState(seed)NEWLINE inds = rng_data.permutation(len(label_list))NEWLINE new_imgpath_list = []NEWLINE new_label_list = []NEWLINE for ind in inds:NEWLINE new_imgpath_list.append(imgpath_list[ind])NEWLINE new_label_list.append(label_list[ind])NEWLINENEWLINE imgpath_list = new_imgpath_listNEWLINE label_list = np.array(new_label_list)NEWLINENEWLINE results_train, label_train = [], []NEWLINE results_test, label_test = [], []NEWLINE total_num = num_train_test[0] + num_train_test[1]NEWLINENEWLINE for label_ind in range(np.max(label_list) + 1):NEWLINE index_each = np.where(label_list == label_ind)[0]NEWLINE for count, ind_each in enumerate(index_each):NEWLINE if count < num_train_test[1]:NEWLINE results_test.append(imgpath_list[ind_each])NEWLINE label_test.append(label_ind)NEWLINE elif count < total_num:NEWLINE results_train.append(imgpath_list[ind_each])NEWLINE label_train.append(label_ind)NEWLINE else:NEWLINE breakNEWLINENEWLINE return results_train, label_train, results_test, label_testNEWLINENEWLINENEWLINEdef load_imagenet_path(path="./Data/", number_of_classes=1000):NEWLINE with open(os.path.join(path, "ImageNet_imgpath.pkl"), "rb") as f:NEWLINE path_label_dict = pickle.load(f)NEWLINENEWLINE train_img_path_label = path_label_dict["train"]NEWLINE valid_img_path_label = path_label_dict["validation"]NEWLINENEWLINE def get_subset(img_path_label):NEWLINE imgpath_list = img_path_label["img_path"]NEWLINE label_list = img_path_label["label"]NEWLINE label_list = np.array(label_list)NEWLINENEWLINE img_list, lab_list = [], []NEWLINENEWLINE for label_ind in range(number_of_classes):NEWLINE index_each = np.where(label_list == label_ind)[0]NEWLINE for count, ind_each in enumerate(index_each):NEWLINE img_list.append(imgpath_list[ind_each])NEWLINE lab_list.append(label_ind)NEWLINENEWLINE return img_list, lab_listNEWLINENEWLINE train_img, train_label = get_subset(train_img_path_label)NEWLINE test_img, test_label = get_subset(valid_img_path_label)NEWLINE return train_img, train_label, test_img, test_labelNEWLINE
WINDOW_TITLE = "ElectriPy"NEWLINEHEIGHT = 750NEWLINEWIDTH = 750NEWLINERESIZABLE = TrueNEWLINEFPS = 40NEWLINEDEFAULT_FORCE_VECTOR_SCALE_FACTOR = 22e32NEWLINEDEFAULT_EF_VECTOR_SCALE_FACTOR = 2e14NEWLINEDEFAULT_EF_BRIGHTNESS = 105NEWLINEDEFAULT_SPACE_BETWEEN_EF_VECTORS = 20NEWLINEMINIMUM_FORCE_VECTOR_NORM = 10NEWLINEMINIMUM_ELECTRIC_FIELD_VECTOR_NORM = 15NEWLINENEWLINEKEYS = {NEWLINE "clear_screen": "r",NEWLINE "show_vector_components": "space",NEWLINE "show_electric_forces_vectors": "f",NEWLINE "show_electric_field_at_mouse_position": "m",NEWLINE "show_electric_field": "e",NEWLINE "increment_electric_field_brightness": "+",NEWLINE "decrement_electric_field_brightness": "-",NEWLINE "remove_last_charge_added": "z",NEWLINE "add_last_charge_removed": "y",NEWLINE}NEWLINENEWLINE# Text settings:NEWLINECHARGES_SIGN_FONT = "Arial"NEWLINEPROTON_SIGN_FONT_SIZE = 23NEWLINEELECTRON_SIGN_FONT_SIZE = 35NEWLINEVECTOR_COMPONENTS_FONT = "Arial"NEWLINEVECTOR_COMPONENTS_FONT_SIZE = 13NEWLINE
# coding: utf-8NEWLINENEWLINEimport copyNEWLINEimport jsonNEWLINEimport loggingNEWLINEimport osNEWLINEimport uuidNEWLINEfrom io import BytesIONEWLINEfrom unittest.mock import patchNEWLINENEWLINEfrom django.contrib.auth import get_user_modelNEWLINEfrom django.contrib.auth.models import PermissionNEWLINEfrom django.contrib.contenttypes.models import ContentTypeNEWLINEfrom django.test import override_settingsNEWLINEfrom django.urls import reverseNEWLINEfrom requests import codesNEWLINEfrom rest_framework.test import APITestCaseNEWLINENEWLINEfrom edd.rest.tests import EddApiTestCaseMixinNEWLINEfrom edd.utilities import JSONDecoderNEWLINEfrom main import models as edd_modelsNEWLINEfrom main.tests import factory as main_factoryNEWLINENEWLINEfrom . import factoryNEWLINEfrom .test_utils import CONTEXT_PATHNEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINE_TEST_FILES_DIR = os.path.join(NEWLINE os.path.dirname(os.path.abspath(__file__)), "files", "generic_import"NEWLINE)NEWLINENEWLINE_FBA_UPLOAD_PAYLOAD = {NEWLINE "category": 4, # OD600NEWLINE "file_format": 5, # genericNEWLINE "protocol": 3, # OD600NEWLINE "x_units": 2, # hoursNEWLINE "y_units": 1, # n/aNEWLINE "compartment": edd_models.Measurement.Compartment.UNKNOWN,NEWLINE "mime_type": "",NEWLINE "uuid": "69827386-d5f9-41b9-81d2-8469d735ed56",NEWLINE}NEWLINENEWLINENEWLINEdef load_permissions(model, *codenames):NEWLINE ct = ContentType.objects.get_for_model(model)NEWLINE return list(Permission.objects.filter(content_type=ct, codename__in=codenames))NEWLINENEWLINENEWLINE# use example files as the basis for DB records created by the fixtureNEWLINE@override_settings(MEDIA_ROOT=_TEST_FILES_DIR)NEWLINEclass ImportPatchTests(EddApiTestCaseMixin, APITestCase):NEWLINE fixtures = ["edd_file_importer/import_models"]NEWLINENEWLINE @classmethodNEWLINE def setUpTestData(cls):NEWLINE super().setUpTestData()NEWLINENEWLINE # create a test user and give it permission to write to the studyNEWLINE User = get_user_model()NEWLINE cls.write_user = User.objects.create(username="study.writer.user")NEWLINE cls.unprivileged_user = User.objects.create(username="unprivileged_user")NEWLINENEWLINE cls.user_write_study = main_factory.StudyFactory(name="User-writeable study")NEWLINE permissions = cls.user_write_study.userpermission_setNEWLINE permissions.update_or_create(NEWLINE permission_type=edd_models.UserPermission.WRITE, user=cls.write_userNEWLINE )NEWLINE cls.url = reverse(NEWLINE "edd.rest:study-imports-detail",NEWLINE kwargs={"study_pk": cls.user_write_study.pk, "pk": 15},NEWLINE )NEWLINENEWLINE def test_modify_privileges(self):NEWLINE # TODO: eventually add more detail to permissions checks here. Requires a lot moreNEWLINE # complexity in the fixture, and we should be covered by more rigorous checks onNEWLINE # uploadsNEWLINENEWLINE # send the submit request to actually perform the importNEWLINE self.client.force_login(ImportPatchTests.unprivileged_user)NEWLINE response = self.client.patch(NEWLINE ImportPatchTests.url, data={"status": "Submitted"}, format="json"NEWLINE )NEWLINE self.assertEqual(response.status_code, codes.not_found)NEWLINENEWLINE def test_final_submit(self):NEWLINE """NEWLINE Does a simple test that submits a "Ready" import defined in the fixtureNEWLINE """NEWLINE # load expected Redis context data from fileNEWLINE with factory.load_test_file(CONTEXT_PATH) as file:NEWLINE context_str = file.read()NEWLINENEWLINE # mock the notification brokerNEWLINE with patch("edd_file_importer.tasks.RedisBroker") as MockNotify:NEWLINE notify = MockNotify.return_valueNEWLINENEWLINE # mock the import brokerNEWLINE with patch("edd_file_importer.rest.views.ImportBroker") as MockBroker:NEWLINE broker = MockBroker.return_valueNEWLINE broker.load_context.return_value = context_strNEWLINENEWLINE # mock the method that determines whether Celery code is called synchronously orNEWLINE # asynchronously. TODO: this is a stopgap for replacing the legacy import task,NEWLINE # after which we can just mock the backend task...ATM we're chaining together otherNEWLINE # tasks that complicate the mockingNEWLINENEWLINE # mock the method that executes the final celery chain to performs the importNEWLINE with patch("celery.chain.delay") as submit_import:NEWLINENEWLINE # send the request to actually submit the importNEWLINE self.client.force_login(self.write_user)NEWLINE response = self.client.patch(NEWLINE ImportPatchTests.url,NEWLINE data={"status": "Submitted"},NEWLINE format="json",NEWLINE )NEWLINENEWLINE self.assertEqual(response.status_code, codes.accepted)NEWLINENEWLINE # test that the task was calledNEWLINE import_uuid = uuid.UUID("f464cca6-7370-4526-9718-be3ea55fea42")NEWLINE submit_import.assert_called_once()NEWLINENEWLINE notify_msg = (NEWLINE 'Your import for file "FBA-OD-generic.xlsx" is submitted'NEWLINE )NEWLINE notify.notify.assert_called_once_with(NEWLINE notify_msg,NEWLINE tags=["import-status-update"],NEWLINE payload={"status": "Submitted", "pk": 15, "uuid": import_uuid},NEWLINE )NEWLINE broker.add_page.assert_not_called()NEWLINENEWLINENEWLINEclass ImportUploadTests(EddApiTestCaseMixin, APITestCase):NEWLINE """NEWLINE Sets of tests to exercise the import upload stepNEWLINE """NEWLINENEWLINE fixtures = ["edd/rest/study_permissions"]NEWLINENEWLINE @classmethodNEWLINE def setUpTestData(cls):NEWLINE super().setUpTestData()NEWLINENEWLINE # get models from the fixture for studies with varying permission levelsNEWLINE User = get_user_model()NEWLINE cls.superuser = User.objects.get(username="superuser")NEWLINE cls.staffuser = User.objects.get(username="staff.user")NEWLINE # not doing this in fixture because it requires knowing the IDs, which can vary per deployNEWLINE cls.staffuser.user_permissions.add(NEWLINE *load_permissions(NEWLINE edd_models.Study, "add_study", "change_study", "delete_study"NEWLINE )NEWLINE )NEWLINE cls.unprivileged_user = User.objects.get(username="unprivileged_user")NEWLINE cls.readonly_user = User.objects.get(username="study.reader.user")NEWLINE cls.write_user = User.objects.get(username="study.writer.user")NEWLINE cls.write_group_user = User.objects.get(username="study.writer.group.user")NEWLINENEWLINE # create another study with write permissions by only a single userNEWLINE cls.user_write_study = main_factory.StudyFactory(name="User-writeable study")NEWLINE permissions = cls.user_write_study.userpermission_setNEWLINE permissions.update_or_create(NEWLINE permission_type=edd_models.UserPermission.WRITE, user=cls.write_userNEWLINE )NEWLINENEWLINE def setUp(self):NEWLINE super().setUp()NEWLINENEWLINE def _upload_import_file(NEWLINE self,NEWLINE study_pk,NEWLINE file_path,NEWLINE form_data,NEWLINE user,NEWLINE exp_status=codes.accepted,NEWLINE initial_upload=True,NEWLINE ):NEWLINE upload = self._build_file_upload(file_path)NEWLINENEWLINE if user:NEWLINE self.client.force_login(user)NEWLINE else:NEWLINE self.client.logout()NEWLINENEWLINE # mock the celery task so we're testing just the viewNEWLINE with patch("edd_file_importer.tasks.process_import_file.delay") as mock_task:NEWLINENEWLINE # mock the cache so we can test writes to itNEWLINE with patch("edd_file_importer.tasks.RedisBroker") as MockNotify:NEWLINE notify = MockNotify.return_valueNEWLINE url = reverse(NEWLINE "edd.rest:study-imports-list", kwargs={"study_pk": study_pk}NEWLINE )NEWLINE response = self.client.post(NEWLINE url, data={"file": upload, **form_data}, format="multipart"NEWLINE )NEWLINENEWLINE # test the results of the synchronous upload requestNEWLINE self.assertEqual(response.status_code, exp_status)NEWLINE response_json = json.loads(response.content, cls=JSONDecoder)NEWLINENEWLINE # if upload was accepted, test that the file processing task was called asNEWLINE # expectedNEWLINE if response.status_code == codes.accepted:NEWLINE self.assertEqual(response_json["uuid"], form_data["uuid"])NEWLINE import_pk = response_json["pk"]NEWLINE requested_status = form_data.get("status", None)NEWLINE mock_task.assert_called_with(NEWLINE import_pk,NEWLINE user.pk,NEWLINE requested_status,NEWLINE initial_upload=initial_upload,NEWLINE )NEWLINE else:NEWLINE mock_task.assert_not_called()NEWLINE notify.notify.assert_not_called()NEWLINE return response_jsonNEWLINENEWLINE def _build_file_upload(self, file_path):NEWLINE with open(file_path, "rb") as fp:NEWLINE upload = BytesIO(fp.read())NEWLINE upload.name = os.path.basename(file_path) # get file name from pathNEWLINE upload.content_type = (NEWLINE "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"NEWLINE )NEWLINE return uploadNEWLINENEWLINE def test_upload_failure(self):NEWLINE """NEWLINE Tests that disallowed users aren't able to create an import on others' studiesNEWLINE """NEWLINE file_path = factory.test_file_path("generic_import", "FBA-OD-generic.xlsx")NEWLINE study_pk = self.user_write_study.pkNEWLINENEWLINE # use an unprivileged account to upload a file (should fail)NEWLINE disallowed_users = {NEWLINE None: study_pk,NEWLINE ImportUploadTests.unprivileged_user: study_pk,NEWLINE ImportUploadTests.readonly_user: study_pk,NEWLINE ImportUploadTests.staffuser: study_pk,NEWLINE }NEWLINE for user, study_pk in disallowed_users.items():NEWLINE exp_status = codes.not_found if user else codes.forbiddenNEWLINE self._upload_import_file(NEWLINE study_pk, file_path, _FBA_UPLOAD_PAYLOAD, user, exp_statusNEWLINE )NEWLINENEWLINE def test_upload_success(self):NEWLINE """NEWLINE Tests that allowed users are able to create an import on studies they have access toNEWLINE """NEWLINE file_path = factory.test_file_path("generic_import", "FBA-OD-generic.xlsx")NEWLINE allowed_users = {NEWLINE ImportUploadTests.write_group_user: 22, # group write studyNEWLINE ImportUploadTests.write_user: ImportUploadTests.user_write_study.pk,NEWLINE ImportUploadTests.superuser: ImportUploadTests.user_write_study.pk,NEWLINE ImportUploadTests.unprivileged_user: 21, # everyone write studyNEWLINE }NEWLINENEWLINE for user, study_pk in allowed_users.items():NEWLINE # create a new UUID for each import so they don't conflictNEWLINE payload = copy.copy(_FBA_UPLOAD_PAYLOAD)NEWLINE payload["uuid"] = str(uuid.uuid4())NEWLINENEWLINE self._upload_import_file(study_pk, file_path, payload, user, codes.accepted)NEWLINENEWLINE def test_categories(self):NEWLINE """NEWLINE Tests the categories returned by the rest back endNEWLINE """NEWLINE url = reverse("edd.rest:import_categories-list")NEWLINE self.client.force_login(ImportUploadTests.unprivileged_user)NEWLINE response = self.client.get(url, data={"ordering": "display_order"})NEWLINE self.assertEqual(response.status_code, codes.ok)NEWLINE actual = json.loads(response.content)NEWLINE with factory.load_test_file("import_categories.json") as file:NEWLINE expected = json.loads(file.read())NEWLINE self.assertEqual(expected, actual)NEWLINE
from __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport cv2NEWLINEfrom IPython import displayNEWLINEfrom PIL import ImageNEWLINENEWLINENEWLINEdef cv2_imshow(a):NEWLINE """A replacement for cv2.imshow() for use in Jupyter notebooks.NEWLINE Args:NEWLINE a : np.ndarray. shape (N, M) or (N, M, 1) is an NxM grayscale image. shapeNEWLINE (N, M, 3) is an NxM BGR color image. shape (N, M, 4) is an NxM BGRA colorNEWLINE image.NEWLINE """NEWLINE a = a.clip(0, 255).astype('uint8')NEWLINE # cv2 stores colors as BGR; convert to RGBNEWLINE if a.ndim == 3:NEWLINE if a.shape[2] == 4:NEWLINE a = cv2.cvtColor(a, cv2.COLOR_BGRA2RGBA)NEWLINE else:NEWLINE a = cv2.cvtColor(a, cv2.COLOR_BGR2RGB)NEWLINE display.display(Image.fromarray(a))
# BGP SP Topology APIsNEWLINE# Author: Naveena Suvarna (naveen.suvarna@broadcom.com)NEWLINENEWLINEimport copyNEWLINENEWLINEfrom spytest import st, utils, putilsNEWLINEfrom spytest.dicts import SpyTestDictNEWLINEimport apis.routing.ip as ipapiNEWLINEimport apis.routing.bgp as bgpapiNEWLINEimport apis.system.interface as ifapiNEWLINEfrom spytest.tgen.tg import tgen_obj_dictNEWLINEimport BGP.bgplib as bgplibNEWLINENEWLINEsp_topo = SpyTestDict()NEWLINEbgp_topo = SpyTestDict()NEWLINENEWLINENEWLINEclass BGPSP:NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_topology_data_present():NEWLINE if not sp_topo['dut_list'] or len(sp_topo['dut_list']) == 0 :NEWLINE return FalseNEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_present(dut):NEWLINE if dut in sp_topo['dut_list'] :NEWLINE return TrueNEWLINE if dut in sp_topo['tg_list'] :NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_list_present(dut_name_list = []):NEWLINENEWLINE if not dut_name_list or len(dut_name_list) == 0 :NEWLINE return FalseNEWLINE for dut_name in dut_name_list:NEWLINE if dut_name not in sp_topo['dut_list'] :NEWLINE return FalseNEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_count():NEWLINE return len (sp_topo['dut_list'])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_list():NEWLINE return copy.deepcopy(sp_topo['dut_list'])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_from_device(device_name):NEWLINENEWLINE for dut in sp_topo['dut_list'] :NEWLINE if device_name == sp_topo[dut]['device'] :NEWLINE st.log("BGP SP - DUT device {} is dut {}".format(device_name, dut))NEWLINE return dutNEWLINE for dut in sp_topo['tg_list'] :NEWLINE if device_name == sp_topo[dut]['device'] :NEWLINE st.log("BGP SP - TG device {} is dut {}".format(device_name, dut))NEWLINE return dutNEWLINE st.log("BGP SP - device {} not in dut list".format(device_name))NEWLINE return ""NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_device(dut):NEWLINENEWLINE if dut in sp_topo['dut_list'] :NEWLINE return sp_topo[dut]['device']NEWLINE return ''NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_tg_list():NEWLINE return copy.deepcopy(sp_topo['tg_list'])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_is_tg(dut):NEWLINENEWLINE if dut in sp_topo['tg_list'] :NEWLINE if dut in sp_topo.keys() :NEWLINE if sp_topo[dut]['type'] == 'TG' :NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_valid_link_type(link_type):NEWLINENEWLINE if link_type == "ETH" :NEWLINE return TrueNEWLINE if link_type == "LBK" :NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_link_present(dut, link_name):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut) :NEWLINE st.log("BGP SP - Link dut {} not in dut list".format(dut))NEWLINE return FalseNEWLINENEWLINE if link_name not in sp_topo[dut]['intf'].keys():NEWLINE return FalseNEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_link_present(link_name):NEWLINENEWLINE if not link_name or link_name == '' :NEWLINE return FalseNEWLINE for dut in BGPSP.bgp_sp_get_dut_list():NEWLINE if link_name in sp_topo[dut]['intf'].keys():NEWLINE return TrueNEWLINE for dut in BGPSP.bgp_sp_get_tg_list():NEWLINE if link_name in sp_topo[dut]['intf'].keys():NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINENEWLINE """ UNUSED AND USES UNDEFINED VARIABLENEWLINE @staticmethodNEWLINE def bgp_sp_link_list_present(link_name_list = []):NEWLINENEWLINE if not link_name_list or len(link_name_list) == 0 :NEWLINE return FalseNEWLINENEWLINE topo_links = sp_topo[dut]['intf'].keys()NEWLINENEWLINE for link_name in link_name_list:NEWLINE if link_name not in topo_links :NEWLINE return FalseNEWLINE return TrueNEWLINE """NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_all_links(dut):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE return []NEWLINENEWLINE link_name_list = []NEWLINE for link_name, link_data in sp_topo[dut]['intf'].items():NEWLINE if link_data['type'] == 'LBK' :NEWLINE continueNEWLINE link_name_list.append(link_name)NEWLINENEWLINE return copy.deepcopy(link_name_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_link_dut_interface(dut, link_name):NEWLINENEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE if_data = sp_topo[dut]['intf'][link_name]NEWLINE if 'if' in if_data.keys():NEWLINE return if_data['if']NEWLINENEWLINE return ''NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_link_connected(dut, link_name):NEWLINENEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE if_data = sp_topo[dut]['intf'][link_name]NEWLINE if 'rmt_dut' in if_data.keys():NEWLINE if 'rmt_link' in if_data.keys():NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_remote_dut(dut, link_name):NEWLINENEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE if_data = sp_topo[dut]['intf'][link_name]NEWLINE if 'rmt_dut' in if_data.keys():NEWLINE return sp_topo[dut]['intf'][link_name]['rmt_dut']NEWLINENEWLINE return ''NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_remote_link(dut, link_name):NEWLINENEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE if_data = sp_topo[dut]['intf'][link_name]NEWLINE if 'rmt_dut' in if_data.keys():NEWLINE if 'rmt_link' in if_data.keys():NEWLINE return sp_topo[dut]['intf'][link_name]['rmt_link']NEWLINENEWLINE return ''NEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_is_tg_connected_link(dut, link_name):NEWLINENEWLINE rmt_dut = BGPSP.bgp_sp_dut_get_remote_dut(dut, link_name)NEWLINE if rmt_dut == '' :NEWLINE return FalseNEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(rmt_dut):NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_tg_connected_links(dut):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE return []NEWLINENEWLINE link_name_list = []NEWLINE for link_name, link_data in sp_topo[dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE rmt_dut = link_data['rmt_dut']NEWLINE if BGPSP.bgp_sp_dut_is_tg(rmt_dut):NEWLINE link_name_list.append(link_name)NEWLINENEWLINE return link_name_listNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_tg_connected_link_data(dut, link_name):NEWLINENEWLINE if not BGPSP.bgp_sp_is_tg_connected_link(dut, link_name):NEWLINE return {}NEWLINENEWLINE link_data = sp_topo[dut]['intf'][link_name]NEWLINE rmt_dut = link_data['rmt_dut']NEWLINE if BGPSP.bgp_sp_dut_is_tg(rmt_dut):NEWLINE return copy.deepcopy(link_data)NEWLINENEWLINE return {}NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_connected_first_link(from_dut, to_dut):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(from_dut):NEWLINE return ''NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(to_dut):NEWLINE return ''NEWLINENEWLINE for link_name, link_data in sp_topo[from_dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE if link_data['rmt_dut'] == to_dut :NEWLINE if 'rmt_link' in link_data.keys():NEWLINE return link_nameNEWLINENEWLINE return ''NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_connected_links(from_dut, to_dut):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(from_dut):NEWLINE return []NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(to_dut):NEWLINE return []NEWLINENEWLINE link_name_list = []NEWLINE for link_name, link_data in sp_topo[from_dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE if link_data['rmt_dut'] == to_dut :NEWLINE if 'rmt_link' in link_data.keys():NEWLINE link_name_list.append(link_name)NEWLINENEWLINE return link_name_listNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_link_connected_to_each_other(from_dut, from_link, to_dut, to_link):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_link_connected(from_dut, from_link):NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_dut_link_connected(to_dut, to_link):NEWLINE return FalseNEWLINENEWLINE from_if_info = sp_topo[from_dut]['intf'][from_link]NEWLINE to_if_info = sp_topo[to_dut]['intf'][to_link]NEWLINENEWLINE if from_if_info['rmt_dut'] != to_dut:NEWLINE return FalseNEWLINE if to_if_info['rmt_dut'] != from_dut:NEWLINE return FalseNEWLINE if from_if_info['rmt_link'] != to_link:NEWLINE return FalseNEWLINE if to_if_info['rmt_link'] == from_link :NEWLINE return FalseNEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_unused_dut_interface(dut):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - not present in {}".format(dut))NEWLINE return ''NEWLINENEWLINE dut_if_list = []NEWLINE for _, link_data in sp_topo[dut]['intf'].items():NEWLINE if 'if' in link_data.keys():NEWLINE dut_if_list.append(link_data['if'])NEWLINENEWLINE if_idx = 80NEWLINE while if_idx < 100:NEWLINE if_name = "Ethernet{}".format(if_idx)NEWLINE if if_name not in dut_if_list :NEWLINE st.log("BGP SP - Found unused interface {} in dut {}".format(if_name, dut))NEWLINE return copy.deepcopy(if_name)NEWLINE if_idx += 4NEWLINENEWLINE st.log("BGP SP - No unused interfaces in {}".format(dut))NEWLINE return ''NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_addr_family_valid(addr_family):NEWLINENEWLINE if addr_family != 'ipv4' and addr_family != 'ipv6' :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return FalseNEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_address_family_list(addr_family):NEWLINENEWLINE addr_family_list = []NEWLINE if addr_family == 'ipv4' or addr_family == 'all':NEWLINE addr_family_list.append('ipv4')NEWLINE if addr_family == 'ipv6' or addr_family == 'all':NEWLINE addr_family_list.append('ipv6')NEWLINE return addr_family_listNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_ip_prefix_to_route_prefix(prefix, addr_family):NEWLINENEWLINE route_prefix = prefixNEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return route_prefixNEWLINENEWLINE if addr_family == 'ipv6' :NEWLINE temp_prefix = prefix.partition(":0/")NEWLINE if temp_prefix and len(temp_prefix) == 3 and temp_prefix[1] == ":0/" :NEWLINE route_prefix = "{}:/{}".format(temp_prefix[0], temp_prefix[2])NEWLINENEWLINE return route_prefixNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_ip_prefix_list_to_route_prefix_list(prefix_list, addr_family):NEWLINENEWLINE route_prefix_list = []NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return route_prefix_listNEWLINENEWLINE for prefix in prefix_list :NEWLINE route_prefix = BGPSP.bgp_sp_ip_prefix_to_route_prefix(prefix, addr_family)NEWLINE if route_prefix != '':NEWLINE route_prefix_list.append(route_prefix)NEWLINENEWLINE #st.log("BGP SP - route_prefix list {}".format(route_prefix_list))NEWLINE return copy.deepcopy(route_prefix_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_ip_link_present(dut, link_name, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return FalseNEWLINENEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE if link_name in sp_topo[dut][addr_family]['link'].keys():NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_ip_link(dut, link_name, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return {}NEWLINENEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE if link_name in sp_topo[dut][addr_family]['link'].keys():NEWLINE ip_data = sp_topo[dut][addr_family]['link'][link_name]NEWLINE return copy.deepcopy(ip_data)NEWLINENEWLINE return {}NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_link_has_ip(dut, link_name, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return FalseNEWLINENEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE if link_name in sp_topo[dut][addr_family]['link'].keys():NEWLINE ip_data = sp_topo[dut][addr_family]['link'][link_name]NEWLINE if 'ip' in ip_data.keys():NEWLINE return TrueNEWLINENEWLINE st.log("BGP SP - {} {} doesnot have ip address".format(dut, link_name))NEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_link_local_ip(dut, link_name, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - Find local ip {} {} {}".format(dut, link_name, addr_family))NEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE if link_name in sp_topo[dut][addr_family]['link'].keys():NEWLINE ip_data = sp_topo[dut][addr_family]['link'][link_name]NEWLINE if 'ip' in ip_data.keys():NEWLINE return ip_data['ip']NEWLINENEWLINE st.log("BGP SP - {} {} doesnot have local ip address".format(dut, link_name))NEWLINE return ""NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_link_remote_ip(dut, link_name, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return FalseNEWLINENEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE if link_name in sp_topo[dut][addr_family]['link'].keys():NEWLINE ip_data = sp_topo[dut][addr_family]['link'][link_name]NEWLINE if 'rmt_ip' in ip_data.keys():NEWLINE return ip_data['rmt_ip']NEWLINENEWLINE st.log("BGP SP - {} {} doesnot have local remote address".format(dut, link_name))NEWLINE return ""NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_loopback_ip(dut, lpbk_num, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return ""NEWLINENEWLINE link_name = "{}L{}".format(dut, lpbk_num)NEWLINENEWLINE if not BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE st.log("BGP SP - Link {} not in intf list".format(link_name))NEWLINE return ''NEWLINENEWLINE if link_name in sp_topo[dut][addr_family]['link'].keys():NEWLINE ip_data = sp_topo[dut][addr_family]['link'][link_name]NEWLINE if 'ip' not in ip_data.keys():NEWLINE st.log("BGP SP - {} doesnt have ip address".format(link_name))NEWLINE return ''NEWLINENEWLINE return ip_data['ip']NEWLINENEWLINE return ''NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_loopback_ip_list(dut, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE return []NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return []NEWLINENEWLINE lpbk_ip_list = []NEWLINE for _, ip_data in sp_topo[dut][addr_family]['link'].items():NEWLINE if ip_data['type'] == 'LBK' :NEWLINE if 'ip' in ip_data.keys():NEWLINE lpbk_ip_list.append(ip_data['ip'])NEWLINENEWLINE return copy.deepcopy(lpbk_ip_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_loopback_ip_in_dut_list(dut_list=[], addr_family='ipv4'):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE return []NEWLINENEWLINE lpbk_ip_list = []NEWLINENEWLINE for dut in dut_list:NEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE continueNEWLINENEWLINE for _, ip_data in sp_topo[dut][addr_family]['link'].items():NEWLINE if ip_data['type'] == 'LBK' :NEWLINE if 'ip' in ip_data.keys():NEWLINE if ip_data['ip'] not in lpbk_ip_list:NEWLINE lpbk_ip_list.append(ip_data['ip'])NEWLINENEWLINE return copy.deepcopy(lpbk_ip_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_ip_address_list(dut, addr_family, vrf='default'):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE return []NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return []NEWLINENEWLINE ip_addr_list = []NEWLINE for _, ip_data in sp_topo[dut][addr_family]['link'].items():NEWLINE if 'ip' in ip_data.keys():NEWLINE ip_addr_list.append(ip_data['ip'])NEWLINENEWLINE st.log("BGP SP - Dut {} has host ip {}".format(dut, ip_addr_list))NEWLINE return copy.deepcopy(ip_addr_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_static_network_prefixes(dut, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return []NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return []NEWLINENEWLINE snw_list = []NEWLINE for prefix, snw_data in sp_topo[dut][addr_family]['static_nw'].items() :NEWLINE prefix_subnet = "{}/{}".format(prefix, snw_data['subnet'])NEWLINE snw_list.append(prefix_subnet)NEWLINENEWLINE return copy.deepcopy(snw_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_static_route_prefixes(dut, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return []NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return []NEWLINENEWLINE srtp_list = []NEWLINE for prefix, rt_data in sp_topo[dut][addr_family]['static_rt'].items() :NEWLINE prefix_subnet = "{}/{}".format(prefix, rt_data['subnet'])NEWLINE srtp_list.append(prefix_subnet)NEWLINENEWLINE return copy.deepcopy(srtp_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_null_nhop_static_route_prefixes(dut, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return []NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return []NEWLINENEWLINE srtp_list = []NEWLINE for prefix, rt_data in sp_topo[dut][addr_family]['static_rt'].items() :NEWLINE if rt_data['nexthop'] == 'Null0' :NEWLINE prefix_subnet = "{}/{}".format(prefix, rt_data['subnet'])NEWLINE srtp_list.append(prefix_subnet)NEWLINENEWLINE return copy.deepcopy(srtp_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_dut_static_route_prefix_data_list(dut, addr_family):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return {}NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return {}NEWLINENEWLINE srtp_data_list = {}NEWLINE for prefix, rt_data in sp_topo[dut][addr_family]['static_rt'].items() :NEWLINE srtp_data_list.update({prefix: rt_data})NEWLINENEWLINE return copy.deepcopy(srtp_data_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_find_linear_topo_in_dut_list(dut_list=[], start_dut='', node_limit=0, save_path='yes'):NEWLINENEWLINE st.log("BGP SP - Find Linear Topo in Dut list {} length {}".format(dut_list, node_limit))NEWLINE sp_topo_dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINENEWLINE found_path = {}NEWLINE found_path['found'] = FalseNEWLINENEWLINE if not dut_list or len(dut_list) == 0 :NEWLINE dut_list = sp_topo_dut_listNEWLINE else :NEWLINE for dut in dut_list :NEWLINE if dut not in sp_topo_dut_list :NEWLINE st.log("Dut {} not in Topo dut lidt {}".format(dut, sp_topo_dut_list))NEWLINE return found_pathNEWLINENEWLINE if start_dut and start_dut != '' :NEWLINE if start_dut not in dut_list :NEWLINE st.log("Start dut {} not in dut list {}".format(start_dut, dut_list))NEWLINE return found_pathNEWLINENEWLINE if node_limit <= 0 :NEWLINE length_limit = len (dut_list)NEWLINE else :NEWLINE length_limit = node_limitNEWLINENEWLINE st.log("Modified Dut list {} length_limit {}".format(dut_list, length_limit))NEWLINENEWLINE longest_path = []NEWLINENEWLINE for dut in dut_list :NEWLINENEWLINE if start_dut and start_dut != '' and start_dut != dut :NEWLINE continueNEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(dut) :NEWLINE continueNEWLINENEWLINE st.log(" Starting dut {} ".format(dut))NEWLINENEWLINE sp_topo_stack = []NEWLINE sp_topo_path = []NEWLINE sp_topo_stack.append(dut)NEWLINENEWLINE while sp_topo_stack and len(sp_topo_stack) :NEWLINENEWLINE st.log(" sp stack {}".format(sp_topo_stack))NEWLINE st.log(" sp path {}".format(sp_topo_path))NEWLINENEWLINE curr_dut = sp_topo_stack.pop()NEWLINE sp_topo_path.append(curr_dut)NEWLINENEWLINE leaf_dut = TrueNEWLINE for _, link_data in sp_topo[curr_dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE next_dut = link_data['rmt_dut']NEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(next_dut):NEWLINE continueNEWLINENEWLINE if next_dut in sp_topo_path :NEWLINE continueNEWLINENEWLINE if next_dut not in dut_list :NEWLINE continueNEWLINENEWLINE if next_dut not in sp_topo_stack :NEWLINE sp_topo_stack.append(next_dut)NEWLINENEWLINE leaf_dut = FalseNEWLINENEWLINE if len(sp_topo_path) == length_limit :NEWLINE leaf_dut = TrueNEWLINENEWLINE if leaf_dut is True :NEWLINE st.log(" Linear found Dut {} ".format(curr_dut))NEWLINE st.log(" Linear found sp path {} ".format(sp_topo_path))NEWLINE st.log(" Linear found longest path {} ".format(longest_path))NEWLINENEWLINE if len(longest_path) < len(sp_topo_path) :NEWLINE if node_limit > 0 :NEWLINE if len(sp_topo_path) <= length_limit :NEWLINE longest_path = copy.deepcopy(sp_topo_path)NEWLINE st.log(" New longest path set as curr new linear path")NEWLINE else :NEWLINE longest_path = copy.deepcopy(sp_topo_path)NEWLINE st.log(" New longest path set as curr new linear path")NEWLINENEWLINE if len(longest_path) >= length_limit :NEWLINE st.log(" Path length limit provided {} and reached".format(length_limit))NEWLINE breakNEWLINENEWLINE sp_topo_path.pop()NEWLINENEWLINENEWLINE if len(longest_path) == length_limit :NEWLINE breakNEWLINENEWLINE st.log("BGP SP - Longest path len {} with path {}".format(len(longest_path), longest_path))NEWLINENEWLINE path_length = len(longest_path)NEWLINE found_path['found'] = True if path_length else FalseNEWLINENEWLINE path_length = len(longest_path)NEWLINE found_path['found'] = True if path_length else FalseNEWLINE found_path['dut_list'] = []NEWLINE found_path['segment'] = {}NEWLINE found_path['segment_count'] = 0NEWLINE found_path['type'] = 'Linear'NEWLINENEWLINE if found_path['found'] :NEWLINE for dut in longest_path :NEWLINE found_path['dut_list'].append(dut)NEWLINENEWLINE from_dut = longest_path[0]NEWLINE found_path['start_dut'] = from_dutNEWLINE dut_idx = 1NEWLINE while dut_idx < path_length :NEWLINE to_dut = longest_path[dut_idx]NEWLINE segt_link_idx = 0NEWLINE for link_name, link_data in sp_topo[from_dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE if link_data['rmt_dut'] == to_dut :NEWLINENEWLINE rmt_link = link_data['rmt_link']NEWLINE segt_link = { 'lcl_dut' : from_dut, 'lcl_link': link_name,NEWLINE 'rmt_dut' : to_dut, 'rmt_link' : rmt_link }NEWLINENEWLINE if segt_link_idx == 0 : found_path['segment'][dut_idx - 1] = {}NEWLINE found_path['segment'][dut_idx - 1].update({ segt_link_idx: segt_link})NEWLINENEWLINE if segt_link_idx == 0:NEWLINE found_path['segment_count'] += 1NEWLINE segt_link_idx += 1NEWLINE #st.log(" Path node {} is {}".format(dut_idx - 1, segt_link))NEWLINE from_dut = to_dutNEWLINE dut_idx += 1NEWLINENEWLINE if save_path == 'yes' :NEWLINE sp_topo['subtopo']['linear'] = copy.deepcopy(found_path)NEWLINENEWLINE BGPSP.bgp_sp_show_topo_path(found_path)NEWLINE return found_pathNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_saved_linear_topo():NEWLINE return copy.deepcopy(sp_topo['subtopo']['linear'])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_find_ring_topo_in_dut_list(dut_list=[], start_dut='', node_limit=0, save_path='yes'):NEWLINENEWLINE st.log("BGP SP - Find Linear Topo in Dut list {} length {}".format(dut_list, node_limit))NEWLINE sp_topo_dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINENEWLINE found_path = {}NEWLINE found_path['found'] = FalseNEWLINENEWLINE if not dut_list or len(dut_list) == 0 :NEWLINE dut_list = sp_topo_dut_listNEWLINE else :NEWLINE for dut in dut_list :NEWLINE if dut not in sp_topo_dut_list :NEWLINE st.log("Dut {} not in Topo dut lidt {}".format(dut, sp_topo_dut_list))NEWLINE return found_pathNEWLINENEWLINE if start_dut and start_dut != '' :NEWLINE if start_dut not in dut_list :NEWLINE st.log("Start dut {} not in dut list {}".format(start_dut, dut_list))NEWLINE return found_pathNEWLINENEWLINE if node_limit <= 0 :NEWLINE length_limit = len(dut_list) + 1NEWLINE else :NEWLINE length_limit = node_limit + 1NEWLINENEWLINE st.log("Modified Dut list {} length_limit {}".format(dut_list, length_limit))NEWLINENEWLINE longest_path = []NEWLINE loop_count = 0NEWLINENEWLINE for dut in dut_list :NEWLINENEWLINE if length_limit <= 3 :NEWLINE breakNEWLINENEWLINE if start_dut and start_dut != '' and start_dut != dut :NEWLINE continueNEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(dut) :NEWLINE continueNEWLINENEWLINE st.log(" Starting at dut {} with longest path {}.".format(dut, longest_path))NEWLINENEWLINE sp_topo_stack = []NEWLINE sp_topo_path = []NEWLINE sp_topo_stack.append(dut)NEWLINENEWLINE while sp_topo_stack and len(sp_topo_stack) :NEWLINENEWLINE loop_count += 1NEWLINE if loop_count > 100 :NEWLINE breakNEWLINENEWLINE st.log(" sp stack {}".format(sp_topo_stack))NEWLINE st.log(" sp path {}".format(sp_topo_path))NEWLINENEWLINE curr_dut = sp_topo_stack.pop()NEWLINE sp_topo_path.append(curr_dut)NEWLINENEWLINE st.log(" modified sp path {}".format(sp_topo_path))NEWLINENEWLINE leaf_dut = TrueNEWLINE ring_found = FalseNEWLINENEWLINE for link_name, link_data in sp_topo[curr_dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE next_dut = link_data['rmt_dut']NEWLINENEWLINE if next_dut == dut :NEWLINE ring_found = TrueNEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(next_dut):NEWLINE continueNEWLINENEWLINE if next_dut not in dut_list :NEWLINE continueNEWLINENEWLINE if next_dut in sp_topo_path :NEWLINE continueNEWLINENEWLINE if next_dut not in sp_topo_stack :NEWLINE sp_topo_stack.append(next_dut)NEWLINENEWLINE leaf_dut = FalseNEWLINENEWLINE if ring_found :NEWLINE st.log(" Ring found Dut {} ".format(curr_dut))NEWLINE st.log(" Ring found sp path {} ".format(sp_topo_path))NEWLINE st.log(" Ring found longest path {} ".format(longest_path))NEWLINENEWLINE if len(sp_topo_path) > 2 :NEWLINENEWLINE sp_topo_path.append(dut)NEWLINENEWLINE st.log(" new ring sp path {} ".format(sp_topo_path))NEWLINE st.log(" ring longest path {} ".format(longest_path))NEWLINENEWLINE if len(longest_path) < len(sp_topo_path) :NEWLINE if node_limit > 0 :NEWLINE if len(sp_topo_path) <= length_limit :NEWLINE longest_path = copy.deepcopy(sp_topo_path)NEWLINE st.log(" New longest path set as curr new ring sp path")NEWLINE else :NEWLINE longest_path = copy.deepcopy(sp_topo_path)NEWLINE st.log(" New longest path set as curr new ring sp path")NEWLINENEWLINE if len(longest_path) >= length_limit :NEWLINE st.log(" Path length limit provided {} and reached".format(length_limit))NEWLINE breakNEWLINENEWLINE sp_topo_path.pop()NEWLINENEWLINE if leaf_dut is True :NEWLINE sp_topo_path.pop()NEWLINENEWLINE if len(longest_path) == length_limit :NEWLINE breakNEWLINENEWLINE st.log("BGP SP - Longest path len {} with path {}".format(len(longest_path), longest_path))NEWLINENEWLINE path_length = len(longest_path)NEWLINE found_path['found'] = True if path_length else FalseNEWLINE found_path['dut_list'] = []NEWLINE found_path['segment'] = {}NEWLINE found_path['segment_count'] = 0NEWLINE found_path['type'] = 'Ring'NEWLINENEWLINE if found_path['found'] :NEWLINE for dut in longest_path :NEWLINE found_path['dut_list'].append(dut)NEWLINENEWLINE from_dut = longest_path[0]NEWLINE found_path['start_dut'] = from_dutNEWLINE dut_idx = 1NEWLINE while dut_idx < path_length :NEWLINE to_dut = longest_path[dut_idx]NEWLINE segt_link_idx = 0NEWLINE for link_name, link_data in sp_topo[from_dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE if link_data['rmt_dut'] == to_dut :NEWLINENEWLINE rmt_link = link_data['rmt_link']NEWLINE segt_link = { 'lcl_dut' : from_dut, 'lcl_link': link_name,NEWLINE 'rmt_dut' : to_dut, 'rmt_link' : rmt_link }NEWLINENEWLINE if segt_link_idx == 0 : found_path['segment'][dut_idx - 1] = {}NEWLINE found_path['segment'][dut_idx - 1].update({ segt_link_idx: segt_link})NEWLINENEWLINE if segt_link_idx == 0:NEWLINE found_path['segment_count'] += 1NEWLINE segt_link_idx += 1NEWLINE #st.log(" Path node {} is {}".format(dut_idx - 1, segt_link))NEWLINENEWLINE from_dut = to_dutNEWLINE dut_idx += 1NEWLINE found_path['dut_list'].pop()NEWLINENEWLINE if save_path == 'yes' :NEWLINE sp_topo['subtopo']['ring'] = copy.deepcopy(found_path)NEWLINENEWLINE BGPSP.bgp_sp_show_topo_path(found_path)NEWLINE return found_pathNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_saved_ring_topo():NEWLINE return copy.deepcopy(sp_topo['subtopo']['ring'])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_find_star_topo_in_dut_list(dut_list=[], core_dut = "", path_spoke_limit=0, save_path='yes'):NEWLINENEWLINE st.log("BGP SP - Find Star Topo in Dut list {} length {}".format(dut_list, path_spoke_limit))NEWLINE sp_topo_dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINENEWLINE found_path = {}NEWLINE found_path['found'] = FalseNEWLINENEWLINE if not dut_list or len(dut_list) == 0 :NEWLINE dut_list = sp_topo_dut_listNEWLINE else :NEWLINE for dut in dut_list :NEWLINE if dut not in sp_topo_dut_list :NEWLINE st.log("Dut {} not in Topo dut list {}".format(dut, sp_topo_dut_list))NEWLINE return found_pathNEWLINENEWLINE if core_dut and core_dut != '' :NEWLINE if core_dut not in dut_list :NEWLINE st.log("Core dute {} not in dut list {}".format(core_dut, dut_list))NEWLINE return found_pathNEWLINENEWLINE if path_spoke_limit <= 0 :NEWLINE spoke_limit = len (dut_list)NEWLINE else :NEWLINE spoke_limit = path_spoke_limitNEWLINENEWLINE st.log("Modified Dut list {} length_limit {}".format(dut_list, spoke_limit))NEWLINENEWLINE largest_star = []NEWLINENEWLINE for dut in dut_list :NEWLINENEWLINE if core_dut and core_dut != '' and core_dut != dut :NEWLINE continueNEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(dut) :NEWLINE continueNEWLINENEWLINE st.log(" Starting dut {} ".format(dut))NEWLINENEWLINE sp_topo_path = []NEWLINE sp_topo_path.append(dut)NEWLINENEWLINE excl_list = list(dut_list)NEWLINE excl_list.remove(dut)NEWLINENEWLINE for next_dut in excl_list :NEWLINENEWLINE st.log(" sp path {}".format(sp_topo_path))NEWLINENEWLINE #leaf_dut = TrueNEWLINE for link_name, link_data in sp_topo[dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE rmt_dut = link_data['rmt_dut']NEWLINENEWLINE if rmt_dut != next_dut :NEWLINE continueNEWLINENEWLINE sp_topo_path.append(next_dut)NEWLINE breakNEWLINENEWLINE if len(largest_star) < len(sp_topo_path) :NEWLINE largest_star = sp_topo_pathNEWLINENEWLINE path_spoke_count = len(largest_star) - 1NEWLINE if path_spoke_limit > 0 :NEWLINE if path_spoke_count == path_spoke_limit :NEWLINE st.log(" Path spoke limit provided {} and reached".format(path_spoke_limit))NEWLINE breakNEWLINE else :NEWLINE if path_spoke_count == spoke_limit :NEWLINE st.log(" Path max possible spoke {} reached".format(spoke_limit))NEWLINE breakNEWLINENEWLINE st.log("BGP SP - {} Star with nodes {}".format(len(largest_star), largest_star))NEWLINENEWLINE path_length = len(largest_star)NEWLINENEWLINE found_path['found'] = True if path_length else FalseNEWLINE found_path['dut_list'] = []NEWLINE found_path['segment'] = {}NEWLINE found_path['segment_count'] = 0NEWLINE found_path['type'] = 'Star'NEWLINENEWLINE if found_path['found'] :NEWLINENEWLINE for dut in largest_star :NEWLINE found_path['dut_list'].append(dut)NEWLINENEWLINE from_dut = largest_star[0]NEWLINE found_path['start_dut'] = from_dutNEWLINENEWLINE dut_idx = 1NEWLINE while dut_idx < path_length :NEWLINE to_dut = largest_star[dut_idx]NEWLINE segt_link_idx = 0NEWLINE for link_name, link_data in sp_topo[from_dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE if link_data['rmt_dut'] == to_dut :NEWLINE rmt_link = link_data['rmt_link']NEWLINE segt_link = { 'lcl_dut' : from_dut, 'lcl_link': link_name,NEWLINE 'rmt_dut' : to_dut, 'rmt_link' : rmt_link }NEWLINENEWLINE if segt_link_idx == 0 : found_path['segment'][dut_idx - 1] = {}NEWLINE found_path['segment'][dut_idx - 1].update({ segt_link_idx: segt_link})NEWLINENEWLINE if segt_link_idx == 0:NEWLINE found_path['segment_count'] += 1NEWLINE segt_link_idx += 1NEWLINE #st.log(" Path node {} is {}".format(dut_idx - 1, segt_link))NEWLINENEWLINE dut_idx += 1NEWLINENEWLINE if save_path == 'yes' :NEWLINE sp_topo['subtopo']['star'] = copy.deepcopy(found_path)NEWLINENEWLINE BGPSP.bgp_sp_show_topo_path(found_path)NEWLINE return found_pathNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_saved_star_topo():NEWLINE return copy.deepcopy(sp_topo['subtopo']['star'])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_find_spine_leaf_topo_in_dut_list(spine_list=[], leaf_list=[], save_path='yes'):NEWLINENEWLINE st.log("BGP SP - Find Spine Leaf paths in {} and {}.".format(spine_list, leaf_list))NEWLINE sp_topo_dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINENEWLINE found_path = {}NEWLINE found_path['found'] = FalseNEWLINENEWLINE for dut in spine_list:NEWLINE if dut not in sp_topo_dut_list:NEWLINE st.log("Spine dut {} not in topo dut list {}".format(dut, sp_topo_dut_list))NEWLINE return found_pathNEWLINENEWLINE for dut in leaf_list:NEWLINE if dut not in sp_topo_dut_list:NEWLINE st.log("Leaf dut {} not in topo dut list {}".format(dut, sp_topo_dut_list))NEWLINE return found_pathNEWLINENEWLINE for dut in spine_list:NEWLINE if dut in leaf_list:NEWLINE st.log("Dut {} in both spine and leaf list {}".format(dut, spine_list))NEWLINE return found_pathNEWLINENEWLINE found_path['spine_list'] = spine_listNEWLINE found_path['leaf_list'] = leaf_listNEWLINE found_path['dut_list'] = []NEWLINE found_path['spine_path'] = {}NEWLINE found_path['type'] = 'SpineLeaf'NEWLINENEWLINE for spine_dut in spine_list :NEWLINENEWLINE dut_list = copy.deepcopy(leaf_list)NEWLINE dut_list.append(spine_dut)NEWLINENEWLINE spine_path = BGPSP.bgp_sp_find_star_topo_in_dut_list(dut_list, spine_dut, save_path='no')NEWLINENEWLINE st.log("Spine Leaf paths from {} is {}.\n".format(spine_dut, spine_path))NEWLINENEWLINE if spine_path['found'] :NEWLINE found_path['found'] = TrueNEWLINENEWLINE if spine_dut not in found_path['dut_list']:NEWLINE found_path['dut_list'].append(spine_dut)NEWLINENEWLINE for leaf_dut in spine_path['dut_list']:NEWLINE if leaf_dut not in found_path['dut_list']:NEWLINE found_path['dut_list'].append(leaf_dut)NEWLINENEWLINE spine_path = copy.deepcopy(spine_path)NEWLINE found_path['spine_path'].update({ spine_dut : spine_path })NEWLINENEWLINE if save_path == 'yes' :NEWLINE sp_topo['subtopo']['spine_leaf'] = copy.deepcopy(found_path)NEWLINENEWLINE st.log("BGP SP - Spine Leaf paths {}\n".format(found_path))NEWLINE return found_pathNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_saved_spine_leaf_topo():NEWLINE return copy.deepcopy(sp_topo['subtopo']['spine_leaf'])NEWLINENEWLINENEWLINE """ UNUSED AND CALLS UNDEFINED FUNCTIONNEWLINE @staticmethodNEWLINE def bgp_sp_dut_get_connected_ip_links(from_dut, to_dut, addr_family):NEWLINENEWLINE ip_link_list = []NEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return ip_link_listNEWLINENEWLINE link_name_list = bgp_sp_dut_get_connected_link_names(from_dut, to_dut)NEWLINE if not link_name_list or len(link_name_list) == 0 :NEWLINE return ip_link_listNEWLINENEWLINE ip_link_list = []NEWLINE for link_name in link_name_list:NEWLINE if link_name in sp_topo[dut][addr_family]['link'].keys():NEWLINE ip_data = sp_topo[dut][addr_family]['link'][link_name]NEWLINE if 'rmt_dut' in ip_data.keys():NEWLINE if 'rmt_link' in ip_data.keys():NEWLINE if ip_data['rmt_dut'] == to_dut :NEWLINE ip_link_list.append(link_name)NEWLINENEWLINE return ip_link_listNEWLINE """NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_add_del_dut(dut, device_name, device_type='DUT', add='yes'):NEWLINENEWLINE action_str = "Add" if add == 'yes' else 'Delete'NEWLINENEWLINE if add == 'yes' :NEWLINENEWLINE if BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - device {} exists as dut {}".format(device_name, dut))NEWLINE return FalseNEWLINENEWLINE dut2 = BGPSP.bgp_sp_get_dut_from_device(device_name)NEWLINE if dut2 != "" and dut != dut2 :NEWLINE st.log("BGP SP - device {} exists as dut {}".format(device_name, dut2))NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - {} {} {} {}".format(action_str, device_type, dut, device_name))NEWLINE if device_type == 'DUT' :NEWLINE sp_topo['dut_list'].append(dut)NEWLINE sp_topo['dut_list'].sort()NEWLINE else :NEWLINE sp_topo['tg_list'].append(dut)NEWLINE sp_topo['tg_list'].sort()NEWLINENEWLINE sp_topo[dut] = {}NEWLINE sp_topo[dut]['type'] = device_typeNEWLINE sp_topo[dut]['device'] = device_nameNEWLINE sp_topo[dut]['intf'] = {}NEWLINE sp_topo[dut]['nwoctet'] = 0NEWLINE sp_topo[dut]['vrf'] = {}NEWLINENEWLINE sp_topo[dut]['ipv4'] = {}NEWLINE sp_topo[dut]['ipv4']['static_nw'] = {}NEWLINE sp_topo[dut]['ipv4']['static_rt'] = {}NEWLINE sp_topo[dut]['ipv4']['link'] = {}NEWLINE sp_topo[dut]['ipv4']['nwoctet'] = 0NEWLINENEWLINE sp_topo[dut]['ipv6'] = {}NEWLINE sp_topo[dut]['ipv6']['static_nw'] = {}NEWLINE sp_topo[dut]['ipv6']['static_rt'] = {}NEWLINE sp_topo[dut]['ipv6']['link'] = {}NEWLINE sp_topo[dut]['ipv6']['nwoctet'] = 0NEWLINENEWLINE return TrueNEWLINENEWLINE else :NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - dut doesnt exists {}".format(dut))NEWLINE return FalseNEWLINENEWLINE if device_name != '' and device_name != sp_topo[dut]['device']:NEWLINE st.log("BGP SP - device {} isnot dut {}".format(device_name, dut))NEWLINE return FalseNEWLINENEWLINE device_name = sp_topo[dut]['device']NEWLINENEWLINE if len(sp_topo[dut]['intf']) != 0 :NEWLINE st.log("BGP SP - device {} {} interface exists".format(device_name, dut))NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - Deleting device {} {} ".format(device_name, dut))NEWLINE del sp_topo[dut]NEWLINE if device_type == 'DUT' :NEWLINE del sp_topo['dut_list'][dut]NEWLINE sp_topo['dut_list'].sort()NEWLINE else :NEWLINE del sp_topo['tg_list'][dut]NEWLINE sp_topo['tg_list'].sort()NEWLINENEWLINE return TrueNEWLINENEWLINE #st.log("BGP SP - Dut {} FAILED".format(action_str))NEWLINE #return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_add_del_link(dut, link_type, link_name, intf_name, add='yes'):NEWLINENEWLINE action_str = "Add" if add == 'yes' else 'Delete'NEWLINE st.log("BGP SP - Link {} for {} {}".format(action_str, dut, link_name))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} doesnt exist".format(dut))NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_valid_link_type(link_type):NEWLINE st.log("BGP SP - Invalid intface type {}".format(link_type))NEWLINE return FalseNEWLINENEWLINE if dut == "" or link_name=="" or intf_name == "" :NEWLINE st.log("BGP SP - Invalid dut {} or link {} or intf {}".format(dut, link_name, intf_name))NEWLINE return FalseNEWLINENEWLINE if add == 'yes' :NEWLINE if BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE st.log("BGP SP - dut {} link {} already present".format(dut, link_name))NEWLINE return FalseNEWLINENEWLINE if_data = { 'if': intf_name, 'type': link_type }NEWLINE sp_topo[dut]['intf'].update({link_name : if_data })NEWLINENEWLINE return TrueNEWLINENEWLINE else:NEWLINE if not BGPSP.bgp_sp_dut_link_present(dut, link_name):NEWLINE st.log("BGP SP - dut {} doesnt have intf {}".format(dut, link_name))NEWLINE return FalseNEWLINENEWLINE if BGPSP.bgp_sp_dut_link_connected(dut, link_name):NEWLINE st.log("BGP SP - dut {} link {} connected".format(dut, link_name))NEWLINE return FalseNEWLINENEWLINE if BGPSP.bgp_sp_dut_link_has_ip(dut, link_name, 'ipv4'):NEWLINE st.log("BGP SP - dut {} link {} has ipv4 addr".format(dut, link_name))NEWLINE return FalseNEWLINENEWLINE if BGPSP.bgp_sp_dut_link_has_ip(dut, link_name, 'ipv6'):NEWLINE st.log("BGP SP - dut {} link {} has ipv6 addr".format(dut, link_name))NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - dut {} deleting link {}".format(dut, link_name))NEWLINE del sp_topo[dut]['intf'][link_name]NEWLINE return TrueNEWLINENEWLINE #st.log("BGP SP - Link {} FAILED".format(action_str))NEWLINE #return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_connect_links(from_dut, from_link, to_dut, to_link, add='yes'):NEWLINENEWLINE action_str = "Add" if add == 'yes' else 'Delete'NEWLINE st.log("BGP SP - Link connect {} for {} {}".format(action_str, from_link, to_link))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_link_present(from_dut, from_link):NEWLINE st.log("BGP SP - dut {} link {} not present".format(from_dut, from_link))NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_dut_link_present(to_dut, to_link):NEWLINE st.log("BGP SP - dut {} link {} not present".format(to_dut, to_link))NEWLINE return FalseNEWLINENEWLINE if add == 'yes' :NEWLINENEWLINE if BGPSP.bgp_sp_dut_link_connected(from_dut, from_link):NEWLINE st.log("BGP SP - dut {} link {} already connected".format(from_dut, from_link))NEWLINE return FalseNEWLINENEWLINE if BGPSP.bgp_sp_dut_link_connected(to_dut, to_link):NEWLINE st.log("BGP SP - dut {} link {} already connected".format(to_dut, to_link))NEWLINE return FalseNEWLINENEWLINE sp_topo[from_dut]['intf'][from_link].update({'rmt_dut': to_dut})NEWLINE sp_topo[from_dut]['intf'][from_link].update({'rmt_link': to_link})NEWLINENEWLINE sp_topo[to_dut]['intf'][to_link].update({'rmt_dut': from_dut})NEWLINE sp_topo[to_dut]['intf'][to_link].update({'rmt_link': from_link})NEWLINENEWLINE if BGPSP.bgp_sp_dut_link_connected(from_dut, from_link):NEWLINE st.log("BGP SP - {} {} {} {} connected".format(from_dut, from_link, to_dut, to_link))NEWLINE return TrueNEWLINENEWLINE else:NEWLINENEWLINE if not BGPSP.bgp_sp_dut_link_connected_to_each_other(from_dut, from_link, to_dut, to_link):NEWLINE st.log("BGP SP - {} {} {} {} not connected".format(from_dut, from_link, to_dut, to_link))NEWLINE return FalseNEWLINENEWLINE del sp_topo[from_dut]['intf'][from_link]['rmt_dut']NEWLINE del sp_topo[from_dut]['intf'][from_link]['rmt_link']NEWLINE del sp_topo[to_dut]['intf'][to_link]['rmt_dut']NEWLINE del sp_topo[to_dut]['intf'][to_link]['rmt_link']NEWLINENEWLINE st.log("BGP SP - {} {} {} {} disconnected".format(from_dut, from_link, to_dut, to_link))NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_add_del_link_ip(dut, link_name, ip_addr, subnet, rmt_ip, addr_family, add='yes'):NEWLINENEWLINE action_str = "Add" if add == 'yes' else 'Delete'NEWLINE st.log("BGP SP - Link ip {} for {} {} {}".format(action_str, dut, link_name, ip_addr))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_link_connected(dut, link_name):NEWLINE st.log("BGP SP - {} link not in connected state".format(link_name))NEWLINENEWLINE if add == 'yes' :NEWLINENEWLINE if BGPSP.bgp_sp_dut_ip_link_present(dut, link_name, addr_family) :NEWLINE st.log("BGP SP - {} {} already has {} address".format(dut, link_name, addr_family))NEWLINE return FalseNEWLINENEWLINE if_data = sp_topo[dut]['intf'][link_name]NEWLINE ip_data = { "ip": ip_addr, "subnet": subnet, "if": if_data['if'], 'type': if_data['type']}NEWLINENEWLINE if 'rmt_dut' in if_data.keys():NEWLINE ip_data.update({'rmt_dut': if_data['rmt_dut']})NEWLINE ip_data.update({'rmt_link': if_data['rmt_link']})NEWLINENEWLINE if rmt_ip and rmt_ip != "":NEWLINE ip_data.update({'rmt_ip': rmt_ip})NEWLINENEWLINE sp_topo[dut][addr_family]['link'].update({link_name: ip_data})NEWLINENEWLINE #st.log("BGP SP - Added IP link {} {}".format(link_name, ip_data))NEWLINE return TrueNEWLINENEWLINE else:NEWLINENEWLINE if not BGPSP.bgp_sp_dut_ip_link_present(dut, link_name, addr_family) :NEWLINE st.log("BGP SP - {} {} does not exist".format(dut, link_name))NEWLINE return TrueNEWLINENEWLINE #if_data = sp_topo[dut]['intf'][link_name]NEWLINE #ip_data = sp_topo[dut][addr_family]['link'][link_name]NEWLINENEWLINE del sp_topo[dut][addr_family]['link'][link_name]NEWLINENEWLINE #st.log("BGP SP - Deleted IP link {} {}".format(link_name, ip_data))NEWLINE return TrueNEWLINENEWLINE #st.log("BGP SP - Link ip {} FAILED".format(action_str))NEWLINE #return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_connect_all_ip_links():NEWLINENEWLINE st.log("BGP SP - IP link connect all")NEWLINENEWLINE nbr_visited = {}NEWLINE for dut in sp_topo['dut_list']:NEWLINE nbr_visited[dut] = FalseNEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list("all")NEWLINENEWLINE dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINE dut_list += BGPSP.bgp_sp_get_tg_list()NEWLINENEWLINE for lcl_dut in dut_list:NEWLINE for lcl_link, link_data in sp_topo[lcl_dut]['intf'].items():NEWLINE if 'rmt_dut' in link_data.keys():NEWLINE rmt_dut = link_data['rmt_dut']NEWLINE rmt_link = link_data['rmt_link']NEWLINENEWLINE for afmly in addr_family_list:NEWLINE if lcl_link in sp_topo[lcl_dut][afmly]['link'].keys():NEWLINE if rmt_link in sp_topo[rmt_dut][afmly]['link'].keys():NEWLINENEWLINE lcl_ip = sp_topo[lcl_dut][afmly]['link'][lcl_link]['ip']NEWLINE rmt_ip = sp_topo[rmt_dut][afmly]['link'][rmt_link]['ip']NEWLINENEWLINE sp_topo[lcl_dut][afmly]['link'][lcl_link].update({'rmt_link': rmt_link})NEWLINE sp_topo[lcl_dut][afmly]['link'][lcl_link].update({'rmt_dut': rmt_dut})NEWLINE sp_topo[lcl_dut][afmly]['link'][lcl_link].update({'rmt_ip': rmt_ip})NEWLINENEWLINE sp_topo[rmt_dut][afmly]['link'][rmt_link].update({'rmt_link': lcl_link})NEWLINE sp_topo[rmt_dut][afmly]['link'][rmt_link].update({'rmt_dut': lcl_dut})NEWLINE sp_topo[rmt_dut][afmly]['link'][rmt_link].update({'rmt_ip': lcl_ip})NEWLINENEWLINE nbr_visited[lcl_dut] = TrueNEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_add_del_dut_static_network_prefix(dut, prefix, subnet, addr_family, add='yes'):NEWLINENEWLINE action_str = "Add" if add == 'yes' else 'Delete'NEWLINE st.log("BGP SP - Static nw {} for {} {}".format(action_str, dut, prefix))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return FalseNEWLINENEWLINE if add == 'yes' :NEWLINE snw_data = {'subnet': subnet}NEWLINE sp_topo[dut][addr_family]['static_nw'].update({prefix: snw_data})NEWLINE else :NEWLINE if prefix in sp_topo[dut][addr_family]['static_nw']:NEWLINE del sp_topo[dut][addr_family]['static_nw'][prefix]NEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_add_del_dut_static_route_prefix(dut, prefix, subnet, next_hop, addr_family, add='yes'):NEWLINENEWLINE action_str = "Add" if add == 'yes' else 'Delete'NEWLINE st.log("BGP SP - {} Static route {} pfx {} nhop {}.".format(action_str, dut, prefix, next_hop))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return FalseNEWLINENEWLINE if add == 'yes' :NEWLINE strt_data = {'nexthop' : next_hop , 'subnet': subnet}NEWLINE sp_topo[dut][addr_family]['static_rt'].update({prefix: strt_data})NEWLINE else :NEWLINE if prefix in sp_topo[dut][addr_family]['static_rt'].keys():NEWLINE del sp_topo[dut][addr_family]['static_rt'][prefix]NEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_add_del_dut_network_num(dut, nw_num, addr_family, add='yes'):NEWLINENEWLINE action_str = "Add" if add == 'yes' else 'Delete'NEWLINE st.log("BGP SP - Nw num {} for {} {}".format(action_str, dut, nw_num))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return FalseNEWLINENEWLINE if add == 'yes' :NEWLINE sp_topo[dut][addr_family]['nwoctet'] = nw_numNEWLINE else :NEWLINE sp_topo[dut][addr_family]['nwoctet'] = 0NEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_add_del_link_address_octate(link_name, addr_oct_list=[], add='yes'):NEWLINENEWLINE action_str = "Add" if add == 'yes' else 'Delete'NEWLINE st.log("BGP SP - Addr octate {} for {} {}".format(action_str, link_name, addr_oct_list))NEWLINENEWLINE if add == 'yes' :NEWLINE sp_topo['network'].update({link_name: addr_oct_list})NEWLINE else :NEWLINE if link_name in sp_topo['network'].keys():NEWLINE del sp_topo['network'][link_name]NEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_verify_routes_in_dut_list(dut_list=[], route_list=[], addr_family='ipv4', present='yes'):NEWLINENEWLINE st.log("BGP SP - verify route list routes in list of duts")NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return FalseNEWLINENEWLINE if len(dut_list) == 0 :NEWLINE st.log("BGP SP - Dut list List empty")NEWLINE return FalseNEWLINENEWLINE if len(route_list) == 0 :NEWLINE st.log("BGP SP - Route List empty")NEWLINE if present == 'yes' :NEWLINE return TrueNEWLINE else :NEWLINE return FalseNEWLINENEWLINE for dut in dut_list:NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINENEWLINE result = bgpapi.verify_ip_bgp_route_network_list(tb_dut, addr_family, route_list)NEWLINE if present == 'yes' :NEWLINE if not result :NEWLINE st.log("BGP SP - {} doesnot have routes {} - failed result".format(dut, route_list))NEWLINE return FalseNEWLINE else :NEWLINE st.log("BGP SP - {} has routes {}".format(dut, route_list))NEWLINE else :NEWLINE if result :NEWLINE st.log("BGP SP - {} has routes {} - failed result".format(dut, route_list))NEWLINE return FalseNEWLINE else :NEWLINE st.log("BGP SP - {} doesnot have routes {}".format(dut, route_list))NEWLINENEWLINE if present == 'yes' :NEWLINE st.log("BGP SP - {} has routes {} - Success".format(dut_list, route_list))NEWLINE else:NEWLINE st.log("BGP SP - {} doesnot have routes {} - Success".format(dut_list, route_list))NEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_verify_static_route(dut_list=[], afmly_list=[], present='yes'):NEWLINENEWLINE st.log("BGP SP - verify every has other network due to root reflection")NEWLINE for dut in dut_list:NEWLINE other_dut_list = copy.deepcopy(dut_list)NEWLINE other_dut_list.remove(dut)NEWLINENEWLINE for afmly in afmly_list:NEWLINE #strt_prefix_list = BGPSP.bgp_sp_get_dut_static_route_prefixes(dut, afmly)NEWLINE strt_prefix_list = BGPSP.bgp_sp_get_dut_null_nhop_static_route_prefixes(dut, afmly)NEWLINE strt_prefix_list = BGPSP.bgp_sp_ip_prefix_list_to_route_prefix_list(strt_prefix_list, afmly)NEWLINENEWLINE st.log("BGP SP - {} static route prefixes {}".format(dut, strt_prefix_list))NEWLINENEWLINE result = BGPSP.bgp_sp_bgp_verify_routes_in_dut_list(other_dut_list, strt_prefix_list, afmly, present=present)NEWLINE if not result :NEWLINE st.log("BGP SP - Static route check FAILED")NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - Static route check Passed")NEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_matching_entries(entries=[], match=None):NEWLINE matched_entries = utils.filter_and_select(entries, None, match)NEWLINE if not matched_entries:NEWLINE st.log("\nBGP SP no match {} in\n {}\n".format(match, entries))NEWLINE else :NEWLINE st.log("\nBGP SP Matched {} entries\n {}\n".format(match, matched_entries))NEWLINE return matched_entriesNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_entries_are_matching(entries=[], match=None):NEWLINE matched_entries = BGPSP.bgp_sp_get_matching_entries(entries, match)NEWLINE if not matched_entries:NEWLINE return FalseNEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_matching_bgp_ip_routes(dut, route_prefix_list=[], addr_family='ipv4'):NEWLINENEWLINE matched_entries = []NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE show_output = bgpapi.show_ip_bgp_route(tb_dut, family=addr_family)NEWLINE #st.log("\nBGP SP ip bgp route \n {}\n".format(show_output))NEWLINENEWLINE if not route_prefix_list :NEWLINE return show_outputNEWLINENEWLINE for route_prefix in route_prefix_list:NEWLINE match = {'network': route_prefix}NEWLINE entries = utils.filter_and_select(show_output, None, match)NEWLINE #st.log("\nBGP SP filtered entries \n {}\n".format(entries))NEWLINE if entries:NEWLINE matched_entries += entriesNEWLINE else :NEWLINE if len(matched_entries) :NEWLINE st.log("BGP SP - Few entries dont match")NEWLINE return []NEWLINENEWLINE #st.log("\nBGP SP route_prefixes Matched entries {}\n".format(matched_entries))NEWLINE return matched_entriesNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_ip_route_is_matching(dut, route_prefix_list=[], addr_family='ipv4', match=None):NEWLINENEWLINE matched_entries = BGPSP.bgp_sp_get_matching_bgp_ip_routes(dut, route_prefix_list, addr_family)NEWLINE if not matched_entries :NEWLINE return FalseNEWLINENEWLINE if not match:NEWLINE return TrueNEWLINENEWLINE result = BGPSP.bgp_sp_entries_are_matching(matched_entries, match)NEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_ip_route_is_selected(dut, route_prefix_list=[], addr_family='ipv4', match=None):NEWLINENEWLINE matched_entries = BGPSP.bgp_sp_get_matching_bgp_ip_routes(dut, route_prefix_list, addr_family)NEWLINE if not matched_entries :NEWLINE return FalseNEWLINENEWLINE match_selected ={'status_code': '*>'}NEWLINE selected_entries = BGPSP.bgp_sp_get_matching_entries(matched_entries, match_selected)NEWLINE #if not matched_entries:NEWLINE #return FalseNEWLINENEWLINE if not match:NEWLINE return TrueNEWLINENEWLINE result = BGPSP.bgp_sp_entries_are_matching(selected_entries, match)NEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_ip_routes_matching(dut_list=[], route_prefix_list=[], addr_family='ipv4', match=None):NEWLINENEWLINE fail_result_list = []NEWLINE for dut in dut_list :NEWLINE matched_entries = BGPSP.bgp_sp_get_matching_bgp_ip_routes(dut, route_prefix_list, addr_family)NEWLINE if not matched_entries :NEWLINE st.log("BGP SP - {} doesnt have all routes to {}".format(dut, route_prefix_list))NEWLINE fail_result = "BGP SP - {} doesnt have all matching routes ".format(dut)NEWLINE fail_result_list.append(fail_result)NEWLINE continueNEWLINENEWLINE if not match:NEWLINE continueNEWLINENEWLINE result = BGPSP.bgp_sp_entries_are_matching(matched_entries, match)NEWLINE if not result :NEWLINE st.log("BGP SP - {} routes do not match condition {}".format(dut, match))NEWLINE fail_result = "BGP SP - {} routes dont match route condition".format(dut)NEWLINE fail_result_list.append(fail_result)NEWLINE continueNEWLINENEWLINE if len(fail_result_list):NEWLINE st.log("BGP SP - Dut List {}".format(dut_list))NEWLINE st.log("BGP SP - Route Prefix {}".format(route_prefix_list))NEWLINE st.log("BGP SP - Match condition {}".format(match))NEWLINE for fail_result in fail_result_list:NEWLINE st.log("{}".format(fail_result))NEWLINE st.log("BGP SP - IP routes not matching")NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - IP routes matching")NEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_ip_routes_not_matching(dut_list=[], route_prefix_list=[], addr_family='ipv4', match=None):NEWLINENEWLINE result = BGPSP.bgp_sp_bgp_ip_routes_matching(dut_list, route_prefix_list, addr_family, match)NEWLINE if result :NEWLINE return FalseNEWLINE else :NEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_verify_bgp_ip_routes(dut, route_prefix_list=[], addr_family='ipv4', match=None):NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("Dut {} not present".format(dut))NEWLINE return FalseNEWLINENEWLINE matched_entries = BGPSP.bgp_sp_get_matching_bgp_ip_routes(dut, route_prefix_list, addr_family)NEWLINE if not matched_entries :NEWLINE st.log("BGP SP - {} doesnt have all routes {}".format(dut, route_prefix_list))NEWLINE return FalseNEWLINENEWLINE if match:NEWLINE result = BGPSP.bgp_sp_entries_are_matching(matched_entries, match)NEWLINE if not result :NEWLINE st.log("BGP SP - {} routes do not match condition {}".format(dut, match))NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - {} IP routes matching".format(dut))NEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_verify_bgp_ip_routes(dut_list, route_prefix_list=[], addr_family='ipv4', match=None, threaded_run=True):NEWLINENEWLINE st.log("BGP SP - Verify that {} has BGP routes {}".format(dut_list,route_prefix_list))NEWLINENEWLINE result = TrueNEWLINENEWLINE dut_list = list(dut_list) if isinstance(dut_list, list) else [dut_list]NEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE dut_thread = []NEWLINE fail_result_list = []NEWLINENEWLINE for dut in dut_list :NEWLINE dut_result = TrueNEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_dut_verify_bgp_ip_routes, dut, route_prefix_list, addr_family, match])NEWLINE else :NEWLINE dut_result = BGPSP.bgp_sp_dut_verify_bgp_ip_routes(dut, route_prefix_list, addr_family, match)NEWLINENEWLINE if not dut_result:NEWLINE result = FalseNEWLINE st.log("BGP SP - {} routes do not match condition {}".format(dut, match))NEWLINE fail_result = "BGP SP - {} routes dont match route condition".format(dut)NEWLINE fail_result_list.append(fail_result)NEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - BGP Route match Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result or len(fail_result_list):NEWLINE st.log("BGP SP - Dut List {}".format(dut_list))NEWLINE st.log("BGP SP - Route Prefix {}".format(route_prefix_list))NEWLINE st.log("BGP SP - Match condition {}".format(match))NEWLINE for fail_result in fail_result_list:NEWLINE st.log("{}".format(fail_result))NEWLINE st.log("BGP SP - IP routes not matching")NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - IP routes matching")NEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_verify_no_bgp_ip_routes(dut_list, route_prefix_list=[], addr_family='ipv4', match=None, threaded_run=True):NEWLINENEWLINE result = BGPSP.bgp_sp_verify_bgp_ip_routes(dut_list, route_prefix_list, addr_family, match, threaded_run)NEWLINE if not result :NEWLINE result = TrueNEWLINE else :NEWLINE result = FalseNEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_find_tb_connected_link(lcl_dut, lcl_if, rmt_tb, rmt_if):NEWLINENEWLINE connected_link = { 'connected': False,NEWLINE 'lcl_dut' : lcl_dut,NEWLINE 'lcl_tb' : '',NEWLINE 'lcl_link': '',NEWLINE 'lcl_if' : lcl_if,NEWLINE 'rmt_dut' : '',NEWLINE 'rmt_tb' : rmt_tb,NEWLINE 'rmt_link': '',NEWLINE 'rmt_if' : rmt_if }NEWLINENEWLINE connected_link['lcl_tb'] = BGPSP.bgp_sp_get_dut_device(lcl_dut)NEWLINE if connected_link['lcl_tb'] == '' :NEWLINE st.log("BGP SP - No lcl_tb, Link NOT connected {}".format(connected_link))NEWLINE return connected_linkNEWLINENEWLINE connected_link['rmt_dut'] = BGPSP.bgp_sp_get_dut_from_device(rmt_tb)NEWLINE if connected_link['rmt_dut'] == '' :NEWLINE st.log("BGP SP - No rmt dut, Link NOT connected {}".format(connected_link))NEWLINE return connected_linkNEWLINENEWLINE tb_vars = st.get_testbed_vars()NEWLINE tb_vars_keys = tb_vars.keys()NEWLINENEWLINE for port_idx in range(1,20) :NEWLINE link_name = "{}{}P{}".format(connected_link['lcl_dut'],NEWLINE connected_link['rmt_dut'], port_idx)NEWLINE if link_name in tb_vars_keys :NEWLINE temp_lcl_if = tb_vars[link_name]NEWLINE if temp_lcl_if == lcl_if :NEWLINE connected_link['lcl_link'] = link_nameNEWLINE breakNEWLINENEWLINE for port_idx in range(1,20) :NEWLINE link_name = "{}{}P{}".format(connected_link['rmt_dut'],NEWLINE connected_link['lcl_dut'], port_idx)NEWLINE if link_name in tb_vars_keys :NEWLINE temp_rmt_if = tb_vars[link_name]NEWLINE if temp_rmt_if == rmt_if :NEWLINE connected_link['rmt_link'] = link_nameNEWLINE breakNEWLINENEWLINE if connected_link['lcl_link'] != '' and connected_link['rmt_link'] != '' :NEWLINE connected_link['connected'] = TrueNEWLINE st.log("BGP SP - Link connected {}".format(connected_link))NEWLINE return copy.deepcopy(connected_link)NEWLINENEWLINE st.log("BGP SP - Link NOT connected {}".format(connected_link))NEWLINE return {'connected': False }NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_setup_testbed_topology(per_node_nw='no', nw_ip_octet='10'):NEWLINE st.banner("BGP SP - BUILD TOPOLOGY - START")NEWLINE tb_vars = st.get_testbed_vars()NEWLINE tb_var_keys = tb_vars.keys()NEWLINE st.log("TestBed Vars => {}\n".format(tb_vars))NEWLINENEWLINE sub_nw_idx = 32NEWLINE sp_topo['dut_list'] = []NEWLINE sp_topo['tg_list'] = []NEWLINE sp_topo['dut_map'] = {}NEWLINE sp_topo['tg_map'] = {}NEWLINE sp_topo['network'] = {}NEWLINE sp_topo['subtopo'] = {}NEWLINE sp_topo['subtopo']['linear'] = { 'found': False }NEWLINE sp_topo['subtopo']['ring'] = { 'found': False }NEWLINE sp_topo['subtopo']['star'] = {'found': False}NEWLINE sp_topo['subtopo']['spine_leaf'] = {'found': False}NEWLINENEWLINENEWLINE tb_dut_count = len(tb_vars.dut_list)NEWLINE for dut_idx in range(1, tb_dut_count+1) :NEWLINE dut = "D{}".format(dut_idx)NEWLINE if dut in tb_var_keys :NEWLINE sp_topo['dut_map'][dut] = tb_vars[dut]NEWLINENEWLINE tb_tg_count = len(tb_vars.tgen_list)NEWLINE for tg_idx in range(1, tb_tg_count+1) :NEWLINE tgen = "T{}".format(tg_idx)NEWLINE if tgen in tb_var_keys :NEWLINE sp_topo['tg_map'][tgen] = tb_vars[tgen]NEWLINENEWLINE st.log("BGP SP - Testbed Dut List {}".format(sp_topo['dut_map']))NEWLINE st.log("BGP SP - Testbed Tgen List {}".format(sp_topo['tg_map']))NEWLINENEWLINE dut_idx = 0NEWLINE for dut, tb_dut_name in sp_topo['dut_map'].items():NEWLINENEWLINE dut_idx += 1NEWLINENEWLINE result = BGPSP.bgp_sp_add_del_dut(dut, tb_dut_name, add='yes')NEWLINE if not result:NEWLINE st.log("BGP SP - Dut {} add {} FAILED".format(dut, tb_dut_name))NEWLINENEWLINE if per_node_nw == 'no' :NEWLINE nw_ipv4_octet = nw_ip_octetNEWLINE else :NEWLINE nw_ipv4_octet = int(nw_ip_octet) + dut_idxNEWLINENEWLINE BGPSP.bgp_sp_add_del_dut_network_num(dut, nw_ipv4_octet, 'ipv4', 'yes')NEWLINE nw_ipv6_octet = "97{}".format(nw_ipv4_octet)NEWLINE BGPSP.bgp_sp_add_del_dut_network_num(dut, nw_ipv6_octet, 'ipv6', 'yes')NEWLINENEWLINENEWLINE for dut, tb_dut_name in sp_topo['tg_map'].items():NEWLINENEWLINE dut_idx += 1NEWLINENEWLINE result = BGPSP.bgp_sp_add_del_dut(dut, tb_dut_name, device_type='TG', add='yes')NEWLINE if not result:NEWLINE st.log("BGP SP - TG Dut {} add {} FAILED".format(dut, tb_dut_name))NEWLINENEWLINE if per_node_nw == 'no' :NEWLINE nw_ipv4_octet = nw_ip_octetNEWLINE else :NEWLINE nw_ipv4_octet = int(nw_ip_octet) + dut_idxNEWLINENEWLINE BGPSP.bgp_sp_add_del_dut_network_num(dut, nw_ipv4_octet, 'ipv4', 'yes')NEWLINE nw_ipv6_octet = "97{}".format(nw_ipv4_octet)NEWLINE BGPSP.bgp_sp_add_del_dut_network_num(dut, nw_ipv6_octet, 'ipv6', 'yes')NEWLINENEWLINENEWLINE sp_topo['dut_list'].sort()NEWLINE sp_topo['tg_list'].sort()NEWLINE #st.log("SP topo after dut add:\n{}\n".format(sp_topo))NEWLINENEWLINE for from_dut_idx, from_dut in enumerate(sp_topo['dut_list'], start = 1):NEWLINENEWLINE for count in range(0,2):NEWLINE intf_name = "Loopback{}".format(count)NEWLINE link_name = "{}L{}".format(from_dut, count)NEWLINENEWLINE result = BGPSP.bgp_sp_add_del_link(from_dut, 'LBK', link_name, intf_name, add='yes')NEWLINE if not result:NEWLINE st.log("Loopback interface {} add FAILED".format(link_name))NEWLINENEWLINE nwoct4 = "{}".format(sp_topo[from_dut]['ipv4']['nwoctet'])NEWLINE nwoct3 = 8NEWLINE nwoct2 = count + 1NEWLINENEWLINE lo_ip = "{}.{}.{}.{}".format(nwoct4, nwoct3, nwoct2, from_dut_idx)NEWLINE result = BGPSP.bgp_sp_add_del_link_ip(from_dut, link_name, lo_ip, 32, "", 'ipv4', add='yes')NEWLINE if not result:NEWLINE st.log("Loopback interface IPv4 {} add FAILED".format(link_name))NEWLINENEWLINE lo_ip = "{}:{}{}:{}{}::{}".format(nwoct4, from_dut_idx, nwoct3, nwoct2, count+1, from_dut_idx)NEWLINE result = BGPSP.bgp_sp_add_del_link_ip(from_dut, link_name, lo_ip, 128, "", 'ipv6', add='yes')NEWLINE if not result:NEWLINE st.log("Loopback interface IPv6 {} add FAILED".format(link_name))NEWLINENEWLINE addr_oct_list = [nwoct4, nwoct3, nwoct2, from_dut_idx]NEWLINE BGPSP.bgp_sp_add_del_link_address_octate(link_name, addr_oct_list, add='yes')NEWLINENEWLINE #st.log("SP topo after dut loopback add :\n{}\n".format(sp_topo))NEWLINENEWLINE lcl_dut = from_dutNEWLINE lcl_tb = BGPSP.bgp_sp_get_dut_device(lcl_dut)NEWLINENEWLINE dut_links = st.get_dut_links(lcl_tb)NEWLINE tg_links = st.get_tg_links(lcl_tb)NEWLINENEWLINE dut_all_links = dut_links + tg_linksNEWLINE st.log("BGP SP - Dut {} links {}".format(lcl_dut, dut_all_links))NEWLINENEWLINE for link_idx, link in enumerate(dut_all_links , start = 1):NEWLINENEWLINE link_data = BGPSP.bgp_sp_find_tb_connected_link(lcl_dut, link[0], link[1], link[2])NEWLINE if not link_data['connected'] :NEWLINE continueNEWLINENEWLINE rmt_dut = link_data['rmt_dut']NEWLINE #rmt_tb = link_data['rmt_tb']NEWLINENEWLINE lcl_if = link_data['lcl_if']NEWLINE rmt_if = link_data['rmt_if']NEWLINENEWLINE lcl_link = link_data['lcl_link']NEWLINE rmt_link = link_data['rmt_link']NEWLINENEWLINE BGPSP.bgp_sp_add_del_link(lcl_dut, 'ETH', lcl_link, lcl_if, add='yes')NEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(rmt_dut) :NEWLINE BGPSP.bgp_sp_add_del_link(rmt_dut, 'ETH', rmt_link, rmt_if, add='yes')NEWLINENEWLINE if BGPSP.bgp_sp_dut_link_present(rmt_dut, rmt_link):NEWLINE BGPSP.bgp_sp_connect_links(lcl_dut, lcl_link, rmt_dut, rmt_link)NEWLINENEWLINE if lcl_link in sp_topo['network'].keys() :NEWLINE nwoct4 = sp_topo['network'][lcl_link][0]NEWLINE nwoct3 = sp_topo['network'][lcl_link][1]NEWLINE nwoct2 = sp_topo['network'][lcl_link][2]NEWLINE elif rmt_link in sp_topo['network'].keys():NEWLINE nwoct4 = sp_topo['network'][rmt_link][0]NEWLINE nwoct3 = sp_topo['network'][rmt_link][1]NEWLINE nwoct2 = sp_topo['network'][rmt_link][2]NEWLINE else :NEWLINE nwoct4 = "{}".format(sp_topo[lcl_dut]['ipv4']['nwoctet'])NEWLINE nwoct3 = sub_nw_idxNEWLINE sub_nw_idx += 2NEWLINE nwoct2 = link_idx #from_dut_idxNEWLINENEWLINE if link_data['lcl_dut'] < link_data['rmt_dut'] :NEWLINE lcl_host_num = 1NEWLINE rmt_host_num = 2NEWLINE else:NEWLINE lcl_host_num = 2NEWLINE rmt_host_num = 1NEWLINENEWLINE lcl_ip = "{}.{}.{}.{}".format(nwoct4, nwoct3, nwoct2, lcl_host_num)NEWLINE rmt_ip = "{}.{}.{}.{}".format(nwoct4, nwoct3, nwoct2, rmt_host_num)NEWLINENEWLINE BGPSP.bgp_sp_add_del_link_ip(lcl_dut, lcl_link, lcl_ip, 24, rmt_ip, 'ipv4', add='yes')NEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(rmt_dut) :NEWLINE BGPSP.bgp_sp_add_del_link_ip(rmt_dut, rmt_link, rmt_ip, 24, lcl_ip, 'ipv4', add='yes')NEWLINENEWLINE lcl_ip = "{}:{}:{}::{}".format(nwoct4, nwoct3, nwoct2, lcl_host_num)NEWLINE rmt_ip = "{}:{}:{}::{}".format(nwoct4, nwoct3, nwoct2, rmt_host_num)NEWLINENEWLINE BGPSP.bgp_sp_add_del_link_ip(lcl_dut, lcl_link, lcl_ip, 64, rmt_ip, 'ipv6', add='yes')NEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(rmt_dut) :NEWLINE BGPSP.bgp_sp_add_del_link_ip(rmt_dut, rmt_link, rmt_ip, 64, lcl_ip, 'ipv6', add='yes')NEWLINENEWLINE addr_oct_list = [nwoct4, nwoct3, nwoct2, lcl_host_num]NEWLINE BGPSP.bgp_sp_add_del_link_address_octate(lcl_link, addr_oct_list, add='yes')NEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(rmt_dut) :NEWLINE BGPSP.bgp_sp_add_del_link_address_octate(rmt_link, addr_oct_list, add='yes')NEWLINENEWLINENEWLINE #st.log("SP topo after {} interface add :\n{}\n".format(from_dut, sp_topo))NEWLINENEWLINE for count in range(1,3):NEWLINE link_name = "{}N{}".format(from_dut, count)NEWLINE nwoct4 = 216NEWLINE nwoct3 = 50 + countNEWLINE nwoct2 = from_dut_idxNEWLINENEWLINE st_nw = "{}.{}.{}.{}".format(nwoct4, nwoct3, nwoct2, 0)NEWLINE BGPSP.bgp_sp_add_del_dut_static_network_prefix(from_dut, st_nw, 24, 'ipv4', add='yes')NEWLINENEWLINE st_nw = "{}:{}:{}::{}".format(nwoct4, nwoct3, nwoct2, 0)NEWLINE BGPSP.bgp_sp_add_del_dut_static_network_prefix(from_dut, st_nw, 86, 'ipv6', add='yes')NEWLINENEWLINE addr_oct_list = [nwoct4, nwoct3, nwoct2, 0]NEWLINE BGPSP.bgp_sp_add_del_link_address_octate(link_name, addr_oct_list, add='yes')NEWLINENEWLINE for count in range(1,2):NEWLINE link_name = "{}RN{}".format(from_dut, count)NEWLINE nwoct4 = 209NEWLINE nwoct3 = 90 + countNEWLINE nwoct2 = from_dut_idxNEWLINE next_hop = "Null0"NEWLINENEWLINE st_rt = "{}.{}.{}.{}".format(nwoct4, nwoct3, nwoct2, 0)NEWLINE BGPSP.bgp_sp_add_del_dut_static_route_prefix(from_dut, st_rt, 24, next_hop, 'ipv4', add='yes')NEWLINENEWLINE st_rt = "{}:{}:{}::{}".format(nwoct4, nwoct3, nwoct2, 0)NEWLINE BGPSP.bgp_sp_add_del_dut_static_route_prefix(from_dut, st_rt, 64, next_hop, 'ipv6', add='yes')NEWLINENEWLINE addr_oct_list = [nwoct4, nwoct3, nwoct2, 0]NEWLINE BGPSP.bgp_sp_add_del_link_address_octate(link_name, addr_oct_list, add='yes')NEWLINENEWLINE for count in range(1,2):NEWLINE link_name = "{}RS{}".format(from_dut, count)NEWLINE nwoct4 = 208NEWLINE nwoct3 = 80 + countNEWLINE nwoct2 = from_dut_idxNEWLINENEWLINE st_rt = "{}.{}.{}.{}".format(nwoct4, nwoct3, nwoct2, 0)NEWLINE #next_hop = BGPSP.bgp_sp_get_dut_loopback_ip(from_dut, 0, 'ipv4')NEWLINE next_hop = BGPSP.bgp_sp_get_unused_dut_interface(from_dut)NEWLINE BGPSP.bgp_sp_add_del_dut_static_route_prefix(from_dut, st_rt, 24, next_hop, 'ipv4', add='yes')NEWLINENEWLINE st_rt = "{}:{}:{}::{}".format(nwoct4, nwoct3, nwoct2, 0)NEWLINE #next_hop = BGPSP.bgp_sp_get_dut_loopback_ip(from_dut, 0, 'ipv6')NEWLINE next_hop = BGPSP.bgp_sp_get_unused_dut_interface(from_dut)NEWLINE BGPSP.bgp_sp_add_del_dut_static_route_prefix(from_dut, st_rt, 64, next_hop, 'ipv6', add='yes')NEWLINENEWLINE addr_oct_list = [nwoct4, nwoct3, nwoct2, 0]NEWLINE BGPSP.bgp_sp_add_del_link_address_octate(link_name, addr_oct_list, add='yes')NEWLINENEWLINE #st.log("SP topo for {} :\n{}\n".format(from_dut, sp_topo))NEWLINENEWLINE #st.log("SP topo at testbed topobuild complete:\n{}\n".format(sp_topo))NEWLINENEWLINE BGPSP.bgp_sp_connect_all_ip_links()NEWLINENEWLINE BGPSP.bgp_sp_show_dut_topo_data()NEWLINENEWLINE st.banner("BGP SP - BUILD TOPOLOGY - END")NEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_clear_testbed_topology(per_node_nw='no', nw_ip_octet='10'):NEWLINE sp_topo.clear()NEWLINE bgp_topo.clear()NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_test_topo_present(topo_path=None, dut_count=None, segment_count=None):NEWLINENEWLINE if dut_count :NEWLINE if BGPSP.bgp_sp_get_dut_count() < dut_count :NEWLINE st.log("BGP SP - Test case needs minimum {} duts in testbed".format(dut_count))NEWLINE return FalseNEWLINENEWLINE if not topo_path :NEWLINE st.log("BGP SP - Testbed Topology path is Null")NEWLINE return FalseNEWLINENEWLINE if 'found' not in topo_path.keys() :NEWLINE st.log("BGP SP - Invalid Path")NEWLINE return FalseNEWLINENEWLINE if not topo_path['found'] :NEWLINE st.log("BGP SP - Required Topology path not found")NEWLINE return FalseNEWLINENEWLINE if segment_count :NEWLINE if topo_path['segment_count'] < segment_count :NEWLINE st.log("BGP SP - Test case needs minimum {} segments in Topology path".format(segment_count))NEWLINE return FalseNEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_show_dut_topo_data(dut_list = []):NEWLINENEWLINE if not dut_list :NEWLINE dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINE dut_list += BGPSP.bgp_sp_get_tg_list()NEWLINENEWLINE st.log("\n")NEWLINE st.log("BGP SP - Dut List: {}".format(sp_topo['dut_list']))NEWLINE st.log("BGP SP - Dut Dev Map: {}".format(sp_topo['dut_map']))NEWLINE st.log("BGP SP - TG List: {}".format(sp_topo['tg_list']))NEWLINE st.log("BGP SP - TG Dev Map: {}".format(sp_topo['tg_map']))NEWLINENEWLINE for dut in dut_list:NEWLINE if not BGPSP.bgp_sp_dut_present(dut) :NEWLINE continueNEWLINENEWLINE st.log("\n")NEWLINE st.log("BGP SP - Dut {} {} {}".format(dut, sp_topo[dut]['type'], sp_topo[dut]['device']))NEWLINENEWLINE for intf, intf_data in sp_topo[dut]['intf'].items():NEWLINE st.log(" Intf {} {}".format(intf, intf_data))NEWLINENEWLINE for link, link_data in sp_topo[dut]['ipv4']['link'].items():NEWLINE st.log(" Ipv4 Link {} {}".format(link, link_data))NEWLINE for link, link_data in sp_topo[dut]['ipv6']['link'].items():NEWLINE st.log(" Ipv6 Link {} {}".format(link, link_data))NEWLINENEWLINE for stnw, stnw_data in sp_topo[dut]['ipv4']['static_nw'].items():NEWLINE st.log(" Static Ipv4 Nw {} {}".format(stnw, stnw_data))NEWLINE for stnw, stnw_data in sp_topo[dut]['ipv6']['static_nw'].items():NEWLINE st.log(" Static IPv6 Nw {} {}".format(stnw, stnw_data))NEWLINENEWLINE for strt, strt_data in sp_topo[dut]['ipv4']['static_rt'].items():NEWLINE st.log(" Static Ipv4 Route {} {}".format(strt, strt_data))NEWLINE for strt, strt_data in sp_topo[dut]['ipv6']['static_rt'].items():NEWLINE st.log(" Static IPv6 Route {} {}".format(strt, strt_data))NEWLINENEWLINE st.log(" Ipv4 Network Octates {}".format(sp_topo[dut]['ipv4']['nwoctet']))NEWLINE st.log(" IPv6 Network Octates {}".format(sp_topo[dut]['ipv6']['nwoctet']))NEWLINENEWLINE st.log("\n")NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_show_topo_path(path):NEWLINENEWLINE if not path :NEWLINE st.log("BGP SP - Path Null")NEWLINE returnNEWLINENEWLINE if 'type' not in path.keys():NEWLINE st.log("BGP SP - Path Type Not found")NEWLINE returnNEWLINENEWLINE if 'found' not in path.keys():NEWLINE st.log("BGP SP - Path Invalid")NEWLINE returnNEWLINENEWLINE path_found = "Found" if path['found'] else "Not Found"NEWLINENEWLINE st.log("BGP SP - {} Topo Path {}".format(path['type'], path_found))NEWLINE if not path['found'] : returnNEWLINENEWLINE st.log(" Dut List: {}".format(path['dut_list']))NEWLINE st.log(" Segt Count: {}".format(path['segment_count']))NEWLINE for segt_idx, segt_data in path['segment'].items():NEWLINE st.log(" Segment-{}: ".format(segt_idx))NEWLINE for link_idx, link_data in segt_data.items():NEWLINE st.log(" Link-{}: {}".format(link_idx, link_data))NEWLINE st.log("\n")NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_show_dut_if_cmd_logs(dut):NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE st.show(tb_dut, "show ip interface")NEWLINE st.show(tb_dut, "show ipv6 interface")NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_show_dut_route_cmd_logs(dut):NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE st.vtysh_show(tb_dut, "show ip route")NEWLINE st.vtysh_show(tb_dut, "show ipv6 route")NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_show_dut_bgp_cmd_logs(dut):NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE st.vtysh_config(tb_dut, "do show running-config bgp")NEWLINE st.vtysh_show(tb_dut, "show ip bgp summary")NEWLINE st.vtysh_show(tb_dut, "show bgp ipv4")NEWLINE st.vtysh_show(tb_dut, "show bgp ipv6")NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_show_dut_cmd_logs(dut):NEWLINE BGPSP.bgp_sp_show_dut_if_cmd_logs(dut)NEWLINE BGPSP.bgp_sp_show_dut_route_cmd_logs(dut)NEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_show_dut_bgp_running_config(dut_list=[]):NEWLINE for dut in dut_list :NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE st.vtysh_config(tb_dut, "do show running-config bgp")NEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_loopback_interface_config_unconfig(config='yes', vrf='default', threaded_run=True):NEWLINE """NEWLINENEWLINE :param config:NEWLINE :param vrf:NEWLINE :return:NEWLINE """NEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.banner("{}uring LOOPBACK Interface on all nodes.".format(action_str))NEWLINENEWLINE result = TrueNEWLINE #threaded_run = TrueNEWLINENEWLINE dut_list = BGPSP.bgp_sp_get_dut_list() #+ BGPSP.bgp_sp_get_tg_list()NEWLINE dut_thread = []NEWLINENEWLINE for dut in dut_list :NEWLINE tb_dut = sp_topo[dut]['device']NEWLINE lpbk_if_data = {}NEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(dut) :NEWLINE st.log("BGP SP - TG {} Loopback config not done for now".format(dut))NEWLINE continueNEWLINENEWLINE for _, link_data in sp_topo[dut]['intf'].items():NEWLINE if link_data['type'] != 'LBK':NEWLINE continueNEWLINENEWLINE if_name = link_data['if']NEWLINE lpbk_if_data[if_name] = 'default'NEWLINENEWLINENEWLINE loopback_names = list(lpbk_if_data.keys())NEWLINE if threaded_run:NEWLINE dut_thread.append(putils.ExecAllFunc(ipapi.config_loopback_interfaces, tb_dut, loopback_name=loopback_names, config=config))NEWLINE else :NEWLINE result = ipapi.config_loopback_interfaces(tb_dut, loopback_name=loopback_names, config=config)NEWLINE if not result :NEWLINE st.log("{}uring {} loopback interfaces FAILED".format(action_str, dut))NEWLINE return FalseNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_loopback_address_config_unconfig(config='yes', vrf='default', addr_family='all', threaded_run=True, debug_run=False):NEWLINE """NEWLINENEWLINE :param config:NEWLINE :param vrf:NEWLINE :param addr_family:NEWLINE :return:NEWLINE """NEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.banner("{}uring LOOPBACK Addresses on all nodes.".format(action_str))NEWLINENEWLINE if not BGPSP.bgp_sp_topology_data_present() :NEWLINE st.log("BGP SP Topology data not available")NEWLINE st.log("SP topo:\n{}\n".format(sp_topo))NEWLINE return FalseNEWLINENEWLINE #threaded_run = TrueNEWLINE #debug_run = FalseNEWLINE result = TrueNEWLINE config = 'add' if config == 'yes' else 'remove'NEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINE dut_thread = []NEWLINENEWLINE dut_list = BGPSP.bgp_sp_get_dut_list() #+ BGPSP.bgp_sp_get_tg_list()NEWLINENEWLINE for dut in dut_list :NEWLINE tb_dut = sp_topo[dut]['device']NEWLINE if_data_list = []NEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(dut) :NEWLINE st.log("BGP SP - TG {} Loopback IP config not done for now".format(dut))NEWLINE continueNEWLINENEWLINE for afmly in addr_family_list:NEWLINE for _, link_data in sp_topo[dut][afmly]['link'].items():NEWLINE if link_data['type'] != 'LBK':NEWLINE continueNEWLINENEWLINE lpbk_if = link_data['if']NEWLINE lpbk_ip = link_data['ip']NEWLINE subnet = link_data['subnet']NEWLINENEWLINE if_data_list.append({'name': lpbk_if, 'ip': lpbk_ip, 'subnet': subnet, 'family': afmly })NEWLINE st.log("{}uring {} Loopback {}:{} {} {} ".format(action_str, afmly, dut, tb_dut, lpbk_if, lpbk_ip))NEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([ipapi.config_unconfig_interface_ip_addresses, tb_dut, if_data_list, config])NEWLINE else :NEWLINE result = ipapi.config_unconfig_interface_ip_addresses(tb_dut, if_data_list, config=config)NEWLINE if not result:NEWLINE BGPSP.bgp_sp_show_dut_cmd_logs(dut)NEWLINE st.log("{}uring {} loopback address FAILED".format(action_str, dut))NEWLINE return FalseNEWLINENEWLINE if debug_run:NEWLINE BGPSP.bgp_sp_show_dut_if_cmd_logs(dut)NEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_interface_address_all_config_unconfig(config='yes', vrf='default', addr_family='all', threaded_run=True, debug_run=False):NEWLINE """NEWLINENEWLINE :param config:NEWLINE :param vrf:NEWLINE :param addr_family:NEWLINE :return:NEWLINE """NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.banner("{}uring Interface Addresses on all nodes.".format(action_str))NEWLINENEWLINE if not BGPSP.bgp_sp_topology_data_present() :NEWLINE st.log("BGP SP Topology data not available")NEWLINE st.log("SP topo:\n{}\n".format(sp_topo))NEWLINE return FalseNEWLINENEWLINE #threaded_run = TrueNEWLINE #debug_run = FalseNEWLINE result = TrueNEWLINENEWLINE config = 'add' if config == 'yes' else 'remove'NEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINE dut_thread = []NEWLINENEWLINE dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINENEWLINE for dut in dut_list :NEWLINE tb_dut = sp_topo[dut]['device']NEWLINENEWLINE if_data_list = []NEWLINENEWLINE for afmly in addr_family_list:NEWLINE for link_name, link_data in sp_topo[dut][afmly]['link'].items():NEWLINE if link_data['type'] == 'LBK':NEWLINE continueNEWLINENEWLINE link_ip = link_data['ip']NEWLINE link_if = link_data['if']NEWLINE subnet = link_data['subnet']NEWLINENEWLINE if_data_list.append({'name': link_if, 'ip': link_ip, 'subnet': subnet, 'family':afmly })NEWLINENEWLINE st.log("{}uring {} Interface {}:{} {}:{} {} ".format(action_str, afmly, dut,NEWLINE tb_dut, link_name, link_if, link_ip))NEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([ipapi.config_unconfig_interface_ip_addresses, tb_dut, if_data_list, config])NEWLINE else :NEWLINE result = ipapi.config_unconfig_interface_ip_addresses(tb_dut, if_data_list, config=config)NEWLINE if not result:NEWLINE BGPSP.bgp_sp_show_dut_cmd_logs(dut)NEWLINE st.log("{}uring {} Interface address FAILED".format(action_str, dut))NEWLINE return FalseNEWLINENEWLINE if debug_run:NEWLINE BGPSP.bgp_sp_show_dut_if_cmd_logs(dut)NEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_tg_interface_ip_all_config_unconfig(config='yes', vrf='default', addr_family='all', threaded_run=True):NEWLINE """NEWLINENEWLINE :param config:NEWLINE :param vrf:NEWLINE :param addr_family:NEWLINE :return:NEWLINE """NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.banner("{}uring Interface Addresses on all TGENs.".format(action_str))NEWLINENEWLINE if not BGPSP.bgp_sp_topology_data_present() :NEWLINE st.log("BGP SP Topology data not available")NEWLINE st.log("SP topo:\n{}\n".format(sp_topo))NEWLINE return FalseNEWLINENEWLINE result = TrueNEWLINE #threaded_run = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = BGPSP.bgp_sp_get_tg_list()NEWLINENEWLINE for dut in dut_list :NEWLINE tb_dut = sp_topo[dut]['device']NEWLINE tg = tgen_obj_dict[tb_dut]NEWLINENEWLINE for link_name, link_data in sp_topo[dut]['intf'].items():NEWLINE if link_data['type'] == 'LBK':NEWLINE continueNEWLINENEWLINE tb_if = link_data['if']NEWLINE tg_port_handle = tg.get_port_handle(tb_if)NEWLINENEWLINE if config == 'yes' :NEWLINE st.log("\n")NEWLINE st.log("BGP SP - Resetting TG port {} {}".format(tb_dut, tb_if))NEWLINE tg.tg_traffic_control(action="reset", port_handle=tg_port_handle)NEWLINE st.log("\n")NEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_tg_link_ip_config_unconfig, dut, link_name, addr_family, vrf, config])NEWLINE else :NEWLINE result = BGPSP.bgp_sp_tg_link_ip_config_unconfig(dut, link_name, addr_family, vrf, config=config)NEWLINE if not result:NEWLINE BGPSP.bgp_sp_show_dut_cmd_logs(dut)NEWLINE st.log("{}uring TG {} Interface address FAILED".format(action_str, dut))NEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_link_ip_config_unconfig(dut, link_name, addr_family='all', vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring Interface Addresses on TG link.".format(action_str))NEWLINENEWLINE result = TrueNEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINENEWLINE if not BGPSP.bgp_sp_dut_link_present(dut, link_name) :NEWLINE st.log("BGP SP - Dut {} link {} not present".format(dut, link_name))NEWLINE return FalseNEWLINENEWLINE tb_dut = sp_topo[dut]['device']NEWLINE #link_data = sp_topo[dut]['intf'][link_name]NEWLINE #tb_if = link_data['if']NEWLINENEWLINE for afmly in addr_family_list:NEWLINENEWLINE if link_name not in sp_topo[dut][afmly]['link'].keys():NEWLINE st.log("BGP SP - {} {} {} address not assigned".format(dut, link_name, afmly))NEWLINE continueNEWLINENEWLINE ip_data = sp_topo[dut][afmly]['link'][link_name]NEWLINENEWLINE link_ip = ip_data['ip']NEWLINE link_if = ip_data['if']NEWLINE #rmt_ip = ip_data['rmt_ip']NEWLINE subnet = ip_data['subnet']NEWLINENEWLINE st.log("{}uring {} Interface {} {}:{} {} ".format(action_str, afmly,NEWLINE tb_dut, link_name, link_if, link_ip))NEWLINENEWLINE result = ipapi.config_ip_addr_interface(tb_dut, link_if, link_ip, subnet, afmly, config)NEWLINENEWLINE if not result:NEWLINE BGPSP.bgp_sp_show_dut_cmd_logs(dut)NEWLINE st.log("{}uring {} Interface address FAILED".format(action_str, dut))NEWLINE breakNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_tg_link_ip_config_unconfig(dut, link_name, addr_family='all', vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring Interface Addresses on TG link.".format(action_str))NEWLINENEWLINE result = TrueNEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINENEWLINE if not BGPSP.bgp_sp_dut_link_present(dut, link_name) :NEWLINE st.log("BGP SP - Dut {} link {} not present".format(dut, link_name))NEWLINE return FalseNEWLINENEWLINE tb_dut = sp_topo[dut]['device']NEWLINE link_data = sp_topo[dut]['intf'][link_name]NEWLINE tb_if = link_data['if']NEWLINENEWLINE tg = tgen_obj_dict[tb_dut]NEWLINE tg_port_handle = tg.get_port_handle(tb_if)NEWLINENEWLINE for afmly in addr_family_list:NEWLINENEWLINE if link_name not in sp_topo[dut][afmly]['link'].keys():NEWLINE st.log("BGP SP - {} {} {} address not assigned".format(dut, link_name, afmly))NEWLINE continueNEWLINENEWLINE ip_data = sp_topo[dut][afmly]['link'][link_name]NEWLINENEWLINE link_ip = ip_data['ip']NEWLINE link_if = ip_data['if']NEWLINE rmt_ip = ip_data['rmt_ip']NEWLINE subnet = ip_data['subnet']NEWLINENEWLINE st.log("{}uring {} Interface {} {}:{} {} ".format(action_str, afmly,NEWLINE tb_dut, link_name, link_if, link_ip))NEWLINENEWLINE if config =='yes' :NEWLINE if afmly == 'ipv4':NEWLINE tg_result = tg.tg_interface_config(port_handle=tg_port_handle, mode='config',NEWLINE intf_ip_addr=link_ip,NEWLINE gateway=rmt_ip, arp_send_req='1')NEWLINE else:NEWLINE tg_result = tg.tg_interface_config(port_handle=tg_port_handle, mode='config',NEWLINE ipv6_intf_addr=link_ip,NEWLINE ipv6_prefix_length=subnet,NEWLINE ipv6_gateway=rmt_ip, arp_send_req='1')NEWLINENEWLINE st.log("BGP SP - Port ip config tg api result = {}".format(tg_result))NEWLINENEWLINE if 'handle' in tg_result.keys():NEWLINE sp_topo[dut][afmly]['link'][link_name]['tg_handle'] = tg_result['handle']NEWLINE else :NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE else :NEWLINE handle = ''NEWLINE if 'tg_handle' in ip_data.keys():NEWLINE handle = ip_data['tg_handle']NEWLINENEWLINE if handle == '' :NEWLINE st.log("BGP SP - {} {} {} tg handle invalid".format(dut, link_name, afmly))NEWLINE continueNEWLINENEWLINE if afmly == 'ipv4':NEWLINE tg_result = tg.tg_interface_config(port_handle=tg_port_handle, handle=handle, mode='destroy')NEWLINE else:NEWLINE tg_result = tg.tg_interface_config(port_handle=tg_port_handle, handle=handle, mode='destroy')NEWLINENEWLINE st.log("BGP SP - Port ip Unconfig tg api result = {}".format(tg_result))NEWLINENEWLINE sp_topo[dut][afmly]['link'][link_name]['tg_handle'] = ''NEWLINENEWLINE if not result:NEWLINE BGPSP.bgp_sp_show_dut_cmd_logs(dut)NEWLINE st.log("{}uring TG {} Interface address FAILED".format(action_str, dut))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_static_route_config_unconfig(config='yes', vrf='default', addr_family='all', threaded_run=True, debug_run=False):NEWLINE """NEWLINENEWLINE :param config:NEWLINE :param vrf:NEWLINE :param addr_family:NEWLINE :return:NEWLINE """NEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.banner("{}uring Static Route on all nodes.".format(action_str))NEWLINENEWLINE if not BGPSP.bgp_sp_topology_data_present() :NEWLINE st.log("BGP SP Topology data not available")NEWLINE st.log("SP topo:\n{}\n".format(sp_topo))NEWLINE return FalseNEWLINENEWLINE #threaded_run = TrueNEWLINE #debug_run = FalseNEWLINE result = TrueNEWLINE config = 'add' if config == 'yes' else 'remove'NEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINE #thread_info = {'ipv4': [], 'ipv6': []}NEWLINE dut_thread = []NEWLINENEWLINE for dut in sp_topo['dut_list'] :NEWLINE tb_dut = sp_topo[dut]['device']NEWLINE rt_data_list = []NEWLINENEWLINE for afmly in addr_family_list:NEWLINENEWLINE for prefix, strt_data in sp_topo[dut][afmly]['static_rt'].items():NEWLINENEWLINE nexthop = strt_data['nexthop']NEWLINE subnet = strt_data['subnet']NEWLINE rt_data_list.append({ 'ip': prefix, 'subnet': subnet, 'nexthop': nexthop, 'family': afmly })NEWLINENEWLINE st.log("{}uring {} Static route {}:{} pfx {} nh {} .".format(action_str, afmly, dut, tb_dut, prefix, nexthop))NEWLINENEWLINE '''NEWLINE prefix_sn = "{}/{}".format(prefix, subnet)NEWLINE if config == 'add':NEWLINE if threaded_run:NEWLINE thread_info[afmly].append([ipapi.create_static_route, tb_dut, nexthop, prefix_sn, 'vtysh', afmly])NEWLINE else:NEWLINE result = ipapi.create_static_route(tb_dut, nexthop, prefix_sn, 'vtysh', afmly)NEWLINE else:NEWLINE if threaded_run:NEWLINE thread_info[afmly].append([ipapi.delete_static_route, tb_dut, nexthop, prefix_sn, afmly, 'vtysh'])NEWLINE else:NEWLINE result = ipapi.delete_static_route(tb_dut, nexthop, prefix_sn, afmly, 'vtysh')NEWLINE result = TrueNEWLINE '''NEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([ipapi.config_unconfig_static_routes, tb_dut, rt_data_list, "vtysh", config])NEWLINE else :NEWLINE result = ipapi.config_unconfig_static_routes(tb_dut, rt_data_list, shell="vtysh", config=config)NEWLINE if not result:NEWLINE BGPSP.bgp_sp_show_dut_cmd_logs(dut)NEWLINE st.log("{}uring {} Static route FAILED".format(action_str, dut))NEWLINE return FalseNEWLINENEWLINE if debug_run:NEWLINE BGPSP.bgp_sp_show_dut_route_cmd_logs(dut)NEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_interface_address_ping_test(dut, vrf='default', addr_family='all', ping_count=3):NEWLINENEWLINE st.log("BGP SP - {} interface IP address Ping test".format(dut))NEWLINENEWLINE #debug_run = FalseNEWLINE result = TrueNEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} not present".format(dut))NEWLINE return FalseNEWLINENEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE BGPSP.bgp_sp_show_dut_route_cmd_logs(dut)NEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINENEWLINE for afmly in addr_family_list:NEWLINE for link_name, link_data in sp_topo[dut][afmly]['link'].items():NEWLINE if link_data['type'] == 'LBK' :NEWLINE continueNEWLINE if 'rmt_ip' not in link_data.keys():NEWLINE continueNEWLINENEWLINE if BGPSP.bgp_sp_is_tg_connected_link(dut, link_name):NEWLINE st.log("Not Trying Pinf test for TG connected link {}".format(link_name))NEWLINE continue #only for nowNEWLINENEWLINE lcl_ip = link_data['ip']NEWLINE rmt_ip = link_data['rmt_ip']NEWLINE st.log("Pingtest for {} {} {} --{}-> {} ".format(afmly, tb_dut, lcl_ip, link_name, rmt_ip))NEWLINENEWLINE if not ipapi.ping(tb_dut, rmt_ip, family=afmly, count=ping_count):NEWLINE st.log("Ping FAILED for {} {} {} --{}-> {} ".format(afmly, tb_dut, lcl_ip, link_name, rmt_ip))NEWLINE st.log("ERROR Dut {} Ping to {} FAILED ".format(tb_dut, rmt_ip))NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE if not result:NEWLINE st.log("{} Ping Test FAILED".format(dut))NEWLINE BGPSP.bgp_sp_show_dut_cmd_logs(dut)NEWLINE return FalseNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_interface_address_ping_test(vrf='default', addr_family='all', ping_count=3):NEWLINE """NEWLINENEWLINE :param config:NEWLINE :param vrf:NEWLINE :param addr_family:NEWLINE :return:NEWLINE """NEWLINENEWLINE st.log("BGP SP network Ping test for interface IP addressess")NEWLINENEWLINE if not BGPSP.bgp_sp_topology_data_present() :NEWLINE st.log("BGP SP Topology data not available")NEWLINE st.log("SP topo:\n{}\n".format(sp_topo))NEWLINE return FalseNEWLINENEWLINE threaded_run = TrueNEWLINE result = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINENEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE for dut in dut_list :NEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_dut_interface_address_ping_test, dut, vrf, addr_family, ping_count])NEWLINE else :NEWLINE result = BGPSP.bgp_sp_dut_interface_address_ping_test(dut, vrf, addr_family, ping_count)NEWLINENEWLINE if not result:NEWLINE BGPSP.bgp_sp_show_dut_if_cmd_logs(dut)NEWLINE st.log("BGP SP - Ping Test Failed for {}".format(dut))NEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Ping Test Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - Interface Ping Test FAILED")NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_interface_shut_noshut(dut, link_name, shut='yes'):NEWLINENEWLINE action_str = "Shut down" if shut == 'yes' else 'Startup'NEWLINENEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE tb_intf = BGPSP.bgp_sp_get_link_dut_interface(dut, link_name)NEWLINENEWLINE if tb_dut == '' or tb_intf == '' :NEWLINE st.log("BGP SP - tb dut {} or if {} empty".format(tb_dut, tb_intf))NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - {} {} {}".format(action_str, dut, link_name))NEWLINENEWLINE if shut == 'yes':NEWLINE result = ifapi.interface_shutdown(tb_dut, tb_intf)NEWLINE else :NEWLINE result = ifapi.interface_noshutdown(tb_dut, tb_intf)NEWLINENEWLINE if not result :NEWLINE st.log("BGP SP - {} {} {} Failed".format(action_str, dut, link_name))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_config_ip_topology_on_testbed():NEWLINE st.banner("BGP SP Base Class Pre CONFIG - START")NEWLINE BGPSP.bgp_sp_loopback_interface_config_unconfig(config='yes', vrf='default')NEWLINE BGPSP.bgp_sp_loopback_address_config_unconfig(config='yes', addr_family='all')NEWLINE BGPSP.bgp_sp_interface_address_all_config_unconfig(config='yes', addr_family='all')NEWLINE BGPSP.bgp_sp_static_route_config_unconfig(config='yes', addr_family='all')NEWLINE st.banner("BGP SP Base Class Pre CONFIG - END")NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_unconfig_ip_topology_on_testbed():NEWLINE st.banner("BGP SP Base Class Pre CONFIG CLEANUP - START")NEWLINE BGPSP.bgp_sp_static_route_config_unconfig('no')NEWLINE BGPSP.bgp_sp_interface_address_all_config_unconfig(config='no')NEWLINE BGPSP.bgp_sp_loopback_address_config_unconfig(config='no')NEWLINE BGPSP.bgp_sp_loopback_interface_config_unconfig(config='no')NEWLINE st.banner("BGP SP Base Class Pre CONFIG CLEANUP - END")NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_configured(dut, vrf='default'):NEWLINENEWLINE if dut not in bgp_topo.keys():NEWLINE return FalseNEWLINENEWLINE if vrf not in bgp_topo[dut].keys():NEWLINE return FalseNEWLINENEWLINE if bgp_topo[dut][vrf]['asn'] == 0 :NEWLINE return FalseNEWLINENEWLINE if bgp_topo[dut][vrf]['asn'] == '0' :NEWLINE return FalseNEWLINENEWLINE if bgp_topo[dut][vrf]['asn'] == '' :NEWLINE return FalseNEWLINENEWLINE return TrueNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_bgp_asn(dut, vrf='default'):NEWLINENEWLINE if not BGPSP.bgp_sp_bgp_configured(dut, vrf):NEWLINE return 0NEWLINENEWLINE return int(bgp_topo[dut][vrf]['asn'])NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_asn_match(dut, asn = 0, vrf='default'):NEWLINENEWLINE if not BGPSP.bgp_sp_bgp_configured(dut, vrf):NEWLINE return 0NEWLINENEWLINE bgp_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINENEWLINE if bgp_asn == asn :NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_is_ip_bgp_neigbour(dut, nbr_ip, addr_family, vrf='default'):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_bgp_configured(dut, vrf):NEWLINE return FalseNEWLINENEWLINE if nbr_ip in bgp_topo[dut][vrf][addr_family]['nbr'].keys():NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_bgp_neigbour_ip_list(dut, addr_family, vrf='default'):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return []NEWLINENEWLINE if not BGPSP.bgp_sp_bgp_configured(dut, vrf):NEWLINE return []NEWLINENEWLINE nbr_ip_list = []NEWLINE for nbr_ip in bgp_topo[dut][vrf][addr_family]['nbr'].keys():NEWLINE nbr_ip_list.append(nbr_ip)NEWLINENEWLINE return copy.deepcopy(nbr_ip_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_bgp_neigbour_list(dut, addr_family, vrf='default'):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return {}NEWLINENEWLINE if not BGPSP.bgp_sp_bgp_configured(dut, vrf):NEWLINE return {}NEWLINENEWLINE nbr_ip_data_list = {}NEWLINE for nbr_ip, nbr_data in bgp_topo[dut][vrf][addr_family]['nbr'].items():NEWLINE nbr_ip_data_list.update( { nbr_ip: nbr_data} )NEWLINENEWLINE return copy.deepcopy(nbr_ip_data_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_bgp_neigbour_ip_between_duts(from_dut, to_dut, addr_family, from_vrf='default', to_vrf='default'):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return []NEWLINENEWLINE from_asn = BGPSP.bgp_sp_get_bgp_asn(from_dut, from_vrf)NEWLINE to_asn = BGPSP.bgp_sp_get_bgp_asn(to_dut, to_vrf)NEWLINE to_dut_ip_list = BGPSP.bgp_sp_get_dut_ip_address_list(to_dut, addr_family, vrf=to_vrf)NEWLINENEWLINE if from_asn == 0 or to_asn == 0 :NEWLINE return []NEWLINENEWLINE nbr_ip_list = []NEWLINE for nbr_ip, nbr_data in bgp_topo[from_dut][from_vrf][addr_family]['nbr'].items():NEWLINE if nbr_data['rmt_asn'] == to_asn :NEWLINE if nbr_ip in to_dut_ip_list :NEWLINE nbr_ip_list.append(nbr_ip)NEWLINENEWLINE return copy.deepcopy(nbr_ip_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_get_bgp_network_prefix_list(dut, addr_family, vrf='default'):NEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return {}NEWLINENEWLINE if not BGPSP.bgp_sp_bgp_configured(dut, vrf):NEWLINE return {}NEWLINENEWLINE nbr_nwip_list = {}NEWLINE for prefix, subnet in bgp_topo[dut][vrf][addr_family]['network'].items():NEWLINE nbr_nwip_list.update( {prefix: subnet} )NEWLINENEWLINE return copy.deepcopy(nbr_nwip_list)NEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_config_unconfig(dut, local_asn, router_id='', vrf='default', config='yes', cli_type=""):NEWLINE """NEWLINENEWLINE :param dutNEWLINE :param local_asn:NEWLINE :param vrf:NEWLINE :param configNEWLINE :return:NEWLINE """NEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP router.".format(action_str))NEWLINENEWLINE if not BGPSP.bgp_sp_topology_data_present() :NEWLINE st.log("BGP SP Topology data not available")NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("BGP SP - Dut {} doesnt exist".format(dut))NEWLINE return FalseNEWLINENEWLINE if not local_asn :NEWLINE st.log("BGP SP - local asn not provided ")NEWLINE return FalseNEWLINENEWLINE result = TrueNEWLINENEWLINE if dut not in bgp_topo.keys():NEWLINE if config != 'yes' :NEWLINE st.log("BGP SP - {} BGP dut doesnt exist".format(dut))NEWLINE return FalseNEWLINE bgp_topo[dut] = {}NEWLINENEWLINE if vrf not in bgp_topo[dut].keys():NEWLINE if config != 'yes' :NEWLINE st.log("BGP SP - {} vrf {} BGP router doesnt exist".format(dut, vrf))NEWLINE return TrueNEWLINENEWLINE bgp_topo[dut][vrf] = {}NEWLINE bgp_topo[dut][vrf]['asn'] = int(local_asn)NEWLINE bgp_topo[dut][vrf]['rtrid'] = '0'NEWLINE bgp_topo[dut][vrf]['ipv4']={}NEWLINE bgp_topo[dut][vrf]['ipv4']['nbr']={}NEWLINE bgp_topo[dut][vrf]['ipv4']['unicast']={}NEWLINE bgp_topo[dut][vrf]['ipv4']['network'] = {}NEWLINE bgp_topo[dut][vrf]['ipv6']={}NEWLINE bgp_topo[dut][vrf]['ipv6']['nbr']={}NEWLINE bgp_topo[dut][vrf]['ipv6']['unicast']={}NEWLINE bgp_topo[dut][vrf]['ipv6']['network'] = {}NEWLINENEWLINE if bgp_topo[dut][vrf]['asn'] != 0 :NEWLINE if bgp_topo[dut][vrf]['asn'] != local_asn:NEWLINE st.log("BGP SP - bgp asns {} {} dont match".format(bgp_topo[dut][vrf]['asn'], local_asn))NEWLINE return FalseNEWLINENEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINENEWLINE if config == 'yes' :NEWLINENEWLINE if not router_id or router_id == '' :NEWLINE router_id = BGPSP.bgp_sp_get_dut_loopback_ip(dut, 0, 'ipv4')NEWLINENEWLINE st.log("BGP SP - {} vrf {} Configuring BGP with as {}".format(dut, vrf, local_asn))NEWLINENEWLINE result = bgpapi.config_bgp_router(tb_dut, local_asn, router_id=router_id, keep_alive=30, hold=60, config='yes')NEWLINE if not result :NEWLINE st.log("BGP SP - {} vrf {} Configuring BGP with as {} FAILED".format(dut, vrf, local_asn))NEWLINE return FalseNEWLINENEWLINE bgp_topo[dut][vrf]['asn'] = int(local_asn)NEWLINE bgp_topo[dut][vrf]['ipv4']['rtr_id'] = router_idNEWLINENEWLINE bgpapi.config_bgp_default(tb_dut, local_asn, 'ipv4-unicast', config='no', cli_type=cli_type)NEWLINENEWLINE else :NEWLINENEWLINE st.log("BGP SP - {} vrf {} Unconfiguring BGP with as {}".format(dut, vrf, local_asn))NEWLINENEWLINE result = bgpapi.config_bgp_router(tb_dut, local_asn, config='no')NEWLINE if not result :NEWLINE st.log("BGP SP - {} vrf {} UnConfiguring BGP with as {} FAILED".format(dut, vrf, local_asn))NEWLINE return FalseNEWLINENEWLINE del bgp_topo[dut][vrf]NEWLINENEWLINE #st.log("BGP SP - Bgp topo after {} router bgpn: {}".format(action_str, bgp_topo))NEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_bgp_redistribute_connected_config_unconfig(dut, addr_family='all', tr_type='unicast', vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP redistribute connected route on {}".format(action_str, dut))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("Dut {} not present".format(dut))NEWLINE return FalseNEWLINENEWLINE result = TrueNEWLINE afmly_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINENEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE dut_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINENEWLINE if dut_asn == 0 :NEWLINE st.log("BGP SP - BGP bot configured in dut {}".format(dut))NEWLINE return FalseNEWLINENEWLINE for afmly in afmly_list:NEWLINE bgpapi.config_address_family_redistribute(tb_dut, dut_asn, afmly, tr_type, "connected", config=config)NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_redistribute_connected_config_unconfig(dut_list, addr_family='all', tr_type='unicast', vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP redistribute connected route ".format(action_str))NEWLINENEWLINE result = TrueNEWLINE threaded_run = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = list(dut_list) if isinstance(dut_list, list) else [dut_list]NEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE for dut in dut_list :NEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_dut_bgp_redistribute_connected_config_unconfig,NEWLINE dut, addr_family, tr_type, vrf, config])NEWLINE else :NEWLINE result = BGPSP.bgp_sp_dut_bgp_redistribute_connected_config_unconfig(NEWLINE dut, addr_family, tr_type, vrf, config)NEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - Redistribute connected at {} failed".format(dut))NEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Redistribute connected Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - {}uring Redistribute connected Static FAILED".format(action_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_bgp_redistribute_static_config_unconfig(dut, addr_family='all', tr_type='unicast', vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP redistribute static route on {}".format(action_str, dut))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("Dut {} not present".format(dut))NEWLINE return FalseNEWLINENEWLINE result = TrueNEWLINE afmly_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINENEWLINE dut_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINE if dut_asn == 0 :NEWLINE st.log("BGP SP - BGP bot configured in dut {}".format(dut))NEWLINE return FalseNEWLINENEWLINE for afmly in afmly_list:NEWLINE bgpapi.config_address_family_redistribute(tb_dut, dut_asn, afmly, tr_type, "static", config=config)NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_redistribute_static_config_unconfig(dut_list, addr_family='all', tr_type='unicast', vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP redistribute static route ".format(action_str))NEWLINENEWLINE result = TrueNEWLINE threaded_run = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = list(dut_list) if isinstance(dut_list, list) else [dut_list]NEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE for dut in dut_list :NEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_dut_bgp_redistribute_static_config_unconfig,NEWLINE dut, addr_family, tr_type, vrf, config])NEWLINE else :NEWLINE result = BGPSP.bgp_sp_dut_bgp_redistribute_static_config_unconfig(NEWLINE dut, addr_family, tr_type, vrf, config)NEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - Redistribute static at {} failed".format(dut))NEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Redistribute Static Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - {}uring Redistribute Static FAILED".format(action_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_bgp_network_advertise_config_unconfig(dut, network_list=[], addr_family='ipv4', vrf='default', config='yes', cli_type=""):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP network advertise on {}".format(action_str, dut))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("Dut {} not present".format(dut))NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return FalseNEWLINENEWLINE result = TrueNEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINENEWLINE dut_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINE if dut_asn == 0 :NEWLINE st.log("BGP SP - BGP bot configured in dut {}".format(dut))NEWLINE return FalseNEWLINE check_flag = True if config == "yes" else FalseNEWLINE for network_ip in network_list:NEWLINE result = bgpapi.config_bgp_network_advertise(tb_dut, dut_asn, network_ip, route_map='',NEWLINE addr_family=addr_family, config=config, cli_type=cli_type, network_import_check=check_flag)NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_network_advertise_config_unconfig(dut_list, network_list=[], addr_family='ipv4', vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP network advertise ".format(action_str))NEWLINENEWLINE result = TrueNEWLINE threaded_run = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = list(dut_list) if isinstance(dut_list, list) else [dut_list]NEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE for dut in dut_list :NEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_dut_bgp_network_advertise_config_unconfig,NEWLINE dut, network_list, addr_family, vrf, config])NEWLINE else :NEWLINE result = BGPSP.bgp_sp_dut_bgp_network_advertise_config_unconfig(NEWLINE dut, network_list, addr_family, vrf, config)NEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - Network advertise at {} failed".format(dut))NEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Network advertise Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - {}uring Network advertise FAILED".format(action_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_deterministic_med_config_unconfig(dut_list, vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP deterministic Med".format(action_str))NEWLINENEWLINE result = TrueNEWLINE threaded_run = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = list(dut_list) if isinstance(dut_list, list) else [dut_list]NEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE for dut in dut_list :NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE dut_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINENEWLINE if dut_asn == 0 :NEWLINE st.log("BGP SP - BGP not configured in dut {}".format(dut))NEWLINE return FalseNEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([bgpapi.config_bgp_deterministic_med, tb_dut, dut_asn, config])NEWLINE else :NEWLINE result = bgpapi.config_bgp_deterministic_med(tb_dut, dut_asn, config=config)NEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - deterministic med at {} failed".format(dut))NEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Deterministic med Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - {}uring Deterministic med FAILED".format(action_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_compare_med_config_unconfig(dut_list, vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP Always compare Med".format(action_str))NEWLINENEWLINE result = TrueNEWLINE threaded_run = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = list(dut_list) if isinstance(dut_list, list) else [dut_list]NEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE for dut in dut_list :NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE dut_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINENEWLINE if dut_asn == 0 :NEWLINE st.log("BGP SP - BGP not configured in dut {}".format(dut))NEWLINE return FalseNEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([bgpapi.config_bgp_always_compare_med, tb_dut, dut_asn, config])NEWLINE else :NEWLINE result = bgpapi.config_bgp_always_compare_med(tb_dut, dut_asn, config=config)NEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - compare med at {} failed".format(dut))NEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - compare med Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - {}uring Always compare med FAILED".format(action_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_ctoc_reflection_config_unconfig(dut_list, vrf='default', config='yes', cli_type=""):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP Client to Client Route Reflection".format(action_str))NEWLINENEWLINE result = TrueNEWLINE threaded_run = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = list(dut_list) if isinstance(dut_list, list) else [dut_list]NEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE for dut in dut_list :NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE dut_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINENEWLINE if dut_asn == 0 :NEWLINE st.log("BGP SP - BGP not configured in dut {}".format(dut))NEWLINE return FalseNEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([bgpapi.create_bgp_client_to_client_reflection, tb_dut, dut_asn, config, cli_type])NEWLINE else :NEWLINE result = bgpapi.create_bgp_client_to_client_reflection(tb_dut, dut_asn, config=config, cli_type= cli_type)NEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - Client to Client RR at {} failed".format(dut))NEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Client to Client RR Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result:NEWLINE st.log("BGP SP - {}uring Client to Client RR FAILED".format(action_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_neighbor_route_reflector_config_unconfig(dut, nbr_list=[], addr_family='ipv4', vrf='default', config='yes', cli_type="vtysh"):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP Neighbor Route Reflector clients ".format(action_str))NEWLINENEWLINE result = TrueNEWLINENEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE dut_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINENEWLINE if dut_asn == 0 :NEWLINE st.log("BGP SP - dut {} doesnt have bgp configured".format(dut))NEWLINE return FalseNEWLINENEWLINE if len(nbr_list) != 0:NEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return FalseNEWLINENEWLINE afmly_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINENEWLINE for afmly in afmly_list :NEWLINE dut_nbr_list = BGPSP.bgp_sp_get_bgp_neigbour_ip_list(dut, afmly, vrf=vrf)NEWLINE if len(nbr_list) == 0 :NEWLINE rr_nbr_list = dut_nbr_listNEWLINE else :NEWLINE rr_nbr_list = nbr_listNEWLINENEWLINE for nbr_ip in rr_nbr_list :NEWLINE if nbr_ip not in dut_nbr_list :NEWLINE st.log("BGP SP - nbr {} not in ngr list {} Failed".format(nbr_ip, dut_nbr_list))NEWLINE continueNEWLINENEWLINE st.log("BGP SP - {}uring {} route-reflector-client {}.".format(action_str, dut, nbr_ip))NEWLINE result = bgpapi.create_bgp_route_reflector_client(tb_dut, dut_asn, afmly, nbr_ip, config=config)NEWLINE if not result :NEWLINE st.log("BGP SP - Configuring client reflection on {} {} bgp {} Failed".format(dut, afmly, dut_asn))NEWLINE breakNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_route_map_config_unconfig(dut, rmap_name, condition='permit', sequence='', config='yes', **kwargs):NEWLINENEWLINE cli_type = st.get_ui_type(dut, cli_type="")NEWLINE # cli_type = "vtysh" if cli_type in ['click', "vtysh"] else cli_typeNEWLINE # cli_type = "vtysh" if cli_type in ["rest-patch", "rest-put"] else cli_typeNEWLINE cli_type = "vtysh" if cli_type in ['click', "vtysh"] else ("klish" if cli_type in ["rest-patch", "rest-put"] else cli_type)NEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring route map".format(action_str))NEWLINENEWLINE result = TrueNEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINENEWLINE no_params = True if not kwargs else FalseNEWLINE cfg_action = "no" if config == 'no' else ""NEWLINE cmd_str = ''NEWLINENEWLINE if rmap_name == '' :NEWLINE st.log("BGP SP - Routemap name must")NEWLINE return FalseNEWLINENEWLINE if no_params :NEWLINE if config == 'yes' :NEWLINE if sequence == '' :NEWLINE st.log("BGP SP - Sequence value for rmap must")NEWLINE return FalseNEWLINE else :NEWLINE if condition == '':NEWLINE st.log("BGP SP - routemap condition permit/deny is must")NEWLINE return FalseNEWLINE else :NEWLINE cmd_str = "route-map {} {} {}".format(rmap_name, condition, sequence)NEWLINENEWLINE elif config == 'no' :NEWLINE if sequence == '' :NEWLINE cmd_str = "no route-map {}".format(rmap_name)NEWLINE else :NEWLINE if condition == '':NEWLINE st.log("BGP SP - routemap condition permit/deny is must")NEWLINE return FalseNEWLINE else :NEWLINE cmd_str = "no route-map {} {} {}".format(rmap_name, condition, sequence)NEWLINENEWLINE if no_params :NEWLINE #st.log("BGP SP - Route Map cmd without params is\n{}\n".format(cmd_str))NEWLINE st.config(tb_dut, cmd_str, type= cli_type)NEWLINE result = TrueNEWLINE return resultNEWLINENEWLINE if condition == '':NEWLINE st.log("BGP SP - routemap condition permit/deny is must")NEWLINE return FalseNEWLINENEWLINE cmd_str = "route-map {} {} {}".format(rmap_name, condition, sequence)NEWLINENEWLINE if 'metric' in kwargs :NEWLINE metric = kwargs['metric']NEWLINE cmd_str += "\n {} set metric {} ".format(cfg_action, metric)NEWLINENEWLINE if 'community' in kwargs :NEWLINE community = kwargs['metric']NEWLINE cmd_str += "\n {} set community {} ".format(cfg_action, community)NEWLINENEWLINE #st.log("BGP SP - Route Map cmd is \n{}\n".format(cmd_str))NEWLINE st.config(tb_dut, cmd_str, type= cli_type)NEWLINE return resultNEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_nexthop_self_config_unconfig(dut_list=[], addr_family='all', vrf='default', force='no', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP nexthop self ".format(action_str))NEWLINENEWLINE result = TrueNEWLINE afmly_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINENEWLINE for dut in dut_list :NEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE dut_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINENEWLINE if dut_asn == 0 :NEWLINE st.log("BGP SP - BGP not configured on dut {}".format(dut))NEWLINE continueNEWLINENEWLINE for afmly in afmly_list :NEWLINE dut_nbr_list = BGPSP.bgp_sp_get_bgp_neigbour_ip_list(dut, afmly, vrf=vrf)NEWLINE for bgp_nbr in dut_nbr_list :NEWLINE st.log("BGP SP - {}uring {} nexthop self {}.".format(action_str, dut, bgp_nbr))NEWLINE result = bgpapi.create_bgp_next_hop_self(tb_dut, dut_asn, afmly, bgp_nbr, force, config=config)NEWLINE if not result :NEWLINE st.log("BGP SP - Configuring nexthop self on {} {} bgp {} Failed".format(dut, afmly, dut_asn))NEWLINE breakNEWLINE else :NEWLINE if config == 'yes' :NEWLINE bgp_topo[dut][vrf][afmly]['nbr'][bgp_nbr].update({'nh_self': True})NEWLINE else :NEWLINE del bgp_topo[dut][vrf][afmly]['nbr'][bgp_nbr]['nh_self']NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_neighbor_route_map_config_unconfig(dut, nbr_list, route_map, direction, addr_family, vrf='default', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP neighbor route map".format(action_str))NEWLINENEWLINE result = TrueNEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family) :NEWLINE st.log("BGP SP - Invalid address family {}".format(addr_family))NEWLINE return FalseNEWLINENEWLINE if direction != 'in' and direction != 'out' :NEWLINE st.log("BGP SP - Invalid rmap direction {}".format(direction))NEWLINE return FalseNEWLINENEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINE dut_asn = BGPSP.bgp_sp_get_bgp_asn(dut, vrf)NEWLINENEWLINE if dut_asn == 0 :NEWLINE st.log("BGP SP - dut {} doesnt have bgp configured".format(dut))NEWLINE return FalseNEWLINENEWLINE dut_nbr_list = BGPSP.bgp_sp_get_bgp_neigbour_ip_list(dut, addr_family, vrf=vrf)NEWLINENEWLINE for nbr_ip in nbr_list :NEWLINE if nbr_ip in dut_nbr_list :NEWLINE bgpapi.config_bgp(dut=tb_dut, local_as=dut_asn, neighbor= nbr_ip,NEWLINE addr_family=addr_family, config_type_list =["routeMap"],NEWLINE routeMap=route_map, diRection= direction, config = config)NEWLINENEWLINE result = TrueNEWLINE if result :NEWLINE if config == 'yes':NEWLINE if direction == 'out' :NEWLINE bgp_topo[dut][vrf][addr_family]['nbr'][nbr_ip].update({'route_map_out': route_map})NEWLINE if direction == 'in' :NEWLINE bgp_topo[dut][vrf][addr_family]['nbr'][nbr_ip].update({'route_map_in': route_map})NEWLINE else :NEWLINE if direction == 'out' :NEWLINE if "route_map_out" in bgp_topo[dut][vrf][addr_family]['nbr'][nbr_ip]:NEWLINE del bgp_topo[dut][vrf][addr_family]['nbr'][nbr_ip]['route_map_out']NEWLINE if direction == 'in' :NEWLINE if "route_map_in" in bgp_topo[dut][vrf][addr_family]['nbr'][nbr_ip]:NEWLINE del bgp_topo[dut][vrf][addr_family]['nbr'][nbr_ip]['route_map_in']NEWLINENEWLINE return resultNEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_neighbor_config_unconfig(dut, nbr_ip, nbr_asn, addr_family, vrf='default', config='yes', cli_type=""):NEWLINE """NEWLINENEWLINE :param dutNEWLINE :param nbr_ip:NEWLINE :param nbr_asn:NEWLINE :param addr_familyNEWLINE :param vrfNEWLINE :param configNEWLINE :return:NEWLINE """NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring BGP neighbor ".format(action_str))NEWLINENEWLINE if not BGPSP.bgp_sp_topology_data_present() :NEWLINE st.log("BGP SP Topology data not available")NEWLINE st.log("SP topo:\n{}\n".format(sp_topo))NEWLINE return FalseNEWLINENEWLINE if not nbr_ip or not nbr_asn :NEWLINE st.log("BGP SP - nbr_ip or asn not provided ")NEWLINE return FalseNEWLINENEWLINE if not BGPSP.bgp_sp_addr_family_valid(addr_family):NEWLINE return FalseNEWLINENEWLINE result = TrueNEWLINENEWLINE if dut not in bgp_topo.keys():NEWLINE st.log("BGP SP - {} BGP dut not configured".format(dut))NEWLINE return FalseNEWLINENEWLINE if vrf not in bgp_topo[dut].keys():NEWLINE st.log("BGP SP - {} BGP on vrf {} not configured".format(dut, vrf))NEWLINE return FalseNEWLINENEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINENEWLINE lcl_asn = bgp_topo[dut][vrf]['asn']NEWLINE if lcl_asn == 0 :NEWLINE st.log("BGP SP - {} {} BGP lcl asn not set".format(dut, vrf))NEWLINE return FalseNEWLINENEWLINE if config == 'yes' :NEWLINENEWLINE if nbr_ip in bgp_topo[dut][vrf][addr_family]['nbr'].keys():NEWLINENEWLINE st.log("BGP SP - {} vrf {} BGP nbr {} exists".format(dut, vrf, nbr_ip))NEWLINENEWLINE nbr_data = bgp_topo[dut][vrf][addr_family]['nbr'][nbr_ip]NEWLINE if nbr_data['rmt_asn'] != nbr_asn :NEWLINE st.log("BGP SP - {} vrf {} BGP nbr {} rmt asns {} wont match".format(dut, vrf, nbr_ip, nbr_asn))NEWLINE return FalseNEWLINENEWLINE result = TrueNEWLINE bgp_topo[dut][vrf][addr_family]['nbr'][nbr_ip].update({'nbr_ip' : nbr_ip})NEWLINENEWLINE else :NEWLINE st.log("BGP SP - {} vrf {} Configuring BGP nbr {} asn {}".format(dut, vrf, nbr_ip, nbr_asn))NEWLINENEWLINE result = bgpapi.config_bgp_neighbor(tb_dut, lcl_asn, nbr_ip, nbr_asn, addr_family, 3, 9, config='yes', cli_type=cli_type, connect_retry=1)NEWLINE if not result:NEWLINE st.log("BGP SP - {} vrf {} Configuring BGP nbr {} asin {} FAILED".format(dut, vrf, nbr_ip, nbr_asn))NEWLINE return FalseNEWLINENEWLINE nbr_data = {'lcl_asn': lcl_asn, 'rmt_asn': nbr_asn, 'rmt_ip': nbr_ip }NEWLINE bgp_topo[dut][vrf][addr_family]['nbr'].update({nbr_ip : nbr_data})NEWLINE else :NEWLINENEWLINE if nbr_ip not in bgp_topo[dut][vrf][addr_family]['nbr'].keys():NEWLINE st.log("BGP SP - {} vrf {} BGP nbr {} doesnt exists".format(dut, vrf, nbr_ip))NEWLINE return FalseNEWLINENEWLINE st.log("BGP SP - {} vrf {} UnConfiguring BGP nbr {} asn {} ".format(dut, vrf, nbr_ip, nbr_asn))NEWLINENEWLINE result = bgpapi.config_bgp_neighbor(tb_dut, lcl_asn, nbr_ip, nbr_asn, addr_family, config='no', cli_type=cli_type)NEWLINE if not result:NEWLINE st.log("BGP SP - {} vrf {} UnConfiguring BGP nbr {} asn {} FAILED".format(dut, vrf, nbr_ip, nbr_asn))NEWLINENEWLINE del bgp_topo[dut][vrf][addr_family]['nbr'][nbr_ip]NEWLINENEWLINE #st.log("BGP SP - Bgp topo after {} router bgp nbr: {}".format(action_str, bgp_topo))NEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_neighbor_segment_config_unconfig(segment_data={}, addr_family='all', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring Bgp segment".format(action_str))NEWLINE st.log("Input BGP Segment data : {}".format(segment_data))NEWLINENEWLINE result = TrueNEWLINE threaded_run = FalseNEWLINE if config != 'yes' : threaded_run = TrueNEWLINENEWLINE lcl_dut = segment_data['lcl_dut']NEWLINE lcl_asn = segment_data['lcl_asn']NEWLINE rmt_dut = segment_data['rmt_dut']NEWLINE rmt_asn = segment_data['rmt_asn']NEWLINENEWLINE if 'lcl_vrf' in segment_data.keys():NEWLINE lcl_vrf = segment_data['lcl_vrf']NEWLINE else:NEWLINE lcl_vrf ='default'NEWLINENEWLINE if 'rmt_vrf' in segment_data.keys():NEWLINE rmt_vrf = segment_data['rmt_vrf']NEWLINE else:NEWLINE rmt_vrf ='default'NEWLINENEWLINE if 'lcl_link' in segment_data.keys():NEWLINE link = segment_data['lcl_link']NEWLINE else:NEWLINE link ='none'NEWLINENEWLINE st.log("BGP SP - {}uring bgp nbr {}:{}--{}--{}:{}".format(NEWLINE action_str, lcl_dut, lcl_asn,NEWLINE link, rmt_asn, rmt_dut))NEWLINENEWLINENEWLINE for _ in range(0,1) :NEWLINENEWLINE #ibgp_session = True if lcl_asn == rmt_asn else FalseNEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(lcl_dut) :NEWLINE st.log("BGP SP - Dut {} not in topology list ".format(lcl_dut))NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(rmt_dut) :NEWLINE st.log("BGP SP - Dut {} not in topology list ".format(rmt_dut))NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINE link_list = BGPSP.bgp_sp_dut_get_connected_links(lcl_dut, rmt_dut)NEWLINENEWLINE if not link_list or len(link_list) == 0 :NEWLINE st.log("BGP SP - no links available between {} {}".format(lcl_dut, rmt_dut))NEWLINENEWLINE bgp_configured = FalseNEWLINENEWLINE for afmly in addr_family_list:NEWLINE lcl_ip = ''NEWLINE rmt_ip = ''NEWLINE link_name = ''NEWLINENEWLINE if link == 'none' :NEWLINE lcl_ip = BGPSP.bgp_sp_get_dut_loopback_ip(lcl_dut, 0, afmly)NEWLINE rmt_ip = BGPSP.bgp_sp_get_dut_loopback_ip(rmt_dut, 0, afmly)NEWLINE elif link == 'any' :NEWLINE if len(link_list) == 0 :NEWLINE st.log("BGP SP - No link present between {} {}".format(lcl_dut, rmt_dut))NEWLINE lcl_ip = BGPSP.bgp_sp_get_dut_loopback_ip(lcl_dut, 0, afmly)NEWLINE rmt_ip = BGPSP.bgp_sp_get_dut_loopback_ip(rmt_dut, 0, afmly)NEWLINE else :NEWLINE link_name = link_list[0]NEWLINE else :NEWLINE if link not in link_list :NEWLINE st.log("BGP SP - Link {} not present between {} {}".format(link, lcl_dut, rmt_dut))NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE link_name = linkNEWLINENEWLINE lcl_ip = BGPSP.bgp_sp_dut_get_link_local_ip(lcl_dut, link_name, afmly)NEWLINE rmt_ip = BGPSP.bgp_sp_dut_get_link_remote_ip(lcl_dut, link_name, afmly)NEWLINENEWLINE if lcl_ip == '' or rmt_ip == '' :NEWLINE st.log("BGP SP - {} Link {} no have lcl/rmt {} {} ip assigned".format(afmly, link, lcl_ip, rmt_ip))NEWLINE continueNEWLINE #return FalseNEWLINENEWLINE if not bgp_configured :NEWLINENEWLINE bgp_configured = TrueNEWLINENEWLINE dut_thread = []NEWLINENEWLINE if config == 'yes' :NEWLINE if not BGPSP.bgp_sp_bgp_configured(lcl_dut, lcl_vrf):NEWLINE st.log("BGP SP - {} BGP on vrf {} not configured".format(lcl_dut, lcl_vrf))NEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_bgp_config_unconfig,NEWLINE lcl_dut, lcl_asn, '', lcl_vrf, config])NEWLINE else :NEWLINE result = BGPSP.bgp_sp_bgp_config_unconfig(NEWLINE lcl_dut, lcl_asn, router_id='', vrf=lcl_vrf, config=config)NEWLINENEWLINE if not result :NEWLINE st.log("BGP SP - bgp config for {} {} FAILED".format(lcl_dut, lcl_asn))NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE if not BGPSP.bgp_sp_bgp_configured(rmt_dut, rmt_vrf) :NEWLINE st.log("BGP SP - {} BGP on vrf {} not configured".format(rmt_dut, rmt_vrf))NEWLINENEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_bgp_config_unconfig,NEWLINE rmt_dut, rmt_asn, '', rmt_vrf, config])NEWLINE else :NEWLINE result = BGPSP.bgp_sp_bgp_config_unconfig(NEWLINE rmt_dut, rmt_asn, router_id='', vrf=rmt_vrf, config=config)NEWLINENEWLINE if not result :NEWLINE st.log("BGP SP - bgp config for {} {} FAILED".format(rmt_dut, rmt_asn))NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Bgp config Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result :NEWLINE st.log("BGP SP - Neighbor BGP config FAILED")NEWLINE return FalseNEWLINENEWLINENEWLINE dut_thread = []NEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_bgp_neighbor_config_unconfig,NEWLINE lcl_dut, rmt_ip, rmt_asn, afmly, lcl_vrf, config])NEWLINE else :NEWLINE result = BGPSP.bgp_sp_bgp_neighbor_config_unconfig(NEWLINE lcl_dut, rmt_ip, rmt_asn, afmly, vrf=lcl_vrf, config=config)NEWLINENEWLINE if not result :NEWLINE st.log("BGP SP - bgp nbr config for {} {} {} {} FAILED".format(lcl_dut, rmt_ip, rmt_asn, afmly))NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_bgp_neighbor_config_unconfig,NEWLINE rmt_dut, lcl_ip, lcl_asn, afmly, rmt_vrf, config])NEWLINE else :NEWLINE result = BGPSP.bgp_sp_bgp_neighbor_config_unconfig(NEWLINE rmt_dut, lcl_ip, lcl_asn, afmly, vrf=rmt_vrf, config=config)NEWLINENEWLINE if not result :NEWLINE st.log("BGP SP - bgp nbr config for {} {} {} {} FAILED".format(rmt_dut, lcl_ip, lcl_asn, afmly))NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Bgp Neighbor config Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE if not result :NEWLINE breakNEWLINENEWLINE if not bgp_configured :NEWLINE result = FalseNEWLINE breakNEWLINENEWLINE result_str = "Success" if result else "FAILED"NEWLINENEWLINE st.log("BGP SP - {}uring bgp nbr {}:{}--{}--{}:{} {}".format(NEWLINE action_str, lcl_dut, lcl_asn,NEWLINE link, rmt_asn, rmt_dut, result_str))NEWLINE return resultNEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_bgp_asn_map_config_unconfig(dut_asn_map={}, config='yes', vrf='default', addr_family='all', max_adjacency='all', cli_type="vtysh", debug_run=False):NEWLINE """NEWLINENEWLINE :param dut_asn_mapNEWLINE :param config:NEWLINE :param vrf:NEWLINE :param addr_familyNEWLINE :param max_adjacencyNEWLINE :return:NEWLINE """NEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.log("{}uring list of bgp AS nodes.".format(action_str))NEWLINENEWLINE if not BGPSP.bgp_sp_topology_data_present() :NEWLINE st.log("BGP SP Topology data not available")NEWLINE st.log("SP topo:\n{}\n".format(sp_topo))NEWLINE return FalseNEWLINENEWLINE if not dut_asn_map :NEWLINE st.log("BGP SP DUT to AS Map not provided ")NEWLINE st.log("dut_asn_map:\n{}\n".format(dut_asn_map))NEWLINE return FalseNEWLINENEWLINE #threaded_run = FalseNEWLINE #debug_run = FalseNEWLINE result = TrueNEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINE dut_asn_map = {k: dut_asn_map[k] for k in sorted(dut_asn_map)}NEWLINE adj_limit = 10 if max_adjacency == 'all' else int(max_adjacency)NEWLINENEWLINE st.log("BGP Dut Asn map: {}".format(dut_asn_map))NEWLINENEWLINE for dut, as_num in dut_asn_map.items():NEWLINE if dut not in sp_topo['dut_list']:NEWLINE st.log("BGP SP - Dut {} not in BGP SP topology {}".format(dut, sp_topo['dut_list']))NEWLINE return FalseNEWLINENEWLINE nbr_count = {}NEWLINE nbr_visited = {}NEWLINE for dut, as_num in dut_asn_map.items():NEWLINE nbr_visited[dut] = FalseNEWLINENEWLINE result = BGPSP.bgp_sp_bgp_config_unconfig(dut, as_num, router_id='', vrf=vrf, config=config, cli_type=cli_type)NEWLINE if not result :NEWLINE st.log("BGP SP - bgp config for {} {} FAILED".format(dut, as_num))NEWLINE return FalseNEWLINENEWLINE for dut, lcl_asn in dut_asn_map.items():NEWLINE #tb_dut = sp_topo[dut]['device']NEWLINENEWLINE for link_name, link_data in sp_topo[dut]['intf'].items():NEWLINENEWLINE if link_data['type'] == 'LBK':NEWLINE continueNEWLINENEWLINE rmt_dut = link_data['rmt_dut']NEWLINE if rmt_dut not in dut_asn_map.keys():NEWLINE continueNEWLINENEWLINE if nbr_visited[rmt_dut] :NEWLINE continueNEWLINENEWLINE rmt_asn = dut_asn_map[rmt_dut]NEWLINENEWLINE from_node_adj = "{}{}".format(dut, rmt_dut)NEWLINE if from_node_adj not in nbr_count.keys():NEWLINE nbr_count[from_node_adj] = 0NEWLINENEWLINE to_node_adj = "{}{}".format(dut, rmt_dut)NEWLINE if to_node_adj not in nbr_count.keys():NEWLINE nbr_count[to_node_adj] = 0NEWLINENEWLINE if nbr_count[from_node_adj] >= adj_limit :NEWLINE continueNEWLINENEWLINE if nbr_count[to_node_adj] >= adj_limit :NEWLINE continueNEWLINENEWLINE nbr_added = FalseNEWLINE for afmly in addr_family_list:NEWLINE if link_name in sp_topo[dut][afmly]['link'].keys():NEWLINENEWLINE ip_data = sp_topo[dut][afmly]['link'][link_name]NEWLINENEWLINE if 'rmt_ip' in ip_data.keys() :NEWLINENEWLINE lcl_ip = ip_data['ip']NEWLINE rmt_ip = ip_data['rmt_ip']NEWLINENEWLINE result = BGPSP.bgp_sp_bgp_neighbor_config_unconfig(dut, rmt_ip, rmt_asn, afmly, vrf=vrf, config=config, cli_type=cli_type)NEWLINE if not result :NEWLINE st.log("BGP SP - bgp nbr config for {} {} {} {} FAILED".format(dut, rmt_ip, rmt_asn, afmly))NEWLINE return FalseNEWLINENEWLINE result = BGPSP.bgp_sp_bgp_neighbor_config_unconfig(rmt_dut, lcl_ip, lcl_asn, afmly, vrf=vrf, config=config, cli_type=cli_type)NEWLINE if not result :NEWLINE st.log("BGP SP - bgp nbr config for {} {} {} {} FAILED".format(rmt_dut, lcl_ip, lcl_asn, afmly))NEWLINE return FalseNEWLINENEWLINE nbr_added = TrueNEWLINENEWLINE if nbr_added :NEWLINE nbr_count[to_node_adj] += 1NEWLINE nbr_count[from_node_adj] += 1NEWLINENEWLINE nbr_visited[dut] = TrueNEWLINENEWLINE if debug_run:NEWLINE BGPSP.bgp_sp_show_dut_bgp_cmd_logs(dut)NEWLINENEWLINE return resultNEWLINENEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_clear_bgp(dut_list, addr_family='all'):NEWLINENEWLINE if len(dut_list) == 0:NEWLINE dut_list = sp_topo['dut_list']NEWLINENEWLINE st.log("BGP SP - Clearing BGP sessions {}".format(dut_list))NEWLINENEWLINE result = TrueNEWLINE threaded_run = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = list(dut_list) if isinstance(dut_list, list) else [dut_list]NEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINENEWLINE for afmly in addr_family_list:NEWLINE dut_thread = []NEWLINE for dut in dut_list :NEWLINE if dut not in bgp_topo.keys():NEWLINE continueNEWLINENEWLINE if BGPSP.bgp_sp_dut_is_tg(dut) :NEWLINE continueNEWLINENEWLINE tb_dut = sp_topo[dut]['device']NEWLINENEWLINE st.log("BGP SP - clearing {} bgp on {}".format(afmly , dut))NEWLINE if threaded_run:NEWLINE if afmly == 'ipv4' :NEWLINE dut_thread.append([bgpapi.clear_ip_bgp_vtysh, tb_dut])NEWLINE else :NEWLINE dut_thread.append([bgpapi.clear_ipv6_bgp_vtysh, tb_dut])NEWLINE else :NEWLINE if afmly == 'ipv4' :NEWLINE bgpapi.clear_ip_bgp_vtysh(tb_dut)NEWLINE else :NEWLINE bgpapi.clear_ipv6_bgp_vtysh(tb_dut)NEWLINENEWLINE if threaded_run :NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Clear BGP Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_cleanup_bgp_routers(dut_list = [], threaded_run=True):NEWLINENEWLINE if len(dut_list) == 0:NEWLINE dut_list = sp_topo['dut_list']NEWLINENEWLINE st.log("BGP SP - Unconfiguring BGP routers {}".format(dut_list))NEWLINENEWLINE result = TrueNEWLINE #threaded_run = TrueNEWLINE device_list = []NEWLINE dut_thread = []NEWLINENEWLINE for dut in dut_list :NEWLINE if dut not in bgp_topo.keys():NEWLINE st.log("BGP SP - BGP not in topo..force deleting bgp router on {}".format(dut))NEWLINENEWLINE tb_dut = sp_topo[dut]['device']NEWLINE if not BGPSP.bgp_sp_dut_is_tg(dut) :NEWLINE device_list.append(tb_dut)NEWLINE dut_thread.append([bgpapi.unconfig_router_bgp, tb_dut])NEWLINENEWLINE if dut in bgp_topo.keys():NEWLINE del bgp_topo[dut]NEWLINENEWLINE if not device_list : return TrueNEWLINENEWLINE st.log("BGP SP - clearing bgp on {}".format(device_list))NEWLINENEWLINE if threaded_run :NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINE else :NEWLINE result = bgpapi.cleanup_router_bgp(device_list)NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_dut_verify_all_bgp_sessions(dut, addr_family='all', state='up'):NEWLINENEWLINE st.log("BGP SP - Verify Bgp session {} on {}.".format(state, dut))NEWLINENEWLINE if not BGPSP.bgp_sp_dut_present(dut):NEWLINE st.log("Dut {} not present".format(dut))NEWLINE return FalseNEWLINENEWLINE if not bgp_topo[dut] :NEWLINE st.log("BGP SP - BGP not configured in {}".format(dut))NEWLINE return FalseNEWLINENEWLINE result = TrueNEWLINE tb_dut = BGPSP.bgp_sp_get_dut_device(dut)NEWLINENEWLINE addr_family_list = BGPSP.bgp_sp_get_address_family_list(addr_family)NEWLINENEWLINE vrf_list = list(bgp_topo[dut].keys())NEWLINENEWLINE for vrf in vrf_list :NEWLINE for afmly in addr_family_list:NEWLINE nbr_list = bgp_topo[dut][vrf][afmly]['nbr'].keys()NEWLINENEWLINE loop_flag = 0NEWLINE for iter in range(6):NEWLINE result_flag = 0NEWLINE result = bgpapi.verify_bgp_summary(tb_dut, family=afmly, neighbor=nbr_list, state='Established')NEWLINE if result :NEWLINE if state == 'down' :NEWLINE st.log("BGP SP - BGP session not down for nghbor {}".format(nbr_list))NEWLINE BGPSP.bgp_sp_show_dut_route_cmd_logs(dut)NEWLINE #breakNEWLINE result_flag = 1NEWLINENEWLINE if not result :NEWLINE if state == 'up' :NEWLINE st.log("BGP SP - BGP session not up for nghbor {}".format(nbr_list))NEWLINE BGPSP.bgp_sp_show_dut_route_cmd_logs(dut)NEWLINE #breakNEWLINE result_flag = 1NEWLINENEWLINE if result_flag == 0:NEWLINE loop_flag = 0NEWLINE breakNEWLINE else:NEWLINE loop_flag = 1NEWLINE st.wait(10, "Waiting or the connectios establishement")NEWLINENEWLINE if loop_flag == 1:NEWLINE breakNEWLINENEWLINE if not result :NEWLINE breakNEWLINENEWLINE result_str = "Success" if result else "Failed"NEWLINE st.log("BGP SP - BGP Session {} check {}".format(state, result_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_verify_all_bgp_sessions(dut_list=[], addr_family='all', state='up', threaded_run=True):NEWLINE """NEWLINENEWLINE :param config:NEWLINE :param vrf:NEWLINE :param addr_family:NEWLINE :return:NEWLINE """NEWLINENEWLINE if len(dut_list) == 0:NEWLINE dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINENEWLINE st.log("BGP SP - Verify {} Bgp Session {} on {}.".format(addr_family, state, dut_list))NEWLINENEWLINE result = TrueNEWLINE dut_thread = []NEWLINENEWLINE dut_list = list(dut_list) if isinstance(dut_list, list) else [dut_list]NEWLINE if not dut_list or len(dut_list) < 2: threaded_run = FalseNEWLINENEWLINE for dut in dut_list :NEWLINENEWLINE dut_result = TrueNEWLINE if threaded_run:NEWLINE dut_thread.append([BGPSP.bgp_sp_dut_verify_all_bgp_sessions, dut, addr_family, state])NEWLINE else :NEWLINE dut_result = BGPSP.bgp_sp_dut_verify_all_bgp_sessions(dut, addr_family, state)NEWLINENEWLINE if not dut_result:NEWLINE result = FalseNEWLINE st.log("BGP SP - BGP session test at {} failed".format(dut))NEWLINENEWLINE if threaded_run:NEWLINE [out, exceptions] = putils.exec_all(bgplib.fast_start, dut_thread)NEWLINE st.log("BGP SP - BGP session test Threaded Run result {}".format([out, exceptions]))NEWLINE if False in out : result = FalseNEWLINENEWLINE result_str = "Success" if result else "Failed"NEWLINE st.log("BGP SP - BGP Session {} check {}".format(state, result_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_linear_topo_bgp_config_unconfig(sess_type='eBGP', addr_family='all', ring='no', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE topology = "Linear" if ring == 'no' else "Ring"NEWLINE st.banner("{}uring {} topo {} session".format(action_str, topology, sess_type))NEWLINENEWLINE if BGPSP.bgp_sp_get_dut_count() < 2 :NEWLINE st.log("BGP SP - Testbed doesnt have two duts")NEWLINE st.report_fail("test_case_passed")NEWLINE return FalseNEWLINENEWLINE if config == 'yes' :NEWLINE if ring == 'no' :NEWLINE dut_linear_path = BGPSP.bgp_sp_find_linear_topo_in_dut_list()NEWLINE else :NEWLINE dut_linear_path = BGPSP.bgp_sp_find_ring_topo_in_dut_list()NEWLINE else :NEWLINE if ring == 'no' :NEWLINE dut_linear_path = BGPSP.bgp_sp_dut_get_saved_linear_topo()NEWLINE else :NEWLINE dut_linear_path = BGPSP.bgp_sp_dut_get_saved_ring_topo()NEWLINENEWLINE BGPSP.bgp_sp_show_topo_path(dut_linear_path)NEWLINENEWLINE if not dut_linear_path['found'] :NEWLINE st.log("BGP SP - Get linear path Failed")NEWLINE st.log("BGP SP - {} topo {} session test FAILED".format(sess_type, topology))NEWLINE st.report_fail("test_case_failed")NEWLINE return FalseNEWLINENEWLINE dut_list = dut_linear_path['dut_list']NEWLINE path_segts = dut_linear_path['segment']NEWLINENEWLINE result = TrueNEWLINE base_asn = 65001NEWLINE asn_index = 0NEWLINENEWLINE segment_count = len(path_segts)NEWLINENEWLINE form_ring_session = FalseNEWLINE if segment_count >= 2 and ring == 'yes' :NEWLINE form_ring_session = TrueNEWLINENEWLINE for segt_idx, segt_data_links in path_segts.items():NEWLINENEWLINE segt_data = segt_data_links[0]NEWLINENEWLINE if form_ring_session and segt_idx == (segment_count - 1):NEWLINE # last node and first node segmentNEWLINE lcl_asn = base_asn + asn_indexNEWLINE rmt_asn = path_segts[0]['lcl_asn']NEWLINENEWLINE elif sess_type == 'iBGP' :NEWLINE # all node i bgpNEWLINE lcl_asn = base_asnNEWLINE rmt_asn = base_asnNEWLINENEWLINE elif sess_type == 'eBGP' :NEWLINE # all node ebgpNEWLINE lcl_asn = base_asn + asn_indexNEWLINE asn_index += 1NEWLINE rmt_asn = base_asn + asn_indexNEWLINENEWLINE elif sess_type == 'eBGPiBGPeBGP' :NEWLINE # N--e--N--i--N--e--N...NEWLINE lcl_asn = base_asn + asn_indexNEWLINE curr_sess = segt_idx % 3NEWLINE if curr_sess == 0 or curr_sess == 2: #0-e 1=i 2=eNEWLINE asn_index += 1NEWLINE rmt_asn = base_asn + asn_indexNEWLINENEWLINE elif sess_type == 'eBGPiBGPiBGP' :NEWLINE # N--e--N--i--N--i--N--i--N ...all iNEWLINE lcl_asn = base_asn + asn_indexNEWLINE if segt_idx == 0:NEWLINE asn_index += 1NEWLINE rmt_asn = base_asn + asn_indexNEWLINENEWLINE elif sess_type == 'eBGPeBGPiBGP' :NEWLINE # N--e--N--e--N--i--N--i--N ...all iNEWLINE lcl_asn = base_asn + asn_indexNEWLINE if segt_idx <= 1:NEWLINE asn_index += 1NEWLINE rmt_asn = base_asn + asn_indexNEWLINENEWLINE elif sess_type == 'iBGPeBGPiBGP' :NEWLINE # N--i--N--e--N--i--N--i--N ...all iNEWLINE lcl_asn = base_asn + asn_indexNEWLINE if segt_idx == 1:NEWLINE asn_index += 1NEWLINE rmt_asn = base_asn + asn_indexNEWLINENEWLINE else :NEWLINE st.log("BGP SP - Invalid BGP session Type passed {}".format(sess_type))NEWLINE return FalseNEWLINENEWLINE segt_data.update({'lcl_asn': lcl_asn})NEWLINE segt_data.update({'rmt_asn': rmt_asn})NEWLINENEWLINE result = BGPSP.bgp_sp_bgp_neighbor_segment_config_unconfig(segt_data, addr_family, config=config)NEWLINE if not result :NEWLINE breakNEWLINENEWLINE if result and config == 'yes':NEWLINE st.wait(3)NEWLINE result = BGPSP.bgp_sp_verify_all_bgp_sessions(dut_list, addr_family='all')NEWLINE if not result :NEWLINE st.log("BGP SP - Linear topo session {} check Failed".format(sess_type))NEWLINENEWLINE result_str = "Success" if result else "Failed"NEWLINE st.banner("BGP SP - {}uring {} topo {} session {}".format(action_str, topology, sess_type, result_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_star_topo_bgp_config_unconfig(bgp_asn=65008, sess_type='eBGP', addr_family='all', config='yes'):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.banner("{}uring Star topology {} session".format(action_str, sess_type))NEWLINENEWLINE if BGPSP.bgp_sp_get_dut_count() < 3 :NEWLINE st.log("BGP SP - Testbed doesnt have minimum 3 duts")NEWLINE st.report_fail("test_case_passed")NEWLINE return FalseNEWLINENEWLINE if config == 'yes' :NEWLINE dut_star_path = BGPSP.bgp_sp_find_star_topo_in_dut_list([],'', 0)NEWLINE else :NEWLINE dut_star_path = BGPSP.bgp_sp_dut_get_saved_star_topo()NEWLINE BGPSP.bgp_sp_show_topo_path(dut_star_path)NEWLINENEWLINE if not dut_star_path['found'] :NEWLINE st.log("BGP SP - Get Star path Failed")NEWLINE st.report_fail("test_case_failed")NEWLINE return FalseNEWLINENEWLINE dut_list = dut_star_path['dut_list']NEWLINE path_segts = dut_star_path['segment']NEWLINENEWLINE result = TrueNEWLINE if len(path_segts) < 2 :NEWLINE st.log("BGP SP - Testbed doesnt have 3 connected nodes")NEWLINE st.report_fail("test_case_failed")NEWLINE return FalseNEWLINENEWLINE core_asn = bgp_asnNEWLINE spoke_end_as = bgp_asnNEWLINE for _, segt_data_links in path_segts.items():NEWLINENEWLINE segt_data = segt_data_links[0]NEWLINENEWLINE if sess_type == 'eBGP' :NEWLINE spoke_end_as += 1NEWLINENEWLINE segt_data.update({'lcl_asn': core_asn})NEWLINE segt_data.update({'rmt_asn': spoke_end_as})NEWLINENEWLINE result = BGPSP.bgp_sp_bgp_neighbor_segment_config_unconfig(segt_data, addr_family, config=config)NEWLINE if not result :NEWLINE breakNEWLINENEWLINE if result and config == 'yes':NEWLINE st.wait(3)NEWLINE result = BGPSP.bgp_sp_verify_all_bgp_sessions(dut_list, addr_family='all')NEWLINE if not result :NEWLINE st.log("BGP SP - Star topology {} session check Failed".format(sess_type))NEWLINENEWLINE result_str = "Success" if result else "Failed"NEWLINE st.banner("BGP SP - {}uring Star topology {} session {}".format(action_str, sess_type, result_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE @staticmethodNEWLINE def bgp_sp_spine_leaf_bgp_config_unconfig(spine_asn=65001, leaf_asn=65003, addr_family='all', config='yes', cli_type="vtysh"):NEWLINENEWLINE action_str = 'Config' if config == 'yes' else 'Unconfig'NEWLINE st.banner("{}uring Spine Leaf BGP session".format(action_str))NEWLINENEWLINE topo_dut_count = BGPSP.bgp_sp_get_dut_count()NEWLINE if topo_dut_count < 2 :NEWLINE st.log("BGP SP - Testbed doesnt have minimum 2 duts")NEWLINE st.report_fail("test_case_passed")NEWLINE return FalseNEWLINENEWLINE topo_dut_list = BGPSP.bgp_sp_get_dut_list()NEWLINENEWLINE st.log("BGP SP - dut count {} list {}".format(topo_dut_count, topo_dut_list))NEWLINENEWLINE spine_list = []NEWLINE leaf_list = []NEWLINE dut_mid_index = topo_dut_count / 2NEWLINE dut_idx = 1NEWLINENEWLINE for dut in topo_dut_list:NEWLINE if dut_idx <= dut_mid_index :NEWLINE spine_list.append(dut)NEWLINE else :NEWLINE leaf_list.append(dut)NEWLINE dut_idx += 1NEWLINENEWLINE st.log("BGP SP - Spine List {} Leaf list {}".format(spine_list, leaf_list))NEWLINENEWLINE if config == 'yes' :NEWLINE spine_leaf_path = BGPSP.bgp_sp_find_spine_leaf_topo_in_dut_list(spine_list, leaf_list, save_path='yes')NEWLINE else :NEWLINE spine_leaf_path = BGPSP.bgp_sp_dut_get_saved_spine_leaf_topo()NEWLINENEWLINE st.log("BGP SP - Leaf Spine Path {}".format(spine_leaf_path))NEWLINENEWLINE spine_leaf_session_count = 0NEWLINENEWLINE for spine_dut, spine_path in spine_leaf_path['spine_path'].items():NEWLINENEWLINE st.log("BGP SP - Spine Path \n")NEWLINE BGPSP.bgp_sp_show_topo_path(spine_path)NEWLINENEWLINE if not spine_path['found'] :NEWLINE st.log("BGP SP - Spine {} doesnot have any leafs connected".format(spine_dut))NEWLINE continueNEWLINENEWLINE dut_list = spine_path['dut_list']NEWLINE path_segts = spine_path['segment']NEWLINENEWLINE result = TrueNEWLINE if len(path_segts) < 1 :NEWLINE st.log("BGP SP - Spine {} doesnt have connected leafs".format(spine_dut))NEWLINE continueNEWLINENEWLINE for _, segt_data_links in path_segts.items():NEWLINE segt_data = segt_data_links[0]NEWLINE segt_data.update({'lcl_asn': spine_asn})NEWLINE segt_data.update({'rmt_asn': leaf_asn})NEWLINENEWLINE result = BGPSP.bgp_sp_bgp_neighbor_segment_config_unconfig(segt_data, addr_family, config=config)NEWLINE if not result :NEWLINE breakNEWLINENEWLINE spine_leaf_session_count += 1NEWLINENEWLINE if result and spine_leaf_session_count < 1 :NEWLINE #result = FalseNEWLINE st.log("BGP SP - Zero spine leaf sessions")NEWLINE return FalseNEWLINENEWLINE if result and config == 'yes' :NEWLINE st.wait(3)NEWLINE result = BGPSP.bgp_sp_verify_all_bgp_sessions(dut_list, addr_family='all')NEWLINE if not result :NEWLINE st.log("BGP SP - Spine Leaf BGP session check Failed")NEWLINENEWLINE result_str = "Success" if result else "Failed"NEWLINE st.banner("BGP SP - {}uring Spine Leaf BGP session {}".format(action_str, result_str))NEWLINENEWLINE return resultNEWLINENEWLINENEWLINE
#-NEWLINE# Copyright (c) 2013 Michael RoeNEWLINE# All rights reserved.NEWLINE#NEWLINE# This software was developed by SRI International and the University ofNEWLINE# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237NEWLINE# ("CTSRD"), as part of the DARPA CRASH research programme.NEWLINE#NEWLINE# @BERI_LICENSE_HEADER_START@NEWLINE#NEWLINE# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributorNEWLINE# license agreements. See the NOTICE file distributed with this work forNEWLINE# additional information regarding copyright ownership. BERI licenses thisNEWLINE# file to you under the BERI Hardware-Software License, Version 1.0 (theNEWLINE# "License"); you may not use this file except in compliance with theNEWLINE# License. You may obtain a copy of the License at:NEWLINE#NEWLINE# http://www.beri-open-systems.org/legal/license-1-0.txtNEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, Work distributedNEWLINE# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES ORNEWLINE# CONDITIONS OF ANY KIND, either express or implied. See the License for theNEWLINE# specific language governing permissions and limitations under the License.NEWLINE#NEWLINE# @BERI_LICENSE_HEADER_END@NEWLINE#NEWLINENEWLINE#NEWLINE# Test single-precision floor to long word when the FPU is in 64 bit modeNEWLINE#NEWLINENEWLINEfrom beritest_tools import BaseBERITestCaseNEWLINEfrom nose.plugins.attrib import attrNEWLINENEWLINEclass test_raw_fpu_floor_l_s_d64(BaseBERITestCase):NEWLINENEWLINE @attr('float64')NEWLINE def test_raw_fpu_floor_l_s_d64_1(self):NEWLINE '''Test single precision floor of -0.75'''NEWLINE self.assertRegisterEqual(self.MIPS.a0 , 0xffffffffffffffff, "-0.75 did not round down to -1")NEWLINENEWLINE @attr('float64')NEWLINE def test_raw_fpu_floor_l_s_d64_2(self):NEWLINE '''Test single precision floor of -0.5'''NEWLINE self.assertRegisterEqual(self.MIPS.a1 , 0xffffffffffffffff, "-0.5 did not round down to -1")NEWLINENEWLINE @attr('float64')NEWLINE def test_raw_fpu_floor_l_s_d64_3(self):NEWLINE '''Test single precision floor of -0.25'''NEWLINE self.assertRegisterEqual(self.MIPS.a2, 0xffffffffffffffff, "-0.25 did not round down to -1")NEWLINENEWLINE @attr('float64')NEWLINE def test_raw_fpu_floor_l_s_d64_4(self):NEWLINE '''Test single precision floor of 0.5'''NEWLINE self.assertRegisterEqual(self.MIPS.a3, 0, "0.5 did not round down to 0")NEWLINENEWLINE @attr('float64')NEWLINE def test_raw_fpu_floor_l_s_d64_5(self):NEWLINE '''Test single precision floor of 1.5'''NEWLINE self.assertRegisterEqual(self.MIPS.a4, 1, "1.5 did not round down to 1")NEWLINE
import rlpNEWLINENEWLINEfrom bxutils.logging.log_level import LogLevelNEWLINENEWLINEfrom bxgateway.messages.eth.protocol.eth_protocol_message import EthProtocolMessageNEWLINEfrom bxgateway.messages.eth.protocol.eth_protocol_message_type import EthProtocolMessageTypeNEWLINEfrom bxgateway.messages.eth.serializers.block_header import BlockHeaderNEWLINEfrom bxgateway.utils.eth import rlp_utilsNEWLINENEWLINENEWLINEclass BlockHeadersEthProtocolMessage(EthProtocolMessage):NEWLINE msg_type = EthProtocolMessageType.BLOCK_HEADERSNEWLINENEWLINE fields = [("block_headers", rlp.sedes.CountableList(BlockHeader))]NEWLINENEWLINE def __repr__(self):NEWLINE headers = self.get_block_headers()NEWLINE headers_repr = list(headers[:1])NEWLINE if len(headers) > 1:NEWLINE headers_repr.append(headers[-1])NEWLINE return f"BlockHeadersEthProtocolMessage<headers_count: {len(headers)} " \NEWLINE f"headers: [{'...'.join([h.hash().hex() for h in headers_repr])}]>"NEWLINENEWLINE def get_block_headers(self):NEWLINE return self.get_field_value("block_headers")NEWLINENEWLINE def get_block_headers_bytes(self):NEWLINE if self._memory_view is None:NEWLINE self.serialize()NEWLINENEWLINE return rlp_utils.get_first_list_field_items_bytes(self._memory_view)NEWLINENEWLINE @classmethodNEWLINE def from_header_bytes(cls, header_bytes: memoryview) -> "BlockHeadersEthProtocolMessage":NEWLINE headers_list_prefix = rlp_utils.get_length_prefix_list(len(header_bytes))NEWLINENEWLINE msg_bytes = bytearray(len(headers_list_prefix) + len(header_bytes))NEWLINE msg_bytes[:len(headers_list_prefix)] = headers_list_prefixNEWLINE msg_bytes[len(headers_list_prefix):] = header_bytesNEWLINENEWLINE return cls(msg_bytes)NEWLINENEWLINE def log_level(self):NEWLINE return LogLevel.DEBUGNEWLINE
from msgflow.service import TwitterSampleStreamServiceNEWLINEfrom msgflow.service.twitter_service import TwitterConfigNEWLINEfrom msgflow.bot import BotNEWLINENEWLINENEWLINEclass MockTwitterApi:NEWLINE def get_sample_stream(self):NEWLINE tweets = [NEWLINE {"id": "0", "text": "test0", "lang": "ja"},NEWLINE {"id": "1", "text": "test1", "lang": "ja"},NEWLINE {"id": "2", "text": "test2", "lang": "ja"},NEWLINE {"id": "3", "text": "test3_long_sentence", "lang": "ja"},NEWLINE {"id": "4", "text": "test4_black_words", "lang": "ja"},NEWLINE {"id": "4", "text": "test4_diff_lang", "lang": "en"},NEWLINE {"id": "5", "text": "test5", "lang": "ja"},NEWLINE ]NEWLINE for item in tweets:NEWLINE yield itemNEWLINENEWLINENEWLINEclass MockApp:NEWLINE def __init__(self):NEWLINE self._recieved = []NEWLINENEWLINE def handle(self, messenger):NEWLINE self._recieved.append(messenger.message)NEWLINENEWLINENEWLINEclass MockSleepCond:NEWLINE def __init__(self):NEWLINE self._now = 0NEWLINENEWLINE def reset(self):NEWLINE self._now = 0NEWLINENEWLINE def step(self):NEWLINE self._now += 1NEWLINENEWLINE def need_sleep(self, interval):NEWLINE return self._now <= intervalNEWLINENEWLINENEWLINEdef test_TwitterSampleStreamService_flow():NEWLINE api = MockTwitterApi()NEWLINE config = TwitterConfig(NEWLINE bearer_token="", max_len=5, lang="ja", black_words=["4"], interval=1NEWLINE )NEWLINE sleep_cond = MockSleepCond()NEWLINENEWLINE svc = TwitterSampleStreamService(config=config, api=api, sleep_cond=sleep_cond)NEWLINE app = MockApp()NEWLINE bot = Bot(service=svc, post_service=svc, app=app)NEWLINENEWLINE # Start botNEWLINE bot.start()NEWLINENEWLINE # AssertNEWLINE assert [item.text for item in app._recieved] == ["test1", "test5"]NEWLINE
import binasciiNEWLINEimport datetimeNEWLINENEWLINEfrom django.db.backends.base.schema import (NEWLINE BaseDatabaseSchemaEditor,NEWLINE _is_relevant_relation,NEWLINE _related_non_m2m_objects,NEWLINE logger,NEWLINE)NEWLINEfrom django.db.backends.ddl_references import (NEWLINE Columns,NEWLINE IndexName,NEWLINE Statement as DjStatement,NEWLINE Table,NEWLINE)NEWLINEfrom django.db.models import IndexNEWLINEfrom django.db.models.fields import AutoField, BigAutoFieldNEWLINEfrom django.db.transaction import TransactionManagementErrorNEWLINEfrom django.utils.encoding import force_strNEWLINENEWLINENEWLINEclass Statement(DjStatement):NEWLINE def __hash__(self):NEWLINE return hash((self.template, str(self.parts['name'])))NEWLINENEWLINE def __eq__(self, other):NEWLINE return self.template == other.template and str(self.parts['name']) == str(other.parts['name'])NEWLINENEWLINENEWLINEclass DatabaseSchemaEditor(BaseDatabaseSchemaEditor):NEWLINENEWLINE _sql_check_constraint = " CONSTRAINT %(name)s CHECK (%(check)s)"NEWLINE _sql_select_default_constraint_name = "SELECT" \NEWLINE " d.name " \NEWLINE "FROM sys.default_constraints d " \NEWLINE "INNER JOIN sys.tables t ON" \NEWLINE " d.parent_object_id = t.object_id " \NEWLINE "INNER JOIN sys.columns c ON" \NEWLINE " d.parent_object_id = c.object_id AND" \NEWLINE " d.parent_column_id = c.column_id " \NEWLINE "INNER JOIN sys.schemas s ON" \NEWLINE " t.schema_id = s.schema_id " \NEWLINE "WHERE" \NEWLINE " t.name = %(table)s AND" \NEWLINE " c.name = %(column)s"NEWLINE _sql_select_foreign_key_constraints = "SELECT" \NEWLINE " po.name AS table_name," \NEWLINE " co.name AS constraint_name " \NEWLINE "FROM sys.foreign_key_columns fkc " \NEWLINE "INNER JOIN sys.objects co ON" \NEWLINE " fkc.constraint_object_id = co.object_id " \NEWLINE "INNER JOIN sys.tables po ON" \NEWLINE " fkc.parent_object_id = po.object_id " \NEWLINE "INNER JOIN sys.tables ro ON" \NEWLINE " fkc.referenced_object_id = ro.object_id " \NEWLINE "WHERE ro.name = %(table)s"NEWLINE sql_alter_column_default = "ADD DEFAULT %(default)s FOR %(column)s"NEWLINE sql_alter_column_no_default = "DROP CONSTRAINT %(column)s"NEWLINE sql_alter_column_not_null = "ALTER COLUMN %(column)s %(type)s NOT NULL"NEWLINE sql_alter_column_null = "ALTER COLUMN %(column)s %(type)s NULL"NEWLINE sql_alter_column_type = "ALTER COLUMN %(column)s %(type)s"NEWLINE sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s"NEWLINE sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"NEWLINE sql_delete_index = "DROP INDEX %(name)s ON %(table)s"NEWLINE sql_delete_table = "DROP TABLE %(table)s"NEWLINE sql_rename_column = "EXEC sp_rename '%(table)s.%(old_column)s', %(new_column)s, 'COLUMN'"NEWLINE sql_rename_table = "EXEC sp_rename %(old_table)s, %(new_table)s"NEWLINE sql_create_unique_null = "CREATE UNIQUE INDEX %(name)s ON %(table)s(%(columns)s) " \NEWLINE "WHERE %(columns)s IS NOT NULL"NEWLINENEWLINE def _alter_column_default_sql(self, model, old_field, new_field, drop=False):NEWLINE """NEWLINE Hook to specialize column default alteration.NEWLINENEWLINE Return a (sql, params) fragment to add or drop (depending on the dropNEWLINE argument) a default to new_field's column.NEWLINE """NEWLINE new_default = self.effective_default(new_field)NEWLINE default = '%s'NEWLINE params = [new_default]NEWLINE column = self.quote_name(new_field.column)NEWLINENEWLINE if drop:NEWLINE params = []NEWLINE # SQL Server requires the name of the default constraintNEWLINE result = self.execute(NEWLINE self._sql_select_default_constraint_name % {NEWLINE "table": self.quote_value(model._meta.db_table),NEWLINE "column": self.quote_value(new_field.column),NEWLINE },NEWLINE has_result=TrueNEWLINE )NEWLINE if result:NEWLINE for row in result:NEWLINE column = self.quote_name(next(iter(row)))NEWLINE elif self.connection.features.requires_literal_defaults:NEWLINE # Some databases (Oracle) can't take defaults as a parameterNEWLINE # If this is the case, the SchemaEditor for that database shouldNEWLINE # implement prepare_default().NEWLINE default = self.prepare_default(new_default)NEWLINE params = []NEWLINENEWLINE new_db_params = new_field.db_parameters(connection=self.connection)NEWLINE sql = self.sql_alter_column_no_default if drop else self.sql_alter_column_defaultNEWLINE return (NEWLINE sql % {NEWLINE 'column': column,NEWLINE 'type': new_db_params['type'],NEWLINE 'default': default,NEWLINE },NEWLINE params,NEWLINE )NEWLINENEWLINE def _alter_column_null_sql(self, model, old_field, new_field):NEWLINE """NEWLINE Hook to specialize column null alteration.NEWLINENEWLINE Return a (sql, params) fragment to set a column to null or non-nullNEWLINE as required by new_field, or None if no changes are required.NEWLINE """NEWLINE if (self.connection.features.interprets_empty_strings_as_nulls andNEWLINE new_field.get_internal_type() in ("CharField", "TextField")):NEWLINE # The field is nullable in the database anyway, leave it alone.NEWLINE returnNEWLINE else:NEWLINE new_db_params = new_field.db_parameters(connection=self.connection)NEWLINE sql = self.sql_alter_column_null if new_field.null else self.sql_alter_column_not_nullNEWLINE return (NEWLINE sql % {NEWLINE 'column': self.quote_name(new_field.column),NEWLINE 'type': new_db_params['type'],NEWLINE },NEWLINE [],NEWLINE )NEWLINENEWLINE def _alter_column_type_sql(self, model, old_field, new_field, new_type):NEWLINE new_type = self._set_field_new_type_null_status(old_field, new_type)NEWLINE return super()._alter_column_type_sql(model, old_field, new_field, new_type)NEWLINENEWLINE def alter_unique_together(self, model, old_unique_together, new_unique_together):NEWLINE """NEWLINE Deal with a model changing its unique_together. The inputNEWLINE unique_togethers must be doubly-nested, not the single-nestedNEWLINE ["foo", "bar"] format.NEWLINE """NEWLINE olds = {tuple(fields) for fields in old_unique_together}NEWLINE news = {tuple(fields) for fields in new_unique_together}NEWLINE # Deleted uniquesNEWLINE for fields in olds.difference(news):NEWLINE self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_index)NEWLINE # Created uniquesNEWLINE for fields in news.difference(olds):NEWLINE columns = [model._meta.get_field(field).column for field in fields]NEWLINE condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns])NEWLINE sql = self._create_unique_sql(model, columns, condition=condition)NEWLINE self.execute(sql)NEWLINENEWLINE def _model_indexes_sql(self, model):NEWLINE """NEWLINE Return a list of all index SQL statements (field indexes,NEWLINE index_together, Meta.indexes) for the specified model.NEWLINE """NEWLINE if not model._meta.managed or model._meta.proxy or model._meta.swapped:NEWLINE return []NEWLINE output = []NEWLINE for field in model._meta.local_fields:NEWLINE output.extend(self._field_indexes_sql(model, field))NEWLINENEWLINE for field_names in model._meta.index_together:NEWLINE fields = [model._meta.get_field(field) for field in field_names]NEWLINE output.append(self._create_index_sql(model, fields, suffix="_idx"))NEWLINENEWLINE for field_names in model._meta.unique_together:NEWLINE columns = [model._meta.get_field(field).column for field in field_names]NEWLINE condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns])NEWLINE sql = self._create_unique_sql(model, columns, condition=condition)NEWLINE output.append(sql)NEWLINENEWLINE for index in model._meta.indexes:NEWLINE output.append(index.create_sql(model, self))NEWLINE return outputNEWLINENEWLINE def _alter_many_to_many(self, model, old_field, new_field, strict):NEWLINE """Alter M2Ms to repoint their to= endpoints."""NEWLINENEWLINE for idx in self._constraint_names(old_field.remote_field.through, index=True, unique=True):NEWLINE self.execute(self.sql_delete_index % {'name': idx, 'table': old_field.remote_field.through._meta.db_table})NEWLINENEWLINE return super()._alter_many_to_many(model, old_field, new_field, strict)NEWLINENEWLINE def _db_table_constraint_names(self, db_table, column_names=None, unique=None,NEWLINE primary_key=None, index=None, foreign_key=None,NEWLINE check=None, type_=None, exclude=None):NEWLINE """Return all constraint names matching the columns and conditions."""NEWLINE if column_names is not None:NEWLINE column_names = [NEWLINE self.connection.introspection.identifier_converter(name)NEWLINE for name in column_namesNEWLINE ]NEWLINE with self.connection.cursor() as cursor:NEWLINE constraints = self.connection.introspection.get_constraints(cursor, db_table)NEWLINE result = []NEWLINE for name, infodict in constraints.items():NEWLINE if column_names is None or column_names == infodict['columns']:NEWLINE if unique is not None and infodict['unique'] != unique:NEWLINE continueNEWLINE if primary_key is not None and infodict['primary_key'] != primary_key:NEWLINE continueNEWLINE if index is not None and infodict['index'] != index:NEWLINE continueNEWLINE if check is not None and infodict['check'] != check:NEWLINE continueNEWLINE if foreign_key is not None and not infodict['foreign_key']:NEWLINE continueNEWLINE if type_ is not None and infodict['type'] != type_:NEWLINE continueNEWLINE if not exclude or name not in exclude:NEWLINE result.append(name)NEWLINE return resultNEWLINENEWLINE def _db_table_delete_constraint_sql(self, template, db_table, name):NEWLINE return Statement(NEWLINE template,NEWLINE table=Table(db_table, self.quote_name),NEWLINE name=self.quote_name(name),NEWLINE )NEWLINENEWLINE def alter_db_table(self, model, old_db_table, new_db_table):NEWLINE index_names = self._db_table_constraint_names(old_db_table, index=True)NEWLINE for index_name in index_names:NEWLINE self.execute(self._db_table_delete_constraint_sql(self.sql_delete_index, old_db_table, index_name))NEWLINENEWLINE index_names = self._db_table_constraint_names(new_db_table, index=True)NEWLINE for index_name in index_names:NEWLINE self.execute(self._db_table_delete_constraint_sql(self.sql_delete_index, new_db_table, index_name))NEWLINENEWLINE return super().alter_db_table(model, old_db_table, new_db_table)NEWLINENEWLINE def _alter_field(self, model, old_field, new_field, old_type, new_type,NEWLINE old_db_params, new_db_params, strict=False):NEWLINE """Actually perform a "physical" (non-ManyToMany) field update."""NEWLINENEWLINE # the backend doesn't support altering from/to (Big)AutoFieldNEWLINE # because of the limited capability of SQL Server to edit IDENTITY propertyNEWLINE for t in (AutoField, BigAutoField):NEWLINE if isinstance(old_field, t) or isinstance(new_field, t):NEWLINE raise NotImplementedError("the backend doesn't support altering from/to %s." % t.__name__)NEWLINE # Drop any FK constraints, we'll remake them laterNEWLINE fks_dropped = set()NEWLINE if old_field.remote_field and old_field.db_constraint:NEWLINE # Drop index, SQL Server requires explicit deletionNEWLINE if not hasattr(new_field, 'db_constraint') or not new_field.db_constraint:NEWLINE index_names = self._constraint_names(model, [old_field.column], index=True)NEWLINE for index_name in index_names:NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))NEWLINENEWLINE fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)NEWLINE if strict and len(fk_names) != 1:NEWLINE raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (NEWLINE len(fk_names),NEWLINE model._meta.db_table,NEWLINE old_field.column,NEWLINE ))NEWLINE for fk_name in fk_names:NEWLINE fks_dropped.add((old_field.column,))NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))NEWLINE # Has unique been removed?NEWLINE if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)):NEWLINE # Find the unique constraint for this fieldNEWLINE constraint_names = self._constraint_names(model, [old_field.column], unique=True, primary_key=False)NEWLINE if strict and len(constraint_names) != 1:NEWLINE raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (NEWLINE len(constraint_names),NEWLINE model._meta.db_table,NEWLINE old_field.column,NEWLINE ))NEWLINE for constraint_name in constraint_names:NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name))NEWLINE # Drop incoming FK constraints if the field is a primary key or unique,NEWLINE # which might be a to_field target, and things are going to change.NEWLINE drop_foreign_keys = (NEWLINE (NEWLINE (old_field.primary_key and new_field.primary_key) orNEWLINE (old_field.unique and new_field.unique)NEWLINE ) and old_type != new_typeNEWLINE )NEWLINE if drop_foreign_keys:NEWLINE # '_meta.related_field' also contains M2M reverse fields, theseNEWLINE # will be filtered outNEWLINE for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):NEWLINE rel_fk_names = self._constraint_names(NEWLINE new_rel.related_model, [new_rel.field.column], foreign_key=TrueNEWLINE )NEWLINE for fk_name in rel_fk_names:NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_fk, new_rel.related_model, fk_name))NEWLINE # Removed an index? (no strict check, as multiple indexes are possible)NEWLINE # Remove indexes if db_index switched to False or a unique constraintNEWLINE # will now be used in lieu of an index. The following lines from theNEWLINE # truth table show all True cases; the rest are False:NEWLINE #NEWLINE # old_field.db_index | old_field.unique | new_field.db_index | new_field.uniqueNEWLINE # ------------------------------------------------------------------------------NEWLINE # True | False | False | FalseNEWLINE # True | False | False | TrueNEWLINE # True | False | True | TrueNEWLINE if (old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique)) or (NEWLINE # Drop indexes on nvarchar columns that are changing to a different typeNEWLINE # SQL Server requires explicit deletionNEWLINE (old_field.db_index or old_field.unique) and (NEWLINE (old_type.startswith('nvarchar') and not new_type.startswith('nvarchar'))NEWLINE )):NEWLINE # Find the index for this fieldNEWLINE meta_index_names = {index.name for index in model._meta.indexes}NEWLINE # Retrieve only BTREE indexes since this is what's created withNEWLINE # db_index=True.NEWLINE index_names = self._constraint_names(model, [old_field.column], index=True, type_=Index.suffix)NEWLINE for index_name in index_names:NEWLINE if index_name not in meta_index_names:NEWLINE # The only way to check if an index was created withNEWLINE # db_index=True or with Index(['field'], name='foo')NEWLINE # is to look at its name (refs #28053).NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))NEWLINE # Change check constraints?NEWLINE if (old_db_params['check'] != new_db_params['check'] and old_db_params['check']) or (NEWLINE # SQL Server requires explicit deletion befor altering column type with the same constraintNEWLINE old_db_params['check'] == new_db_params['check'] and old_db_params['check'] andNEWLINE old_db_params['type'] != new_db_params['type']NEWLINE ):NEWLINE constraint_names = self._constraint_names(model, [old_field.column], check=True)NEWLINE if strict and len(constraint_names) != 1:NEWLINE raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (NEWLINE len(constraint_names),NEWLINE model._meta.db_table,NEWLINE old_field.column,NEWLINE ))NEWLINE for constraint_name in constraint_names:NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_check, model, constraint_name))NEWLINE # Have they renamed the column?NEWLINE if old_field.column != new_field.column:NEWLINE # remove old indicesNEWLINE self._delete_indexes(model, old_field, new_field)NEWLINENEWLINE self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type))NEWLINE # Rename all references to the renamed column.NEWLINE for sql in self.deferred_sql:NEWLINE if isinstance(sql, DjStatement):NEWLINE sql.rename_column_references(model._meta.db_table, old_field.column, new_field.column)NEWLINENEWLINE # Next, start accumulating actions to doNEWLINE actions = []NEWLINE null_actions = []NEWLINE post_actions = []NEWLINE # Type change?NEWLINE if old_type != new_type:NEWLINE fragment, other_actions = self._alter_column_type_sql(model, old_field, new_field, new_type)NEWLINE actions.append(fragment)NEWLINE post_actions.extend(other_actions)NEWLINE # Drop unique constraint, SQL Server requires explicit deletionNEWLINE self._delete_unique_constraints(model, old_field, new_field, strict)NEWLINE # Drop indexes, SQL Server requires explicit deletionNEWLINE self._delete_indexes(model, old_field, new_field)NEWLINE # When changing a column NULL constraint to NOT NULL with a givenNEWLINE # default value, we need to perform 4 steps:NEWLINE # 1. Add a default for new incoming writesNEWLINE # 2. Update existing NULL rows with new defaultNEWLINE # 3. Replace NULL constraint with NOT NULLNEWLINE # 4. Drop the default again.NEWLINE # Default change?NEWLINE old_default = self.effective_default(old_field)NEWLINE new_default = self.effective_default(new_field)NEWLINE needs_database_default = (NEWLINE old_field.null andNEWLINE not new_field.null andNEWLINE old_default != new_default andNEWLINE new_default is not None andNEWLINE not self.skip_default(new_field)NEWLINE )NEWLINE if needs_database_default:NEWLINE actions.append(self._alter_column_default_sql(model, old_field, new_field))NEWLINE # Nullability change?NEWLINE if old_field.null != new_field.null:NEWLINE fragment = self._alter_column_null_sql(model, old_field, new_field)NEWLINE if fragment:NEWLINE null_actions.append(fragment)NEWLINE if not new_field.null:NEWLINE # Drop unique constraint, SQL Server requires explicit deletionNEWLINE self._delete_unique_constraints(model, old_field, new_field, strict)NEWLINE # Drop indexes, SQL Server requires explicit deletionNEWLINE self._delete_indexes(model, old_field, new_field)NEWLINE # Only if we have a default and there is a change from NULL to NOT NULLNEWLINE four_way_default_alteration = (NEWLINE new_field.has_default() andNEWLINE (old_field.null and not new_field.null)NEWLINE )NEWLINE if actions or null_actions:NEWLINE if not four_way_default_alteration:NEWLINE # If we don't have to do a 4-way default alteration we canNEWLINE # directly run a (NOT) NULL alterationNEWLINE actions = actions + null_actionsNEWLINE # Combine actions together if we can (e.g. postgres)NEWLINE if self.connection.features.supports_combined_alters and actions:NEWLINE sql, params = tuple(zip(*actions))NEWLINE actions = [(", ".join(sql), sum(params, []))]NEWLINE # Apply those actionsNEWLINE for sql, params in actions:NEWLINE self._delete_indexes(model, old_field, new_field)NEWLINE self.execute(NEWLINE self.sql_alter_column % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "changes": sql,NEWLINE },NEWLINE params,NEWLINE )NEWLINE if four_way_default_alteration:NEWLINE # Update existing rows with default valueNEWLINE self.execute(NEWLINE self.sql_update_with_default % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "column": self.quote_name(new_field.column),NEWLINE "default": "%s",NEWLINE },NEWLINE [new_default],NEWLINE )NEWLINE # Since we didn't run a NOT NULL change before we need to do itNEWLINE # nowNEWLINE for sql, params in null_actions:NEWLINE self.execute(NEWLINE self.sql_alter_column % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "changes": sql,NEWLINE },NEWLINE params,NEWLINE )NEWLINE if post_actions:NEWLINE for sql, params in post_actions:NEWLINE self.execute(sql, params)NEWLINE # If primary_key changed to False, delete the primary key constraint.NEWLINE if old_field.primary_key and not new_field.primary_key:NEWLINE self._delete_primary_key(model, strict)NEWLINE # Added a unique?NEWLINE if self._unique_should_be_added(old_field, new_field):NEWLINE if (self.connection.features.supports_nullable_unique_constraints andNEWLINE not new_field.many_to_many and new_field.null):NEWLINENEWLINE self.execute(NEWLINE self._create_index_sql(NEWLINE model, [new_field], sql=self.sql_create_unique_null, suffix="_uniq"NEWLINE )NEWLINE )NEWLINE else:NEWLINE self.execute(self._create_unique_sql(model, [new_field.column]))NEWLINE # Added an index?NEWLINE # constraint will no longer be used in lieu of an index. The followingNEWLINE # lines from the truth table show all True cases; the rest are False:NEWLINE #NEWLINE # old_field.db_index | old_field.unique | new_field.db_index | new_field.uniqueNEWLINE # ------------------------------------------------------------------------------NEWLINE # False | False | True | FalseNEWLINE # False | True | True | FalseNEWLINE # True | True | True | FalseNEWLINE if (not old_field.db_index or old_field.unique) and new_field.db_index and not new_field.unique:NEWLINE self.execute(self._create_index_sql(model, [new_field]))NEWLINENEWLINE # Restore indexes & unique constraints deleted above, SQL Server requires explicit restorationNEWLINE if (old_type != new_type or (old_field.null and not new_field.null)) and (NEWLINE old_field.column == new_field.columnNEWLINE ):NEWLINE # Restore unique constraintsNEWLINE # Note: if nullable they are implemented via an explicit filtered UNIQUE INDEX (not CONSTRAINT)NEWLINE # in order to get ANSI-compliant NULL behaviour (i.e. NULL != NULL, multiple are allowed)NEWLINE if old_field.unique and new_field.unique:NEWLINE if new_field.null:NEWLINE self.execute(NEWLINE self._create_index_sql(NEWLINE model, [old_field], sql=self.sql_create_unique_null, suffix="_uniq"NEWLINE )NEWLINE )NEWLINE else:NEWLINE self.execute(self._create_unique_sql(model, columns=[old_field.column]))NEWLINE else:NEWLINE for fields in model._meta.unique_together:NEWLINE columns = [model._meta.get_field(field).column for field in fields]NEWLINE if old_field.column in columns:NEWLINE condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns])NEWLINE self.execute(self._create_unique_sql(model, columns, condition=condition))NEWLINE # Restore indexesNEWLINE index_columns = []NEWLINE if old_field.db_index and new_field.db_index:NEWLINE index_columns.append([old_field])NEWLINE else:NEWLINE for fields in model._meta.index_together:NEWLINE columns = [model._meta.get_field(field) for field in fields]NEWLINE if old_field.column in [c.column for c in columns]:NEWLINE index_columns.append(columns)NEWLINE if index_columns:NEWLINE for columns in index_columns:NEWLINE self.execute(self._create_index_sql(model, columns, suffix='_idx'))NEWLINE # Type alteration on primary key? Then we need to alter the columnNEWLINE # referring to us.NEWLINE rels_to_update = []NEWLINE if old_field.primary_key and new_field.primary_key and old_type != new_type:NEWLINE rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))NEWLINE # Changed to become primary key?NEWLINE if self._field_became_primary_key(old_field, new_field):NEWLINE # Make the new oneNEWLINE self.execute(NEWLINE self.sql_create_pk % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "name": self.quote_name(NEWLINE self._create_index_name(model._meta.db_table, [new_field.column], suffix="_pk")NEWLINE ),NEWLINE "columns": self.quote_name(new_field.column),NEWLINE }NEWLINE )NEWLINE # Update all referencing columnsNEWLINE rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))NEWLINE # Handle our type alters on the other end of rels from the PK stuff aboveNEWLINE for old_rel, new_rel in rels_to_update:NEWLINE rel_db_params = new_rel.field.db_parameters(connection=self.connection)NEWLINE rel_type = rel_db_params['type']NEWLINE fragment, other_actions = self._alter_column_type_sql(NEWLINE new_rel.related_model, old_rel.field, new_rel.field, rel_typeNEWLINE )NEWLINE self.execute(NEWLINE self.sql_alter_column % {NEWLINE "table": self.quote_name(new_rel.related_model._meta.db_table),NEWLINE "changes": fragment[0],NEWLINE },NEWLINE fragment[1],NEWLINE )NEWLINE for sql, params in other_actions:NEWLINE self.execute(sql, params)NEWLINE # Does it have a foreign key?NEWLINE if (new_field.remote_field andNEWLINE (fks_dropped or not old_field.remote_field or not old_field.db_constraint) andNEWLINE new_field.db_constraint):NEWLINE self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s"))NEWLINE # Rebuild FKs that pointed to us if we previously had to drop themNEWLINE if drop_foreign_keys:NEWLINE for rel in new_field.model._meta.related_objects:NEWLINE if _is_relevant_relation(rel, new_field) and rel.field.db_constraint:NEWLINE self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk"))NEWLINE # Does it have check constraints we need to add?NEWLINE if (old_db_params['check'] != new_db_params['check'] and new_db_params['check']) or (NEWLINE # SQL Server requires explicit creation after altering column type with the same constraintNEWLINE old_db_params['check'] == new_db_params['check'] and new_db_params['check'] andNEWLINE old_db_params['type'] != new_db_params['type']NEWLINE ):NEWLINE self.execute(NEWLINE self.sql_create_check % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "name": self.quote_name(NEWLINE self._create_index_name(model._meta.db_table, [new_field.column], suffix="_check")NEWLINE ),NEWLINE "column": self.quote_name(new_field.column),NEWLINE "check": new_db_params['check'],NEWLINE }NEWLINE )NEWLINE # Drop the default if we need toNEWLINE # (Django usually does not use in-database defaults)NEWLINE if needs_database_default:NEWLINE changes_sql, params = self._alter_column_default_sql(model, old_field, new_field, drop=True)NEWLINE sql = self.sql_alter_column % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "changes": changes_sql,NEWLINE }NEWLINE self.execute(sql, params)NEWLINENEWLINE # Reset connection if requiredNEWLINE if self.connection.features.connection_persists_old_columns:NEWLINE self.connection.close()NEWLINENEWLINE def _delete_indexes(self, model, old_field, new_field):NEWLINE index_columns = []NEWLINE if old_field.db_index and new_field.db_index:NEWLINE index_columns.append([old_field.column])NEWLINE for fields in model._meta.index_together:NEWLINE columns = [model._meta.get_field(field).column for field in fields]NEWLINE if old_field.column in columns:NEWLINE index_columns.append(columns)NEWLINENEWLINE for fields in model._meta.unique_together:NEWLINE columns = [model._meta.get_field(field).column for field in fields]NEWLINE if old_field.column in columns:NEWLINE index_columns.append(columns)NEWLINE if index_columns:NEWLINE for columns in index_columns:NEWLINE index_names = self._constraint_names(model, columns, index=True)NEWLINE for index_name in index_names:NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))NEWLINENEWLINE def _delete_unique_constraints(self, model, old_field, new_field, strict=False):NEWLINE unique_columns = []NEWLINE if old_field.unique and new_field.unique:NEWLINE unique_columns.append([old_field.column])NEWLINE if unique_columns:NEWLINE for columns in unique_columns:NEWLINE constraint_names_normal = self._constraint_names(model, columns, unique=True, index=False)NEWLINE constraint_names_index = self._constraint_names(model, columns, unique=True, index=True)NEWLINE constraint_names = constraint_names_normal + constraint_names_indexNEWLINE if strict and len(constraint_names) != 1:NEWLINE raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (NEWLINE len(constraint_names),NEWLINE model._meta.db_table,NEWLINE old_field.column,NEWLINE ))NEWLINE for constraint_name in constraint_names_normal:NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name))NEWLINE # Unique indexes which are not table constraints must be deleted using the appropriate SQL.NEWLINE # These may exist for example to enforce ANSI-compliant unique constraints on nullable columns.NEWLINE for index_name in constraint_names_index:NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))NEWLINENEWLINE def _rename_field_sql(self, table, old_field, new_field, new_type):NEWLINE new_type = self._set_field_new_type_null_status(old_field, new_type)NEWLINE return super()._rename_field_sql(table, old_field, new_field, new_type)NEWLINENEWLINE def _set_field_new_type_null_status(self, field, new_type):NEWLINE """NEWLINE Keep the null property of the old field. If it has changed, it will beNEWLINE handled separately.NEWLINE """NEWLINE if field.null:NEWLINE new_type += " NULL"NEWLINE else:NEWLINE new_type += " NOT NULL"NEWLINE return new_typeNEWLINENEWLINE def add_field(self, model, field):NEWLINE """NEWLINE Create a field on a model. Usually involves adding a column, but mayNEWLINE involve adding a table instead (for M2M fields).NEWLINE """NEWLINE # Special-case implicit M2M tablesNEWLINE if field.many_to_many and field.remote_field.through._meta.auto_created:NEWLINE return self.create_model(field.remote_field.through)NEWLINE # Get the column's definitionNEWLINE definition, params = self.column_sql(model, field, include_default=True)NEWLINE # It might not actually have a column behind itNEWLINE if definition is None:NEWLINE returnNEWLINENEWLINE if (self.connection.features.supports_nullable_unique_constraints andNEWLINE not field.many_to_many and field.null and field.unique):NEWLINENEWLINE definition = definition.replace(' UNIQUE', '')NEWLINE self.deferred_sql.append(self._create_index_sql(NEWLINE model, [field], sql=self.sql_create_unique_null, suffix="_uniq"NEWLINE ))NEWLINENEWLINE # Check constraints can go on the column SQL hereNEWLINE db_params = field.db_parameters(connection=self.connection)NEWLINE if db_params['check']:NEWLINE definition += " CHECK (%s)" % db_params['check']NEWLINE # Build the SQL and run itNEWLINE sql = self.sql_create_column % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "column": self.quote_name(field.column),NEWLINE "definition": definition,NEWLINE }NEWLINE self.execute(sql, params)NEWLINE # Drop the default if we need toNEWLINE # (Django usually does not use in-database defaults)NEWLINE if not self.skip_default(field) and self.effective_default(field) is not None:NEWLINE changes_sql, params = self._alter_column_default_sql(model, None, field, drop=True)NEWLINE sql = self.sql_alter_column % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "changes": changes_sql,NEWLINE }NEWLINE self.execute(sql, params)NEWLINE # Add an index, if requiredNEWLINE self.deferred_sql.extend(self._field_indexes_sql(model, field))NEWLINE # Add any FK constraints laterNEWLINE if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint:NEWLINE self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))NEWLINE # Reset connection if requiredNEWLINE if self.connection.features.connection_persists_old_columns:NEWLINE self.connection.close()NEWLINENEWLINE def _create_unique_sql(self, model, columns, name=None, condition=None, deferrable=None):NEWLINE if (NEWLINE deferrable andNEWLINE not self.connection.features.supports_deferrable_unique_constraintsNEWLINE ):NEWLINE return NoneNEWLINENEWLINE def create_unique_name(*args, **kwargs):NEWLINE return self.quote_name(self._create_index_name(*args, **kwargs))NEWLINENEWLINE table = Table(model._meta.db_table, self.quote_name)NEWLINE if name is None:NEWLINE name = IndexName(model._meta.db_table, columns, '_uniq', create_unique_name)NEWLINE else:NEWLINE name = self.quote_name(name)NEWLINE columns = Columns(table, columns, self.quote_name)NEWLINE if condition:NEWLINE return Statement(NEWLINE self.sql_create_unique_index,NEWLINE table=table,NEWLINE name=name,NEWLINE columns=columns,NEWLINE condition=' WHERE ' + condition,NEWLINE deferrable=self._deferrable_constraint_sql(deferrable)NEWLINE ) if self.connection.features.supports_partial_indexes else NoneNEWLINE else:NEWLINE return Statement(NEWLINE self.sql_create_unique,NEWLINE table=table,NEWLINE name=name,NEWLINE columns=columns,NEWLINE deferrable=self._deferrable_constraint_sql(deferrable)NEWLINE )NEWLINENEWLINE def _create_index_sql(self, model, fields, *, name=None, suffix='', using='',NEWLINE db_tablespace=None, col_suffixes=(), sql=None, opclasses=(),NEWLINE condition=None):NEWLINE """NEWLINE Return the SQL statement to create the index for one or several fields.NEWLINE `sql` can be specified if the syntax differs from the standard (GISNEWLINE indexes, ...).NEWLINE """NEWLINE tablespace_sql = self._get_index_tablespace_sql(model, fields, db_tablespace=db_tablespace)NEWLINE columns = [field.column for field in fields]NEWLINE sql_create_index = sql or self.sql_create_indexNEWLINE table = model._meta.db_tableNEWLINENEWLINE def create_index_name(*args, **kwargs):NEWLINE nonlocal nameNEWLINE if name is None:NEWLINE name = self._create_index_name(*args, **kwargs)NEWLINE return self.quote_name(name)NEWLINENEWLINE return Statement(NEWLINE sql_create_index,NEWLINE table=Table(table, self.quote_name),NEWLINE name=IndexName(table, columns, suffix, create_index_name),NEWLINE using=using,NEWLINE columns=self._index_columns(table, columns, col_suffixes, opclasses),NEWLINE extra=tablespace_sql,NEWLINE condition=(' WHERE ' + condition) if condition else '',NEWLINE )NEWLINENEWLINE def create_model(self, model):NEWLINE """NEWLINE Takes a model and creates a table for it in the database.NEWLINE Will also create any accompanying indexes or unique constraints.NEWLINE """NEWLINE # Create column SQL, add FK deferreds if neededNEWLINE column_sqls = []NEWLINE params = []NEWLINE for field in model._meta.local_fields:NEWLINE # SQLNEWLINE definition, extra_params = self.column_sql(model, field)NEWLINE if definition is None:NEWLINE continueNEWLINENEWLINE if (self.connection.features.supports_nullable_unique_constraints andNEWLINE not field.many_to_many and field.null and field.unique):NEWLINENEWLINE definition = definition.replace(' UNIQUE', '')NEWLINE self.deferred_sql.append(self._create_index_sql(NEWLINE model, [field], sql=self.sql_create_unique_null, suffix="_uniq"NEWLINE ))NEWLINENEWLINE # Check constraints can go on the column SQL hereNEWLINE db_params = field.db_parameters(connection=self.connection)NEWLINE if db_params['check']:NEWLINE # SQL Server requires a name for the check constraintNEWLINE definition += self._sql_check_constraint % {NEWLINE "name": self._create_index_name(model._meta.db_table, [field.column], suffix="_check"),NEWLINE "check": db_params['check']NEWLINE }NEWLINE # Autoincrement SQL (for backends with inline variant)NEWLINE col_type_suffix = field.db_type_suffix(connection=self.connection)NEWLINE if col_type_suffix:NEWLINE definition += " %s" % col_type_suffixNEWLINE params.extend(extra_params)NEWLINE # FKNEWLINE if field.remote_field and field.db_constraint:NEWLINE to_table = field.remote_field.model._meta.db_tableNEWLINE to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).columnNEWLINE if self.sql_create_inline_fk:NEWLINE definition += " " + self.sql_create_inline_fk % {NEWLINE "to_table": self.quote_name(to_table),NEWLINE "to_column": self.quote_name(to_column),NEWLINE }NEWLINE elif self.connection.features.supports_foreign_keys:NEWLINE self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))NEWLINE # Add the SQL to our big listNEWLINE column_sqls.append("%s %s" % (NEWLINE self.quote_name(field.column),NEWLINE definition,NEWLINE ))NEWLINE # Autoincrement SQL (for backends with post table definition variant)NEWLINE if field.get_internal_type() in ("AutoField", "BigAutoField"):NEWLINE autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column)NEWLINE if autoinc_sql:NEWLINE self.deferred_sql.extend(autoinc_sql)NEWLINENEWLINE # Add any unique_togethers (always deferred, as some fields might beNEWLINE # created afterwards, like geometry fields with some backends)NEWLINE for fields in model._meta.unique_together:NEWLINE columns = [model._meta.get_field(field).column for field in fields]NEWLINE condition = ' AND '.join(["[%s] IS NOT NULL" % col for col in columns])NEWLINE self.deferred_sql.append(self._create_unique_sql(model, columns, condition=condition))NEWLINENEWLINE # Make the tableNEWLINE sql = self.sql_create_table % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "definition": ", ".join(column_sqls)NEWLINE }NEWLINE if model._meta.db_tablespace:NEWLINE tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)NEWLINE if tablespace_sql:NEWLINE sql += ' ' + tablespace_sqlNEWLINE # Prevent using [] as params, in the case a literal '%' is used in the definitionNEWLINE self.execute(sql, params or None)NEWLINENEWLINE # Add any field index and index_together's (deferred as SQLite3 _remake_table needs it)NEWLINE self.deferred_sql.extend(self._model_indexes_sql(model))NEWLINE self.deferred_sql = list(set(self.deferred_sql))NEWLINENEWLINE # Make M2M tablesNEWLINE for field in model._meta.local_many_to_many:NEWLINE if field.remote_field.through._meta.auto_created:NEWLINE self.create_model(field.remote_field.through)NEWLINENEWLINE def delete_model(self, model):NEWLINE """NEWLINE Deletes a model from the database.NEWLINE """NEWLINE # Delete the foreign key constraintsNEWLINE result = self.execute(NEWLINE self._sql_select_foreign_key_constraints % {NEWLINE "table": self.quote_value(model._meta.db_table),NEWLINE },NEWLINE has_result=TrueNEWLINE )NEWLINE if result:NEWLINE for table, constraint in result:NEWLINE sql = self.sql_alter_column % {NEWLINE "table": self.quote_name(table),NEWLINE "changes": self.sql_alter_column_no_default % {NEWLINE "column": self.quote_name(constraint),NEWLINE }NEWLINE }NEWLINE self.execute(sql)NEWLINENEWLINE # Delete the tableNEWLINE super().delete_model(model)NEWLINE # Remove all deferred statements referencing the deleted table.NEWLINE for sql in list(self.deferred_sql):NEWLINE if isinstance(sql, Statement) and sql.references_table(model._meta.db_table):NEWLINE self.deferred_sql.remove(sql)NEWLINENEWLINE def execute(self, sql, params=(), has_result=False):NEWLINE """NEWLINE Executes the given SQL statement, with optional parameters.NEWLINE """NEWLINE result = NoneNEWLINE # Don't perform the transactional DDL check if SQL is being collectedNEWLINE # as it's not going to be executed anyway.NEWLINE if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl:NEWLINE raise TransactionManagementError(NEWLINE "Executing DDL statements while in a transaction on databases "NEWLINE "that can't perform a rollback is prohibited."NEWLINE )NEWLINE # Account for non-string statement objects.NEWLINE sql = str(sql)NEWLINE # Log the command we're running, then run itNEWLINE logger.debug("%s; (params %r)", sql, params, extra={'params': params, 'sql': sql})NEWLINE if self.collect_sql:NEWLINE ending = "" if sql.endswith(";") else ";"NEWLINE if params is not None:NEWLINE self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending)NEWLINE else:NEWLINE self.collected_sql.append(sql + ending)NEWLINE else:NEWLINE cursor = self.connection.cursor()NEWLINE cursor.execute(sql, params)NEWLINE if has_result:NEWLINE result = cursor.fetchall()NEWLINE # the cursor can be closed only when the driver supports openingNEWLINE # multiple cursors on a connection because the migration commandNEWLINE # has already opened a cursor outside this methodNEWLINE if self.connection.supports_mars:NEWLINE cursor.close()NEWLINE return resultNEWLINENEWLINE def prepare_default(self, value):NEWLINE return self.quote_value(value)NEWLINENEWLINE def quote_value(self, value):NEWLINE """NEWLINE Returns a quoted version of the value so it's safe to use in an SQLNEWLINE string. This is not safe against injection from user code; it isNEWLINE intended only for use in making SQL scripts or preparing default valuesNEWLINE for particularly tricky backends (defaults are not user-defined, though,NEWLINE so this is safe).NEWLINE """NEWLINE if isinstance(value, (datetime.datetime, datetime.date, datetime.time)):NEWLINE return "'%s'" % valueNEWLINE elif isinstance(value, str):NEWLINE return "'%s'" % value.replace("'", "''")NEWLINE elif isinstance(value, (bytes, bytearray, memoryview)):NEWLINE return "0x%s" % force_str(binascii.hexlify(value))NEWLINE elif isinstance(value, bool):NEWLINE return "1" if value else "0"NEWLINE else:NEWLINE return str(value)NEWLINENEWLINE def remove_field(self, model, field):NEWLINE """NEWLINE Removes a field from a model. Usually involves deleting a column,NEWLINE but for M2Ms may involve deleting a table.NEWLINE """NEWLINE # Special-case implicit M2M tablesNEWLINE if field.many_to_many and field.remote_field.through._meta.auto_created:NEWLINE return self.delete_model(field.remote_field.through)NEWLINE # It might not actually have a column behind itNEWLINE if field.db_parameters(connection=self.connection)['type'] is None:NEWLINE returnNEWLINE # Drop any FK constraints, SQL Server requires explicit deletionNEWLINE with self.connection.cursor() as cursor:NEWLINE constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table)NEWLINE for name, infodict in constraints.items():NEWLINE if field.column in infodict['columns'] and infodict['foreign_key']:NEWLINE self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, name))NEWLINE # Drop any indexes, SQL Server requires explicit deletionNEWLINE for name, infodict in constraints.items():NEWLINE if field.column in infodict['columns'] and infodict['index']:NEWLINE self.execute(self.sql_delete_index % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "name": self.quote_name(name),NEWLINE })NEWLINE # Drop primary key constraint, SQL Server requires explicit deletionNEWLINE for name, infodict in constraints.items():NEWLINE if field.column in infodict['columns'] and infodict['primary_key']:NEWLINE self.execute(self.sql_delete_pk % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "name": self.quote_name(name),NEWLINE })NEWLINE # Drop check constraints, SQL Server requires explicit deletionNEWLINE for name, infodict in constraints.items():NEWLINE if field.column in infodict['columns'] and infodict['check']:NEWLINE self.execute(self.sql_delete_check % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "name": self.quote_name(name),NEWLINE })NEWLINE # Drop unique constraints, SQL Server requires explicit deletionNEWLINE for name, infodict in constraints.items():NEWLINE if (field.column in infodict['columns'] and infodict['unique'] andNEWLINE not infodict['primary_key'] and not infodict['index']):NEWLINE self.execute(self.sql_delete_unique % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "name": self.quote_name(name),NEWLINE })NEWLINE # Delete the columnNEWLINE sql = self.sql_delete_column % {NEWLINE "table": self.quote_name(model._meta.db_table),NEWLINE "column": self.quote_name(field.column),NEWLINE }NEWLINE self.execute(sql)NEWLINE # Reset connection if requiredNEWLINE if self.connection.features.connection_persists_old_columns:NEWLINE self.connection.close()NEWLINE # Remove all deferred statements referencing the deleted column.NEWLINE for sql in list(self.deferred_sql):NEWLINE if isinstance(sql, Statement) and sql.references_column(model._meta.db_table, field.column):NEWLINE self.deferred_sql.remove(sql)NEWLINE
import pytestNEWLINEimport torchNEWLINENEWLINEfrom pytorch_toolbelt.modules import HFF, GlobalKMaxPool2d, ResidualDeconvolutionUpsample2dNEWLINENEWLINEskip_if_no_cuda = pytest.mark.skipif(not torch.cuda.is_available(), reason="Cuda is not available")NEWLINENEWLINENEWLINEdef test_hff_dynamic_size():NEWLINE feature_maps = [NEWLINE torch.randn((4, 3, 512, 512)),NEWLINE torch.randn((4, 3, 256, 256)),NEWLINE torch.randn((4, 3, 128, 128)),NEWLINE torch.randn((4, 3, 64, 64)),NEWLINE ]NEWLINENEWLINE hff = HFF(upsample_scale=2)NEWLINE output = hff(feature_maps)NEWLINE assert output.size(2) == 512NEWLINE assert output.size(3) == 512NEWLINENEWLINENEWLINEdef test_hff_static_size():NEWLINE feature_maps = [NEWLINE torch.randn((4, 3, 512, 512)),NEWLINE torch.randn((4, 3, 384, 384)),NEWLINE torch.randn((4, 3, 256, 256)),NEWLINE torch.randn((4, 3, 128, 128)),NEWLINE torch.randn((4, 3, 32, 32)),NEWLINE ]NEWLINENEWLINE hff = HFF(sizes=[(512, 512), (384, 384), (256, 256), (128, 128), (32, 32)])NEWLINE output = hff(feature_maps)NEWLINE assert output.size(2) == 512NEWLINE assert output.size(3) == 512NEWLINENEWLINENEWLINE# def test_upsample():NEWLINE# block = DepthToSpaceUpsample2d(1)NEWLINE# original = np.expand_dims(cv2.imread("lena.png", cv2.IMREAD_GRAYSCALE), -1)NEWLINE# input = tensor_from_rgb_image(original / 255.0).unsqueeze(0).float()NEWLINE# output = block(input)NEWLINE#NEWLINE# output_rgb = rgb_image_from_tensor(output.squeeze(0), mean=0, std=1, max_pixel_value=1, dtype=np.float32)NEWLINE#NEWLINE# cv2.imshow("Original", original)NEWLINE# cv2.imshow("Upsampled (cv2)", cv2.resize(original, None, fx=2, fy=2, interpolation=cv2.INTER_LINEAR))NEWLINE# cv2.imshow("Upsampled", cv2.normalize(output_rgb, None, 0, 255, cv2.NORM_MINMAX, dtype=cv2.CV_8U))NEWLINE# cv2.waitKey(-1)NEWLINENEWLINENEWLINEdef test_residualdeconvolutionupsampleblock():NEWLINE x = torch.randn((4, 16, 32, 32))NEWLINE block = ResidualDeconvolutionUpsample2d(16)NEWLINE output = block(x)NEWLINE print(x.size(), x.mean(), x.std())NEWLINE print(output.size(), output.mean(), x.std())NEWLINENEWLINENEWLINEdef test_kmax_pool():NEWLINE x = torch.randn((8, 512, 16, 16))NEWLINE module1 = GlobalKMaxPool2d(k=4, flatten=True)NEWLINE module2 = GlobalKMaxPool2d(k=4, flatten=False)NEWLINENEWLINE y1 = module1(x)NEWLINE y2 = module2(x)NEWLINENEWLINE assert y1.size() == (8, 512)NEWLINE assert y2.size() == (8, 512, 1, 1)NEWLINE
import jsonNEWLINEimport randomNEWLINEimport reNEWLINENEWLINEimport discordNEWLINEfrom redbot.core import checksNEWLINEfrom redbot.core import commandsNEWLINEfrom redbot.core.bot import RedNEWLINEfrom redbot.core.utils.chat_formatting import inline, boxNEWLINENEWLINEfrom rpadutils import CogSettings, clean_global_mentionsNEWLINENEWLINEDONATE_MSG = """NEWLINETo donate to cover bot hosting fees you can use one of:NEWLINE Patreon : https://www.patreon.com/tsubaki_botNEWLINENEWLINERead the Patreon or join the Tsubaki Support Server for more details:NEWLINE https://discord.gg/tVPmeG8NEWLINENEWLINEYou permanently get some special perks for donating even $1.NEWLINENEWLINEThe following users have donated. Thanks!NEWLINE{donors}NEWLINE"""NEWLINENEWLINEINSULTS_FILE = "data/donations/insults.json"NEWLINEDEFAULT_INSULTS = {NEWLINE 'miru_references': [NEWLINE 'Are you talking to me you piece of shit?',NEWLINE ],NEWLINE 'insults': [NEWLINE 'You are garbage.',NEWLINE 'Kill yourself.',NEWLINE ]NEWLINE}NEWLINELOVE_FILE = "data/donations/love.json"NEWLINEDEFAULT_LOVE = {NEWLINE 'cute': ['xoxo'],NEWLINE 'sexy': ['{}====>'],NEWLINE 'perverted': ['{}===>()'],NEWLINE}NEWLINENEWLINENEWLINEdef roll(chance: int):NEWLINE return random.randrange(100) < chanceNEWLINENEWLINEdef is_patron(ctx):NEWLINE tsubaki_guild = ctx.bot.get_guild(746131494875168770)NEWLINE patron_role = tsubaki_guild.get_role(749849451769757726)NEWLINE author = tsubaki_guild.get_member(ctx.author.id)NEWLINE if author is None or patron_role not in author.roles:NEWLINE return FalseNEWLINE return TrueNEWLINENEWLINEdef is_donor(ctx):NEWLINE tsubaki_guild = ctx.bot.get_guild(746131494875168770)NEWLINE donor_role = tsubaki_guild.get_role(749849518467580015)NEWLINE patron_role = tsubaki_guild.get_role(749849451769757726)NEWLINE author = tsubaki_guild.get_member(ctx.author.id)NEWLINE if author is None or (donor_role not in author.roles and patron_role not in author.roles):NEWLINE return FalseNEWLINE return TrueNEWLINENEWLINEclass Donations(commands.Cog):NEWLINE """Manages donations and perks."""NEWLINENEWLINE def __init__(self, bot: Red, *args, **kwargs):NEWLINE super().__init__(*args, **kwargs)NEWLINE self.bot = botNEWLINE self.settings = DonationsSettings("donations")NEWLINENEWLINE try:NEWLINE insults_json = json.load(open(INSULTS_FILE, "r"))NEWLINE except:NEWLINE insults_json = {}NEWLINE self.insults_miru_reference = insults_json.get(NEWLINE 'miru_references', DEFAULT_INSULTS['miru_references'])NEWLINE self.insults_list = insults_json.get('insults', DEFAULT_INSULTS['insults'])NEWLINE try:NEWLINE love_json = json.load(open(LOVE_FILE, "r"))NEWLINE except:NEWLINE love_json = {}NEWLINE self.cute_list = love_json.get('cute', DEFAULT_LOVE['cute'])NEWLINE self.sexy_list = love_json.get('sexy', DEFAULT_LOVE['sexy'])NEWLINE self.perverted_list = love_json.get('perverted', DEFAULT_LOVE['perverted'])NEWLINENEWLINE self.tsubaki_guild = NoneNEWLINE self.donor_role = NoneNEWLINE self.patron_role = NoneNEWLINENEWLINE async def red_get_data_for_user(self, *, user_id):NEWLINE """Get a user's personal data."""NEWLINE udata = self.settings.getUserData(user_id)NEWLINENEWLINE data = "Stored data for user with ID {}:\n".format(user_id)NEWLINE if udata['command']:NEWLINE data += " - You have setup the command '{}'.\n".format(udata['command'])NEWLINE if udata['embed']:NEWLINE data += " - You have setup the embed '{}'.\n".format(udata['embed'])NEWLINE if udata['insult']:NEWLINE data += " - You have asked the bot to insult you occasionally.\n"NEWLINENEWLINE if not any(udata.values()):NEWLINE data = "No data is stored for user with ID {}.\n".format(user_id)NEWLINENEWLINE return {"user_data.txt": BytesIO(data.encode())}NEWLINENEWLINE async def red_delete_data_for_user(self, *, requester, user_id):NEWLINE """Delete a user's personal data."""NEWLINE if requester not in ("discord_deleted_user", "owner"):NEWLINE self.settings.clearUserData(user_id)NEWLINE else:NEWLINE self.settings.clearUserDataFull(user_id)NEWLINENEWLINE async def set_server_attributes(self):NEWLINE await self.bot.wait_until_ready()NEWLINE self.tsubaki_guild = self.bot.get_guild(746131494875168770)NEWLINE self.donor_role = self.tsubaki_guild.get_role(749849518467580015)NEWLINE self.patron_role = self.tsubaki_guild.get_role(749849451769757726)NEWLINENEWLINE @commands.command()NEWLINE async def donate(self, ctx):NEWLINE """Prints information about donations."""NEWLINE donor_names = set()NEWLINE for user in self.tsubaki_guild.members:NEWLINE if self.donor_role in user.roles or self.patron_role in user.roles:NEWLINE donor_names.add(user.name)NEWLINENEWLINE msg = DONATE_MSG.format(count=len(donor_names), donors=', '.join(sorted(donor_names)))NEWLINE await ctx.send(box(msg))NEWLINENEWLINE @commands.command()NEWLINE @commands.check(is_donor)NEWLINE async def mycommand(self, ctx, command: str, *, text: str):NEWLINE """Sets your custom command."""NEWLINE text = clean_global_mentions(text)NEWLINENEWLINE self.settings.addCustomCommand(ctx.author.id, command, text)NEWLINE await ctx.send(inline('I set up your command: ' + command))NEWLINENEWLINE @commands.command()NEWLINE @commands.check(is_donor)NEWLINE async def myembed(self, ctx, command: str, title: str, url: str, footer: str):NEWLINE """Sets your custom embed command.NEWLINENEWLINE This lets you create a fancier image message. For example you can set upNEWLINE a simple inline image without a link using:NEWLINE [p]myembed lewd "" "http://i0.kym-cdn.com/photos/images/original/000/731/885/751.jpg" ""NEWLINENEWLINE Want a title on that image? Fill in the first argument:NEWLINE [p]myembed lewd "L-lewd!" "<snip, see above>" ""NEWLINENEWLINE Want a footer? Fill in the last argument:NEWLINE [p]myembed lewd "L-lewd!" "<snip, see above>" "source: some managa i read"NEWLINE """NEWLINENEWLINE self.settings.addCustomEmbed(ctx.author.id, command, title, url, footer)NEWLINE await ctx.send(inline('I set up your embed: ' + command))NEWLINENEWLINE @commands.command()NEWLINE @commands.check(is_donor)NEWLINE async def spankme(self, ctx):NEWLINE """You are trash."""NEWLINE await ctx.send(ctx.author.mention + ' ' + random.choice(self.insults_list))NEWLINENEWLINE @commands.command()NEWLINE @commands.check(is_donor)NEWLINE async def insultme(self, ctx):NEWLINE """You are consistently trash."""NEWLINE user_id = ctx.author.idNEWLINENEWLINE self.settings.addInsultsEnabled(user_id)NEWLINE await ctx.send(ctx.author.mention + ' ' 'Oh, I will.\n' + random.choice(self.insults_list))NEWLINENEWLINE @commands.command()NEWLINE @commands.check(is_donor)NEWLINE async def plsno(self, ctx):NEWLINE """I am merciful."""NEWLINENEWLINE self.settings.rmInsultsEnabled(ctx.author.id)NEWLINE await ctx.send('I will let you off easy this time.')NEWLINENEWLINE @commands.command()NEWLINE @commands.check(is_donor)NEWLINE async def kissme(self, ctx):NEWLINE """You are so cute!."""NEWLINE await ctx.send(ctx.author.mention + ' ' + random.choice(self.cute_list))NEWLINENEWLINE @commands.command()NEWLINE @commands.check(is_donor)NEWLINE async def lewdme(self, ctx):NEWLINE """So nsfw.."""NEWLINE if 'nsfw' in ctx.channel.name.lower():NEWLINE await ctx.send(ctx.author.mention + ' ' + random.choice(self.sexy_list))NEWLINE else:NEWLINE await ctx.send(ctx.author.mention + ' Oooh naughty...')NEWLINE await ctx.author.send(random.choice(self.sexy_list))NEWLINENEWLINE @commands.command()NEWLINE @commands.check(is_donor)NEWLINE async def pervme(self, ctx):NEWLINE """Hentai!!!."""NEWLINE if 'nsfw' in ctx.channel.name.lower():NEWLINE await ctx.send(ctx.author.mention + ' ' + random.choice(self.perverted_list))NEWLINE else:NEWLINE await ctx.send(ctx.author.mention + ' Filthy hentai!')NEWLINE await ctx.author.send(random.choice(self.perverted_list))NEWLINENEWLINE @commands.group()NEWLINE @checks.admin_or_permissions(manage_guild=True)NEWLINE async def donations(self, ctx):NEWLINE """Manage donation options."""NEWLINENEWLINE @donations.command()NEWLINE async def togglePerks(self, ctx):NEWLINE """Enable or disable donor-specific perks for the server."""NEWLINE server_id = ctx.guild.idNEWLINE if server_id in self.settings.disabledServers():NEWLINE self.settings.rmDisabledServer(server_id)NEWLINE await ctx.send(inline('Donor perks enabled on this server'))NEWLINE else:NEWLINE self.settings.addDisabledServer(server_id)NEWLINE await ctx.send(inline('Donor perks disabled on this server'))NEWLINENEWLINE @donations.command()NEWLINE @checks.is_owner()NEWLINE async def info(self, ctx):NEWLINE """Print donation related info."""NEWLINE patrons = [user for user in self.tsubaki_guild.members if self.patron_role in user.roles]NEWLINE donors = [user for user in self.tsubaki_guild.members if self.donor_role in user.rolesNEWLINE and self.patron_role not in user.roles]NEWLINE cmds = self.settings.customCommands()NEWLINE embeds = self.settings.customEmbeds()NEWLINE disabled_servers = self.settings.disabledServers()NEWLINENEWLINE id_to_name = {m.id: m.name for m in self.bot.get_all_members()}NEWLINENEWLINE msg = 'Donations Info'NEWLINENEWLINE msg += '\n\nPatrons:'NEWLINE for user in patrons:NEWLINE msg += '\n\t{} ({})'.format(user.name, user.id)NEWLINENEWLINE msg += '\n\nDonors:'NEWLINE for user in donors:NEWLINE msg += '\n\t{} ({})'.format(user.name, user.id)NEWLINENEWLINE msg += '\n\nDisabled servers:'NEWLINE for server_id in disabled_servers:NEWLINE server = self.bot.get_guild(int(server_id))NEWLINE msg += '\n\t{} ({})'.format(server.name if server else 'unknown', server_id)NEWLINENEWLINE msg += '\n\n{} personal commands are set'.format(len(cmds))NEWLINE msg += '\n{} personal embeds are set'.format(len(cmds))NEWLINENEWLINE await ctx.send(box(msg))NEWLINENEWLINE @commands.Cog.listener("on_message")NEWLINE async def checkCC(self, message):NEWLINE if len(message.content) < 2:NEWLINE returnNEWLINENEWLINE prefix = (await self.bot.get_prefix(message))[0]NEWLINENEWLINE user_id = message.author.idNEWLINE if user_id not in [user.id for user in self.tsubaki_guild.members if self.donor_role in user.roles] and \NEWLINE user_id not in [user.id for user in self.tsubaki_guild.members if self.patron_role in user.roles]:NEWLINE returnNEWLINENEWLINE if message.guild and message.guild.id in self.settings.disabledServers():NEWLINE returnNEWLINENEWLINE user_cmd = self.settings.customCommands().get(user_id)NEWLINE user_embed = self.settings.customEmbeds().get(user_id)NEWLINENEWLINE cmd = message.content[len(prefix):].lower()NEWLINE if user_cmd is not None:NEWLINE if cmd == user_cmd['command']:NEWLINE await message.channel.send(user_cmd['text'])NEWLINE returnNEWLINE if user_embed is not None:NEWLINE if cmd == user_embed['command']:NEWLINE embed = discord.Embed()NEWLINE title = user_embed['title']NEWLINE url = user_embed['url']NEWLINE footer = user_embed['footer']NEWLINE if len(title):NEWLINE embed.title = titleNEWLINE if len(url):NEWLINE embed.set_image(url=url)NEWLINE if len(footer):NEWLINE embed.set_footer(text=footer)NEWLINE await message.channel.send(embed=embed)NEWLINE returnNEWLINENEWLINE @commands.Cog.listener("on_message")NEWLINE async def check_insult(self, message):NEWLINE # Only opted-in peopleNEWLINE if message.author.id not in self.settings.insultsEnabled():NEWLINE returnNEWLINENEWLINE if message.guild and message.guild.id in self.settings.disabledServers():NEWLINE returnNEWLINENEWLINE content = message.clean_contentNEWLINE # Ignore short messagesNEWLINE if len(content) < 10:NEWLINE returnNEWLINENEWLINE msg = message.author.mentionNEWLINENEWLINE # Pretty frequently respond to direct messagesNEWLINE mentions_bot = re.search(r'(miru|myr|tsubaki) bot', content, re.IGNORECASE) and roll(40)NEWLINE # Semi-frequently respond to miru in msgNEWLINE mentions_miru_and_roll = re.search(NEWLINE r'\b(miru|myr|tsubaki)\b', content, re.IGNORECASE) and roll(20)NEWLINENEWLINE if mentions_bot or mentions_miru_and_roll:NEWLINE msg += ' ' + random.choice(self.insults_miru_reference)NEWLINE msg += '\n' + random.choice(self.insults_list)NEWLINE await message.channel.send(msg)NEWLINE returnNEWLINENEWLINE # Semi-frequently respond to long messagesNEWLINE long_msg_and_roll = len(content) > 200 and roll(10)NEWLINE # Occasionally respond to other messagesNEWLINE short_msg_and_roll = roll(1)NEWLINENEWLINE if long_msg_and_roll or short_msg_and_roll:NEWLINE msg += ' ' + random.choice(self.insults_list)NEWLINE await message.channel.send(msg)NEWLINE returnNEWLINENEWLINE # Periodically send private messagesNEWLINE if roll(7):NEWLINE msg += ' ' + random.choice(self.insults_list)NEWLINE await message.author.send(msg)NEWLINE returnNEWLINENEWLINENEWLINEclass DonationsSettings(CogSettings):NEWLINE def make_default_settings(self):NEWLINE config = {NEWLINE 'custom_commands': {},NEWLINE 'custom_embeds': {},NEWLINE 'disabled_servers': [],NEWLINE 'insults_enabled': [],NEWLINE }NEWLINE return configNEWLINENEWLINE def customCommands(self):NEWLINE return self.bot_settings['custom_commands']NEWLINENEWLINE def addCustomCommand(self, user_id, command, text):NEWLINE cmds = self.customCommands()NEWLINE cmds[user_id] = {NEWLINE 'command': command.lower(),NEWLINE 'text': text,NEWLINE }NEWLINE self.save_settings()NEWLINENEWLINE def rmCustomCommand(self, user_id):NEWLINE cmds = self.customCommands()NEWLINE if user_id in cmds:NEWLINE cmds.remove(user_id)NEWLINE self.save_settings()NEWLINENEWLINE def customEmbeds(self):NEWLINE return self.bot_settings['custom_embeds']NEWLINENEWLINE def addCustomEmbed(self, user_id, command, title, url, footer):NEWLINE embeds = self.customEmbeds()NEWLINE embeds[user_id] = {NEWLINE 'command': command.lower().strip(),NEWLINE 'title': title.strip(),NEWLINE 'url': url.strip(),NEWLINE 'footer': footer.strip(),NEWLINE }NEWLINE self.save_settings()NEWLINENEWLINE def rmCustomEmbed(self, user_id):NEWLINE embeds = self.customEmbeds()NEWLINE if user_id in embeds:NEWLINE embeds.remove(user_id)NEWLINE self.save_settings()NEWLINENEWLINE def disabledServers(self):NEWLINE return self.bot_settings['disabled_servers']NEWLINENEWLINE def addDisabledServer(self, server_id):NEWLINE disabled_servers = self.disabledServers()NEWLINE if server_id not in disabled_servers:NEWLINE disabled_servers.append(server_id)NEWLINE self.save_settings()NEWLINENEWLINE def rmDisabledServer(self, server_id):NEWLINE disabled_servers = self.disabledServers()NEWLINE if server_id in disabled_servers:NEWLINE disabled_servers.remove(server_id)NEWLINE self.save_settings()NEWLINENEWLINE def insultsEnabled(self):NEWLINE return self.bot_settings['insults_enabled']NEWLINENEWLINE def addInsultsEnabled(self, user_id):NEWLINE insults_enabled = self.insultsEnabled()NEWLINE if user_id not in insults_enabled:NEWLINE insults_enabled.append(user_id)NEWLINE self.save_settings()NEWLINENEWLINE def rmInsultsEnabled(self, user_id):NEWLINE insults_enabled = self.insultsEnabled()NEWLINE if user_id in insults_enabled:NEWLINE insults_enabled.remove(user_id)NEWLINE self.save_settings()NEWLINENEWLINE # GDPR Compliance FunctionsNEWLINE def getUserData(self, user_id):NEWLINE o = {NEWLINE 'command': "",NEWLINE 'embed': "",NEWLINE 'insult': False,NEWLINE }NEWLINENEWLINE if user_id in self.bot_settings['custom_commands']:NEWLINE o['command'] = self.bot_settings['custom_commands'][user_id]["command"]NEWLINE if user_id in self.bot_settings['custom_embeds']:NEWLINE o['embed'] = self.bot_settings['custom_embeds'][user_id]["command"]NEWLINE if user_id in self.bot_settings['insults_enabled']:NEWLINE o['insult'] = TrueNEWLINENEWLINE return oNEWLINENEWLINE def clearUserData(self, user_id):NEWLINE if user_id in self.bot_settings['custom_commands']:NEWLINE del self.bot_settings['custom_commands'][user_id]NEWLINE if user_id in self.bot_settings['custom_embeds']:NEWLINE del self.bot_settings['custom_embeds'][user_id]NEWLINE if user_id in self.bot_settings['insults_enabled']:NEWLINE self.bot_settings['insults_enabled'].remove(user_id)NEWLINE self.save_settings()NEWLINENEWLINE def clearUserDataFull(self, user_id):NEWLINE self.clearUserData(user_id)NEWLINE
from django.core.files.base import ContentFileNEWLINENEWLINEfrom readable.models import DocumentsNEWLINENEWLINEfrom .utils import TestCaseNEWLINENEWLINENEWLINEclass TestDocuments(TestCase):NEWLINE def setUp(self) -> None:NEWLINE super(TestDocuments, self).setUp()NEWLINE self.user = self.create_user("staff", self.get_random_string())NEWLINE self.staff = self.create_staff(self.user)NEWLINE self.lorem = ContentFile("Lorem ipsum dolor sit amet, consectetur adipiscing elit.", "lorem.txt")NEWLINENEWLINE def test_upload_directory(self) -> None:NEWLINE document: Documents = Documents.objects.create(filename=self.lorem, uploaded_by=self.staff)NEWLINE self.assertEqual(document.realname, self.lorem.name)NEWLINE self.assertEqual(document.filename, f"{document.id!s}{document.path.suffix}")NEWLINENEWLINE self.assertTrue(document.unavailable)NEWLINE document.status = Documents.Status.FINISHEDNEWLINE document.save(update_fields=["status"])NEWLINE self.assertFalse(document.unavailable)NEWLINE
import operatorNEWLINENEWLINENEWLINEdef parse(data):NEWLINE return [NEWLINE tuple(tuple(int(c) for c in l.split(",")) for l in s.split("\n")[1:])NEWLINE for s in data.split("\n\n")NEWLINE ]NEWLINENEWLINENEWLINEdef rot_x(cords):NEWLINE return tuple((x, z, -y) for x, y, z in cords)NEWLINENEWLINENEWLINEdef rot_y(cords):NEWLINE return tuple((z, y, -x) for x, y, z in cords)NEWLINENEWLINENEWLINEdef rot_z(cords):NEWLINE return tuple((y, -x, z) for x, y, z in cords)NEWLINENEWLINENEWLINEdef rep(f, n, arg):NEWLINE for _ in range(n):NEWLINE arg = f(arg)NEWLINE return argNEWLINENEWLINENEWLINEdef rots(cords):NEWLINE for rx in range(2):NEWLINE for ry in range(4) if not rx else (0, 2):NEWLINE for rz in range(4):NEWLINE yield rep(rot_x, rx, rep(rot_y, ry, rep(rot_z, rz, cords)))NEWLINENEWLINENEWLINEdef sub(a, b):NEWLINE return tuple(map(operator.sub, a, b))NEWLINENEWLINENEWLINEdef add(a, b):NEWLINE return tuple(map(operator.add, a, b))NEWLINENEWLINENEWLINEdef find(beacons, s):NEWLINE for r in rots(s):NEWLINE for b in beacons:NEWLINE for bb in r[:-12]:NEWLINE off = sub(b, bb)NEWLINE match = 0NEWLINE for bbb in r:NEWLINE if add(bbb, off) in beacons:NEWLINE match += 1NEWLINE if match == 12:NEWLINE breakNEWLINE else:NEWLINE continueNEWLINE return off, tuple(add(bbb, off) for bbb in r)NEWLINE return None, NoneNEWLINENEWLINENEWLINEdef slam(data):NEWLINE beacons = set(data.pop(0))NEWLINE sensors = set()NEWLINE while data:NEWLINE for i, s in enumerate(data):NEWLINE sensor, found = find(beacons, s)NEWLINE if sensor:NEWLINE sensors.add(sensor)NEWLINE beacons.update(found)NEWLINE del data[i]NEWLINE print(f"Found {len(beacons)} with {len(sensors)}")NEWLINE return sensors, beaconsNEWLINENEWLINENEWLINEdef aoc(data):NEWLINE return len(slam(parse(data))[1])NEWLINE
#!/usr/bin/env pythonNEWLINE# encoding: utf-8NEWLINENEWLINEname = "Intra_R_Add_Exo_scission/training"NEWLINEshortDesc = "Kinetics used to train group additivity values"NEWLINElongDesc = """NEWLINEPut kinetic parameters for reactions to use as a training set for fittingNEWLINEgroup additivity values in this file.NEWLINE"""NEWLINENEWLINE
# coding=utf-8NEWLINE# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***NEWLINE# *** Do not edit by hand unless you're certain you know what you are doing! ***NEWLINENEWLINEimport warningsNEWLINEimport pulumiNEWLINEimport pulumi.runtimeNEWLINEfrom typing import UnionNEWLINEfrom .. import utilities, tablesNEWLINENEWLINENEWLINEclass PatchBaseline(pulumi.CustomResource):NEWLINE approval_rules: pulumi.Output[list]NEWLINE """NEWLINE A set of rules used to include patches in the baseline. up to 10 approval rules can be specified. Each approval_rule block requires the fields documented below.NEWLINENEWLINE * `approveAfterDays` (`float`) - The number of days after the release date of each patch matched by the rule the patch is marked as approved in the patch baseline. Valid Range: 0 to 100.NEWLINE * `complianceLevel` (`str`) - Defines the compliance level for patches approved by this rule. Valid compliance levels include the following: `CRITICAL`, `HIGH`, `MEDIUM`, `LOW`, `INFORMATIONAL`, `UNSPECIFIED`. The default value is `UNSPECIFIED`.NEWLINE * `enableNonSecurity` (`bool`) - Boolean enabling the application of non-security updates. The default value is 'false'. Valid for Linux instances only.NEWLINE * `patchFilters` (`list`) - The patch filter group that defines the criteria for the rule. Up to 5 patch filters can be specified per approval rule using Key/Value pairs. Valid Keys are `PATCH_SET | PRODUCT | CLASSIFICATION | MSRC_SEVERITY | PATCH_ID`.NEWLINE * `key` (`str`)NEWLINE * `values` (`list`)NEWLINE """NEWLINE approved_patches: pulumi.Output[list]NEWLINE """NEWLINE A list of explicitly approved patches for the baseline.NEWLINE """NEWLINE approved_patches_compliance_level: pulumi.Output[str]NEWLINE """NEWLINE Defines the compliance level for approved patches. This means that if an approved patch is reported as missing, this is the severity of the compliance violation. Valid compliance levels include the following: `CRITICAL`, `HIGH`, `MEDIUM`, `LOW`, `INFORMATIONAL`, `UNSPECIFIED`. The default value is `UNSPECIFIED`.NEWLINE """NEWLINE description: pulumi.Output[str]NEWLINE """NEWLINE The description of the patch baseline.NEWLINE """NEWLINE global_filters: pulumi.Output[list]NEWLINE """NEWLINE A set of global filters used to exclude patches from the baseline. Up to 4 global filters can be specified using Key/Value pairs. Valid Keys are `PRODUCT | CLASSIFICATION | MSRC_SEVERITY | PATCH_ID`.NEWLINENEWLINE * `key` (`str`)NEWLINE * `values` (`list`)NEWLINE """NEWLINE name: pulumi.Output[str]NEWLINE """NEWLINE The name of the patch baseline.NEWLINE """NEWLINE operating_system: pulumi.Output[str]NEWLINE """NEWLINE Defines the operating system the patch baseline applies to. Supported operating systems include `WINDOWS`, `AMAZON_LINUX`, `AMAZON_LINUX_2`, `SUSE`, `UBUNTU`, `CENTOS`, and `REDHAT_ENTERPRISE_LINUX`. The Default value is `WINDOWS`.NEWLINE """NEWLINE rejected_patches: pulumi.Output[list]NEWLINE """NEWLINE A list of rejected patches.NEWLINE """NEWLINE tags: pulumi.Output[dict]NEWLINE """NEWLINE A map of tags to assign to the resource.NEWLINE """NEWLINE def __init__(__self__, resource_name, opts=None, approval_rules=None, approved_patches=None, approved_patches_compliance_level=None, description=None, global_filters=None, name=None, operating_system=None, rejected_patches=None, tags=None, __props__=None, __name__=None, __opts__=None):NEWLINE """NEWLINE Provides an SSM Patch Baseline resourceNEWLINENEWLINE > **NOTE on Patch Baselines:** The `approved_patches` and `approval_rule` areNEWLINE both marked as optional fields, but the Patch Baseline requires that at least oneNEWLINE of them is specified.NEWLINENEWLINE ## Example UsageNEWLINENEWLINE Basic usage using `approved_patches` onlyNEWLINENEWLINE ```pythonNEWLINE import pulumiNEWLINE import pulumi_aws as awsNEWLINENEWLINE production = aws.ssm.PatchBaseline("production", approved_patches=["KB123456"])NEWLINE ```NEWLINENEWLINE Advanced usage, specifying patch filtersNEWLINENEWLINE ```pythonNEWLINE import pulumiNEWLINE import pulumi_aws as awsNEWLINENEWLINE production = aws.ssm.PatchBaseline("production",NEWLINE approval_rules=[NEWLINE {NEWLINE "approveAfterDays": 7,NEWLINE "complianceLevel": "HIGH",NEWLINE "patchFilters": [NEWLINE {NEWLINE "key": "PRODUCT",NEWLINE "values": ["WindowsServer2016"],NEWLINE },NEWLINE {NEWLINE "key": "CLASSIFICATION",NEWLINE "values": [NEWLINE "CriticalUpdates",NEWLINE "SecurityUpdates",NEWLINE "Updates",NEWLINE ],NEWLINE },NEWLINE {NEWLINE "key": "MSRC_SEVERITY",NEWLINE "values": [NEWLINE "Critical",NEWLINE "Important",NEWLINE "Moderate",NEWLINE ],NEWLINE },NEWLINE ],NEWLINE },NEWLINE {NEWLINE "approveAfterDays": 7,NEWLINE "patchFilters": [{NEWLINE "key": "PRODUCT",NEWLINE "values": ["WindowsServer2012"],NEWLINE }],NEWLINE },NEWLINE ],NEWLINE approved_patches=[NEWLINE "KB123456",NEWLINE "KB456789",NEWLINE ],NEWLINE description="Patch Baseline Description",NEWLINE global_filters=[NEWLINE {NEWLINE "key": "PRODUCT",NEWLINE "values": ["WindowsServer2008"],NEWLINE },NEWLINE {NEWLINE "key": "CLASSIFICATION",NEWLINE "values": ["ServicePacks"],NEWLINE },NEWLINE {NEWLINE "key": "MSRC_SEVERITY",NEWLINE "values": ["Low"],NEWLINE },NEWLINE ],NEWLINE rejected_patches=["KB987654"])NEWLINE ```NEWLINENEWLINE Advanced usage, specifying Microsoft application and Windows patch rulesNEWLINENEWLINE ```pythonNEWLINE import pulumiNEWLINE import pulumi_aws as awsNEWLINENEWLINE windows_os_apps = aws.ssm.PatchBaseline("windowsOsApps",NEWLINE approval_rules=[NEWLINE {NEWLINE "approveAfterDays": 7,NEWLINE "patchFilters": [NEWLINE {NEWLINE "key": "CLASSIFICATION",NEWLINE "values": [NEWLINE "CriticalUpdates",NEWLINE "SecurityUpdates",NEWLINE ],NEWLINE },NEWLINE {NEWLINE "key": "MSRC_SEVERITY",NEWLINE "values": [NEWLINE "Critical",NEWLINE "Important",NEWLINE ],NEWLINE },NEWLINE ],NEWLINE },NEWLINE {NEWLINE "approveAfterDays": 7,NEWLINE "patchFilters": [NEWLINE {NEWLINE "key": "PATCH_SET",NEWLINE "values": ["APPLICATION"],NEWLINE },NEWLINE {NEWLINE "key": "PRODUCT",NEWLINE "values": [NEWLINE "Office 2013",NEWLINE "Office 2016",NEWLINE ],NEWLINE },NEWLINE ],NEWLINE },NEWLINE ],NEWLINE description="Patch both Windows and Microsoft apps",NEWLINE operating_system="WINDOWS")NEWLINE ```NEWLINENEWLINE :param str resource_name: The name of the resource.NEWLINE :param pulumi.ResourceOptions opts: Options for the resource.NEWLINE :param pulumi.Input[list] approval_rules: A set of rules used to include patches in the baseline. up to 10 approval rules can be specified. Each approval_rule block requires the fields documented below.NEWLINE :param pulumi.Input[list] approved_patches: A list of explicitly approved patches for the baseline.NEWLINE :param pulumi.Input[str] approved_patches_compliance_level: Defines the compliance level for approved patches. This means that if an approved patch is reported as missing, this is the severity of the compliance violation. Valid compliance levels include the following: `CRITICAL`, `HIGH`, `MEDIUM`, `LOW`, `INFORMATIONAL`, `UNSPECIFIED`. The default value is `UNSPECIFIED`.NEWLINE :param pulumi.Input[str] description: The description of the patch baseline.NEWLINE :param pulumi.Input[list] global_filters: A set of global filters used to exclude patches from the baseline. Up to 4 global filters can be specified using Key/Value pairs. Valid Keys are `PRODUCT | CLASSIFICATION | MSRC_SEVERITY | PATCH_ID`.NEWLINE :param pulumi.Input[str] name: The name of the patch baseline.NEWLINE :param pulumi.Input[str] operating_system: Defines the operating system the patch baseline applies to. Supported operating systems include `WINDOWS`, `AMAZON_LINUX`, `AMAZON_LINUX_2`, `SUSE`, `UBUNTU`, `CENTOS`, and `REDHAT_ENTERPRISE_LINUX`. The Default value is `WINDOWS`.NEWLINE :param pulumi.Input[list] rejected_patches: A list of rejected patches.NEWLINE :param pulumi.Input[dict] tags: A map of tags to assign to the resource.NEWLINENEWLINE The **approval_rules** object supports the following:NEWLINENEWLINE * `approveAfterDays` (`pulumi.Input[float]`) - The number of days after the release date of each patch matched by the rule the patch is marked as approved in the patch baseline. Valid Range: 0 to 100.NEWLINE * `complianceLevel` (`pulumi.Input[str]`) - Defines the compliance level for patches approved by this rule. Valid compliance levels include the following: `CRITICAL`, `HIGH`, `MEDIUM`, `LOW`, `INFORMATIONAL`, `UNSPECIFIED`. The default value is `UNSPECIFIED`.NEWLINE * `enableNonSecurity` (`pulumi.Input[bool]`) - Boolean enabling the application of non-security updates. The default value is 'false'. Valid for Linux instances only.NEWLINE * `patchFilters` (`pulumi.Input[list]`) - The patch filter group that defines the criteria for the rule. Up to 5 patch filters can be specified per approval rule using Key/Value pairs. Valid Keys are `PATCH_SET | PRODUCT | CLASSIFICATION | MSRC_SEVERITY | PATCH_ID`.NEWLINE * `key` (`pulumi.Input[str]`)NEWLINE * `values` (`pulumi.Input[list]`)NEWLINENEWLINE The **global_filters** object supports the following:NEWLINENEWLINE * `key` (`pulumi.Input[str]`)NEWLINE * `values` (`pulumi.Input[list]`)NEWLINE """NEWLINE if __name__ is not None:NEWLINE warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)NEWLINE resource_name = __name__NEWLINE if __opts__ is not None:NEWLINE warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)NEWLINE opts = __opts__NEWLINE if opts is None:NEWLINE opts = pulumi.ResourceOptions()NEWLINE if not isinstance(opts, pulumi.ResourceOptions):NEWLINE raise TypeError('Expected resource options to be a ResourceOptions instance')NEWLINE if opts.version is None:NEWLINE opts.version = utilities.get_version()NEWLINE if opts.id is None:NEWLINE if __props__ is not None:NEWLINE raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')NEWLINE __props__ = dict()NEWLINENEWLINE __props__['approval_rules'] = approval_rulesNEWLINE __props__['approved_patches'] = approved_patchesNEWLINE __props__['approved_patches_compliance_level'] = approved_patches_compliance_levelNEWLINE __props__['description'] = descriptionNEWLINE __props__['global_filters'] = global_filtersNEWLINE __props__['name'] = nameNEWLINE __props__['operating_system'] = operating_systemNEWLINE __props__['rejected_patches'] = rejected_patchesNEWLINE __props__['tags'] = tagsNEWLINE super(PatchBaseline, __self__).__init__(NEWLINE 'aws:ssm/patchBaseline:PatchBaseline',NEWLINE resource_name,NEWLINE __props__,NEWLINE opts)NEWLINENEWLINE @staticmethodNEWLINE def get(resource_name, id, opts=None, approval_rules=None, approved_patches=None, approved_patches_compliance_level=None, description=None, global_filters=None, name=None, operating_system=None, rejected_patches=None, tags=None):NEWLINE """NEWLINE Get an existing PatchBaseline resource's state with the given name, id, and optional extraNEWLINE properties used to qualify the lookup.NEWLINENEWLINE :param str resource_name: The unique name of the resulting resource.NEWLINE :param str id: The unique provider ID of the resource to lookup.NEWLINE :param pulumi.ResourceOptions opts: Options for the resource.NEWLINE :param pulumi.Input[list] approval_rules: A set of rules used to include patches in the baseline. up to 10 approval rules can be specified. Each approval_rule block requires the fields documented below.NEWLINE :param pulumi.Input[list] approved_patches: A list of explicitly approved patches for the baseline.NEWLINE :param pulumi.Input[str] approved_patches_compliance_level: Defines the compliance level for approved patches. This means that if an approved patch is reported as missing, this is the severity of the compliance violation. Valid compliance levels include the following: `CRITICAL`, `HIGH`, `MEDIUM`, `LOW`, `INFORMATIONAL`, `UNSPECIFIED`. The default value is `UNSPECIFIED`.NEWLINE :param pulumi.Input[str] description: The description of the patch baseline.NEWLINE :param pulumi.Input[list] global_filters: A set of global filters used to exclude patches from the baseline. Up to 4 global filters can be specified using Key/Value pairs. Valid Keys are `PRODUCT | CLASSIFICATION | MSRC_SEVERITY | PATCH_ID`.NEWLINE :param pulumi.Input[str] name: The name of the patch baseline.NEWLINE :param pulumi.Input[str] operating_system: Defines the operating system the patch baseline applies to. Supported operating systems include `WINDOWS`, `AMAZON_LINUX`, `AMAZON_LINUX_2`, `SUSE`, `UBUNTU`, `CENTOS`, and `REDHAT_ENTERPRISE_LINUX`. The Default value is `WINDOWS`.NEWLINE :param pulumi.Input[list] rejected_patches: A list of rejected patches.NEWLINE :param pulumi.Input[dict] tags: A map of tags to assign to the resource.NEWLINENEWLINE The **approval_rules** object supports the following:NEWLINENEWLINE * `approveAfterDays` (`pulumi.Input[float]`) - The number of days after the release date of each patch matched by the rule the patch is marked as approved in the patch baseline. Valid Range: 0 to 100.NEWLINE * `complianceLevel` (`pulumi.Input[str]`) - Defines the compliance level for patches approved by this rule. Valid compliance levels include the following: `CRITICAL`, `HIGH`, `MEDIUM`, `LOW`, `INFORMATIONAL`, `UNSPECIFIED`. The default value is `UNSPECIFIED`.NEWLINE * `enableNonSecurity` (`pulumi.Input[bool]`) - Boolean enabling the application of non-security updates. The default value is 'false'. Valid for Linux instances only.NEWLINE * `patchFilters` (`pulumi.Input[list]`) - The patch filter group that defines the criteria for the rule. Up to 5 patch filters can be specified per approval rule using Key/Value pairs. Valid Keys are `PATCH_SET | PRODUCT | CLASSIFICATION | MSRC_SEVERITY | PATCH_ID`.NEWLINE * `key` (`pulumi.Input[str]`)NEWLINE * `values` (`pulumi.Input[list]`)NEWLINENEWLINE The **global_filters** object supports the following:NEWLINENEWLINE * `key` (`pulumi.Input[str]`)NEWLINE * `values` (`pulumi.Input[list]`)NEWLINE """NEWLINE opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))NEWLINENEWLINE __props__ = dict()NEWLINENEWLINE __props__["approval_rules"] = approval_rulesNEWLINE __props__["approved_patches"] = approved_patchesNEWLINE __props__["approved_patches_compliance_level"] = approved_patches_compliance_levelNEWLINE __props__["description"] = descriptionNEWLINE __props__["global_filters"] = global_filtersNEWLINE __props__["name"] = nameNEWLINE __props__["operating_system"] = operating_systemNEWLINE __props__["rejected_patches"] = rejected_patchesNEWLINE __props__["tags"] = tagsNEWLINE return PatchBaseline(resource_name, opts=opts, __props__=__props__)NEWLINENEWLINE def translate_output_property(self, prop):NEWLINE return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or propNEWLINENEWLINE def translate_input_property(self, prop):NEWLINE return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or propNEWLINE
"""CL device arrays."""NEWLINENEWLINEfrom __future__ import divisionNEWLINENEWLINE__copyright__ = "Copyright (C) 2009 Andreas Kloeckner"NEWLINENEWLINE__license__ = """NEWLINEPermission is hereby granted, free of charge, to any personNEWLINEobtaining a copy of this software and associated documentationNEWLINEfiles (the "Software"), to deal in the Software withoutNEWLINErestriction, including without limitation the rights to use,NEWLINEcopy, modify, merge, publish, distribute, sublicense, and/or sellNEWLINEcopies of the Software, and to permit persons to whom theNEWLINESoftware is furnished to do so, subject to the followingNEWLINEconditions:NEWLINENEWLINEThe above copyright notice and this permission notice shall beNEWLINEincluded in all copies or substantial portions of the Software.NEWLINENEWLINETHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,NEWLINEEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIESNEWLINEOF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE ANDNEWLINENONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHTNEWLINEHOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,NEWLINEWHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISINGNEWLINEFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE ORNEWLINEOTHER DEALINGS IN THE SOFTWARE.NEWLINE"""NEWLINENEWLINENEWLINEimport numpy as npNEWLINEimport pyopencl.elementwise as elementwiseNEWLINEimport pyopencl as clNEWLINEfrom pytools import memoize_methodNEWLINEfrom pyopencl.compyte.array import (NEWLINE as_strided as _as_strided,NEWLINE f_contiguous_strides as _f_contiguous_strides,NEWLINE c_contiguous_strides as _c_contiguous_strides,NEWLINE ArrayFlags as _ArrayFlags,NEWLINE get_common_dtype as _get_common_dtype_base)NEWLINEfrom pyopencl.characterize import has_double_supportNEWLINENEWLINENEWLINEdef _get_common_dtype(obj1, obj2, queue):NEWLINE return _get_common_dtype_base(obj1, obj2,NEWLINE has_double_support(queue.device))NEWLINENEWLINE# Work around PyPy not currently supporting the object dtype.NEWLINE# (Yes, it doesn't even support checking!)NEWLINE# (as of May 27, 2014 on PyPy 2.3)NEWLINEtry:NEWLINE np.dtype(object)NEWLINENEWLINE def _dtype_is_object(t):NEWLINE return t == objectNEWLINEexcept:NEWLINE def _dtype_is_object(t):NEWLINE return FalseNEWLINENEWLINENEWLINE# {{{ vector typesNEWLINENEWLINEclass vec:NEWLINE passNEWLINENEWLINENEWLINEdef _create_vector_types():NEWLINE field_names = ["x", "y", "z", "w"]NEWLINENEWLINE from pyopencl.tools import get_or_register_dtypeNEWLINENEWLINE vec.types = {}NEWLINE vec.type_to_scalar_and_count = {}NEWLINENEWLINE counts = [2, 3, 4, 8, 16]NEWLINENEWLINE for base_name, base_type in [NEWLINE ('char', np.int8),NEWLINE ('uchar', np.uint8),NEWLINE ('short', np.int16),NEWLINE ('ushort', np.uint16),NEWLINE ('int', np.int32),NEWLINE ('uint', np.uint32),NEWLINE ('long', np.int64),NEWLINE ('ulong', np.uint64),NEWLINE ('float', np.float32),NEWLINE ('double', np.float64),NEWLINE ]:NEWLINE for count in counts:NEWLINE name = "%s%d" % (base_name, count)NEWLINENEWLINE titles = field_names[:count]NEWLINENEWLINE padded_count = countNEWLINE if count == 3:NEWLINE padded_count = 4NEWLINENEWLINE names = ["s%d" % i for i in range(count)]NEWLINE while len(names) < padded_count:NEWLINE names.append("padding%d" % (len(names)-count))NEWLINENEWLINE if len(titles) < len(names):NEWLINE titles.extend((len(names)-len(titles))*[None])NEWLINENEWLINE try:NEWLINE dtype = np.dtype(dict(NEWLINE names=names,NEWLINE formats=[base_type]*padded_count,NEWLINE titles=titles))NEWLINE except NotImplementedError:NEWLINE try:NEWLINE dtype = np.dtype([((n, title), base_type)NEWLINE for (n, title) in zip(names, titles)])NEWLINE except TypeError:NEWLINE dtype = np.dtype([(n, base_type) for (n, title)NEWLINE in zip(names, titles)])NEWLINENEWLINE get_or_register_dtype(name, dtype)NEWLINENEWLINE setattr(vec, name, dtype)NEWLINENEWLINE def create_array(dtype, count, padded_count, *args, **kwargs):NEWLINE if len(args) < count:NEWLINE from warnings import warnNEWLINE warn("default values for make_xxx are deprecated;"NEWLINE " instead specify all parameters or use"NEWLINE " array.vec.zeros_xxx", DeprecationWarning)NEWLINE padded_args = tuple(list(args)+[0]*(padded_count-len(args)))NEWLINE array = eval("array(padded_args, dtype=dtype)",NEWLINE dict(array=np.array, padded_args=padded_args,NEWLINE dtype=dtype))NEWLINE for key, val in kwargs.items():NEWLINE array[key] = valNEWLINE return arrayNEWLINENEWLINE setattr(vec, "make_"+name, staticmethod(eval(NEWLINE "lambda *args, **kwargs: create_array(dtype, %i, %i, "NEWLINE "*args, **kwargs)" % (count, padded_count),NEWLINE dict(create_array=create_array, dtype=dtype))))NEWLINE setattr(vec, "filled_"+name, staticmethod(eval(NEWLINE "lambda val: vec.make_%s(*[val]*%i)" % (name, count))))NEWLINE setattr(vec, "zeros_"+name,NEWLINE staticmethod(eval("lambda: vec.filled_%s(0)" % (name))))NEWLINE setattr(vec, "ones_"+name,NEWLINE staticmethod(eval("lambda: vec.filled_%s(1)" % (name))))NEWLINENEWLINE vec.types[np.dtype(base_type), count] = dtypeNEWLINE vec.type_to_scalar_and_count[dtype] = np.dtype(base_type), countNEWLINENEWLINE_create_vector_types()NEWLINENEWLINE# }}}NEWLINENEWLINENEWLINE# {{{ helper functionalityNEWLINENEWLINEdef splay(queue, n, kernel_specific_max_wg_size=None):NEWLINE dev = queue.deviceNEWLINE max_work_items = _builtin_min(128, dev.max_work_group_size)NEWLINENEWLINE if kernel_specific_max_wg_size is not None:NEWLINE from __builtin__ import minNEWLINE max_work_items = min(max_work_items, kernel_specific_max_wg_size)NEWLINENEWLINE min_work_items = _builtin_min(32, max_work_items)NEWLINE max_groups = dev.max_compute_units * 4 * 8NEWLINE # 4 to overfill the deviceNEWLINE # 8 is an Nvidia constant--that's how manyNEWLINE # groups fit onto one compute deviceNEWLINENEWLINE if n < min_work_items:NEWLINE group_count = 1NEWLINE work_items_per_group = min_work_itemsNEWLINE elif n < (max_groups * min_work_items):NEWLINE group_count = (n + min_work_items - 1) // min_work_itemsNEWLINE work_items_per_group = min_work_itemsNEWLINE elif n < (max_groups * max_work_items):NEWLINE group_count = max_groupsNEWLINE grp = (n + min_work_items - 1) // min_work_itemsNEWLINE work_items_per_group = (NEWLINE (grp + max_groups - 1) // max_groups) * min_work_itemsNEWLINE else:NEWLINE group_count = max_groupsNEWLINE work_items_per_group = max_work_itemsNEWLINENEWLINE #print "n:%d gc:%d wipg:%d" % (n, group_count, work_items_per_group)NEWLINE return (group_count*work_items_per_group,), (work_items_per_group,)NEWLINENEWLINENEWLINEdef elwise_kernel_runner(kernel_getter):NEWLINE """Take a kernel getter of the same signature as the kernelNEWLINE and return a function that invokes that kernel.NEWLINENEWLINE Assumes that the zeroth entry in *args* is an :class:`Array`.NEWLINE """NEWLINENEWLINE def kernel_runner(*args, **kwargs):NEWLINE repr_ary = args[0]NEWLINE queue = kwargs.pop("queue", None) or repr_ary.queueNEWLINE wait_for = kwargs.pop("wait_for", None)NEWLINENEWLINE # wait_for must be a copy, because we modify it in-place belowNEWLINE if wait_for is None:NEWLINE wait_for = []NEWLINE else:NEWLINE wait_for = list(wait_for)NEWLINENEWLINE knl = kernel_getter(*args, **kwargs)NEWLINENEWLINE gs, ls = repr_ary.get_sizes(queue,NEWLINE knl.get_work_group_info(NEWLINE cl.kernel_work_group_info.WORK_GROUP_SIZE,NEWLINE queue.device))NEWLINENEWLINE assert isinstance(repr_ary, Array)NEWLINENEWLINE actual_args = []NEWLINE for arg in args:NEWLINE if isinstance(arg, Array):NEWLINE if not arg.flags.forc:NEWLINE raise RuntimeError("only contiguous arrays may "NEWLINE "be used as arguments to this operation")NEWLINE actual_args.append(arg.base_data)NEWLINE actual_args.append(arg.offset)NEWLINE wait_for.extend(arg.events)NEWLINE else:NEWLINE actual_args.append(arg)NEWLINE actual_args.append(repr_ary.size)NEWLINENEWLINE return knl(queue, gs, ls, *actual_args, **dict(wait_for=wait_for))NEWLINENEWLINE try:NEWLINE from functools import update_wrapperNEWLINE except ImportError:NEWLINE return kernel_runnerNEWLINE else:NEWLINE return update_wrapper(kernel_runner, kernel_getter)NEWLINENEWLINENEWLINEclass DefaultAllocator(cl.tools.DeferredAllocator):NEWLINE def __init__(self, *args, **kwargs):NEWLINE from warnings import warnNEWLINE warn("pyopencl.array.DefaultAllocator is deprecated. "NEWLINE "It will be continue to exist throughout the 2013.x "NEWLINE "versions of PyOpenCL.",NEWLINE DeprecationWarning, 2)NEWLINE cl.tools.DeferredAllocator.__init__(self, *args, **kwargs)NEWLINENEWLINENEWLINEdef _make_strides(itemsize, shape, order):NEWLINE if order in "fF":NEWLINE return _f_contiguous_strides(itemsize, shape)NEWLINE elif order in "cC":NEWLINE return _c_contiguous_strides(itemsize, shape)NEWLINE else:NEWLINE raise ValueError("invalid order: %s" % order)NEWLINENEWLINE# }}}NEWLINENEWLINENEWLINE# {{{ array classNEWLINENEWLINEclass ArrayHasOffsetError(ValueError):NEWLINE """NEWLINE .. versionadded:: 2013.1NEWLINE """NEWLINENEWLINE def __init__(self, val="The operation you are attempting does not yet "NEWLINE "support arrays that start at an offset from the beginning "NEWLINE "of their buffer."):NEWLINE ValueError.__init__(self, val)NEWLINENEWLINENEWLINEclass _copy_queue:NEWLINE passNEWLINENEWLINENEWLINEclass Array(object):NEWLINE """A :class:`numpy.ndarray` work-alike that stores its data and performsNEWLINE its computations on the compute device. *shape* and *dtype* work exactlyNEWLINE as in :mod:`numpy`. Arithmetic methods in :class:`Array` support theNEWLINE broadcasting of scalars. (e.g. `array+5`)NEWLINENEWLINE *cqa* must be a :class:`pyopencl.CommandQueue` or a :class:`pyopencl.Context`.NEWLINENEWLINE If it is a queue, *cqa* specifies the queue in which the array carries outNEWLINE its computations by default. If a default queue (and thereby overloadedNEWLINE operators and many other niceties) are not desired, pass aNEWLINE :class:`Context`.NEWLINENEWLINE *cqa* will at some point be renamed *cq*, so it should be consideredNEWLINE 'positional-only'. Arguments starting from 'order' should be consideredNEWLINE keyword-only.NEWLINENEWLINE *allocator* may be `None` or a callable that, upon being called with anNEWLINE argument of the number of bytes to be allocated, returns anNEWLINE :class:`pyopencl.Buffer` object. (A :class:`pyopencl.tools.MemoryPool`NEWLINE instance is one useful example of an object to pass here.)NEWLINENEWLINE .. versionchanged:: 2011.1NEWLINE Renamed *context* to *cqa*, made it general-purpose.NEWLINENEWLINE All arguments beyond *order* should be considered keyword-only.NEWLINENEWLINE .. attribute :: dataNEWLINENEWLINE The :class:`pyopencl.MemoryObject` instance created for the memory thatNEWLINE backs this :class:`Array`.NEWLINENEWLINE .. versionchanged:: 2013.1NEWLINENEWLINE If a non-zero :attr:`offset` has been specified for this array,NEWLINE this will fail with :exc:`ArrayHasOffsetError`.NEWLINENEWLINE .. attribute :: base_dataNEWLINENEWLINE The :class:`pyopencl.MemoryObject` instance created for the memory thatNEWLINE backs this :class:`Array`. Unlike :attr:`data`, the base address ofNEWLINE *base_data* is allowed to be different from the beginning of the array.NEWLINE The actual beginning is the base address of *base_data* plusNEWLINE :attr:`offset` in units of :attr:`dtype`.NEWLINENEWLINE Unlike :attr:`data`, retrieving :attr:`base_data` always succeeds.NEWLINENEWLINE .. versionadded:: 2013.1NEWLINENEWLINE .. attribute :: offsetNEWLINENEWLINE See :attr:`base_data`.NEWLINENEWLINE .. versionadded:: 2013.1NEWLINENEWLINE .. attribute :: shapeNEWLINENEWLINE The tuple of lengths of each dimension in the array.NEWLINENEWLINE .. attribute :: dtypeNEWLINENEWLINE The :class:`numpy.dtype` of the items in the GPU array.NEWLINENEWLINE .. attribute :: sizeNEWLINENEWLINE The number of meaningful entries in the array. Can also be computed byNEWLINE multiplying up the numbers in :attr:`shape`.NEWLINENEWLINE .. attribute :: nbytesNEWLINENEWLINE The size of the entire array in bytes. Computed as :attr:`size` timesNEWLINE ``dtype.itemsize``.NEWLINENEWLINE .. attribute :: stridesNEWLINENEWLINE Tuple of bytes to step in each dimension when traversing an array.NEWLINENEWLINE .. attribute :: flagsNEWLINENEWLINE Return an object with attributes `c_contiguous`, `f_contiguous` andNEWLINE `forc`, which may be used to query contiguity properties in analogy toNEWLINE :attr:`numpy.ndarray.flags`.NEWLINENEWLINE .. rubric:: MethodsNEWLINENEWLINE .. automethod :: with_queueNEWLINENEWLINE .. automethod :: __len__NEWLINE .. automethod :: reshapeNEWLINE .. automethod :: ravelNEWLINE .. automethod :: viewNEWLINE .. automethod :: setNEWLINE .. automethod :: getNEWLINE .. automethod :: copyNEWLINENEWLINE .. automethod :: __str__NEWLINE .. automethod :: __repr__NEWLINENEWLINE .. automethod :: mul_addNEWLINE .. automethod :: __add__NEWLINE .. automethod :: __sub__NEWLINE .. automethod :: __iadd__NEWLINE .. automethod :: __isub__NEWLINE .. automethod :: __neg__NEWLINE .. automethod :: __mul__NEWLINE .. automethod :: __div__NEWLINE .. automethod :: __rdiv__NEWLINE .. automethod :: __pow__NEWLINENEWLINE .. automethod :: __abs__NEWLINENEWLINE .. UNDOC reverse()NEWLINENEWLINE .. automethod :: fillNEWLINENEWLINE .. automethod :: astypeNEWLINENEWLINE .. autoattribute :: realNEWLINE .. autoattribute :: imagNEWLINE .. automethod :: conjNEWLINENEWLINE .. automethod :: __getitem__NEWLINE .. automethod :: __setitem__NEWLINENEWLINE .. automethod :: setitemNEWLINENEWLINE .. automethod :: map_to_hostNEWLINENEWLINE .. rubric:: Comparisons, conditionals, any, allNEWLINENEWLINE .. versionadded:: 2013.2NEWLINENEWLINE Boolean arrays are stored as :class:`numpy.int8` because ``bool``NEWLINE has an unspecified size in the OpenCL spec.NEWLINENEWLINE .. automethod :: __nonzero__NEWLINENEWLINE Only works for device scalars. (i.e. "arrays" with ``shape == ()``.)NEWLINENEWLINE .. automethod :: anyNEWLINE .. automethod :: allNEWLINENEWLINE .. automethod :: __eq__NEWLINE .. automethod :: __ne__NEWLINE .. automethod :: __lt__NEWLINE .. automethod :: __le__NEWLINE .. automethod :: __gt__NEWLINE .. automethod :: __ge__NEWLINENEWLINE .. rubric:: Event managementNEWLINENEWLINE If an array is used from within an out-of-order queue, it needs to takeNEWLINE care of its own operation ordering. The facilities in this section makeNEWLINE this possible.NEWLINENEWLINE .. versionadded:: 2014.1.1NEWLINENEWLINE .. attribute:: eventsNEWLINENEWLINE A list of :class:`pyopencl.Event` instances that the current content ofNEWLINE this array depends on. User code may read, but should never modify thisNEWLINE list directly. To update this list, instead use the following methods.NEWLINENEWLINE .. automethod:: add_eventNEWLINE .. automethod:: finishNEWLINE """NEWLINENEWLINE __array_priority__ = 100NEWLINENEWLINE def __init__(self, cqa, shape, dtype, order="C", allocator=None,NEWLINE data=None, offset=0, queue=None, strides=None, events=None):NEWLINE # {{{ backward compatibilityNEWLINENEWLINE from warnings import warnNEWLINE if queue is not None:NEWLINE warn("Passing the queue to the array through anything but the "NEWLINE "first argument of the Array constructor is deprecated. "NEWLINE "This will be continue to be accepted throughout the "NEWLINE "2013.[0-6] versions of PyOpenCL.",NEWLINE DeprecationWarning, 2)NEWLINENEWLINE if isinstance(cqa, cl.CommandQueue):NEWLINE if queue is not None:NEWLINE raise TypeError("can't specify queue in 'cqa' and "NEWLINE "'queue' arguments")NEWLINE queue = cqaNEWLINENEWLINE elif isinstance(cqa, cl.Context):NEWLINE context = cqaNEWLINENEWLINE if queue is not None:NEWLINE raise TypeError("may not pass a context and a queue "NEWLINE "(just pass the queue)")NEWLINE if allocator is not None:NEWLINE # "is" would be wrong because two Python objects are allowedNEWLINE # to hold handles to the same context.NEWLINENEWLINE # FIXME It would be nice to check this. But it would requireNEWLINE # changing the allocator interface. Trust the user for now.NEWLINENEWLINE #assert allocator.context == contextNEWLINE passNEWLINENEWLINE else:NEWLINE # cqa is assumed to be an allocatorNEWLINE warn("Passing an allocator for the 'cqa' parameter is deprecated. "NEWLINE "This usage will be continue to be accepted throughout "NEWLINE "the 2013.[0-6] versions of PyOpenCL.",NEWLINE DeprecationWarning, stacklevel=2)NEWLINE if allocator is not None:NEWLINE raise TypeError("can't specify allocator in 'cqa' and "NEWLINE "'allocator' arguments")NEWLINENEWLINE allocator = cqaNEWLINENEWLINE # Queue-less arrays do have a purpose in life.NEWLINE # They don't do very much, but at least they don't run kernelsNEWLINE # in random queues.NEWLINE #NEWLINE # See also :meth:`with_queue`.NEWLINENEWLINE # }}}NEWLINENEWLINE # invariant here: allocator, queue setNEWLINENEWLINE # {{{ determine shape and stridesNEWLINE dtype = np.dtype(dtype)NEWLINENEWLINE try:NEWLINE s = 1NEWLINE for dim in shape:NEWLINE s *= dimNEWLINE except TypeError:NEWLINE import sysNEWLINE if sys.version_info >= (3,):NEWLINE admissible_types = (int, np.integer)NEWLINE else:NEWLINE admissible_types = (int, long, np.integer)NEWLINENEWLINE if not isinstance(shape, admissible_types):NEWLINE raise TypeError("shape must either be iterable or "NEWLINE "castable to an integer")NEWLINE s = shapeNEWLINE shape = (shape,)NEWLINENEWLINE if isinstance(s, np.integer):NEWLINE # bombs if s is a Python integerNEWLINE s = np.asscalar(s)NEWLINENEWLINE if strides is None:NEWLINE strides = _make_strides(dtype.itemsize, shape, order)NEWLINENEWLINE else:NEWLINE # FIXME: We should possibly perform some plausibilityNEWLINE # checking on 'strides' here.NEWLINENEWLINE strides = tuple(strides)NEWLINENEWLINE # }}}NEWLINENEWLINE if _dtype_is_object(dtype):NEWLINE raise TypeError("object arrays on the compute device are not allowed")NEWLINENEWLINE self.queue = queueNEWLINE self.shape = shapeNEWLINE self.dtype = dtypeNEWLINE self.strides = stridesNEWLINE if events is None:NEWLINE self.events = []NEWLINE else:NEWLINE self.events = eventsNEWLINENEWLINE self.size = sNEWLINE alloc_nbytes = self.nbytes = self.dtype.itemsize * self.sizeNEWLINENEWLINE self.allocator = allocatorNEWLINENEWLINE if data is None:NEWLINE if not alloc_nbytes:NEWLINE # Work around CL not allowing zero-sized buffers.NEWLINE alloc_nbytes = 1NEWLINENEWLINE if allocator is None:NEWLINE # FIXME remove me when queues become requiredNEWLINE if queue is not None:NEWLINE context = queue.contextNEWLINENEWLINE self.base_data = cl.Buffer(NEWLINE context, cl.mem_flags.READ_WRITE, alloc_nbytes)NEWLINE else:NEWLINE self.base_data = self.allocator(alloc_nbytes)NEWLINE else:NEWLINE self.base_data = dataNEWLINENEWLINE self.offset = offsetNEWLINENEWLINE @propertyNEWLINE def context(self):NEWLINE return self.base_data.contextNEWLINENEWLINE @propertyNEWLINE def data(self):NEWLINE if self.offset:NEWLINE raise ArrayHasOffsetError()NEWLINE else:NEWLINE return self.base_dataNEWLINENEWLINE @propertyNEWLINE @memoize_methodNEWLINE def flags(self):NEWLINE return _ArrayFlags(self)NEWLINENEWLINE def _new_with_changes(self, data, offset, shape=None, dtype=None,NEWLINE strides=None, queue=_copy_queue):NEWLINE """NEWLINE :arg data: *None* means allocate a new array.NEWLINE """NEWLINE if shape is None:NEWLINE shape = self.shapeNEWLINE if dtype is None:NEWLINE dtype = self.dtypeNEWLINE if strides is None:NEWLINE strides = self.stridesNEWLINE if queue is _copy_queue:NEWLINE queue = self.queueNEWLINENEWLINE # If we're allocating new data, then there's not likely to beNEWLINE # a data dependency. Otherwise, the two arrays should probablyNEWLINE # share the same events list.NEWLINENEWLINE if data is None:NEWLINE events = NoneNEWLINE else:NEWLINE events = self.eventsNEWLINENEWLINE if queue is not None:NEWLINE return Array(queue, shape, dtype, allocator=self.allocator,NEWLINE strides=strides, data=data, offset=offset,NEWLINE events=events)NEWLINE else:NEWLINE return Array(self.context, shape, dtype, queue=queue,NEWLINE strides=strides, data=data, offset=offset,NEWLINE events=events, allocator=self.allocator)NEWLINENEWLINE def with_queue(self, queue):NEWLINE """Return a copy of *self* with the default queue set to *queue*.NEWLINENEWLINE *None* is allowed as a value for *queue*.NEWLINENEWLINE .. versionadded:: 2013.1NEWLINE """NEWLINENEWLINE if queue is not None:NEWLINE assert queue.context == self.contextNEWLINENEWLINE return self._new_with_changes(self.base_data, self.offset,NEWLINE queue=queue)NEWLINENEWLINE #@memoize_method FIXME: reenableNEWLINE def get_sizes(self, queue, kernel_specific_max_wg_size=None):NEWLINE if not self.flags.forc:NEWLINE raise NotImplementedError("cannot operate on non-contiguous array")NEWLINE return splay(queue, self.size,NEWLINE kernel_specific_max_wg_size=kernel_specific_max_wg_size)NEWLINENEWLINE def set(self, ary, queue=None, async=False):NEWLINE """Transfer the contents the :class:`numpy.ndarray` object *ary*NEWLINE onto the device.NEWLINENEWLINE *ary* must have the same dtype and size (not necessarily shape) asNEWLINE *self*.NEWLINE """NEWLINENEWLINE assert ary.size == self.sizeNEWLINE assert ary.dtype == self.dtypeNEWLINENEWLINE if not ary.flags.forc:NEWLINE raise RuntimeError("cannot set from non-contiguous array")NEWLINENEWLINE ary = ary.copy()NEWLINENEWLINE if ary.strides != self.strides:NEWLINE from warnings import warnNEWLINE warn("Setting array from one with different "NEWLINE "strides/storage order. This will cease to work "NEWLINE "in 2013.x.",NEWLINE stacklevel=2)NEWLINENEWLINE if self.size:NEWLINE cl.enqueue_copy(queue or self.queue, self.base_data, ary,NEWLINE device_offset=self.offset,NEWLINE is_blocking=not async)NEWLINENEWLINE def get(self, queue=None, ary=None, async=False):NEWLINE """Transfer the contents of *self* into *ary* or a newly allocatedNEWLINE :mod:`numpy.ndarray`. If *ary* is given, it must have the rightNEWLINE size (not necessarily shape) and dtype.NEWLINE """NEWLINENEWLINE if ary is None:NEWLINE ary = np.empty(self.shape, self.dtype)NEWLINENEWLINE ary = _as_strided(ary, strides=self.strides)NEWLINE else:NEWLINE if ary.size != self.size:NEWLINE raise TypeError("'ary' has non-matching size")NEWLINE if ary.dtype != self.dtype:NEWLINE raise TypeError("'ary' has non-matching type")NEWLINENEWLINE assert self.flags.forc, "Array in get() must be contiguous"NEWLINENEWLINE if self.size:NEWLINE cl.enqueue_copy(queue or self.queue, ary, self.base_data,NEWLINE device_offset=self.offset,NEWLINE is_blocking=not async)NEWLINENEWLINE return aryNEWLINENEWLINE def copy(self, queue=None):NEWLINE """.. versionadded:: 2013.1"""NEWLINENEWLINE queue = queue or self.queueNEWLINE result = self._new_like_me()NEWLINENEWLINE if self.nbytes:NEWLINE cl.enqueue_copy(queue, result.base_data, self.base_data,NEWLINE src_offset=self.offset, byte_count=self.nbytes)NEWLINENEWLINE return resultNEWLINENEWLINE def __str__(self):NEWLINE return str(self.get())NEWLINENEWLINE def __repr__(self):NEWLINE return repr(self.get())NEWLINENEWLINE def __hash__(self):NEWLINE raise TypeError("pyopencl arrays are not hashable.")NEWLINENEWLINE # {{{ kernel invocation wrappersNEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _axpbyz(out, afac, a, bfac, b, queue=None):NEWLINE """Compute ``out = selffac * self + otherfac*other``,NEWLINE where *other* is an array."""NEWLINE assert out.shape == a.shapeNEWLINE assert out.shape == b.shapeNEWLINENEWLINE return elementwise.get_axpbyz_kernel(NEWLINE out.context, a.dtype, b.dtype, out.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _axpbz(out, a, x, b, queue=None):NEWLINE """Compute ``z = a * x + b``, where *b* is a scalar."""NEWLINE a = np.array(a)NEWLINE b = np.array(b)NEWLINE assert out.shape == x.shapeNEWLINE return elementwise.get_axpbz_kernel(out.context,NEWLINE a.dtype, x.dtype, b.dtype, out.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _elwise_multiply(out, a, b, queue=None):NEWLINE assert out.shape == a.shapeNEWLINE assert out.shape == b.shapeNEWLINE return elementwise.get_multiply_kernel(NEWLINE a.context, a.dtype, b.dtype, out.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _rdiv_scalar(out, ary, other, queue=None):NEWLINE other = np.array(other)NEWLINE assert out.shape == ary.shapeNEWLINE return elementwise.get_rdivide_elwise_kernel(NEWLINE out.context, ary.dtype, other.dtype, out.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _div(out, self, other, queue=None):NEWLINE """Divides an array by another array."""NEWLINENEWLINE assert self.shape == other.shapeNEWLINENEWLINE return elementwise.get_divide_kernel(self.context,NEWLINE self.dtype, other.dtype, out.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _fill(result, scalar):NEWLINE return elementwise.get_fill_kernel(result.context, result.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _abs(result, arg):NEWLINE if arg.dtype.kind == "c":NEWLINE from pyopencl.elementwise import complex_dtype_to_nameNEWLINE fname = "%s_abs" % complex_dtype_to_name(arg.dtype)NEWLINE elif arg.dtype.kind == "f":NEWLINE fname = "fabs"NEWLINE elif arg.dtype.kind in ["u", "i"]:NEWLINE fname = "abs"NEWLINE else:NEWLINE raise TypeError("unsupported dtype in _abs()")NEWLINENEWLINE return elementwise.get_unary_func_kernel(NEWLINE arg.context, fname, arg.dtype, out_dtype=result.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _real(result, arg):NEWLINE from pyopencl.elementwise import complex_dtype_to_nameNEWLINE fname = "%s_real" % complex_dtype_to_name(arg.dtype)NEWLINE return elementwise.get_unary_func_kernel(NEWLINE arg.context, fname, arg.dtype, out_dtype=result.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _imag(result, arg):NEWLINE from pyopencl.elementwise import complex_dtype_to_nameNEWLINE fname = "%s_imag" % complex_dtype_to_name(arg.dtype)NEWLINE return elementwise.get_unary_func_kernel(NEWLINE arg.context, fname, arg.dtype, out_dtype=result.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _conj(result, arg):NEWLINE from pyopencl.elementwise import complex_dtype_to_nameNEWLINE fname = "%s_conj" % complex_dtype_to_name(arg.dtype)NEWLINE return elementwise.get_unary_func_kernel(NEWLINE arg.context, fname, arg.dtype, out_dtype=result.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _pow_scalar(result, ary, exponent):NEWLINE exponent = np.array(exponent)NEWLINE return elementwise.get_pow_kernel(result.context,NEWLINE ary.dtype, exponent.dtype, result.dtype,NEWLINE is_base_array=True, is_exp_array=False)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _rpow_scalar(result, base, exponent):NEWLINE base = np.array(base)NEWLINE return elementwise.get_pow_kernel(result.context,NEWLINE base.dtype, exponent.dtype, result.dtype,NEWLINE is_base_array=False, is_exp_array=True)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _pow_array(result, base, exponent):NEWLINE return elementwise.get_pow_kernel(NEWLINE result.context, base.dtype, exponent.dtype, result.dtype,NEWLINE is_base_array=True, is_exp_array=True)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _reverse(result, ary):NEWLINE return elementwise.get_reverse_kernel(result.context, ary.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _copy(dest, src):NEWLINE return elementwise.get_copy_kernel(NEWLINE dest.context, dest.dtype, src.dtype)NEWLINENEWLINE def _new_like_me(self, dtype=None, queue=None):NEWLINE strides = NoneNEWLINE if dtype is None:NEWLINE dtype = self.dtypeNEWLINENEWLINE if dtype == self.dtype:NEWLINE strides = self.stridesNEWLINENEWLINE queue = queue or self.queueNEWLINE if queue is not None:NEWLINE return self.__class__(queue, self.shape, dtype,NEWLINE allocator=self.allocator, strides=strides)NEWLINE elif self.allocator is not None:NEWLINE return self.__class__(self.allocator, self.shape, dtype,NEWLINE strides=strides)NEWLINE else:NEWLINE return self.__class__(self.context, self.shape, dtype,NEWLINE strides=strides)NEWLINENEWLINE # }}}NEWLINENEWLINE # {{{ operatorsNEWLINENEWLINE def mul_add(self, selffac, other, otherfac, queue=None):NEWLINE """Return `selffac * self + otherfac*other`.NEWLINE """NEWLINE result = self._new_like_me(NEWLINE _get_common_dtype(self, other, queue or self.queue))NEWLINE result.add_event(NEWLINE self._axpbyz(result, selffac, self, otherfac, other))NEWLINE return resultNEWLINENEWLINE def __add__(self, other):NEWLINE """Add an array with an array or an array with a scalar."""NEWLINENEWLINE if isinstance(other, Array):NEWLINE # add another vectorNEWLINE result = self._new_like_me(NEWLINE _get_common_dtype(self, other, self.queue))NEWLINENEWLINE result.add_event(NEWLINE self._axpbyz(result,NEWLINE self.dtype.type(1), self,NEWLINE other.dtype.type(1), other))NEWLINENEWLINE return resultNEWLINE else:NEWLINE # add a scalarNEWLINE if other == 0:NEWLINE return self.copy()NEWLINE else:NEWLINE common_dtype = _get_common_dtype(self, other, self.queue)NEWLINE result = self._new_like_me(common_dtype)NEWLINE result.add_event(NEWLINE self._axpbz(result, self.dtype.type(1),NEWLINE self, common_dtype.type(other)))NEWLINE return resultNEWLINENEWLINE __radd__ = __add__NEWLINENEWLINE def __sub__(self, other):NEWLINE """Substract an array from an array or a scalar from an array."""NEWLINENEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(NEWLINE _get_common_dtype(self, other, self.queue))NEWLINE result.add_event(NEWLINE self._axpbyz(result,NEWLINE self.dtype.type(1), self,NEWLINE other.dtype.type(-1), other))NEWLINENEWLINE return resultNEWLINE else:NEWLINE # subtract a scalarNEWLINE if other == 0:NEWLINE return self.copy()NEWLINE else:NEWLINE result = self._new_like_me(NEWLINE _get_common_dtype(self, other, self.queue))NEWLINE result.add_event(NEWLINE self._axpbz(result, self.dtype.type(1), self, -other))NEWLINE return resultNEWLINENEWLINE def __rsub__(self, other):NEWLINE """Substracts an array by a scalar or an array::NEWLINENEWLINE x = n - selfNEWLINE """NEWLINE common_dtype = _get_common_dtype(self, other, self.queue)NEWLINE # other must be a scalarNEWLINE result = self._new_like_me(common_dtype)NEWLINE result.add_event(NEWLINE self._axpbz(result, self.dtype.type(-1), self,NEWLINE common_dtype.type(other)))NEWLINE return resultNEWLINENEWLINE def __iadd__(self, other):NEWLINE if isinstance(other, Array):NEWLINE self.add_event(NEWLINE self._axpbyz(self,NEWLINE self.dtype.type(1), self,NEWLINE other.dtype.type(1), other))NEWLINE return selfNEWLINE else:NEWLINE self.add_event(NEWLINE self._axpbz(self, self.dtype.type(1), self, other))NEWLINE return selfNEWLINENEWLINE def __isub__(self, other):NEWLINE if isinstance(other, Array):NEWLINE self.add_event(NEWLINE self._axpbyz(self, self.dtype.type(1), self,NEWLINE other.dtype.type(-1), other))NEWLINE return selfNEWLINE else:NEWLINE self._axpbz(self, self.dtype.type(1), self, -other)NEWLINE return selfNEWLINENEWLINE def __neg__(self):NEWLINE result = self._new_like_me()NEWLINE result.add_event(self._axpbz(result, -1, self, 0))NEWLINE return resultNEWLINENEWLINE def __mul__(self, other):NEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(NEWLINE _get_common_dtype(self, other, self.queue))NEWLINE result.add_event(NEWLINE self._elwise_multiply(result, self, other))NEWLINE return resultNEWLINE else:NEWLINE common_dtype = _get_common_dtype(self, other, self.queue)NEWLINE result = self._new_like_me(common_dtype)NEWLINE result.add_event(NEWLINE self._axpbz(result,NEWLINE common_dtype.type(other), self, self.dtype.type(0)))NEWLINE return resultNEWLINENEWLINE def __rmul__(self, scalar):NEWLINE common_dtype = _get_common_dtype(self, scalar, self.queue)NEWLINE result = self._new_like_me(common_dtype)NEWLINE result.add_event(NEWLINE self._axpbz(result,NEWLINE common_dtype.type(scalar), self, self.dtype.type(0)))NEWLINE return resultNEWLINENEWLINE def __imul__(self, other):NEWLINE if isinstance(other, Array):NEWLINE self.add_event(NEWLINE self._elwise_multiply(self, self, other))NEWLINE else:NEWLINE # scalarNEWLINE self.add_event(NEWLINE self._axpbz(self, other, self, self.dtype.type(0)))NEWLINENEWLINE return selfNEWLINENEWLINE def __div__(self, other):NEWLINE """Divides an array by an array or a scalar, i.e. ``self / other``.NEWLINE """NEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(NEWLINE _get_common_dtype(self, other, self.queue))NEWLINE result.add_event(self._div(result, self, other))NEWLINE else:NEWLINE if other == 1:NEWLINE return self.copy()NEWLINE else:NEWLINE # create a new array for the resultNEWLINE common_dtype = _get_common_dtype(self, other, self.queue)NEWLINE result = self._new_like_me(common_dtype)NEWLINE result.add_event(NEWLINE self._axpbz(result,NEWLINE common_dtype.type(1/other), self, self.dtype.type(0)))NEWLINENEWLINE return resultNEWLINENEWLINE __truediv__ = __div__NEWLINENEWLINE def __rdiv__(self, other):NEWLINE """Divides an array by a scalar or an array, i.e. ``other / self``.NEWLINE """NEWLINENEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(NEWLINE _get_common_dtype(self, other, self.queue))NEWLINE result.add_event(other._div(result, self))NEWLINE else:NEWLINE # create a new array for the resultNEWLINE common_dtype = _get_common_dtype(self, other, self.queue)NEWLINE result = self._new_like_me(common_dtype)NEWLINE result.add_event(NEWLINE self._rdiv_scalar(result, self, common_dtype.type(other)))NEWLINENEWLINE return resultNEWLINENEWLINE __rtruediv__ = __rdiv__NEWLINENEWLINE def fill(self, value, queue=None, wait_for=None):NEWLINE """Fill the array with *scalar*.NEWLINENEWLINE :returns: *self*.NEWLINE """NEWLINE self.add_event(NEWLINE self._fill(self, value, queue=queue, wait_for=wait_for))NEWLINENEWLINE return selfNEWLINENEWLINE def __len__(self):NEWLINE """Returns the size of the leading dimension of *self*."""NEWLINE if len(self.shape):NEWLINE return self.shape[0]NEWLINE else:NEWLINE return TypeError("scalar has no len()")NEWLINENEWLINE def __abs__(self):NEWLINE """Return a `Array` of the absolute values of the elementsNEWLINE of *self*.NEWLINE """NEWLINENEWLINE result = self._new_like_me(self.dtype.type(0).real.dtype)NEWLINE result.add_event(self._abs(result, self))NEWLINE return resultNEWLINENEWLINE def __pow__(self, other):NEWLINE """Exponentiation by a scalar or elementwise by anotherNEWLINE :class:`Array`.NEWLINE """NEWLINENEWLINE if isinstance(other, Array):NEWLINE assert self.shape == other.shapeNEWLINENEWLINE result = self._new_like_me(NEWLINE _get_common_dtype(self, other, self.queue))NEWLINE result.add_event(NEWLINE self._pow_array(result, self, other))NEWLINE else:NEWLINE result = self._new_like_me(NEWLINE _get_common_dtype(self, other, self.queue))NEWLINE result.add_event(self._pow_scalar(result, self, other))NEWLINENEWLINE return resultNEWLINENEWLINE def __rpow__(self, other):NEWLINE # other must be a scalarNEWLINE common_dtype = _get_common_dtype(self, other, self.queue)NEWLINE result = self._new_like_me(common_dtype)NEWLINE result.add_event(NEWLINE self._rpow_scalar(result, common_dtype.type(other), self))NEWLINE return resultNEWLINENEWLINE # }}}NEWLINENEWLINE def reverse(self, queue=None):NEWLINE """Return this array in reversed order. The array is treatedNEWLINE as one-dimensional.NEWLINE """NEWLINENEWLINE result = self._new_like_me()NEWLINE result.add_event(NEWLINE self._reverse(result, self))NEWLINE return resultNEWLINENEWLINE def astype(self, dtype, queue=None):NEWLINE """Return a copy of *self*, cast to *dtype*."""NEWLINE if dtype == self.dtype:NEWLINE return self.copy()NEWLINENEWLINE result = self._new_like_me(dtype=dtype)NEWLINE result.add_event(self._copy(result, self, queue=queue))NEWLINE return resultNEWLINENEWLINE # {{{ rich comparisons, any, allNEWLINENEWLINE def __nonzero__(self):NEWLINE if self.shape == ():NEWLINE return bool(self.get())NEWLINE else:NEWLINE raise ValueError("The truth value of an array with "NEWLINE "more than one element is ambiguous. Use a.any() or a.all()")NEWLINENEWLINE def any(self, queue=None, wait_for=None):NEWLINE from pyopencl.reduction import get_any_kernelNEWLINE krnl = get_any_kernel(self.context, self.dtype)NEWLINE return krnl(self, queue=queue, wait_for=wait_for)NEWLINENEWLINE def all(self, queue=None, wait_for=None):NEWLINE from pyopencl.reduction import get_all_kernelNEWLINE krnl = get_all_kernel(self.context, self.dtype)NEWLINE return krnl(self, queue=queue, wait_for=wait_for)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _scalar_comparison(out, a, b, queue=None, op=None):NEWLINE return elementwise.get_array_scalar_comparison_kernel(NEWLINE out.context, op, a.dtype)NEWLINENEWLINE @staticmethodNEWLINE @elwise_kernel_runnerNEWLINE def _array_comparison(out, a, b, queue=None, op=None):NEWLINE if a.shape != b.shape:NEWLINE raise ValueError("shapes of comparison arguments do not match")NEWLINE return elementwise.get_array_comparison_kernel(NEWLINE out.context, op, a.dtype, b.dtype)NEWLINENEWLINE def __eq__(self, other):NEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._array_comparison(result, self, other, op="=="))NEWLINE return resultNEWLINE else:NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._scalar_comparison(result, self, other, op="=="))NEWLINE return resultNEWLINENEWLINE def __ne__(self, other):NEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._array_comparison(result, self, other, op="!="))NEWLINE return resultNEWLINE else:NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._scalar_comparison(result, self, other, op="!="))NEWLINE return resultNEWLINENEWLINE def __le__(self, other):NEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._array_comparison(result, self, other, op="<="))NEWLINE return resultNEWLINE else:NEWLINE result = self._new_like_me(np.int8)NEWLINE self._scalar_comparison(result, self, other, op="<=")NEWLINE return resultNEWLINENEWLINE def __ge__(self, other):NEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._array_comparison(result, self, other, op=">="))NEWLINE return resultNEWLINE else:NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._scalar_comparison(result, self, other, op=">="))NEWLINE return resultNEWLINENEWLINE def __lt__(self, other):NEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._array_comparison(result, self, other, op="<"))NEWLINE return resultNEWLINE else:NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._scalar_comparison(result, self, other, op="<"))NEWLINE return resultNEWLINENEWLINE def __gt__(self, other):NEWLINE if isinstance(other, Array):NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._array_comparison(result, self, other, op=">"))NEWLINE return resultNEWLINE else:NEWLINE result = self._new_like_me(np.int8)NEWLINE result.add_event(NEWLINE self._scalar_comparison(result, self, other, op=">"))NEWLINE return resultNEWLINENEWLINE # }}}NEWLINENEWLINE # {{{ complex-valued businessNEWLINENEWLINE def real(self):NEWLINE if self.dtype.kind == "c":NEWLINE result = self._new_like_me(self.dtype.type(0).real.dtype)NEWLINE result.add_event(NEWLINE self._real(result, self))NEWLINE return resultNEWLINE else:NEWLINE return selfNEWLINE real = property(real, doc=".. versionadded:: 2012.1")NEWLINENEWLINE def imag(self):NEWLINE if self.dtype.kind == "c":NEWLINE result = self._new_like_me(self.dtype.type(0).real.dtype)NEWLINE result.add_event(NEWLINE self._imag(result, self))NEWLINE return resultNEWLINE else:NEWLINE return zeros_like(self)NEWLINE imag = property(imag, doc=".. versionadded:: 2012.1")NEWLINENEWLINE def conj(self):NEWLINE """.. versionadded:: 2012.1"""NEWLINE if self.dtype.kind == "c":NEWLINE result = self._new_like_me()NEWLINE result.add_event(self._conj(result, self))NEWLINE return resultNEWLINE else:NEWLINE return selfNEWLINENEWLINE # }}}NEWLINENEWLINE # {{{ event managementNEWLINENEWLINE def add_event(self, evt):NEWLINE """Add *evt* to :attr:`events`. If :attr:`events` is too long, this methodNEWLINE may implicitly wait for a subset of :attr:`events` and clear them from theNEWLINE list.NEWLINE """NEWLINE n_wait = 4NEWLINENEWLINE self.events.append(evt)NEWLINENEWLINE if len(self.events) > 3*n_wait:NEWLINE wait_events = self.events[:n_wait]NEWLINE cl.wait_for_events(wait_events)NEWLINE del self.events[:n_wait]NEWLINENEWLINE def finish(self):NEWLINE """Wait for the entire contents of :attr:`events`, clear it."""NEWLINENEWLINE if self.events:NEWLINE cl.wait_for_events(self.events)NEWLINE del self.events[:]NEWLINENEWLINE # }}}NEWLINENEWLINE # {{{ viewsNEWLINENEWLINE def reshape(self, *shape, **kwargs):NEWLINE """Returns an array containing the same data with a new shape."""NEWLINENEWLINE order = kwargs.pop("order", "C")NEWLINE if kwargs:NEWLINE raise TypeError("unexpected keyword arguments: %s"NEWLINE % kwargs.keys())NEWLINENEWLINE # TODO: add more error-checking, perhapsNEWLINE if isinstance(shape[0], tuple) or isinstance(shape[0], list):NEWLINE shape = tuple(shape[0])NEWLINENEWLINE if shape == self.shape:NEWLINE return selfNEWLINENEWLINE size = reduce(lambda x, y: x * y, shape, 1)NEWLINE if size != self.size:NEWLINE raise ValueError("total size of new array must be unchanged")NEWLINENEWLINE return self._new_with_changes(NEWLINE data=self.base_data, offset=self.offset, shape=shape,NEWLINE strides=_make_strides(self.dtype.itemsize, shape, order))NEWLINENEWLINE def ravel(self):NEWLINE """Returns flattened array containing the same data."""NEWLINE return self.reshape(self.size)NEWLINENEWLINE def view(self, dtype=None):NEWLINE """Returns view of array with the same data. If *dtype* is differentNEWLINE from current dtype, the actual bytes of memory will be reinterpreted.NEWLINE """NEWLINENEWLINE if dtype is None:NEWLINE dtype = self.dtypeNEWLINENEWLINE old_itemsize = self.dtype.itemsizeNEWLINE itemsize = np.dtype(dtype).itemsizeNEWLINENEWLINE from pytools import argmin2NEWLINE min_stride_axis = argmin2(NEWLINE (axis, abs(stride))NEWLINE for axis, stride in enumerate(self.strides))NEWLINENEWLINE if self.shape[min_stride_axis] * old_itemsize % itemsize != 0:NEWLINE raise ValueError("new type not compatible with array")NEWLINENEWLINE new_shape = (NEWLINE self.shape[:min_stride_axis]NEWLINE + (self.shape[min_stride_axis] * old_itemsize // itemsize,)NEWLINE + self.shape[min_stride_axis+1:])NEWLINE new_strides = (NEWLINE self.strides[:min_stride_axis]NEWLINE + (self.strides[min_stride_axis] * itemsize // old_itemsize,)NEWLINE + self.strides[min_stride_axis+1:])NEWLINENEWLINE return self._new_with_changes(NEWLINE self.base_data, self.offset,NEWLINE shape=new_shape, dtype=dtype,NEWLINE strides=new_strides)NEWLINENEWLINE # }}}NEWLINENEWLINE def map_to_host(self, queue=None, flags=None, is_blocking=True, wait_for=None):NEWLINE """If *is_blocking*, return a :class:`numpy.ndarray` corresponding to theNEWLINE same memory as *self*.NEWLINENEWLINE If *is_blocking* is not true, return a tuple ``(ary, evt)``, whereNEWLINE *ary* is the above-mentioned array.NEWLINENEWLINE The host array is obtained using :func:`pyopencl.enqueue_map_buffer`.NEWLINE See there for further details.NEWLINENEWLINE :arg flags: A combination of :class:`pyopencl.map_flags`.NEWLINE Defaults to read-write.NEWLINENEWLINE .. versionadded :: 2013.2NEWLINE """NEWLINENEWLINE if flags is None:NEWLINE flags = cl.map_flags.READ | cl.map_flags.WRITENEWLINENEWLINE ary, evt = cl.enqueue_map_buffer(NEWLINE queue or self.queue, self.base_data, flags, self.offset,NEWLINE self.shape, self.dtype, strides=self.strides, wait_for=wait_for,NEWLINE is_blocking=is_blocking)NEWLINENEWLINE if is_blocking:NEWLINE return aryNEWLINE else:NEWLINE return ary, evtNEWLINENEWLINE # {{{ getitem/setitemNEWLINENEWLINE def __getitem__(self, index):NEWLINE """NEWLINE .. versionadded:: 2013.1NEWLINE """NEWLINENEWLINE if isinstance(index, Array):NEWLINE if index.dtype.kind != "i":NEWLINE raise TypeError(NEWLINE "fancy indexing is only allowed with integers")NEWLINE if len(index.shape) != 1:NEWLINE raise NotImplementedError(NEWLINE "multidimensional fancy indexing is not supported")NEWLINE if len(self.shape) != 1:NEWLINE raise NotImplementedError(NEWLINE "fancy indexing into a multi-d array is not supported")NEWLINENEWLINE return take(self, index)NEWLINENEWLINE if not isinstance(index, tuple):NEWLINE index = (index,)NEWLINENEWLINE new_shape = []NEWLINE new_offset = self.offsetNEWLINE new_strides = []NEWLINENEWLINE seen_ellipsis = FalseNEWLINENEWLINE index_axis = 0NEWLINE array_axis = 0NEWLINE while index_axis < len(index):NEWLINE index_entry = index[index_axis]NEWLINENEWLINE if array_axis > len(self.shape):NEWLINE raise IndexError("too many axes in index")NEWLINENEWLINE if isinstance(index_entry, slice):NEWLINE start, stop, idx_stride = index_entry.indices(NEWLINE self.shape[array_axis])NEWLINENEWLINE array_stride = self.strides[array_axis]NEWLINENEWLINE new_shape.append((stop-start)//idx_stride)NEWLINE new_strides.append(idx_stride*array_stride)NEWLINE new_offset += array_stride*startNEWLINENEWLINE index_axis += 1NEWLINE array_axis += 1NEWLINENEWLINE elif isinstance(index_entry, (int, np.integer)):NEWLINE array_shape = self.shape[array_axis]NEWLINE if index_entry < 0:NEWLINE index_entry += array_shapeNEWLINENEWLINE if not (0 <= index_entry < array_shape):NEWLINE raise IndexError(NEWLINE "subindex in axis %d out of range" % index_axis)NEWLINENEWLINE new_offset += self.strides[array_axis]*index_entryNEWLINENEWLINE index_axis += 1NEWLINE array_axis += 1NEWLINENEWLINE elif index_entry is Ellipsis:NEWLINE index_axis += 1NEWLINENEWLINE remaining_index_count = len(index) - index_axisNEWLINE new_array_axis = len(self.shape) - remaining_index_countNEWLINE if new_array_axis < array_axis:NEWLINE raise IndexError("invalid use of ellipsis in index")NEWLINE while array_axis < new_array_axis:NEWLINE new_shape.append(self.shape[array_axis])NEWLINE new_strides.append(self.strides[array_axis])NEWLINE array_axis += 1NEWLINENEWLINE if seen_ellipsis:NEWLINE raise IndexError(NEWLINE "more than one ellipsis not allowed in index")NEWLINE seen_ellipsis = TrueNEWLINENEWLINE else:NEWLINE raise IndexError("invalid subindex in axis %d" % index_axis)NEWLINENEWLINE while array_axis < len(self.shape):NEWLINE new_shape.append(self.shape[array_axis])NEWLINE new_strides.append(self.strides[array_axis])NEWLINENEWLINE array_axis += 1NEWLINENEWLINE return self._new_with_changes(NEWLINE self.base_data, offset=new_offset,NEWLINE shape=tuple(new_shape),NEWLINE strides=tuple(new_strides))NEWLINENEWLINE def setitem(self, subscript, value, queue=None, wait_for=None):NEWLINE """Like :meth:`__setitem__`, but with the ability to specifyNEWLINE a *queue* and *wait_for*.NEWLINENEWLINE .. versionadded:: 2013.1NEWLINENEWLINE .. versionchanged:: 2013.2NEWLINENEWLINE Added *wait_for*.NEWLINE """NEWLINENEWLINE if isinstance(subscript, Array):NEWLINE if subscript.dtype.kind != "i":NEWLINE raise TypeError(NEWLINE "fancy indexing is only allowed with integers")NEWLINE if len(subscript.shape) != 1:NEWLINE raise NotImplementedError(NEWLINE "multidimensional fancy indexing is not supported")NEWLINE if len(self.shape) != 1:NEWLINE raise NotImplementedError(NEWLINE "fancy indexing into a multi-d array is supported")NEWLINENEWLINE multi_put([value], subscript, out=[self], queue=self.queue,NEWLINE wait_for=wait_for)NEWLINE returnNEWLINENEWLINE queue = queue or self.queue or value.queueNEWLINENEWLINE subarray = self[subscript]NEWLINENEWLINE if isinstance(value, np.ndarray):NEWLINE if subarray.shape == value.shape and subarray.strides == value.strides:NEWLINE self.add_event(NEWLINE cl.enqueue_copy(queue, subarray.base_data,NEWLINE value, device_offset=subarray.offset, wait_for=wait_for))NEWLINE returnNEWLINE else:NEWLINE value = to_device(queue, value, self.allocator)NEWLINENEWLINE if isinstance(value, Array):NEWLINE if len(subarray.shape) != len(value.shape):NEWLINE raise NotImplementedError("broadcasting is not "NEWLINE "supported in __setitem__")NEWLINE if subarray.shape != value.shape:NEWLINE raise ValueError("cannot assign between arrays of "NEWLINE "differing shapes")NEWLINE if subarray.strides != value.strides:NEWLINE raise ValueError("cannot assign between arrays of "NEWLINE "differing strides")NEWLINENEWLINE self.add_event(NEWLINE self._copy(subarray, value, queue=queue, wait_for=wait_for))NEWLINENEWLINE else:NEWLINE # Let's assume it's a scalarNEWLINE subarray.fill(value, queue=queue, wait_for=wait_for)NEWLINENEWLINE def __setitem__(self, subscript, value):NEWLINE """Set the slice of *self* identified *subscript* to *value*.NEWLINENEWLINE *value* is allowed to be:NEWLINENEWLINE * A :class:`Array` of the same :attr:`shape` and (for now) :attr:`strides`,NEWLINE but with potentially different :attr:`dtype`.NEWLINE * A :class:`numpy.ndarray` of the same :attr:`shape` and (for now)NEWLINE :attr:`strides`, but with potentially different :attr:`dtype`.NEWLINE * A scalar.NEWLINENEWLINE Non-scalar broadcasting is not currently supported.NEWLINENEWLINE .. versionadded:: 2013.1NEWLINE """NEWLINE self.setitem(subscript, value)NEWLINENEWLINE # }}}NEWLINENEWLINE# }}}NEWLINENEWLINENEWLINEdef as_strided(ary, shape=None, strides=None):NEWLINE """Make an :class:`Array` from the given array with the givenNEWLINE shape and strides.NEWLINE """NEWLINENEWLINE # undocumented for the momentNEWLINENEWLINE shape = shape or ary.shapeNEWLINE strides = strides or ary.stridesNEWLINENEWLINE return Array(ary.queue, shape, ary.dtype, allocator=ary.allocator,NEWLINE data=ary.data, strides=strides)NEWLINENEWLINE# }}}NEWLINENEWLINENEWLINE# {{{ creation helpersNEWLINENEWLINEdef to_device(queue, ary, allocator=None, async=False):NEWLINE """Return a :class:`Array` that is an exact copy of theNEWLINE :class:`numpy.ndarray` instance *ary*.NEWLINENEWLINE See :class:`Array` for the meaning of *allocator*.NEWLINENEWLINE .. versionchanged:: 2011.1NEWLINE *context* argument was deprecated.NEWLINE """NEWLINENEWLINE if _dtype_is_object(ary.dtype):NEWLINE raise RuntimeError("to_device does not work on object arrays.")NEWLINENEWLINE result = Array(queue, ary.shape, ary.dtype,NEWLINE allocator=allocator, strides=ary.strides)NEWLINE result.set(ary, async=async)NEWLINE return resultNEWLINENEWLINENEWLINEempty = ArrayNEWLINENEWLINENEWLINEdef zeros(queue, shape, dtype, order="C", allocator=None):NEWLINE """Same as :func:`empty`, but the :class:`Array` is zero-initialized beforeNEWLINE being returned.NEWLINENEWLINE .. versionchanged:: 2011.1NEWLINE *context* argument was deprecated.NEWLINE """NEWLINENEWLINE result = Array(queue, shape, dtype,NEWLINE order=order, allocator=allocator)NEWLINE zero = np.zeros((), dtype)NEWLINE result.fill(zero)NEWLINE return resultNEWLINENEWLINENEWLINEdef empty_like(ary):NEWLINE """Make a new, uninitialized :class:`Array` having the same propertiesNEWLINE as *other_ary*.NEWLINE """NEWLINENEWLINE return ary._new_with_changes(data=None, offset=0)NEWLINENEWLINENEWLINEdef zeros_like(ary):NEWLINE """Make a new, zero-initialized :class:`Array` having the same propertiesNEWLINE as *other_ary*.NEWLINE """NEWLINENEWLINE result = empty_like(ary)NEWLINE zero = np.zeros((), ary.dtype)NEWLINE result.fill(zero)NEWLINE return resultNEWLINENEWLINENEWLINE@elwise_kernel_runnerNEWLINEdef _arange_knl(result, start, step):NEWLINE return elementwise.get_arange_kernel(NEWLINE result.context, result.dtype)NEWLINENEWLINENEWLINEdef arange(queue, *args, **kwargs):NEWLINE """Create a :class:`Array` filled with numbers spaced `step` apart,NEWLINE starting from `start` and ending at `stop`.NEWLINENEWLINE For floating point arguments, the length of the result isNEWLINE `ceil((stop - start)/step)`. This rule may result in the lastNEWLINE element of the result being greater than `stop`.NEWLINENEWLINE *dtype*, if not specified, is taken as the largest common typeNEWLINE of *start*, *stop* and *step*.NEWLINENEWLINE .. versionchanged:: 2011.1NEWLINE *context* argument was deprecated.NEWLINENEWLINE .. versionchanged:: 2011.2NEWLINE *allocator* keyword argument was added.NEWLINE """NEWLINENEWLINE # argument processing -----------------------------------------------------NEWLINENEWLINE # Yuck. Thanks, numpy developers. ;)NEWLINE from pytools import RecordNEWLINENEWLINE class Info(Record):NEWLINE passNEWLINENEWLINE explicit_dtype = FalseNEWLINENEWLINE inf = Info()NEWLINE inf.start = NoneNEWLINE inf.stop = NoneNEWLINE inf.step = NoneNEWLINE inf.dtype = NoneNEWLINE inf.allocator = NoneNEWLINE inf.wait_for = []NEWLINENEWLINE if isinstance(args[-1], np.dtype):NEWLINE inf.dtype = args[-1]NEWLINE args = args[:-1]NEWLINE explicit_dtype = TrueNEWLINENEWLINE argc = len(args)NEWLINE if argc == 0:NEWLINE raise ValueError("stop argument required")NEWLINE elif argc == 1:NEWLINE inf.stop = args[0]NEWLINE elif argc == 2:NEWLINE inf.start = args[0]NEWLINE inf.stop = args[1]NEWLINE elif argc == 3:NEWLINE inf.start = args[0]NEWLINE inf.stop = args[1]NEWLINE inf.step = args[2]NEWLINE else:NEWLINE raise ValueError("too many arguments")NEWLINENEWLINE admissible_names = ["start", "stop", "step", "dtype", "allocator"]NEWLINE for k, v in kwargs.iteritems():NEWLINE if k in admissible_names:NEWLINE if getattr(inf, k) is None:NEWLINE setattr(inf, k, v)NEWLINE if k == "dtype":NEWLINE explicit_dtype = TrueNEWLINE else:NEWLINE raise ValueError(NEWLINE "may not specify '%s' by position and keyword" % k)NEWLINE else:NEWLINE raise ValueError("unexpected keyword argument '%s'" % k)NEWLINENEWLINE if inf.start is None:NEWLINE inf.start = 0NEWLINE if inf.step is None:NEWLINE inf.step = 1NEWLINE if inf.dtype is None:NEWLINE inf.dtype = np.array([inf.start, inf.stop, inf.step]).dtypeNEWLINENEWLINE # actual functionality ----------------------------------------------------NEWLINE dtype = np.dtype(inf.dtype)NEWLINE start = dtype.type(inf.start)NEWLINE step = dtype.type(inf.step)NEWLINE stop = dtype.type(inf.stop)NEWLINE wait_for = inf.wait_forNEWLINENEWLINE if not explicit_dtype:NEWLINE raise TypeError("arange requires a dtype argument")NEWLINENEWLINE from math import ceilNEWLINE size = int(ceil((stop-start)/step))NEWLINENEWLINE result = Array(queue, (size,), dtype, allocator=inf.allocator)NEWLINE result.add_event(NEWLINE _arange_knl(result, start, step, queue=queue, wait_for=wait_for))NEWLINE return resultNEWLINENEWLINE# }}}NEWLINENEWLINENEWLINE# {{{ take/put/concatenate/diffNEWLINENEWLINE@elwise_kernel_runnerNEWLINEdef _take(result, ary, indices):NEWLINE return elementwise.get_take_kernel(NEWLINE result.context, result.dtype, indices.dtype)NEWLINENEWLINENEWLINEdef take(a, indices, out=None, queue=None, wait_for=None):NEWLINE """Return the :class:`Array` ``[a[indices[0]], ..., a[indices[n]]]``.NEWLINE For the moment, *a* must be a type that can be bound to a texture.NEWLINE """NEWLINENEWLINE queue = queue or a.queueNEWLINE if out is None:NEWLINE out = Array(queue, indices.shape, a.dtype, allocator=a.allocator)NEWLINENEWLINE assert len(indices.shape) == 1NEWLINE out.add_event(NEWLINE _take(out, a, indices, queue=queue, wait_for=wait_for))NEWLINE return outNEWLINENEWLINENEWLINEdef multi_take(arrays, indices, out=None, queue=None):NEWLINE if not len(arrays):NEWLINE return []NEWLINENEWLINE assert len(indices.shape) == 1NEWLINENEWLINE from pytools import single_valuedNEWLINE a_dtype = single_valued(a.dtype for a in arrays)NEWLINE a_allocator = arrays[0].dtypeNEWLINE context = indices.contextNEWLINE queue = queue or indices.queueNEWLINENEWLINE vec_count = len(arrays)NEWLINENEWLINE if out is None:NEWLINE out = [Array(context, queue, indices.shape, a_dtype,NEWLINE allocator=a_allocator)NEWLINE for i in range(vec_count)]NEWLINE else:NEWLINE if len(out) != len(arrays):NEWLINE raise ValueError("out and arrays must have the same length")NEWLINENEWLINE chunk_size = _builtin_min(vec_count, 10)NEWLINENEWLINE def make_func_for_chunk_size(chunk_size):NEWLINE knl = elementwise.get_take_kernel(NEWLINE indices.context, a_dtype, indices.dtype,NEWLINE vec_count=chunk_size)NEWLINE knl.set_block_shape(*indices._block)NEWLINE return knlNEWLINENEWLINE knl = make_func_for_chunk_size(chunk_size)NEWLINENEWLINE for start_i in range(0, len(arrays), chunk_size):NEWLINE chunk_slice = slice(start_i, start_i+chunk_size)NEWLINENEWLINE if start_i + chunk_size > vec_count:NEWLINE knl = make_func_for_chunk_size(vec_count-start_i)NEWLINENEWLINE gs, ls = indices.get_sizes(queue,NEWLINE knl.get_work_group_info(NEWLINE cl.kernel_work_group_info.WORK_GROUP_SIZE,NEWLINE queue.device))NEWLINENEWLINE knl(queue, gs, ls,NEWLINE indices.data,NEWLINE *([o.data for o in out[chunk_slice]]NEWLINE + [i.data for i in arrays[chunk_slice]]NEWLINE + [indices.size]))NEWLINENEWLINE return outNEWLINENEWLINENEWLINEdef multi_take_put(arrays, dest_indices, src_indices, dest_shape=None,NEWLINE out=None, queue=None, src_offsets=None):NEWLINE if not len(arrays):NEWLINE return []NEWLINENEWLINE from pytools import single_valuedNEWLINE a_dtype = single_valued(a.dtype for a in arrays)NEWLINE a_allocator = arrays[0].allocatorNEWLINE context = src_indices.contextNEWLINE queue = queue or src_indices.queueNEWLINENEWLINE vec_count = len(arrays)NEWLINENEWLINE if out is None:NEWLINE out = [Array(queue, dest_shape, a_dtype, allocator=a_allocator)NEWLINE for i in range(vec_count)]NEWLINE else:NEWLINE if a_dtype != single_valued(o.dtype for o in out):NEWLINE raise TypeError("arrays and out must have the same dtype")NEWLINE if len(out) != vec_count:NEWLINE raise ValueError("out and arrays must have the same length")NEWLINENEWLINE if src_indices.dtype != dest_indices.dtype:NEWLINE raise TypeError(NEWLINE "src_indices and dest_indices must have the same dtype")NEWLINENEWLINE if len(src_indices.shape) != 1:NEWLINE raise ValueError("src_indices must be 1D")NEWLINENEWLINE if src_indices.shape != dest_indices.shape:NEWLINE raise ValueError(NEWLINE "src_indices and dest_indices must have the same shape")NEWLINENEWLINE if src_offsets is None:NEWLINE src_offsets_list = []NEWLINE else:NEWLINE src_offsets_list = src_offsetsNEWLINE if len(src_offsets) != vec_count:NEWLINE raise ValueError(NEWLINE "src_indices and src_offsets must have the same length")NEWLINENEWLINE max_chunk_size = 10NEWLINENEWLINE chunk_size = _builtin_min(vec_count, max_chunk_size)NEWLINENEWLINE def make_func_for_chunk_size(chunk_size):NEWLINE return elementwise.get_take_put_kernel(context,NEWLINE a_dtype, src_indices.dtype,NEWLINE with_offsets=src_offsets is not None,NEWLINE vec_count=chunk_size)NEWLINENEWLINE knl = make_func_for_chunk_size(chunk_size)NEWLINENEWLINE for start_i in range(0, len(arrays), chunk_size):NEWLINE chunk_slice = slice(start_i, start_i+chunk_size)NEWLINENEWLINE if start_i + chunk_size > vec_count:NEWLINE knl = make_func_for_chunk_size(vec_count-start_i)NEWLINENEWLINE gs, ls = src_indices.get_sizes(queue,NEWLINE knl.get_work_group_info(NEWLINE cl.kernel_work_group_info.WORK_GROUP_SIZE,NEWLINE queue.device))NEWLINENEWLINE from pytools import flattenNEWLINE knl(queue, gs, ls,NEWLINE *([o.data for o in out[chunk_slice]]NEWLINE + [dest_indices.base_data,NEWLINE dest_indices.offset,NEWLINE src_indices.base_data,NEWLINE src_indices.offset]NEWLINE + list(flatten(NEWLINE (i.base_data, i.offset)NEWLINE for i in arrays[chunk_slice]))NEWLINE + src_offsets_list[chunk_slice]NEWLINE + [src_indices.size]))NEWLINENEWLINE return outNEWLINENEWLINENEWLINEdef multi_put(arrays, dest_indices, dest_shape=None, out=None, queue=None,NEWLINE wait_for=None):NEWLINE if not len(arrays):NEWLINE return []NEWLINENEWLINE from pytools import single_valuedNEWLINE a_dtype = single_valued(a.dtype for a in arrays)NEWLINE a_allocator = arrays[0].allocatorNEWLINE context = dest_indices.contextNEWLINE queue = queue or dest_indices.queueNEWLINENEWLINE vec_count = len(arrays)NEWLINENEWLINE if out is None:NEWLINE out = [Array(queue, dest_shape, a_dtype,NEWLINE allocator=a_allocator, queue=queue)NEWLINE for i in range(vec_count)]NEWLINE else:NEWLINE if a_dtype != single_valued(o.dtype for o in out):NEWLINE raise TypeError("arrays and out must have the same dtype")NEWLINE if len(out) != vec_count:NEWLINE raise ValueError("out and arrays must have the same length")NEWLINENEWLINE if len(dest_indices.shape) != 1:NEWLINE raise ValueError("dest_indices must be 1D")NEWLINENEWLINE chunk_size = _builtin_min(vec_count, 10)NEWLINENEWLINE def make_func_for_chunk_size(chunk_size):NEWLINE knl = elementwise.get_put_kernel(NEWLINE context,NEWLINE a_dtype, dest_indices.dtype, vec_count=chunk_size)NEWLINE return knlNEWLINENEWLINE knl = make_func_for_chunk_size(chunk_size)NEWLINENEWLINE for start_i in range(0, len(arrays), chunk_size):NEWLINE chunk_slice = slice(start_i, start_i+chunk_size)NEWLINENEWLINE if start_i + chunk_size > vec_count:NEWLINE knl = make_func_for_chunk_size(vec_count-start_i)NEWLINENEWLINE gs, ls = dest_indices.get_sizes(queue,NEWLINE knl.get_work_group_info(NEWLINE cl.kernel_work_group_info.WORK_GROUP_SIZE,NEWLINE queue.device))NEWLINENEWLINE from pytools import flattenNEWLINE evt = knl(queue, gs, ls,NEWLINE *(NEWLINE list(flatten(NEWLINE (o.base_data, o.offset)NEWLINE for o in out[chunk_slice]))NEWLINE + [dest_indices.base_data, dest_indices.offset]NEWLINE + list(flatten(NEWLINE (i.base_data, i.offset)NEWLINE for i in arrays[chunk_slice]))NEWLINE + [dest_indices.size]),NEWLINE **dict(wait_for=wait_for))NEWLINENEWLINE # FIXME should wait on incoming eventsNEWLINENEWLINE for o in out[chunk_slice]:NEWLINE o.add_event(evt)NEWLINENEWLINE return outNEWLINENEWLINENEWLINEdef concatenate(arrays, axis=0, queue=None, allocator=None):NEWLINE """NEWLINE .. versionadded:: 2013.1NEWLINE """NEWLINE # {{{ find properties of result arrayNEWLINENEWLINE shape = NoneNEWLINENEWLINE for i_ary, ary in enumerate(arrays):NEWLINE queue = queue or ary.queueNEWLINE allocator = allocator or ary.allocatorNEWLINENEWLINE if shape is None:NEWLINE # first arrayNEWLINE shape = list(ary.shape)NEWLINE else:NEWLINE if len(ary.shape) != len(shape):NEWLINE raise ValueError("%d'th array has different number of axes "NEWLINE "(shold have %d, has %d)"NEWLINE % (i_ary, len(ary.shape), len(shape)))NEWLINENEWLINE ary_shape_list = list(ary.shape)NEWLINE if (ary_shape_list[:axis] != shape[:axis]NEWLINE or ary_shape_list[axis+1:] != shape[axis+1:]):NEWLINE raise ValueError("%d'th array has residual not matching "NEWLINE "other arrays" % i_ary)NEWLINENEWLINE shape[axis] += ary.shape[axis]NEWLINENEWLINE # }}}NEWLINENEWLINE shape = tuple(shape)NEWLINE dtype = np.find_common_type([ary.dtype for ary in arrays], [])NEWLINE result = empty(queue, shape, dtype, allocator=allocator)NEWLINENEWLINE full_slice = (slice(None),) * len(shape)NEWLINENEWLINE base_idx = 0NEWLINE for ary in arrays:NEWLINE my_len = ary.shape[axis]NEWLINE result.setitem(NEWLINE full_slice[:axis]NEWLINE + (slice(base_idx, base_idx+my_len),)NEWLINE + full_slice[axis+1:],NEWLINE ary)NEWLINENEWLINE base_idx += my_lenNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE@elwise_kernel_runnerNEWLINEdef _diff(result, array):NEWLINE return elementwise.get_diff_kernel(array.context, array.dtype)NEWLINENEWLINENEWLINEdef diff(array, queue=None, allocator=None):NEWLINE """NEWLINE .. versionadded:: 2013.2NEWLINE """NEWLINENEWLINE if len(array.shape) != 1:NEWLINE raise ValueError("multi-D arrays are not supported")NEWLINENEWLINE n, = array.shapeNEWLINENEWLINE queue = queue or array.queueNEWLINE allocator = allocator or array.allocatorNEWLINENEWLINE result = empty(queue, (n-1,), array.dtype, allocator=allocator)NEWLINE _diff(result, array, queue=queue)NEWLINE return resultNEWLINENEWLINENEWLINEdef hstack(arrays, queue=None):NEWLINE from pyopencl.array import emptyNEWLINENEWLINE if len(arrays) == 0:NEWLINE return empty(queue, (), dtype=np.float64)NEWLINENEWLINE if queue is None:NEWLINE for ary in arrays:NEWLINE if ary.queue is not None:NEWLINE queue = ary.queueNEWLINE breakNEWLINENEWLINE from pytools import all_equal, single_valuedNEWLINE if not all_equal(len(ary.shape) for ary in arrays):NEWLINE raise ValueError("arguments must all have the same number of axes")NEWLINENEWLINE lead_shape = single_valued(ary.shape[:-1] for ary in arrays)NEWLINENEWLINE w = _builtin_sum([ary.shape[-1] for ary in arrays])NEWLINE result = empty(queue, lead_shape+(w,), arrays[0].dtype)NEWLINE index = 0NEWLINE for ary in arrays:NEWLINE result[..., index:index+ary.shape[-1]] = aryNEWLINE index += ary.shape[-1]NEWLINENEWLINE return resultNEWLINENEWLINE# }}}NEWLINENEWLINENEWLINE# {{{ conditionalsNEWLINENEWLINE@elwise_kernel_runnerNEWLINEdef _if_positive(result, criterion, then_, else_):NEWLINE return elementwise.get_if_positive_kernel(NEWLINE result.context, criterion.dtype, then_.dtype)NEWLINENEWLINENEWLINEdef if_positive(criterion, then_, else_, out=None, queue=None):NEWLINE """Return an array like *then_*, which, for the element at index *i*,NEWLINE contains *then_[i]* if *criterion[i]>0*, else *else_[i]*.NEWLINE """NEWLINENEWLINE if not (criterion.shape == then_.shape == else_.shape):NEWLINE raise ValueError("shapes do not match")NEWLINENEWLINE if not (then_.dtype == else_.dtype):NEWLINE raise ValueError("dtypes do not match")NEWLINENEWLINE if out is None:NEWLINE out = empty_like(then_)NEWLINE _if_positive(out, criterion, then_, else_, queue=queue)NEWLINE return outNEWLINENEWLINENEWLINEdef maximum(a, b, out=None, queue=None):NEWLINE """Return the elementwise maximum of *a* and *b*."""NEWLINENEWLINE # silly, but functionalNEWLINE return if_positive(a.mul_add(1, b, -1, queue=queue), a, b,NEWLINE queue=queue, out=out)NEWLINENEWLINENEWLINEdef minimum(a, b, out=None, queue=None):NEWLINE """Return the elementwise minimum of *a* and *b*."""NEWLINE # silly, but functionalNEWLINE return if_positive(a.mul_add(1, b, -1, queue=queue), b, a,NEWLINE queue=queue, out=out)NEWLINENEWLINE# }}}NEWLINENEWLINENEWLINE# {{{ reductionsNEWLINE_builtin_sum = sumNEWLINE_builtin_min = minNEWLINE_builtin_max = maxNEWLINENEWLINENEWLINEdef sum(a, dtype=None, queue=None):NEWLINE """NEWLINE .. versionadded:: 2011.1NEWLINE """NEWLINE from pyopencl.reduction import get_sum_kernelNEWLINE krnl = get_sum_kernel(a.context, dtype, a.dtype)NEWLINE return krnl(a, queue=queue)NEWLINENEWLINENEWLINEdef dot(a, b, dtype=None, queue=None):NEWLINE """NEWLINE .. versionadded:: 2011.1NEWLINE """NEWLINE from pyopencl.reduction import get_dot_kernelNEWLINE krnl = get_dot_kernel(a.context, dtype, a.dtype, b.dtype)NEWLINE return krnl(a, b, queue=queue)NEWLINENEWLINENEWLINEdef vdot(a, b, dtype=None, queue=None):NEWLINE """Like :func:`numpy.vdot`.NEWLINENEWLINE .. versionadded:: 2013.1NEWLINE """NEWLINE from pyopencl.reduction import get_dot_kernelNEWLINE krnl = get_dot_kernel(a.context, dtype, a.dtype, b.dtype,NEWLINE conjugate_first=True)NEWLINE return krnl(a, b, queue=queue)NEWLINENEWLINENEWLINEdef subset_dot(subset, a, b, dtype=None, queue=None):NEWLINE """NEWLINE .. versionadded:: 2011.1NEWLINE """NEWLINE from pyopencl.reduction import get_subset_dot_kernelNEWLINE krnl = get_subset_dot_kernel(NEWLINE a.context, dtype, subset.dtype, a.dtype, b.dtype)NEWLINE return krnl(subset, a, b, queue=queue)NEWLINENEWLINENEWLINEdef _make_minmax_kernel(what):NEWLINE def f(a, queue=None):NEWLINE from pyopencl.reduction import get_minmax_kernelNEWLINE krnl = get_minmax_kernel(a.context, what, a.dtype)NEWLINE return krnl(a, queue=queue)NEWLINENEWLINE return fNEWLINENEWLINEmin = _make_minmax_kernel("min")NEWLINEmin.__doc__ = """NEWLINE .. versionadded:: 2011.1NEWLINE """NEWLINENEWLINEmax = _make_minmax_kernel("max")NEWLINEmax.__doc__ = """NEWLINE .. versionadded:: 2011.1NEWLINE """NEWLINENEWLINENEWLINEdef _make_subset_minmax_kernel(what):NEWLINE def f(subset, a, queue=None):NEWLINE from pyopencl.reduction import get_subset_minmax_kernelNEWLINE krnl = get_subset_minmax_kernel(a.context, what, a.dtype, subset.dtype)NEWLINE return krnl(subset, a, queue=queue)NEWLINENEWLINE return fNEWLINENEWLINEsubset_min = _make_subset_minmax_kernel("min")NEWLINEsubset_min.__doc__ = """.. versionadded:: 2011.1"""NEWLINEsubset_max = _make_subset_minmax_kernel("max")NEWLINEsubset_max.__doc__ = """.. versionadded:: 2011.1"""NEWLINENEWLINE# }}}NEWLINENEWLINENEWLINE# {{{ scansNEWLINENEWLINEdef cumsum(a, output_dtype=None, queue=None,NEWLINE wait_for=None, return_event=False):NEWLINE # undocumented for nowNEWLINENEWLINE """NEWLINE .. versionadded:: 2013.1NEWLINE """NEWLINENEWLINE if output_dtype is None:NEWLINE output_dtype = a.dtypeNEWLINENEWLINE result = a._new_like_me(output_dtype)NEWLINENEWLINE from pyopencl.scan import get_cumsum_kernelNEWLINE krnl = get_cumsum_kernel(a.context, a.dtype, output_dtype)NEWLINE evt = krnl(a, result, queue=queue, wait_for=wait_for)NEWLINENEWLINE if return_event:NEWLINE return evt, resultNEWLINE else:NEWLINE return resultNEWLINENEWLINE# }}}NEWLINENEWLINE# vim: foldmethod=markerNEWLINE
from __future__ import print_functionNEWLINENEWLINEimport osNEWLINEimport astNEWLINEimport jsonNEWLINEimport globNEWLINEimport reNEWLINEimport sysNEWLINEimport demisto_clientNEWLINEfrom threading import Thread, LockNEWLINEfrom demisto_client.demisto_api.rest import ApiExceptionNEWLINEfrom demisto_sdk.commands.common.tools import run_threads_listNEWLINENEWLINEfrom google.cloud.storage import BucketNEWLINEfrom packaging.version import VersionNEWLINEfrom typing import ListNEWLINENEWLINEfrom Tests.Marketplace.marketplace_services import init_storage_client, Pack, load_jsonNEWLINEfrom Tests.Marketplace.upload_packs import download_and_extract_indexNEWLINEfrom Tests.Marketplace.marketplace_constants import GCPConfig, PACKS_FULL_PATH, IGNORED_FILES, PACKS_FOLDER, MetadataNEWLINEfrom Tests.scripts.utils.content_packs_util import is_pack_deprecatedNEWLINEfrom Tests.scripts.utils import logging_wrapper as loggingNEWLINENEWLINEPACK_METADATA_FILE = 'pack_metadata.json'NEWLINEPACK_PATH_VERSION_REGEX = re.compile(fr'^{GCPConfig.PRODUCTION_STORAGE_BASE_PATH}/[A-Za-z0-9-_.]+/(\d+\.\d+\.\d+)/[A-Za-z0-9-_.]'NEWLINE r'+\.zip$')NEWLINESUCCESS_FLAG = TrueNEWLINENEWLINENEWLINEdef get_pack_id_from_error_with_gcp_path(error: str) -> str:NEWLINE """NEWLINE Gets the id of the pack from the pack's path in GCP that is mentioned in the error msg.NEWLINE Args:NEWLINE error: path of pack in GCP.NEWLINENEWLINE Returns:NEWLINE The id of given pack.NEWLINE """NEWLINE return error.split('/packs/')[1].split('.zip')[0].split('/')[0]NEWLINENEWLINENEWLINEdef get_pack_display_name(pack_id: str) -> str:NEWLINE """NEWLINE Gets the display name of the pack from the pack ID.NEWLINENEWLINE :param pack_id: ID of the pack.NEWLINE :return: Name found in the pack metadata, otherwise an empty string.NEWLINE """NEWLINE metadata_path = os.path.join(PACKS_FULL_PATH, pack_id, PACK_METADATA_FILE)NEWLINE if pack_id and os.path.isfile(metadata_path):NEWLINE with open(metadata_path, 'r') as json_file:NEWLINE pack_metadata = json.load(json_file)NEWLINE return pack_metadata.get('name')NEWLINE return ''NEWLINENEWLINENEWLINEdef is_pack_hidden(pack_id: str) -> bool:NEWLINE """NEWLINE Check if the given pack is deprecated.NEWLINENEWLINE :param pack_id: ID of the pack.NEWLINE :return: True if the pack is deprecated, i.e. has 'hidden: true' field, False otherwise.NEWLINE """NEWLINE metadata_path = os.path.join(PACKS_FULL_PATH, pack_id, PACK_METADATA_FILE)NEWLINE if pack_id and os.path.isfile(metadata_path):NEWLINE with open(metadata_path, 'r') as json_file:NEWLINE pack_metadata = json.load(json_file)NEWLINE return pack_metadata.get('hidden', False)NEWLINE else:NEWLINE logging.warning(f'Could not open metadata file of pack {pack_id}')NEWLINE return FalseNEWLINENEWLINENEWLINEdef create_dependencies_data_structure(response_data: dict, dependants_ids: list, dependencies_data: list,NEWLINE checked_packs: list):NEWLINE """ Recursively creates the packs' dependencies data structure for the installation requestsNEWLINE (only required and uninstalled).NEWLINENEWLINE Args:NEWLINE response_data (dict): The GET /search/dependencies response data.NEWLINE dependants_ids (list): A list of the dependant packs IDs.NEWLINE dependencies_data (list): The dependencies data structure to be created.NEWLINE checked_packs (list): Required dependants that were already found.NEWLINE """NEWLINENEWLINE next_call_dependants_ids = []NEWLINENEWLINE for dependency in response_data:NEWLINE dependants = dependency.get('dependants', {})NEWLINE for dependant in dependants.keys():NEWLINE is_required = dependants[dependant].get('level', '') == 'required'NEWLINE if dependant in dependants_ids and is_required and dependency.get('id') not in checked_packs:NEWLINE dependencies_data.append({NEWLINE 'id': dependency.get('id'),NEWLINE 'version': dependency.get('extras', {}).get('pack', {}).get('currentVersion')NEWLINE })NEWLINE next_call_dependants_ids.append(dependency.get('id'))NEWLINE checked_packs.append(dependency.get('id'))NEWLINENEWLINE if next_call_dependants_ids:NEWLINE create_dependencies_data_structure(response_data, next_call_dependants_ids, dependencies_data, checked_packs)NEWLINENEWLINENEWLINEdef get_pack_dependencies(client: demisto_client, pack_data: dict, lock: Lock):NEWLINE """ Get the pack's required dependencies.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE pack_data (dict): Contains the pack ID and version.NEWLINE lock (Lock): A lock object.NEWLINE Returns:NEWLINE (list) The pack's dependencies.NEWLINE """NEWLINE pack_id = pack_data['id']NEWLINE logging.debug(f'Getting dependencies for pack {pack_id}')NEWLINE try:NEWLINE response_data, status_code, _ = demisto_client.generic_request_func(NEWLINE client,NEWLINE path='/contentpacks/marketplace/search/dependencies',NEWLINE method='POST',NEWLINE body=[pack_data],NEWLINE accept='application/json',NEWLINE _request_timeout=NoneNEWLINE )NEWLINENEWLINE if 200 <= status_code < 300:NEWLINE dependencies_data: list = []NEWLINE dependants_ids = [pack_id]NEWLINE reseponse_data = ast.literal_eval(response_data).get('dependencies', [])NEWLINE create_dependencies_data_structure(reseponse_data, dependants_ids, dependencies_data, dependants_ids)NEWLINE dependencies_str = ', '.join([dep['id'] for dep in dependencies_data])NEWLINE if dependencies_data:NEWLINE logging.debug(f'Found the following dependencies for pack {pack_id}: {dependencies_str}')NEWLINE return dependencies_dataNEWLINE if status_code == 400:NEWLINE logging.error(f'Unable to find dependencies for {pack_id}.')NEWLINE return []NEWLINE else:NEWLINE result_object = ast.literal_eval(response_data)NEWLINE msg = result_object.get('message', '')NEWLINE raise Exception(f'Failed to get pack {pack_id} dependencies - with status code {status_code}\n{msg}\n')NEWLINE except Exception:NEWLINE logging.exception(f'The request to get pack {pack_id} dependencies has failed.')NEWLINENEWLINE lock.acquire()NEWLINE global SUCCESS_FLAGNEWLINE SUCCESS_FLAG = FalseNEWLINE lock.release()NEWLINENEWLINENEWLINEdef search_pack(client: demisto_client,NEWLINE pack_display_name: str,NEWLINE pack_id: str,NEWLINE lock: Lock) -> dict:NEWLINE """ Make a pack search request.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE pack_display_name (string): The pack display name.NEWLINE pack_id (string): The pack ID.NEWLINE lock (Lock): A lock object.NEWLINE Returns:NEWLINE (dict): Returns the pack data if found, or empty dict otherwise.NEWLINE """NEWLINENEWLINE try:NEWLINE # make the search requestNEWLINE response_data, status_code, _ = demisto_client.generic_request_func(client,NEWLINE path=f'/contentpacks/marketplace/{pack_id}',NEWLINE method='GET',NEWLINE accept='application/json',NEWLINE _request_timeout=None)NEWLINENEWLINE if 200 <= status_code < 300:NEWLINE result_object = ast.literal_eval(response_data)NEWLINENEWLINE if result_object and result_object.get('currentVersion'):NEWLINE logging.debug(f'Found pack "{pack_display_name}" by its ID "{pack_id}" in bucket!')NEWLINENEWLINE pack_data = {NEWLINE 'id': result_object.get('id'),NEWLINE 'version': result_object.get('currentVersion')NEWLINE }NEWLINE return pack_dataNEWLINENEWLINE else:NEWLINE raise Exception(f'Did not find pack "{pack_display_name}" by its ID "{pack_id}" in bucket.')NEWLINE else:NEWLINE result_object = ast.literal_eval(response_data)NEWLINE msg = result_object.get('message', '')NEWLINE err_msg = f'Search request for pack "{pack_display_name}" with ID "{pack_id}", failed with status code ' \NEWLINE f'{status_code}\n{msg}'NEWLINE raise Exception(err_msg)NEWLINE except Exception:NEWLINE logging.exception(f'Search request for pack "{pack_display_name}" with ID "{pack_id}", failed.')NEWLINENEWLINE lock.acquire()NEWLINE global SUCCESS_FLAGNEWLINE SUCCESS_FLAG = FalseNEWLINE lock.release()NEWLINE return {}NEWLINENEWLINENEWLINEdef find_malformed_pack_id(body: str) -> List:NEWLINE """NEWLINE Find the pack ID from the installation error message in the case the error is that the pack is not found orNEWLINE in case that the error is that the pack's version is invalid.NEWLINE Args:NEWLINE body (str): The response message of the failed installation pack.NEWLINENEWLINE Returns: list of malformed ids (list)NEWLINENEWLINE """NEWLINE malformed_ids = []NEWLINE if body:NEWLINE response_info = json.loads(body)NEWLINE if error_info := response_info.get('error'):NEWLINE errors_info = [error_info]NEWLINE else:NEWLINE # the error is returned as a list of errorNEWLINE errors_info = response_info.get('errors', [])NEWLINE for error in errors_info:NEWLINE if 'pack id: ' in error:NEWLINE malformed_ids.extend(error.split('pack id: ')[1].replace(']', '').replace('[', '').replace(NEWLINE ' ', '').split(','))NEWLINE else:NEWLINE malformed_pack_pattern = re.compile(r'invalid version [0-9.]+ for pack with ID ([\w_-]+)')NEWLINE malformed_pack_id = malformed_pack_pattern.findall(str(error))NEWLINE if malformed_pack_id and error:NEWLINE malformed_ids.extend(malformed_pack_id)NEWLINE return malformed_idsNEWLINENEWLINENEWLINEdef handle_malformed_pack_ids(malformed_pack_ids, packs_to_install):NEWLINE """NEWLINE Handles the case where the malformed id failed the installation but it was not a part of the initial installaion.NEWLINE This is in order to prevent an infinite loop for this such edge case.NEWLINE Args:NEWLINE malformed_pack_ids: the ids found from the error msgNEWLINE packs_to_install: list of packs that was already installed that caused the failure.NEWLINENEWLINE Returns:NEWLINE raises an error.NEWLINE """NEWLINE for malformed_pack_id in malformed_pack_ids:NEWLINE if malformed_pack_id not in {pack['id'] for pack in packs_to_install}:NEWLINE raise Exception(f'The pack {malformed_pack_id} has failed to install even 'NEWLINE f'though it was not in the installation list')NEWLINENEWLINENEWLINEdef install_packs_from_artifacts(client: demisto_client, host: str, test_pack_path: str, pack_ids_to_install: List):NEWLINE """NEWLINE Installs all the packs located in the artifacts folder of the BitHub actions build. Please note:NEWLINE The server always returns a 200 status even if the pack was not installed.NEWLINENEWLINE :param client: Demisto-py client to connect to the server.NEWLINE :param host: FQDN of the server.NEWLINE :param test_pack_path: Path the the test pack directory.NEWLINE :param pack_ids_to_install: List of pack IDs to install.NEWLINE :return: None. Call to server waits until a successful response.NEWLINE """NEWLINE logging.info(f"Test pack path is: {test_pack_path}")NEWLINE logging.info(f"Pack IDs to install are: {pack_ids_to_install}")NEWLINENEWLINE local_packs = glob.glob(f"{test_pack_path}/*.zip")NEWLINENEWLINE for local_pack in local_packs:NEWLINE if any(pack_id in local_pack for pack_id in pack_ids_to_install):NEWLINE logging.info(f'Installing the following pack: {local_pack}')NEWLINE upload_zipped_packs(client=client, host=host, pack_path=local_pack)NEWLINENEWLINENEWLINEdef install_packs_private(client: demisto_client,NEWLINE host: str,NEWLINE pack_ids_to_install: List,NEWLINE test_pack_path: str):NEWLINE """ Make a packs installation request.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE host (str): The server URL.NEWLINE pack_ids_to_install (list): List of Pack IDs to install.NEWLINE test_pack_path (str): Path where test packs are located.NEWLINE """NEWLINE install_packs_from_artifacts(client,NEWLINE host,NEWLINE pack_ids_to_install=pack_ids_to_install,NEWLINE test_pack_path=test_pack_path)NEWLINENEWLINENEWLINEdef install_packs(client: demisto_client,NEWLINE host: str,NEWLINE packs_to_install: list,NEWLINE request_timeout: int = 999999,NEWLINE ):NEWLINE """ Make a packs installation request.NEWLINE If a pack fails to install due to malformed pack, this function catches the corrupted pack and call anotherNEWLINE request to install packs again, this time without the corrupted pack.NEWLINE If a pack fails to install due to timeout when sending a request to GCP,NEWLINE request to install all packs again once more.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE host (str): The server URL.NEWLINE packs_to_install (list): A list of the packs to install.NEWLINE request_timeout (int): Timeout settings for the installation request.NEWLINE """NEWLINENEWLINE class GCPTimeOutException(ApiException):NEWLINE def __init__(self, error):NEWLINE if '/packs/' in error:NEWLINE self.pack_id = get_pack_id_from_error_with_gcp_path(error)NEWLINE super().__init__()NEWLINENEWLINE class MalformedPackException(ApiException):NEWLINE def __init__(self, pack_ids):NEWLINE self.malformed_ids = pack_idsNEWLINE super().__init__()NEWLINENEWLINE class GeneralItemNotFoundError(ApiException):NEWLINE def __init__(self, error_msg):NEWLINE self.error_msg = error_msgNEWLINE super().__init__()NEWLINENEWLINE def call_install_packs_request(packs):NEWLINE try:NEWLINE logging.debug(f'Installing the following packs on server {host}:\n{[pack["id"] for pack in packs]}')NEWLINE response_data, status_code, _ = demisto_client.generic_request_func(client,NEWLINE path='/contentpacks/marketplace/install',NEWLINE method='POST',NEWLINE body={'packs': packs,NEWLINE 'ignoreWarnings': True},NEWLINE accept='application/json',NEWLINE _request_timeout=request_timeout)NEWLINENEWLINE if status_code in range(200, 300) and status_code != 204:NEWLINE packs_data = [{'ID': pack.get('id'), 'CurrentVersion': pack.get('currentVersion')} for pack inNEWLINE ast.literal_eval(response_data)]NEWLINE logging.success(f'Packs were successfully installed on server {host}')NEWLINE logging.debug(f'The packs that were successfully installed on server {host}:\n{packs_data}')NEWLINENEWLINE except ApiException as ex:NEWLINE if 'timeout awaiting response' in ex.body:NEWLINE raise GCPTimeOutException(ex.body)NEWLINE if malformed_ids := find_malformed_pack_id(ex.body):NEWLINE raise MalformedPackException(malformed_ids)NEWLINE if 'Item not found' in ex.body:NEWLINE raise GeneralItemNotFoundError(ex.body)NEWLINE raise exNEWLINENEWLINE try:NEWLINE logging.info(f'Installing packs on server {host}')NEWLINE try:NEWLINE call_install_packs_request(packs_to_install)NEWLINENEWLINE except MalformedPackException as e:NEWLINE # if this is malformed pack error, remove malformed packs and retry until successNEWLINE handle_malformed_pack_ids(e.malformed_ids, packs_to_install)NEWLINE logging.warning(f'The request to install packs on server {host} has failed, retrying without packs 'NEWLINE f'{e.malformed_ids}')NEWLINE return install_packs(client, host, [pack for pack in packs_to_install if pack['id'] not in e.malformed_ids],NEWLINE request_timeout)NEWLINENEWLINE except GCPTimeOutException as e:NEWLINE # if this is a gcp timeout, try only once moreNEWLINE logging.warning(f'The request to install packs on server {host} has failed due to timeout awaiting response'NEWLINE f' headers while trying to install pack {e.pack_id}, trying again for one more time')NEWLINE call_install_packs_request(packs_to_install)NEWLINENEWLINE except GeneralItemNotFoundError as e:NEWLINE logging.warning(f'The request to install all packs on server {host} has failed due to an item not found 'NEWLINE f'error, with the message: {e.error_msg}.\n trying again for one more time')NEWLINE call_install_packs_request(packs_to_install)NEWLINENEWLINE except Exception as e:NEWLINE logging.exception(f'The request to install packs has failed. Additional info: {str(e)}')NEWLINE global SUCCESS_FLAGNEWLINE SUCCESS_FLAG = FalseNEWLINENEWLINE finally:NEWLINE return SUCCESS_FLAGNEWLINENEWLINENEWLINEdef search_pack_and_its_dependencies(client: demisto_client,NEWLINE pack_id: str,NEWLINE packs_to_install: list,NEWLINE installation_request_body: list,NEWLINE lock: Lock):NEWLINE """ Searches for the pack of the specified file path, as well as its dependencies,NEWLINE and updates the list of packs to be installed accordingly.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE pack_id (str): The id of the pack to be installed.NEWLINE packs_to_install (list) A list of the packs to be installed in this iteration.NEWLINE installation_request_body (list): A list of packs to be installed, in the request format.NEWLINE lock (Lock): A lock object.NEWLINE """NEWLINE pack_data = {}NEWLINE if pack_id not in packs_to_install:NEWLINE pack_display_name = get_pack_display_name(pack_id)NEWLINE if pack_display_name:NEWLINE pack_data = search_pack(client, pack_display_name, pack_id, lock)NEWLINE if pack_data is None:NEWLINE pack_data = {NEWLINE 'id': pack_id,NEWLINE 'version': '1.0.0'NEWLINE }NEWLINENEWLINE if pack_data:NEWLINE dependencies = get_pack_dependencies(client, pack_data, lock)NEWLINENEWLINE current_packs_to_install = [pack_data]NEWLINE if dependencies:NEWLINE # Check that the dependencies don't include a deprecated pack:NEWLINE for dependency in dependencies:NEWLINE pack_path = os.path.join(PACKS_FOLDER, dependency.get('id'))NEWLINE if is_pack_deprecated(pack_path):NEWLINE logging.critical(f'Pack {pack_id} depends on pack {dependency.get("id")} which is a deprecated 'NEWLINE f'pack.')NEWLINE global SUCCESS_FLAGNEWLINE SUCCESS_FLAG = FalseNEWLINE else:NEWLINE current_packs_to_install.extend(dependencies)NEWLINENEWLINE lock.acquire()NEWLINE for pack in current_packs_to_install:NEWLINE if pack['id'] not in packs_to_install:NEWLINE packs_to_install.append(pack['id'])NEWLINE installation_request_body.append(pack)NEWLINE lock.release()NEWLINENEWLINENEWLINEdef get_latest_version_from_bucket(pack_id: str, production_bucket: Bucket) -> str:NEWLINE """ Retrieves the latest version of pack in the bucketNEWLINENEWLINE Args:NEWLINE pack_id (str): The pack id to retrieve the latest versionNEWLINE production_bucket (Bucket): The GCS production bucketNEWLINENEWLINE Returns: The latest version of the pack as it is in the production bucketNEWLINENEWLINE """NEWLINE pack_bucket_path = os.path.join(GCPConfig.PRODUCTION_STORAGE_BASE_PATH, pack_id)NEWLINE logging.debug(f'Trying to get latest version for pack {pack_id} from bucket path {pack_bucket_path}')NEWLINE # Adding the '/' in the end of the prefix to search for the exact pack idNEWLINE pack_versions_paths = [f.name for f in production_bucket.list_blobs(prefix=f'{pack_bucket_path}/') ifNEWLINE f.name.endswith('.zip')]NEWLINENEWLINE pack_versions = []NEWLINE for path in pack_versions_paths:NEWLINE versions = PACK_PATH_VERSION_REGEX.findall(path)NEWLINE if not versions:NEWLINE continueNEWLINE pack_versions.append(Version(versions[0]))NEWLINENEWLINE logging.debug(f'Found the following zips for {pack_id} pack: {pack_versions}')NEWLINE if pack_versions:NEWLINE pack_latest_version = str(max(pack_versions))NEWLINE return pack_latest_versionNEWLINE else:NEWLINE logging.error(f'Could not find any versions for pack {pack_id} in bucket path {pack_bucket_path}')NEWLINE return ''NEWLINENEWLINENEWLINEdef get_pack_installation_request_data(pack_id: str, pack_version: str):NEWLINE """NEWLINE Returns the installation request data of a given pack and its version. The request must have the ID and Version.NEWLINENEWLINE :param pack_id: Id of the pack to add.NEWLINE :param pack_version: Version of the pack to add.NEWLINE :return: The request data part of the packNEWLINE """NEWLINE return {NEWLINE 'id': pack_id,NEWLINE 'version': pack_versionNEWLINE }NEWLINENEWLINENEWLINEdef install_all_content_packs_for_nightly(client: demisto_client, host: str, service_account: str):NEWLINE """ Iterates over the packs currently located in the Packs directory. Wrapper for install_packs.NEWLINE Retrieving the latest version of each pack from the production bucket.NEWLINENEWLINE :param client: Demisto-py client to connect to the server.NEWLINE :param host: FQDN of the server.NEWLINE :param service_account: The full path to the service account json.NEWLINE :return: None. Prints the response from the server in the build.NEWLINE """NEWLINE all_packs = []NEWLINENEWLINE # Initiate the GCS client and get the production bucketNEWLINE storage_client = init_storage_client(service_account)NEWLINE production_bucket = storage_client.bucket(GCPConfig.PRODUCTION_BUCKET)NEWLINE logging.debug(f"Installing all content packs for nightly flow in server {host}")NEWLINENEWLINE # Add deprecated packs to IGNORED_FILES list:NEWLINE for pack_id in os.listdir(PACKS_FULL_PATH):NEWLINE if is_pack_hidden(pack_id):NEWLINE logging.debug(f'Skipping installation of hidden pack "{pack_id}"')NEWLINE IGNORED_FILES.append(pack_id)NEWLINENEWLINE for pack_id in os.listdir(PACKS_FULL_PATH):NEWLINE if pack_id not in IGNORED_FILES:NEWLINE pack_version = get_latest_version_from_bucket(pack_id, production_bucket)NEWLINE if pack_version:NEWLINE all_packs.append(get_pack_installation_request_data(pack_id, pack_version))NEWLINE install_packs(client, host, all_packs)NEWLINENEWLINENEWLINEdef install_all_content_packs_from_build_bucket(client: demisto_client, host: str, server_version: str,NEWLINE bucket_packs_root_path: str, service_account: str,NEWLINE extract_destination_path: str):NEWLINE """ Iterates over the packs currently located in the Build bucket. Wrapper for install_packs.NEWLINE Retrieving the metadata of the latest version of each pack from the index.zip of the build bucket.NEWLINENEWLINE :param client: Demisto-py client to connect to the server.NEWLINE :param host: FQDN of the server.NEWLINE :param server_version: The version of the server the packs are installed on.NEWLINE :param bucket_packs_root_path: The prefix to the root of packs in the bucketNEWLINE :param service_account: Google Service AccountNEWLINE :param extract_destination_path: the full path of extract folder for the index.NEWLINE :return: None. Prints the response from the server in the build.NEWLINE """NEWLINE all_packs = []NEWLINE logging.debug(f"Installing all content packs in server {host} from packs path {bucket_packs_root_path}")NEWLINENEWLINE storage_client = init_storage_client(service_account)NEWLINE build_bucket = storage_client.bucket(GCPConfig.CI_BUILD_BUCKET)NEWLINE index_folder_path, _, _ = download_and_extract_index(build_bucket, extract_destination_path, bucket_packs_root_path)NEWLINENEWLINE for pack_id in os.listdir(index_folder_path):NEWLINE if os.path.isdir(os.path.join(index_folder_path, pack_id)):NEWLINE metadata_path = os.path.join(index_folder_path, pack_id, Pack.METADATA)NEWLINE pack_metadata = load_json(metadata_path)NEWLINE if 'partnerId' in pack_metadata: # not installing private packsNEWLINE continueNEWLINE pack_version = pack_metadata.get(Metadata.CURRENT_VERSION, Metadata.SERVER_DEFAULT_MIN_VERSION)NEWLINE server_min_version = pack_metadata.get(Metadata.SERVER_MIN_VERSION, Metadata.SERVER_DEFAULT_MIN_VERSION)NEWLINE hidden = pack_metadata.get(Metadata.HIDDEN, False)NEWLINE # Check if the server version is greater than the minimum server version required for this pack or if theNEWLINE # pack is hidden (deprecated):NEWLINE if ('Master' in server_version or Version(server_version) >= Version(server_min_version)) and \NEWLINE not hidden:NEWLINE logging.debug(f"Appending pack id {pack_id}")NEWLINE all_packs.append(get_pack_installation_request_data(pack_id, pack_version))NEWLINE else:NEWLINE reason = 'Is hidden' if hidden else f'min server version is {server_min_version}'NEWLINE logging.debug(f'Pack: {pack_id} with version: {pack_version} will not be installed on {host}. 'NEWLINE f'Pack {reason}.')NEWLINE return install_packs(client, host, all_packs)NEWLINENEWLINENEWLINEdef upload_zipped_packs(client: demisto_client,NEWLINE host: str,NEWLINE pack_path: str):NEWLINE """ Install packs from zip file.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE host (str): The server URL.NEWLINE pack_path (str): path to pack zip.NEWLINE """NEWLINE header_params = {NEWLINE 'Content-Type': 'multipart/form-data'NEWLINE }NEWLINE auth_settings = ['api_key', 'csrf_token', 'x-xdr-auth-id']NEWLINE file_path = os.path.abspath(pack_path)NEWLINE files = {'file': file_path}NEWLINENEWLINE logging.info(f'Making "POST" request to server {host} - to install all packs from file {pack_path}')NEWLINENEWLINE # make the pack installation requestNEWLINE try:NEWLINE response_data, status_code, _ = client.api_client.call_api(resource_path='/contentpacks/installed/upload',NEWLINE method='POST',NEWLINE auth_settings=auth_settings,NEWLINE header_params=header_params, files=files)NEWLINENEWLINE if 200 <= status_code < 300:NEWLINE logging.info(f'All packs from file {pack_path} were successfully installed on server {host}')NEWLINE else:NEWLINE result_object = ast.literal_eval(response_data)NEWLINE message = result_object.get('message', '')NEWLINE raise Exception(f'Failed to install packs - with status code {status_code}\n{message}')NEWLINE except Exception:NEWLINE logging.exception('The request to install packs has failed.')NEWLINE sys.exit(1)NEWLINENEWLINENEWLINEdef search_and_install_packs_and_their_dependencies_private(test_pack_path: str,NEWLINE pack_ids: list,NEWLINE client: demisto_client):NEWLINE """ Searches for the packs from the specified list, searches their dependencies, and then installs them.NEWLINE Args:NEWLINE test_pack_path (str): Path of where the test packs are located.NEWLINE pack_ids (list): A list of the pack ids to search and install.NEWLINE client (demisto_client): The client to connect to.NEWLINENEWLINE Returns (list, bool):NEWLINE A list of the installed packs' ids, or an empty list if is_nightly == True.NEWLINE A flag that indicates if the operation succeeded or not.NEWLINE """NEWLINE host = client.api_client.configuration.hostNEWLINENEWLINE logging.info(f'Starting to search and install packs in server: {host}')NEWLINENEWLINE install_packs_private(client, host, pack_ids, test_pack_path)NEWLINENEWLINE return SUCCESS_FLAGNEWLINENEWLINENEWLINEdef search_and_install_packs_and_their_dependencies(pack_ids: list,NEWLINE client: demisto_client, hostname: str = ''):NEWLINE """ Searches for the packs from the specified list, searches their dependencies, and thenNEWLINE installs them.NEWLINE Args:NEWLINE pack_ids (list): A list of the pack ids to search and install.NEWLINE client (demisto_client): The client to connect to.NEWLINE hostname (str): Hostname of instance. Using for logs.NEWLINENEWLINE Returns (list, bool):NEWLINE A list of the installed packs' ids, or an empty list if is_nightly == True.NEWLINE A flag that indicates if the operation succeeded or not.NEWLINE """NEWLINE host = hostname if hostname else client.api_client.configuration.hostNEWLINENEWLINE logging.info(f'Starting to search and install packs in server: {host}')NEWLINENEWLINE packs_to_install: list = [] # we save all the packs we want to install, to avoid duplicationsNEWLINE installation_request_body: list = [] # the packs to install, in the request formatNEWLINENEWLINE threads_list = []NEWLINE lock = Lock()NEWLINENEWLINE for pack_id in pack_ids:NEWLINE thread = Thread(target=search_pack_and_its_dependencies,NEWLINE kwargs={'client': client,NEWLINE 'pack_id': pack_id,NEWLINE 'packs_to_install': packs_to_install,NEWLINE 'installation_request_body': installation_request_body,NEWLINE 'lock': lock})NEWLINE threads_list.append(thread)NEWLINE run_threads_list(threads_list)NEWLINENEWLINE install_packs(client, host, installation_request_body)NEWLINENEWLINE return packs_to_install, SUCCESS_FLAGNEWLINE
# coding=utf-8NEWLINE# Copyright 2018 The TF-Agents Authors.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""Keras LSTM Encoding Network.NEWLINENEWLINEImplements a network that will generate the following layers:NEWLINENEWLINE [optional]: preprocessing_layers # preprocessing_layersNEWLINE [optional]: (Add | Concat(axis=-1) | ...) # preprocessing_combinerNEWLINE [optional]: Conv2D # input_conv_layer_paramsNEWLINE FlattenNEWLINE [optional]: Dense # input_fc_layer_paramsNEWLINE [optional]: LSTM cellNEWLINE [optional]: Dense # output_fc_layer_paramsNEWLINE"""NEWLINENEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport ginNEWLINEimport tensorflow as tfNEWLINENEWLINEfrom tf_agents.networks import dynamic_unroll_layerNEWLINEfrom tf_agents.networks import encoding_networkNEWLINEfrom tf_agents.networks import networkNEWLINEfrom tf_agents.specs import tensor_specNEWLINEfrom tf_agents.trajectories import time_stepNEWLINEfrom tf_agents.utils import nest_utilsNEWLINENEWLINENEWLINEKERAS_LSTM_FUSED_IMPLEMENTATION = 2NEWLINENEWLINENEWLINE@gin.configurableNEWLINEclass LSTMEncodingNetwork(network.Network):NEWLINE """Recurrent network."""NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE input_tensor_spec,NEWLINE preprocessing_layers=None,NEWLINE preprocessing_combiner=None,NEWLINE conv_layer_params=None,NEWLINE input_fc_layer_params=(75, 40),NEWLINE lstm_size=(40,),NEWLINE output_fc_layer_params=(75, 40),NEWLINE activation_fn=tf.keras.activations.relu,NEWLINE dtype=tf.float32,NEWLINE name='LSTMEncodingNetwork',NEWLINE ):NEWLINE """Creates an instance of `LSTMEncodingNetwork`.NEWLINENEWLINE Input preprocessing is possible via `preprocessing_layers` andNEWLINE `preprocessing_combiner` Layers. If the `preprocessing_layers` nest isNEWLINE shallower than `input_tensor_spec`, then the layers will get the subnests.NEWLINE For example, if:NEWLINENEWLINE ```pythonNEWLINE input_tensor_spec = ([TensorSpec(3)] * 2, [TensorSpec(3)] * 5)NEWLINE preprocessing_layers = (Layer1(), Layer2())NEWLINE ```NEWLINENEWLINE then preprocessing will call:NEWLINENEWLINE ```pythonNEWLINE preprocessed = [preprocessing_layers[0](observations[0]),NEWLINE preprocessing_layers[1](obsrevations[1])]NEWLINE ```NEWLINENEWLINE However ifNEWLINENEWLINE ```pythonNEWLINE preprocessing_layers = ([Layer1() for _ in range(2)],NEWLINE [Layer2() for _ in range(5)])NEWLINE ```NEWLINENEWLINE then preprocessing will call:NEWLINE ```pythonNEWLINE preprocessed = [NEWLINE layer(obs) for layer, obs in zip(flatten(preprocessing_layers),NEWLINE flatten(observations))NEWLINE ]NEWLINE ```NEWLINENEWLINE Args:NEWLINE input_tensor_spec: A nest of `tensor_spec.TensorSpec` representing theNEWLINE observations.NEWLINE preprocessing_layers: (Optional.) A nest of `tf.keras.layers.Layer`NEWLINE representing preprocessing for the different observations.NEWLINE All of these layers must not be already built.NEWLINE preprocessing_combiner: (Optional.) A keras layer that takes a flat listNEWLINE of tensors and combines them. Good options includeNEWLINE `tf.keras.layers.Add` and `tf.keras.layers.Concatenate(axis=-1)`.NEWLINE This layer must not be already built.NEWLINE conv_layer_params: Optional list of convolution layers parameters, whereNEWLINE each item is a length-three tuple indicating (filters, kernel_size,NEWLINE stride).NEWLINE input_fc_layer_params: Optional list of fully connected parameters, whereNEWLINE each item is the number of units in the layer. These feed into theNEWLINE recurrent layer.NEWLINE lstm_size: An iterable of ints specifying the LSTM cell sizes to use.NEWLINE output_fc_layer_params: Optional list of fully connected parameters, whereNEWLINE each item is the number of units in the layer. These are applied on topNEWLINE of the recurrent layer.NEWLINE activation_fn: Activation function, e.g. tf.keras.activations.relu,.NEWLINE dtype: The dtype to use by the convolution, LSTM, and fully connectedNEWLINE layers.NEWLINE name: A string representing name of the network.NEWLINENEWLINE Raises:NEWLINE ValueError: If any of `preprocessing_layers` is already built.NEWLINE ValueError: If `preprocessing_combiner` is already built.NEWLINE """NEWLINE kernel_initializer = tf.compat.v1.variance_scaling_initializer(NEWLINE scale=2.0, mode='fan_in', distribution='truncated_normal')NEWLINENEWLINE input_encoder = encoding_network.EncodingNetwork(NEWLINE input_tensor_spec,NEWLINE preprocessing_layers=preprocessing_layers,NEWLINE preprocessing_combiner=preprocessing_combiner,NEWLINE conv_layer_params=conv_layer_params,NEWLINE fc_layer_params=input_fc_layer_params,NEWLINE activation_fn=activation_fn,NEWLINE kernel_initializer=kernel_initializer,NEWLINE dtype=dtype)NEWLINENEWLINE # Create RNN cellNEWLINE if len(lstm_size) == 1:NEWLINE cell = tf.keras.layers.LSTMCell(NEWLINE lstm_size[0],NEWLINE dtype=dtype,NEWLINE implementation=KERAS_LSTM_FUSED_IMPLEMENTATION)NEWLINE else:NEWLINE cell = tf.keras.layers.StackedRNNCells([NEWLINE tf.keras.layers.LSTMCell( # pylint: disable=g-complex-comprehensionNEWLINE size,NEWLINE dtype=dtype,NEWLINE implementation=KERAS_LSTM_FUSED_IMPLEMENTATION)NEWLINE for size in lstm_sizeNEWLINE ])NEWLINENEWLINE output_encoder = []NEWLINE if output_fc_layer_params:NEWLINE output_encoder = [NEWLINE tf.keras.layers.Dense(NEWLINE num_units,NEWLINE activation=activation_fn,NEWLINE kernel_initializer=kernel_initializer,NEWLINE dtype=dtype,NEWLINE name='/'.join([name, 'dense']))NEWLINE for num_units in output_fc_layer_paramsNEWLINE ]NEWLINENEWLINE counter = [-1]NEWLINE def create_spec(size):NEWLINE counter[0] += 1NEWLINE return tensor_spec.TensorSpec(NEWLINE size, dtype=dtype, name='network_state_%d' % counter[0])NEWLINE state_spec = tf.nest.map_structure(create_spec, cell.state_size)NEWLINENEWLINE super(LSTMEncodingNetwork, self).__init__(NEWLINE input_tensor_spec=input_tensor_spec,NEWLINE state_spec=state_spec,NEWLINE name=name)NEWLINENEWLINE self._conv_layer_params = conv_layer_paramsNEWLINE self._input_encoder = input_encoderNEWLINE self._dynamic_unroll = dynamic_unroll_layer.DynamicUnroll(cell)NEWLINE self._output_encoder = output_encoderNEWLINENEWLINE def call(self, observation, step_type, network_state=None):NEWLINE """Apply the network.NEWLINENEWLINE Args:NEWLINE observation: A tuple of tensors matching `input_tensor_spec`.NEWLINE step_type: A tensor of `StepType.NEWLINE network_state: (optional.) The network state.NEWLINENEWLINE Returns:NEWLINE `(outputs, network_state)` - the network output and next network state.NEWLINENEWLINE Raises:NEWLINE ValueError: If observation tensors lack outer `(batch,)` orNEWLINE `(batch, time)` axes.NEWLINE """NEWLINE num_outer_dims = nest_utils.get_outer_rank(observation,NEWLINE self.input_tensor_spec)NEWLINE if num_outer_dims not in (1, 2):NEWLINE raise ValueError(NEWLINE 'Input observation must have a batch or batch x time outer shape.')NEWLINENEWLINE has_time_dim = num_outer_dims == 2NEWLINE if not has_time_dim:NEWLINE # Add a time dimension to the inputs.NEWLINE observation = tf.nest.map_structure(lambda t: tf.expand_dims(t, 1),NEWLINE observation)NEWLINE step_type = tf.nest.map_structure(lambda t: tf.expand_dims(t, 1),NEWLINE step_type)NEWLINENEWLINE state, network_state = self._input_encoder(NEWLINE observation, step_type, network_state)NEWLINENEWLINE with tf.name_scope('reset_mask'):NEWLINE reset_mask = tf.equal(step_type, time_step.StepType.FIRST)NEWLINENEWLINE # Unroll over the time sequence.NEWLINE state, network_state = self._dynamic_unroll(NEWLINE state,NEWLINE reset_mask,NEWLINE initial_state=network_state)NEWLINENEWLINE for layer in self._output_encoder:NEWLINE state = layer(state)NEWLINENEWLINE if not has_time_dim:NEWLINE # Remove time dimension from the state.NEWLINE state = tf.squeeze(state, [1])NEWLINENEWLINE return state, network_stateNEWLINE
from kivy.uix.boxlayout import BoxLayoutNEWLINEfrom kivy.app import AppNEWLINEfrom kivy.uix.button import ButtonNEWLINEfrom kivy.uix.gridlayout import GridLayoutNEWLINEfrom kivy.uix.label import LabelNEWLINEfrom kivy.uix.textinput import TextInputNEWLINEfrom kivy.uix.carousel import CarouselNEWLINENEWLINENEWLINEclass MainLayout(BoxLayout):NEWLINE def __init__(self, **kwargs):NEWLINE super(MainLayout, self).__init__(**kwargs)NEWLINE self.orientation = "vertical"NEWLINE self.spacing = 10NEWLINE self.padding = 10NEWLINE self.num1 = 0NEWLINE self.num2 = 0NEWLINE self.funct = 0NEWLINE self.deciPressed = 0NEWLINE self.tracker = Label(text=str(self.num1), font_size=50, halign='center')NEWLINE self.tracker.size_hint = (1, .1)NEWLINE self.add_widget(self.tracker)NEWLINE clear = Button(text="Clear", font_size=24, background_color=(3,2,3,2))NEWLINE clear.size_hint = (1, .1)NEWLINE clear.bind(on_press = self.press)NEWLINE buttons = GridLayout(cols=4, spacing=10)NEWLINE self.add_widget(buttons)NEWLINE self.add_widget(clear)NEWLINE butts = []NEWLINE for i in range(10):NEWLINE butts.append(str(i))NEWLINE butts.append("+")NEWLINE butts.append("-")NEWLINE butts.append("*")NEWLINE butts.append("/")NEWLINE butts.append("=")NEWLINE butts.append(".")NEWLINE for butt in butts:NEWLINE b = Button(text=str(butt))NEWLINE b.bind(on_press=self.press)NEWLINE b.background_color = (3,1,1,1)NEWLINE b.font_size = (24)NEWLINE buttons.add_widget(b)NEWLINE NEWLINE self.symbolFunctions = {NEWLINE "+":self.adder,NEWLINE "-":self.minuser,NEWLINE "=":self.equals,NEWLINE "*":self.multiplyer,NEWLINE "/":self.divider,NEWLINE ".":self.decimal,NEWLINE "Clear":self.numclearNEWLINE }NEWLINE def divider(self,obj):NEWLINE self.num1 = float(self.num1)NEWLINE self.num2 = self.num1NEWLINE self.num1 = 0NEWLINE self.funct = 4NEWLINE self.deciPressed = 0NEWLINE def multiplyer(self,obj):NEWLINE self.num1 = float(self.num1)NEWLINE self.num2 = self.num1NEWLINE self.num1 = 0NEWLINE self.funct = 3NEWLINE self.deciPressed = 0NEWLINE def equals(self,obj):NEWLINENEWLINE try:NEWLINE num1 = float(self.num1)NEWLINE num2 = float(self.num2)NEWLINE self.deciPressed = 1NEWLINE if self.funct == 1:NEWLINE self.num1 = num1 + num2NEWLINE elif self.funct == 2:NEWLINE self.num1 = num2 - num1NEWLINE elif self.funct == 3:NEWLINE self.num1 = num2 * num1NEWLINE elif self.funct == 4:NEWLINE self.num1 = num2 / num1NEWLINE self.num1 = float(self.num1)NEWLINE except (ValueError, ZeroDivisionError) as e:NEWLINE print(e)NEWLINE self.num1 = 0NEWLINENEWLINE NEWLINE def adder(self, obj):NEWLINE self.num1 = float(self.num1)NEWLINE self.num2 = self.num1NEWLINE self.num1 = 0NEWLINE self.funct = 1NEWLINE self.deciPressed = 0NEWLINE def minuser(self,obj):NEWLINE self.num1 = float(self.num1)NEWLINE self.num2 = self.num1NEWLINE self.num1 = 0NEWLINE self.funct = 2NEWLINE self.deciPressed = 0NEWLINE def numclear(self, obj):NEWLINE self.num1 = 0NEWLINE self.deciPressed = 0NEWLINE def press(self, obj):NEWLINE try:NEWLINE int(obj.text)NEWLINE self.num1 = str(self.num1) + str(obj.text)NEWLINE self.tracker.text = str(float(self.num1))NEWLINE except ValueError:NEWLINE self.symbolFunctions[obj.text](obj)NEWLINE self.tracker.text = str(float(self.num1))NEWLINE def decimal(self, obj):NEWLINE if self.deciPressed == 0:NEWLINE self.num1 = str(self.num1) + (".")NEWLINE self.deciPressed = 1NEWLINE else:NEWLINE self.num1 = str(self.num1)NEWLINEclass Notes(BoxLayout):NEWLINE def __init__(self, **kwargs):NEWLINE super(Notes, self).__init__(**kwargs)NEWLINE self.orientation = "vertical"NEWLINE self.spacing = 10NEWLINE self.padding = 10NEWLINE self.title = Label(text="Notes", font_size=50, halign='center')NEWLINE self.title.size_hint = (1, .1)NEWLINE self.textField = TextInput(font_size=24)NEWLINE self.add_widget(self.title)NEWLINE self.add_widget(self.textField)NEWLINEclass CalcApp(App):NEWLINE def build(self):NEWLINE root = Carousel(direction='right')NEWLINE root.add_widget(MainLayout())NEWLINE root.add_widget(Notes())NEWLINE return rootNEWLINEif __name__ == "__main__":NEWLINE CalcApp().run()
#!/usr/bin/env python3NEWLINENEWLINE"""NEWLINEbotshot.py 0.2 - Mass Web Screenshot Command Line ScriptNEWLINECopyright (c) 2017-2020 Marco Ivaldi <raptor@0xdeadbeef.info>NEWLINENEWLINE"The Other Way to Pen-Test" --HD Moore & ValsmithNEWLINENEWLINEBotshot is a Python script that captures screenshots ofNEWLINEwebsites from the command line. It is useful to automateNEWLINEmapping of the web attack surface of large networks.NEWLINENEWLINEBased on previous work by @federicodotta and @0-duke.NEWLINENEWLINERequirements:NEWLINEPython 3 (https://pythonclock.org/ is ticking...)NEWLINESelenium (https://pypi.python.org/pypi/selenium)NEWLINEChromeDriver (https://chromedriver.chromium.org/)NEWLINENEWLINEExample usage:NEWLINE$ ./botshot.py -f urlsNEWLINENEWLINETODO:NEWLINEImplement import from Nmap's XML output filesNEWLINEAdd the ability to save output in HTML formatNEWLINEAdd the ability to perform nikto/dirb scansNEWLINEMigrate to Electron (https://electron.atom.io/)?NEWLINENEWLINEGet the latest version at:NEWLINEhttps://github.com/0xdea/tactical-exploitation/NEWLINE"""NEWLINENEWLINEVERSION = "0.1"NEWLINEBANNER = """NEWLINEbotshot.py {0} - Mass Web Screenshot Command Line ScriptNEWLINECopyright (c) 2017 Marco Ivaldi <raptor@0xdeadbeef.info>NEWLINE""".format(VERSION)NEWLINENEWLINEimport sysNEWLINEimport argparseNEWLINEimport timeNEWLINEimport osNEWLINEimport reNEWLINEfrom selenium import webdriverNEWLINENEWLINEdef webshot(args):NEWLINE """NEWLINE Mass web screenshot functionNEWLINE """NEWLINENEWLINE targets = [url.rstrip() for url in args.f]NEWLINE timeout = args.tNEWLINENEWLINE # chrome webdriver optionsNEWLINE options = webdriver.ChromeOptions()NEWLINE options.add_argument("--headless")NEWLINE options.add_argument("--ignore-certificate-errors")NEWLINE options.add_argument("--no-sandbox")NEWLINE #options.add_argument("--disable-dev-shm-usage")NEWLINENEWLINE # set up headless browserNEWLINE try:NEWLINE browser = webdriver.Chrome(options=options)NEWLINE browser.set_page_load_timeout(timeout)NEWLINE browser.set_window_size(1920, 1080)NEWLINE except Exception as err:NEWLINE print("// error: {0}".format(err))NEWLINE browser.quit()NEWLINE sys.exit(1)NEWLINENEWLINE # create output directoryNEWLINE outdir = "webshots-" + time.strftime("%Y%m%d-%H%M%S", time.localtime())NEWLINE try:NEWLINE os.mkdir(outdir, mode=0o755)NEWLINE except Exception as err:NEWLINE print("// error: {0}".format(err))NEWLINE browser.quit()NEWLINE sys.exit(1)NEWLINENEWLINE for url in targets:NEWLINE print("*** Grabbing screenshot of {0} ***\n".format(url))NEWLINENEWLINE p = re.compile("[:/]+")NEWLINE outfile = outdir + "/" + p.sub("_", url) + ".png"NEWLINENEWLINE try:NEWLINE browser.get("about:blank")NEWLINE browser.get(url)NEWLINE time.sleep(1) # workaround for some targetsNEWLINE browser.save_screenshot(outfile)NEWLINE except (KeyboardInterrupt, SystemExit):NEWLINE browser.quit()NEWLINE sys.exit(1)NEWLINE except Exception as err:NEWLINE print("// error: {0}".format(err))NEWLINENEWLINE browser.quit()NEWLINE returnNEWLINENEWLINEdef get_args():NEWLINE """NEWLINE Get command line argumentsNEWLINE """NEWLINENEWLINE parser = argparse.ArgumentParser()NEWLINE parser.set_defaults(func=webshot)NEWLINENEWLINE parser.add_argument(NEWLINE "-f",NEWLINE metavar="FILE",NEWLINE type=argparse.FileType("r"),NEWLINE required=True,NEWLINE help="specify file containing a list of URLs")NEWLINE parser.add_argument(NEWLINE "-t",NEWLINE metavar="TIMEOUT", NEWLINE type=int,NEWLINE default=30,NEWLINE help="specify timeout in seconds (default: 30)")NEWLINENEWLINE if len(sys.argv) == 1:NEWLINE parser.print_help()NEWLINE sys.exit(0)NEWLINENEWLINE return parser.parse_args()NEWLINENEWLINEdef main():NEWLINE """NEWLINE Main functionNEWLINE """NEWLINENEWLINE print(BANNER)NEWLINENEWLINE if sys.version_info[0] != 3:NEWLINE print("// error: this script requires python 3")NEWLINE sys.exit(1)NEWLINENEWLINE args = get_args()NEWLINE args.func(args)NEWLINENEWLINEif __name__ == "__main__":NEWLINE main()NEWLINE
import jsonNEWLINENEWLINEfrom django.template.loader import render_to_stringNEWLINEfrom django.urls import reverseNEWLINEfrom django.utils.translation import ugettext_lazy as _NEWLINEfrom wagtail.admin.widgets import AdminChooserNEWLINENEWLINENEWLINEclass AdminModelChooser(AdminChooser):NEWLINE show_edit_link = FalseNEWLINENEWLINE def __init__(self, model, filter_name=None, **kwargs):NEWLINE self.target_model = modelNEWLINE name = self.target_model._meta.verbose_nameNEWLINE self.choose_one_text = _('Choose %s') % nameNEWLINE self.choose_another_text = _('Choose another %s') % nameNEWLINE self.link_to_chosen_text = _('Edit this %s') % nameNEWLINENEWLINE self.filter_name = filter_nameNEWLINENEWLINE super(AdminModelChooser, self).__init__(**kwargs)NEWLINENEWLINE def render_html(self, name, value, attrs):NEWLINE instance, value = self.get_instance_and_id(self.target_model, value)NEWLINENEWLINE original_field_html = super(AdminModelChooser, self).render_html(NEWLINE name, value, attrs)NEWLINENEWLINE return render_to_string("wagtailmodelchooser/model_chooser.html", {NEWLINE 'widget': self,NEWLINE 'model_opts': self.target_model._meta,NEWLINE 'original_field_html': original_field_html,NEWLINE 'attrs': attrs,NEWLINE 'value': value,NEWLINE 'item': instance,NEWLINE })NEWLINENEWLINE def render_js_init(self, id_, name, value):NEWLINE opts = self.target_model._metaNEWLINE kwargs = {'app_label': opts.app_label, 'model_name': opts.model_name}NEWLINE if self.filter_name:NEWLINE kwargs['filter_name'] = self.filter_nameNEWLINENEWLINE return "createModelChooser({id}, {url});".format(NEWLINE id=json.dumps(id_),NEWLINE url=json.dumps(reverse('model_chooser', kwargs=kwargs)),NEWLINE filter_name=json.dumps(self.filter_name))NEWLINE
#!/usr/bin/env python3NEWLINE# -*- coding: utf-8 -*-NEWLINE"""NEWLINECreated on Mon Mar 18 14:28:47 2019NEWLINENEWLINE@author: smrakNEWLINE"""NEWLINEimport osNEWLINEimport numpy as npNEWLINEimport madrigalWeb.madrigalWebNEWLINEimport subprocessNEWLINE#from dateutil import parserNEWLINEfrom datetime import datetimeNEWLINENEWLINE#fixpath = 1NEWLINE#savedir = 'C:\\Users\\smrak\\google drive\\bu\\projects\\LWS2019\\'NEWLINEIX = {'dst': 212, 'aei': 211, 'geo': 210}NEWLINENEWLINEdef dlIndex(#t0:str = None, #t1:str = None, NEWLINE savedir:str = None,NEWLINE index:str = 'dst',NEWLINE user_fullname:str = None,NEWLINE user_email:str = None,NEWLINE user_affiliation:str = None,NEWLINE fixpath:bool = False):NEWLINE NEWLINE# assert t0 is not NoneNEWLINE# assert t1 is not NoneNEWLINE assert savedir is not NoneNEWLINE NEWLINE if user_fullname is None: NEWLINE user_fullname = 'Sebastijan Mrak'NEWLINE if user_email is None: NEWLINE user_email = 'smrak@bu.edu'NEWLINE if user_affiliation is None: NEWLINE user_affiliation = 'BU'NEWLINE NEWLINE # Open Madrigal databaseNEWLINE madrigalUrl = 'http://cedar.openmadrigal.org/'NEWLINE MD = madrigalWeb.madrigalWeb.MadrigalData(madrigalUrl)NEWLINE # ======================== List of instruments ========================== #NEWLINE # GPS network ID: 8000NEWLINE # IMF and Sw IID: 120NEWLINE # Geophysical Indisies ID: 210NEWLINE # AE ID: 211NEWLINE # DST ID: 212NEWLINE NEWLINE #instList = MD.getAllInstruments()NEWLINE #NEWLINE #for inst in instList:NEWLINE # if inst.code == 212:NEWLINE # print((str(inst) + '\n'))NEWLINE # ======================= Get/List of Experiments ======================= #NEWLINE # GPS Minimum Scallop TEC: 3500NEWLINE # GPS LOS: ID: 3505NEWLINE # DST ID: 30006NEWLINE # Geophysical Ind ID: 30007NEWLINE # AE ID: 30008NEWLINENEWLINE#t0 = '2015-7-22'NEWLINE#t1 = '2015-9-30'NEWLINE T = datetime.now()NEWLINE# T0 = parser.parse(t0)NEWLINE# T1 = parser.parse(t1)NEWLINE expList = MD.getExperiments(IX[index], NEWLINE 1900, 1, 1, 0, 0, 1,NEWLINE T.year, T.month, T.day, 0, 0, 0)NEWLINE #T1.year, T1.month, T1.day, 23, 59, 59)NEWLINE #for exp in expList:NEWLINE # should be only oneNEWLINE # print((str(exp) + '\n'))NEWLINENEWLINE # ==================== Get links-filenames and output paths ============ #NEWLINE ids = [n.id for n in expList]NEWLINE file_list = np.array([MD.getExperimentFiles(n) for n in ids])NEWLINE fnlist = []NEWLINE savefnlist = []NEWLINE for subarr in file_list:NEWLINE for this_file in subarr:NEWLINE if this_file.category == 1:NEWLINE path = os.path.expanduser(this_file.name)NEWLINE parts = path.split(os.sep)NEWLINE path_fn = os.path.split(path)[1]NEWLINE print (path_fn)NEWLINE if path_fn[:3] == index:NEWLINE if not fixpath:NEWLINE p = savedir + '/'.join(parts[4:])NEWLINE savefn = os.path.join(p)NEWLINE savefnlist.append(os.path.expanduser(savefn))NEWLINE else:NEWLINE savefn = savedir + os.path.split(path)[1]NEWLINE print (savefn)NEWLINE savefnlist.append(os.path.expanduser(savefn))NEWLINE fnlist.append(this_file.name)NEWLINE NEWLINE # Check for direcotories:NEWLINE for ofn in savefnlist:NEWLINE head = os.path.split(ofn)[0]NEWLINE if not os.path.exists(head):NEWLINE subprocess.call("mkdir -p {}".format(head), timeout=10, shell=True)NEWLINE NEWLINE for i in range(len(savefnlist)):NEWLINE if not os.path.exists(savefnlist[i]):NEWLINE print ('Downloading {}'.format(os.path.split(savefnlist[i])[1]))NEWLINE MD.downloadFile(fnlist[i], savefnlist[i], NEWLINE user_fullname, user_email, user_affiliation, NEWLINE format='hdf5')NEWLINE else:NEWLINE print ("{} already exists".format(savefnlist[i]))NEWLINENEWLINEif __name__ == '__main__':NEWLINE from argparse import ArgumentParserNEWLINE p = ArgumentParser()NEWLINE# p.add_argument('t0', type=str, help='Time limit 1 YYYY-mm-dd')NEWLINE# p.add_argument('t1', type=str, help='Time limit 2 YYYY-mm-dd')NEWLINE p.add_argument('odir', type=str, help='Output directory root')NEWLINE p.add_argument('-i', '--index', help='Which index? dst, ae, geo', default='dst')NEWLINE p.add_argument('--fixpath', help='Save to exact directory', action='store_true')NEWLINE p.add_argument('--name', type=str, help='"Full name"')NEWLINE p.add_argument('--email', type=str, help='"email"')NEWLINE p.add_argument('--affiliation', type=str, help='"affiliation"')NEWLINE NEWLINE P = p.parse_args()NEWLINE NEWLINE dlIndex(#t0 = P.t0, #t1 = P.t1, NEWLINE savedir = P.odir, NEWLINE index=P.index,NEWLINE user_fullname = P.name,NEWLINE user_email = P.email,NEWLINE user_affiliation = P.affiliation,NEWLINE fixpath = P.fixpath)NEWLINE
from struct import unpackNEWLINENEWLINENEWLINEclass FloatLookup:NEWLINE def __init__(self, data): # data: Tuple[float]NEWLINE self.data = dataNEWLINE self.range_ = len(data)NEWLINENEWLINE @staticmethodNEWLINE def get_lookup_from_double(file) -> 'FloatLookup':NEWLINE range_, = unpack('>i', file.read(4))NEWLINE values = tuple(unpack('>{}d'.format(range_), file.read(range_ * 8)))NEWLINE return FloatLookup(values)NEWLINENEWLINE def get(self, n: int) -> float:NEWLINE if 0 <= n < self.range_:NEWLINE return self.data[n]NEWLINE else:NEWLINE raise ValueError("Value is out of range")
# Copyright 2021 NVIDIA CorporationNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINENEWLINEimport legate.numpy as lgNEWLINENEWLINENEWLINEdef test():NEWLINE # test data type conversionNEWLINE x = lg.array([1, 2, 3])NEWLINE y = lg.array([1.0, 2.0, 3.0])NEWLINENEWLINE assert lg.max(x) == lg.max(y)NEWLINENEWLINE returnNEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE test()NEWLINE
NEWLINEimport loggingNEWLINEfrom deep_lyric_visualizer.helpers import setup_logger, _extract_name_from_pathNEWLINEfrom deep_lyric_visualizer.generator.generation_environment import (GenerationEnvironment,NEWLINE WikipediaBigGANGenerationEnviornment)NEWLINEfrom deep_lyric_visualizer.generator.generatorio import PickleGeneratorIO, YAMLGeneratorIONEWLINENEWLINEfrom deep_lyric_visualizer.nlp.vectorizer import VectorizerNEWLINEimport numpy as npNEWLINEsetup_logger()NEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINENEWLINEclass ImageCategoryVectorizer(Vectorizer):NEWLINE def __init__(self, gen_env=None):NEWLINE """A vectorizer specific to image categories. Inherits from theNEWLINE Vectorizer class in the nlp section of this package.NEWLINENEWLINE Args:NEWLINE Vectorizer (nlp.Vectorizer): the Vectorizer class, from which thisNEWLINE class inherits.NEWLINE gen_env (generator.GenerationEnvironment, optional): aNEWLINE GenerationEnvironment instance. If None, uses the default .NEWLINE Defaults to None.NEWLINE """NEWLINE super().__init__(gen_env)NEWLINENEWLINE self.name = __name__ if __name__ != '__main__' else _extract_name_from_path(NEWLINE __file__)NEWLINE self.vectorized_dict = NoneNEWLINENEWLINE self.attrs = ['vectorized_dict']NEWLINENEWLINE def _mean_strategy(self, category_tokens):NEWLINE """Defines the so-called 'mean' strategy for vectorizing a list ofNEWLINE list of tokens for a category. Each sub-category or topic is treatedNEWLINE first, averaging the embeddings for the tokens in that category.NEWLINE Then, the results from each sub-category/topic are averaged together.NEWLINENEWLINE Args:NEWLINE category_tokens (list): This is a list of lists, one list of tokensNEWLINE for each topic in the category.NEWLINENEWLINE Returns:NEWLINE np.array: A so-called category vector, an embedding for theNEWLINE category.NEWLINE """NEWLINENEWLINE wordvec_sum = np.zeros(self.env.wordvec_dim)NEWLINE n_phrases = 0NEWLINENEWLINE for tokens in category_tokens:NEWLINE n = len(tokens)NEWLINE if n == 0:NEWLINE continueNEWLINENEWLINE vec = np.zeros(self.env.wordvec_dim)NEWLINE n_vectorizable_phrases = 0NEWLINE for token in tokens:NEWLINE try:NEWLINE vectorized = self.vectorize_word(token)NEWLINE except KeyError:NEWLINE passNEWLINE else:NEWLINE n_vectorizable_phrases += 1NEWLINE vec += vectorizedNEWLINE if n_vectorizable_phrases == 0:NEWLINE continueNEWLINE else:NEWLINE n_phrases += 1NEWLINE vec = vec / n_vectorizable_phrasesNEWLINE wordvec_sum += vecNEWLINE mean_wordvec = (NEWLINE wordvec_sum / n_phrases) if n_phrases != 0 else wordvec_sumNEWLINENEWLINE return mean_wordvecNEWLINENEWLINE def vectorize_category(self, category_tokens, strategy='mean'):NEWLINE """Handles the vectorization of a cateogry by a particular strategy.NEWLINE At the moment, the only considered strategy is the mean strategy.NEWLINENEWLINE Args:NEWLINE category_tokens (list [list [str]]): This is a list of lists,NEWLINE one list of tokens for each topic in the category.NEWLINE strategy (str, optional): One of {"mean"}. The strategy to useNEWLINE Currently only the mean strategy is supported.NEWLINE Defaults to 'mean'.NEWLINENEWLINE Returns:NEWLINE np.array: An array with the vector representing the categoryNEWLINE """NEWLINE if strategy == 'mean':NEWLINE return self._mean_strategy(category_tokens)NEWLINENEWLINE def vectorize_categories(self, categories_tokens, strategy='mean'):NEWLINE """Vectorize a set of categories given their lists of lists of tokens.NEWLINENEWLINE Args:NEWLINE categories_tokens (dict): A dictionary representing the id numberNEWLINE for a category to the list of lists of tokens for that category.NEWLINE strategy (str, optional): One of {"mean"}. The strategy to useNEWLINE Currently only the mean strategy is supported.NEWLINE Defaults to 'mean'.NEWLINENEWLINE Returns:NEWLINE dict: Dictionary with embeddings for each category_id.NEWLINE """NEWLINE self.vectorized_dict = {id_: self.vectorize_category(NEWLINE category) for id_, category in categories_tokens.items()}NEWLINE return self.vectorized_dictNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE im_vec = ImageCategoryVectorizer()NEWLINE im_vec.load()NEWLINE print(im_vec.vectorized_dict)NEWLINE
# coding=utf-8NEWLINE# --------------------------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root for license information.NEWLINE# --------------------------------------------------------------------------------------------NEWLINENEWLINEfrom knack.help_files import helps # pylint: disable=unused-importNEWLINE# pylint: disable=line-too-long, too-many-linesNEWLINENEWLINEhelps['account lock'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure subscription level locks.NEWLINE"""NEWLINENEWLINEhelps['account lock create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a subscription lock.NEWLINEexamples:NEWLINE - name: Create a read-only subscription level lock.NEWLINE text: >NEWLINE az account lock create --lock-type ReadOnly -n lockNameNEWLINE"""NEWLINENEWLINEhelps['account lock delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a subscription lock.NEWLINEexamples:NEWLINE - name: Delete a subscription lockNEWLINE text: >NEWLINE az account lock delete --name lockNameNEWLINE"""NEWLINENEWLINEhelps['account lock list'] = """NEWLINEtype: commandNEWLINEshort-summary: List lock information in the subscription.NEWLINEexamples:NEWLINE - name: List out all locks on the subscription levelNEWLINE text: >NEWLINE az account lock listNEWLINE"""NEWLINENEWLINEhelps['account lock show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show the details of a subscription lockNEWLINEexamples:NEWLINE - name: Show a subscription level lockNEWLINE text: >NEWLINE az account lock show -n locknameNEWLINE"""NEWLINENEWLINEhelps['account lock update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a subscription lock.NEWLINEexamples:NEWLINE - name: Update a subscription lock with new notes and typeNEWLINE text: >NEWLINE az account lock update --name lockName --notes newNotesHere --lock-type CanNotDeleteNEWLINE"""NEWLINENEWLINEhelps['account management-group'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure Management Groups.NEWLINE"""NEWLINENEWLINEhelps['account management-group create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a new management group.NEWLINElong-summary: Create a new management group.NEWLINEparameters:NEWLINE - name: --name -nNEWLINE type: stringNEWLINE short-summary: Name of the management group.NEWLINE - name: --display-name -dNEWLINE type: stringNEWLINE short-summary: Sets the display name of the management group. If null, the group name is set as the display name.NEWLINE - name: --parent -pNEWLINE type: stringNEWLINE short-summary: Sets the parent of the management group. Can be the fully qualified id or the name of the management group. If null, the root tenant group is set as the parent.NEWLINEexamples:NEWLINE - name: Create a new management group.NEWLINE text: >NEWLINE az account management-group create --name GroupNameNEWLINE - name: Create a new management group with a specific display name.NEWLINE text: >NEWLINE az account management-group create --name GroupName --display-name DisplayNameNEWLINE - name: Create a new management group with a specific parent.NEWLINE text: >NEWLINE az account management-group create --name GroupName --parent ParentId/ParentNameNEWLINE - name: Create a new management group with a specific display name and parent.NEWLINE text: >NEWLINE az account management-group create --name GroupName --display-name DisplayName --parent ParentId/ParentNameNEWLINE"""NEWLINENEWLINEhelps['account management-group delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete an existing management group.NEWLINElong-summary: Delete an existing management group.NEWLINEparameters:NEWLINE - name: --name -nNEWLINE type: stringNEWLINE short-summary: Name of the management group.NEWLINEexamples:NEWLINE - name: Delete an existing management groupNEWLINE text: >NEWLINE az account management-group delete --name GroupNameNEWLINE"""NEWLINENEWLINEhelps['account management-group list'] = """NEWLINEtype: commandNEWLINEshort-summary: List all management groups.NEWLINElong-summary: List of all management groups in the current tenant.NEWLINEexamples:NEWLINE - name: List all management groupsNEWLINE text: >NEWLINE az account management-group listNEWLINE"""NEWLINENEWLINEhelps['account management-group show'] = """NEWLINEtype: commandNEWLINEshort-summary: Get a specific management group.NEWLINElong-summary: Get the details of the management group.NEWLINEparameters:NEWLINE - name: --name -nNEWLINE type: stringNEWLINE short-summary: Name of the management group (the last segment of the resource ID). Do not use display name.NEWLINE - name: --expand -eNEWLINE type: boolNEWLINE short-summary: If given, lists the children in the first level of hierarchy.NEWLINE - name: --recurse -rNEWLINE type: boolNEWLINE short-summary: If given, lists the children in all levels of hierarchy.NEWLINEexamples:NEWLINE - name: Get a management group.NEWLINE text: >NEWLINE az account management-group show --name GroupNameNEWLINE - name: Get a management group with children in the first level of hierarchy.NEWLINE text: >NEWLINE az account management-group show --name GroupName -eNEWLINE - name: Get a management group with children in all levels of hierarchy.NEWLINE text: >NEWLINE az account management-group show --name GroupName -e -rNEWLINE"""NEWLINENEWLINEhelps['account management-group subscription'] = """NEWLINEtype: groupNEWLINEshort-summary: Subscription operations for Management Groups.NEWLINE"""NEWLINENEWLINEhelps['account management-group subscription add'] = """NEWLINEtype: commandNEWLINEshort-summary: Add a subscription to a management group.NEWLINElong-summary: Add a subscription to a management group.NEWLINEparameters:NEWLINE - name: --name -nNEWLINE type: stringNEWLINE short-summary: Name of the management group.NEWLINE - name: --subscription -sNEWLINE type: stringNEWLINE short-summary: Subscription Id or NameNEWLINEexamples:NEWLINE - name: Add a subscription to a management group.NEWLINE text: >NEWLINE az account management-group subscription add --name GroupName --subscription SubscriptionNEWLINE"""NEWLINENEWLINEhelps['account management-group subscription remove'] = """NEWLINEtype: commandNEWLINEshort-summary: Remove an existing subscription from a management group.NEWLINElong-summary: Remove an existing subscription from a management group.NEWLINEparameters:NEWLINE - name: --name -nNEWLINE type: stringNEWLINE short-summary: Name of the management group.NEWLINE - name: --subscription -sNEWLINE type: stringNEWLINE short-summary: Subscription Id or NameNEWLINEexamples:NEWLINE - name: Remove an existing subscription from a management group.NEWLINE text: >NEWLINE az account management-group subscription remove --name GroupName --subscription SubscriptionNEWLINE"""NEWLINENEWLINEhelps['account management-group update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update an existing management group.NEWLINElong-summary: Update an existing management group.NEWLINEparameters:NEWLINE - name: --name -nNEWLINE type: stringNEWLINE short-summary: Name of the management group.NEWLINE - name: --display-name -dNEWLINE type: stringNEWLINE short-summary: Updates the display name of the management group. If null, no change is made.NEWLINE - name: --parent -pNEWLINE type: stringNEWLINE short-summary: Update the parent of the management group. Can be the fully qualified id or the name of the management group. If null, no change is made.NEWLINEexamples:NEWLINE - name: Update an existing management group with a specific display name.NEWLINE text: >NEWLINE az account management-group update --name GroupName --display-name DisplayNameNEWLINE - name: Update an existing management group with a specific parent.NEWLINE text: >NEWLINE az account management-group update --name GroupName --parent ParentId/ParentNameNEWLINE - name: Update an existing management group with a specific display name and parent.NEWLINE text: >NEWLINE az account management-group update --name GroupName --display-name DisplayName --parent ParentId/ParentNameNEWLINE"""NEWLINENEWLINEhelps['deployment'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure Resource Manager template deployment at subscription scope.NEWLINE"""NEWLINENEWLINEhelps['deployment list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployments at subscription scope.NEWLINEexamples:NEWLINE - name: List deployments at subscription scope.NEWLINE text: az deployment listNEWLINE"""NEWLINENEWLINEhelps['deployment show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment at subscription scope.NEWLINEexamples:NEWLINE - name: Show a deployment at subscription scope.NEWLINE text: az deployment show -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a deployment at subscription scope.NEWLINEexamples:NEWLINE - name: Delete a deployment at subscription scope.NEWLINE text: az deployment delete -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment cancel'] = """NEWLINEtype: commandNEWLINEshort-summary: Cancel a deployment at subscription scope.NEWLINEexamples:NEWLINE - name: Cancel a deployment at subscription scope.NEWLINE text: az deployment cancel -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment validate'] = """NEWLINEtype: commandNEWLINEshort-summary: Validate whether a template is valid at subscription scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment metadata.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINEexamples:NEWLINE - name: Validate whether a template is valid at subscription scope.NEWLINE text: |NEWLINE az deployment validate --location westus2 --parameters MyValue=This MyArray=@array.json --template-file azuredeploy.jsonNEWLINE"""NEWLINENEWLINEhelps['deployment create'] = """NEWLINEtype: commandNEWLINEshort-summary: Start a deployment at subscription scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment metadata.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --what-if-result-format -rNEWLINE short-summary: The format of What-If results. Applicable when `--confirm-with-what-if` is set.NEWLINEexamples:NEWLINE - name: Create a deployment at subscription scope from a remote template file, using parameters from a local JSON file.NEWLINE text: >NEWLINE az deployment create --location WestUS --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Create a deployment at subscription scope from a local template file, using parameters from a JSON string.NEWLINE text: |NEWLINE az deployment create --location WestUS --template-file azuredeploy.json \\NEWLINE --parameters "{ \\"policyName\\": { \\"value\\": \\"policy2\\" }}"NEWLINE - name: Create a deployment at subscription scope from a local template, using a parameter file, a remote parameter file, and selectively overriding key/value pairs.NEWLINE text: >NEWLINE az deployment create --location WestUS --template-file azuredeploy.json \\NEWLINE --parameters @params.json --parameters https://mysite/params.json --parameters MyValue=This MyArray=@array.jsonNEWLINE - name: Create a deployment at subscription scope from a template-specNEWLINE text: >NEWLINE az deployment create --location WestUS --template-spec "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myRG/providers/Microsoft.Resources/templateSpecs/myTemplateSpec/versions/1.0"NEWLINE"""NEWLINENEWLINEhelps['deployment export'] = """NEWLINEtype: commandNEWLINEshort-summary: Export the template used for a deployment.NEWLINEexamples:NEWLINE - name: Export the template used for a deployment at subscription scope.NEWLINE text: |NEWLINE az deployment export --name MyDeploymentNEWLINE"""NEWLINENEWLINEhelps['deployment wait'] = """NEWLINEtype: commandNEWLINEshort-summary: Place the CLI in a waiting state until a deployment condition is met.NEWLINEexamples:NEWLINE - name: Place the CLI in a waiting state until a deployment condition is met. (autogenerated)NEWLINE text: |NEWLINE az deployment wait --deleted --name MyDeployment --subscription MySubscriptionNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment operation'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage deployment operations at subscription scope.NEWLINE"""NEWLINENEWLINEhelps['deployment operation list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployment operations at subscription scope.NEWLINEexamples:NEWLINE - name: List deployment operations at subscription scope. (autogenerated)NEWLINE text: |NEWLINE az deployment operation list --name MyDeploymentNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment operation show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment operation at subscription scope.NEWLINE"""NEWLINENEWLINEhelps['deployment sub'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure Resource Manager template deployment at subscription scope.NEWLINE"""NEWLINENEWLINEhelps['deployment sub list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployments at subscription scope.NEWLINEexamples:NEWLINE - name: List deployments at subscription scope.NEWLINE text: az deployment sub listNEWLINE"""NEWLINENEWLINEhelps['deployment sub show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment at subscription scope.NEWLINEexamples:NEWLINE - name: Show a deployment at subscription scope.NEWLINE text: az deployment sub show -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment sub delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a deployment at subscription scope.NEWLINEexamples:NEWLINE - name: Delete a deployment at subscription scope.NEWLINE text: az deployment sub delete -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment sub cancel'] = """NEWLINEtype: commandNEWLINEshort-summary: Cancel a deployment at subscription scope.NEWLINEexamples:NEWLINE - name: Cancel a deployment at subscription scope.NEWLINE text: az deployment sub cancel -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment sub validate'] = """NEWLINEtype: commandNEWLINEshort-summary: Validate whether a template is valid at subscription scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment metadata.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINEexamples:NEWLINE - name: Validate whether a template is valid at subscription scope.NEWLINE text: az deployment sub validate --location westus2 --template-file {template-file}NEWLINE - name: Validate whether a template is valid at subscription scope. (autogenerated)NEWLINE text: |NEWLINE az deployment sub validate --location westus2 --parameters MyValue=This MyArray=@array.json --template-file azuredeploy.jsonNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment sub create'] = """NEWLINEtype: commandNEWLINEshort-summary: Start a deployment at subscription scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment metadata.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --what-if-result-format -rNEWLINE short-summary: The format of What-If results. Applicable when `--confirm-with-what-if` is set.NEWLINEexamples:NEWLINE - name: Create a deployment at subscription scope from a remote template file, using parameters from a local JSON file.NEWLINE text: >NEWLINE az deployment sub create --location WestUS --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Create a deployment at subscription scope from a local template file, using parameters from a JSON string.NEWLINE text: |NEWLINE az deployment sub create --location WestUS --template-file azuredeploy.json \\NEWLINE --parameters '{ \\"policyName\\": { \\"value\\": \\"policy2\\" } }'NEWLINE - name: Create a deployment at subscription scope from a local template, using a parameter file, a remote parameter file, and selectively overriding key/value pairs.NEWLINE text: >NEWLINE az deployment sub create --location WestUS --template-file azuredeploy.json \\NEWLINE --parameters @params.json --parameters https://mysite/params.json --parameters MyValue=This MyArray=@array.jsonNEWLINE"""NEWLINENEWLINENEWLINEhelps['deployment sub what-if'] = """NEWLINEtype: commandNEWLINEshort-summary: Execute a deployment What-If operation at subscription scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment What-If operation metadata.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --result-format -rNEWLINE short-summary: The format of What-If results.NEWLINEexamples:NEWLINE - name: Execute a deployment What-If operation at a subscription.NEWLINE text: >NEWLINE az deployment sub what-if --location WestUS --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Execute a deployment What-If operation at a subscription with ResourceIdOnly format.NEWLINE text: >NEWLINE az deployment sub what-if --location WestUS --template-uri https://myresource/azuredeploy.json --parameters @myparameters.json --result-format ResourceIdOnlyNEWLINE - name: Execute a deployment What-If operation at a subscription without pretty-printing the result.NEWLINE text: >NEWLINE az deployment sub what-if --location WestUS --template-uri https://myresource/azuredeploy.json --parameters @myparameters.json --no-pretty-printNEWLINE"""NEWLINENEWLINEhelps['deployment sub export'] = """NEWLINEtype: commandNEWLINEshort-summary: Export the template used for a deployment.NEWLINEexamples:NEWLINE - name: Export the template used for a deployment at subscription scope.NEWLINE text: az deployment sub export --name MyDeploymentNEWLINE"""NEWLINENEWLINEhelps['deployment sub wait'] = """NEWLINEtype: commandNEWLINEshort-summary: Place the CLI in a waiting state until a deployment condition is met.NEWLINEexamples:NEWLINE - name: Place the CLI in a waiting state until a deployment condition is met. (autogenerated)NEWLINE text: |NEWLINE az deployment sub wait --created --name MyDeploymentNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment operation sub'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage deployment operations at subscription scope.NEWLINE"""NEWLINENEWLINEhelps['deployment operation sub list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployment operations at subscription scope.NEWLINEexamples:NEWLINE - name: List deployment operations at subscription scope. (autogenerated)NEWLINE text: |NEWLINE az deployment operation sub list --name mydeploymentNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment operation sub show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment operation at subscription scope.NEWLINE"""NEWLINENEWLINEhelps['deployment group'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure Resource Manager template deployment at resource group.NEWLINE"""NEWLINENEWLINEhelps['deployment group list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployments at resource group.NEWLINEexamples:NEWLINE - name: List deployments at resource group.NEWLINE text: az deployment group list -g testrgNEWLINE"""NEWLINENEWLINEhelps['deployment group show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment at resource group.NEWLINEexamples:NEWLINE - name: Show a deployment at resource group.NEWLINE text: az deployment group show -g testrg -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment group delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a deployment at resource group.NEWLINEexamples:NEWLINE - name: Delete a deployment at resource group.NEWLINE text: az deployment group delete -g testrg -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment group cancel'] = """NEWLINEtype: commandNEWLINEshort-summary: Cancel a deployment at resource group.NEWLINEexamples:NEWLINE - name: Cancel a deployment at resource group.NEWLINE text: az deployment group cancel -g testrg -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment group validate'] = """NEWLINEtype: commandNEWLINEshort-summary: Validate whether a template is valid at resource group.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --resource-group -gNEWLINE short-summary: The resource group to create deployment at.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --modeNEWLINE short-summary: The deployment mode.NEWLINEexamples:NEWLINE - name: Validate whether a template is valid at resource group.NEWLINE text: az deployment group validate --resource-group testrg --template-file {template-file}NEWLINE - name: Validate whether a template is valid at resource group. (autogenerated)NEWLINE text: |NEWLINE az deployment group validate --parameters MyValue=This MyArray=@array.json --resource-group testrg --template-file azuredeploy.jsonNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment group create'] = """NEWLINEtype: commandNEWLINEshort-summary: Start a deployment at resource group.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --resource-group -gNEWLINE short-summary: The resource group to create deployment at.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --modeNEWLINE short-summary: The deployment mode.NEWLINE - name: --what-if-result-format -rNEWLINE short-summary: The format of What-If results. Applicable when `--confirm-with-what-if` is set.NEWLINEexamples:NEWLINE - name: Create a deployment at resource group from a remote template file, using parameters from a local JSON file.NEWLINE text: >NEWLINE az deployment group create --resource-group testrg --name rollout01 \\NEWLINE --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Create a deployment at resource group from a local template file, using parameters from a JSON string.NEWLINE text: |NEWLINE az deployment group create --resource-group testrg --name rollout01 \\NEWLINE --template-file azuredeploy.json \\NEWLINE --parameters '{ \\"policyName\\": { \\"value\\": \\"policy2\\" } }'NEWLINE - name: Create a deployment at resource group from a local template file, using parameters from an array string.NEWLINE text: |NEWLINE az deployment group create --resource-group testgroup --template-file demotemplate.json --parameters exampleString='inline string' exampleArray='("value1", "value2")'NEWLINE - name: Create a deployment at resource group from a local template, using a parameter file, a remote parameter file, and selectively overriding key/value pairs.NEWLINE text: >NEWLINE az deployment group create --resource-group testrg --name rollout01 \\NEWLINE --template-file azuredeploy.json --parameters @params.json \\NEWLINE --parameters https://mysite/params.json --parameters MyValue=This MyArray=@array.jsonNEWLINE - name: Create a deployment at subscription scope from a template-specNEWLINE text: >NEWLINE az deployment group create --resource-group testrg --template-spec "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/testrg/providers/Microsoft.Resources/templateSpecs/myTemplateSpec/versions/1.0"NEWLINE"""NEWLINENEWLINEhelps['deployment group what-if'] = """NEWLINEtype: commandNEWLINEshort-summary: Execute a deployment What-If operation at resource group scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicpe file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --resource-group -gNEWLINE short-summary: The resource group to execute deployment What-If operation at.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --modeNEWLINE short-summary: The deployment mode.NEWLINE - name: --result-format -rNEWLINE short-summary: The format of What-If results.NEWLINEexamples:NEWLINE - name: Execute a deployment What-If operation at a resource group.NEWLINE text: >NEWLINE az deployment group what-if --resource-group testrg --name rollout01 --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Execute a deployment What-If operation at a resource group with ResourceIdOnly format.NEWLINE text: >NEWLINE az deployment group what-if --resource-group testrg --name rollout01 --template-uri https://myresource/azuredeploy.json --parameters @myparameters.json --result-format ResourceIdOnlyNEWLINE - name: Execute a deployment What-If operation at a resource group without pretty-printing the result.NEWLINE text: >NEWLINE az deployment group what-if --resource-group testrg --name rollout01 --template-uri https://myresource/azuredeploy.json --parameters @myparameters.json --no-pretty-printNEWLINE"""NEWLINENEWLINEhelps['deployment group export'] = """NEWLINEtype: commandNEWLINEshort-summary: Export the template used for a deployment.NEWLINEexamples:NEWLINE - name: Export the template used for a deployment at resource group.NEWLINE text: az deployment group export --resource-group testrg --name MyDeploymentNEWLINE"""NEWLINENEWLINEhelps['deployment group wait'] = """NEWLINEtype: commandNEWLINEshort-summary: Place the CLI in a waiting state until a deployment condition is met.NEWLINEexamples:NEWLINE - name: Place the CLI in a waiting state until a deployment condition is met. (autogenerated)NEWLINE text: |NEWLINE az deployment group wait --created --name MyDeployment --resource-group MyResourceGroupNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment operation group'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage deployment operations at resource group.NEWLINE"""NEWLINENEWLINEhelps['deployment operation group list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployment operations at resource group.NEWLINEexamples:NEWLINE - name: List deployment operations at resource group (autogenerated)NEWLINE text: |NEWLINE az deployment operation group list --name MyDeployment --resource-group MyResourceGroupNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment operation group show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment operation at resource group.NEWLINE"""NEWLINENEWLINEhelps['deployment mg'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure Resource Manager template deployment at management group.NEWLINE"""NEWLINENEWLINEhelps['deployment mg list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployments at management group.NEWLINEexamples:NEWLINE - name: List deployments at management group.NEWLINE text: az deployment mg list -m testmgNEWLINE"""NEWLINENEWLINEhelps['deployment mg show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment at management group.NEWLINEexamples:NEWLINE - name: Show a deployment at management group.NEWLINE text: az deployment mg show -m testmg -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment mg delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a deployment at management group.NEWLINEexamples:NEWLINE - name: Delete a deployment at management group.NEWLINE text: az deployment mg delete -m testmg -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment mg cancel'] = """NEWLINEtype: commandNEWLINEshort-summary: Cancel a deployment at management group.NEWLINEexamples:NEWLINE - name: Cancel a deployment at management group.NEWLINE text: az deployment mg cancel -m testmg -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment mg validate'] = """NEWLINEtype: commandNEWLINEshort-summary: Validate whether a template is valid at management group.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --management-group-id -mNEWLINE short-summary: The management group id to create deployment at.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment metadata.NEWLINEexamples:NEWLINE - name: Validate whether a template is valid at management group.NEWLINE text: az deployment mg validate --management-group-id testmg --location WestUS --template-file {template-file}NEWLINE - name: Validate whether a template is valid at management group. (autogenerated)NEWLINE text: |NEWLINE az deployment mg validate --location WestUS --management-group-id testmg --name mydeployment --parameters @myparameters.json --template-file azuredeploy.jsonNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment mg what-if'] = """NEWLINEtype: commandNEWLINEshort-summary: Execute a deployment What-If operation at management group scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --management-group-id -mNEWLINE short-summary: The management group id to create deployment at.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment metadata.NEWLINE - name: --result-format -rNEWLINE short-summary: The format of What-If results.NEWLINEexamples:NEWLINE - name: Execute a deployment What-If operation at a management group.NEWLINE text: >NEWLINE az deployment mg what-if --management-group-id testmg --location westus --name rollout01 --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Execute a deployment What-If operation at a management group with ResourceIdOnly format.NEWLINE text: >NEWLINE az deployment mg what-if --management-group-id testmg --location westus --name rollout01 --template-uri https://myresource/azuredeploy.json --parameters @myparameters.json --result-format ResourceIdOnlyNEWLINE - name: Execute a deployment What-If operation at a management group without pretty-printing the result.NEWLINE text: >NEWLINE az deployment mg what-if --management-group-id testmg --location westus --name rollout01 --template-uri https://myresource/azuredeploy.json --parameters @myparameters.json --no-pretty-printNEWLINE"""NEWLINENEWLINEhelps['deployment mg create'] = """NEWLINEtype: commandNEWLINEshort-summary: Start a deployment at management group.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --management-group-id -mNEWLINE short-summary: The management group id to create deployment at.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment metadata.NEWLINE - name: --what-if-result-format -rNEWLINE short-summary: The format of What-If results. Applicable when `--confirm-with-what-if` is set.NEWLINEexamples:NEWLINE - name: Create a deployment at management group from a remote template file, using parameters from a local JSON file.NEWLINE text: >NEWLINE az deployment mg create --management-group-id testrg --name rollout01 --location WestUS \\NEWLINE --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Create a deployment at management group from a local template file, using parameters from a JSON string.NEWLINE text: |NEWLINE az deployment mg create --management-group-id testmg --name rollout01 --location WestUS \\NEWLINE --template-file azuredeploy.json \\NEWLINE --parameters '{ \\"policyName\\": { \\"value\\": \\"policy2\\" } }'NEWLINE - name: Create a deployment at management group from a local template, using a parameter file, a remote parameter file, and selectively overriding key/value pairs.NEWLINE text: >NEWLINE az deployment mg create --management-group-id testmg --name rollout01 --location WestUS \\NEWLINE --template-file azuredeploy.json --parameters @params.json \\NEWLINE --parameters https://mysite/params.json --parameters MyValue=This MyArray=@array.jsonNEWLINE"""NEWLINENEWLINEhelps['deployment mg export'] = """NEWLINEtype: commandNEWLINEshort-summary: Export the template used for a deployment.NEWLINEexamples:NEWLINE - name: Export the template used for a deployment at management group.NEWLINE text: az deployment mg export --management-group-id testmg --name MyDeploymentNEWLINE"""NEWLINENEWLINEhelps['deployment mg wait'] = """NEWLINEtype: commandNEWLINEshort-summary: Place the CLI in a waiting state until a deployment condition is met.NEWLINE"""NEWLINENEWLINEhelps['deployment operation mg'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage deployment operations at management group.NEWLINE"""NEWLINENEWLINEhelps['deployment operation mg list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployment operations at management group.NEWLINE"""NEWLINENEWLINEhelps['deployment operation mg show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment operation at management group.NEWLINE"""NEWLINENEWLINEhelps['deployment tenant'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure Resource Manager template deployment at tenant scope.NEWLINE"""NEWLINENEWLINEhelps['deployment tenant list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployments at tenant scope.NEWLINEexamples:NEWLINE - name: List deployments at tenant scope.NEWLINE text: az deployment tenant listNEWLINE"""NEWLINENEWLINEhelps['deployment tenant show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment at tenant scope.NEWLINEexamples:NEWLINE - name: Show a deployment at tenant scope.NEWLINE text: az deployment tenant show -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment tenant delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a deployment at tenant scope.NEWLINEexamples:NEWLINE - name: Delete a deployment at tenant scope.NEWLINE text: az deployment tenant delete -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment tenant cancel'] = """NEWLINEtype: commandNEWLINEshort-summary: Cancel a deployment at tenant scope.NEWLINEexamples:NEWLINE - name: Cancel a deployment at tenant scope.NEWLINE text: az deployment tenant cancel -n deployment01NEWLINE"""NEWLINENEWLINEhelps['deployment tenant validate'] = """NEWLINEtype: commandNEWLINEshort-summary: Validate whether a template is valid at tenant scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment metadata.NEWLINEexamples:NEWLINE - name: Validate whether a template is valid at tenant scope.NEWLINE text: az deployment tenant validate --location WestUS --template-file {template-file}NEWLINE - name: Validate whether a template is valid at tenant scope. (autogenerated)NEWLINE text: |NEWLINE az deployment tenant validate --location WestUS --name mydeployment --parameters @myparameters.json --template-file azuredeploy.jsonNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment tenant what-if'] = """NEWLINEtype: commandNEWLINEshort-summary: Execute a deployment What-If operation at tenant scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment What-If operation metadata.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --result-format -rNEWLINE short-summary: The format of What-If results.NEWLINEexamples:NEWLINE - name: Execute a deployment What-If operation at tenant scope.NEWLINE text: >NEWLINE az deployment tenant what-if --location WestUS --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Execute a deployment What-If operation at tenant scope with ResourceIdOnly format.NEWLINE text: >NEWLINE az deployment tenant what-if --location WestUS --template-uri https://myresource/azuredeploy.json --parameters @myparameters.json --result-format ResourceIdOnlyNEWLINE - name: Execute a deployment What-If operation at tenant scope without pretty-printing the result.NEWLINE text: >NEWLINE az deployment tenant what-if --location WestUS --template-uri https://myresource/azuredeploy.json --parameters @myparameters.json --no-pretty-printNEWLINE"""NEWLINENEWLINEhelps['deployment tenant create'] = """NEWLINEtype: commandNEWLINEshort-summary: Start a deployment at tenant scope.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINE - name: --template-file -fNEWLINE short-summary: The path to the template file or Bicep file.NEWLINE - name: --template-uri -uNEWLINE short-summary: The URI to the template file.NEWLINE - name: --template-spec -sNEWLINE short-summary: The template spec resource id.NEWLINE - name: --name -nNEWLINE short-summary: The deployment name.NEWLINE - name: --location -lNEWLINE short-summary: The location to store the deployment metadata.NEWLINE - name: --what-if-result-format -rNEWLINE short-summary: The format of What-If results. Applicable when `--confirm-with-what-if` is set.NEWLINEexamples:NEWLINE - name: Create a deployment at tenant scope from a remote template file, using parameters from a local JSON file.NEWLINE text: >NEWLINE az deployment tenant create --name rollout01 --location WestUS \\NEWLINE --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Create a deployment at tenant scope from a local template file, using parameters from a JSON string.NEWLINE text: |NEWLINE az deployment tenant create --name rollout01 --location WestUS \\NEWLINE --template-file azuredeploy.json \\NEWLINE --parameters '{ \\"policyName\\": { \\"value\\": \\"policy2\\" } }'NEWLINE - name: Create a deployment at tenant scope from a local template, using a parameter file, a remote parameter file, and selectively overriding key/value pairs.NEWLINE text: >NEWLINE az deployment tenant create --name rollout01 --location WestUS \\NEWLINE --template-file azuredeploy.json --parameters @params.json \\NEWLINE --parameters https://mysite/params.json --parameters MyValue=This MyArray=@array.jsonNEWLINE"""NEWLINENEWLINEhelps['deployment tenant export'] = """NEWLINEtype: commandNEWLINEshort-summary: Export the template used for a deployment.NEWLINEexamples:NEWLINE - name: Export the template used for a deployment at tenant scope.NEWLINE text: az deployment tenant export --name MyDeploymentNEWLINE"""NEWLINENEWLINEhelps['deployment tenant wait'] = """NEWLINEtype: commandNEWLINEshort-summary: Place the CLI in a waiting state until a deployment condition is met.NEWLINEexamples:NEWLINE - name: Place the CLI in a waiting state until a deployment condition is met. (autogenerated)NEWLINE text: |NEWLINE az deployment tenant wait --deleted --name MyDeploymentNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['deployment operation tenant'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage deployment operations at tenant scope.NEWLINE"""NEWLINENEWLINEhelps['deployment operation tenant list'] = """NEWLINEtype: commandNEWLINEshort-summary: List deployment operations at tenant scope.NEWLINE"""NEWLINENEWLINEhelps['deployment operation tenant show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a deployment operation at tenant scope.NEWLINE"""NEWLINENEWLINEhelps['deployment-scripts'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage deployment scripts at subscription or resource group scope.NEWLINE"""NEWLINENEWLINEhelps['deployment-scripts list'] = """NEWLINEtype: commandNEWLINEshort-summary: List all deployment scripts.NEWLINEexamples:NEWLINE - name: Retrieve all deployment scripts found in the user's logged-in default subscription.NEWLINE text: >NEWLINE az deployment-scripts listNEWLINE - name: Retrieve all deployment scripts found in a resource groupNEWLINE text: |NEWLINE az deployment-scripts list --resource-group contoso-rgNEWLINE"""NEWLINENEWLINEhelps['deployment-scripts show'] = """NEWLINEtype: commandNEWLINEshort-summary: Retrieve a deployment script.NEWLINEparameters:NEWLINE - name: --nameNEWLINE short-summary: Deployment script resource name.NEWLINEexamples:NEWLINE - name: Retrieve a deployment script found in the user's logged-in default subscription.NEWLINE text: >NEWLINE az deployment-scripts show --resource-group contoso-rg --name contosoBashScriptNEWLINE"""NEWLINENEWLINEhelps['deployment-scripts show-log'] = """NEWLINEtype: commandNEWLINEshort-summary: Show deployment script logs.NEWLINEparameters:NEWLINE - name: --nameNEWLINE short-summary: Deployment script resource name.NEWLINEexamples:NEWLINE - name: Retrieve deployment script logs found in the user's logged-in default subscription, max limit is 4MB.NEWLINE text: >NEWLINE az deployment-scripts show-log --resource-group contoso-rg --name contosoBashScriptNEWLINE"""NEWLINENEWLINEhelps['deployment-scripts delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a deployment script.NEWLINEparameters:NEWLINE - name: --nameNEWLINE short-summary: Deployment script resource name.NEWLINEexamples:NEWLINE - name: Delete a deployment script found in the user's logged-in default subscription.NEWLINE text: >NEWLINE az deployment-scripts delete --resource-group contoso-rg --name contosoBashScriptNEWLINE"""NEWLINENEWLINEhelps['feature'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage resource provider features.NEWLINE"""NEWLINENEWLINEhelps['feature list'] = """NEWLINEtype: commandNEWLINEshort-summary: List preview features.NEWLINEexamples:NEWLINE - name: List preview featuresNEWLINE text: az feature listNEWLINE"""NEWLINENEWLINEhelps['feature register'] = """NEWLINEtype: commandNEWLINEshort-summary: register a preview feature.NEWLINEexamples:NEWLINE - name: register the "Shared Image Gallery" featureNEWLINE text: az feature register --namespace Microsoft.Compute --name GalleryPreviewNEWLINE"""NEWLINENEWLINEhelps['feature unregister'] = """NEWLINEtype: commandNEWLINEshort-summary: unregister a preview feature.NEWLINEexamples:NEWLINE - name: unregister the "Shared Image Gallery" featureNEWLINE text: az feature unregister --namespace Microsoft.Compute --name GalleryPreviewNEWLINE"""NEWLINENEWLINEhelps['group'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage resource groups and template deployments.NEWLINE"""NEWLINENEWLINEhelps['group create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a new resource group.NEWLINEexamples:NEWLINE - name: Create a new resource group in the West US region.NEWLINE text: >NEWLINE az group create -l westus -n MyResourceGroupNEWLINE"""NEWLINENEWLINEhelps['group delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a resource group.NEWLINEexamples:NEWLINE - name: Delete a resource group.NEWLINE text: >NEWLINE az group delete -n MyResourceGroupNEWLINE"""NEWLINENEWLINEhelps['group deployment'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure Resource Manager deployments.NEWLINE"""NEWLINENEWLINEhelps['group deployment create'] = """NEWLINEtype: commandNEWLINEshort-summary: Start a deployment.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINEexamples:NEWLINE - name: Create a deployment from a remote template file, using parameters from a local JSON file.NEWLINE text: >NEWLINE az group deployment create -g MyResourceGroup --template-uri https://myresource/azuredeploy.json --parameters @myparameters.jsonNEWLINE - name: Create a deployment from a local template file, using parameters from a JSON string.NEWLINE text: |NEWLINE az group deployment create -g MyResourceGroup --template-file azuredeploy.json \\NEWLINE --parameters "{ \\"location\\": { \\"value\\": \\"westus\\" } }"NEWLINE - name: Create a deployment from a local template, using a local parameter file, a remote parameter file, and selectively overriding key/value pairs.NEWLINE text: >NEWLINE az group deployment create -g MyResourceGroup --template-file azuredeploy.json \\NEWLINE --parameters @params.json --parameters https://mysite/params.json --parameters MyValue=This MyArray=@array.jsonNEWLINE"""NEWLINENEWLINEhelps['group deployment export'] = """NEWLINEtype: commandNEWLINEshort-summary: Export the template used for a deployment.NEWLINEexamples:NEWLINE - name: Export the template used for a deployment. (autogenerated)NEWLINE text: |NEWLINE az group deployment export --name MyDeployment --resource-group MyResourceGroupNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['group deployment operation'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage deployment operations.NEWLINE"""NEWLINENEWLINEhelps['group deployment validate'] = """NEWLINEtype: commandNEWLINEshort-summary: Validate whether a template is syntactically correct.NEWLINEparameters:NEWLINE - name: --parameters -pNEWLINE short-summary: Supply deployment parameter values.NEWLINE long-summary: >NEWLINE Parameters may be supplied from a file using the `@{path}` syntax, a JSON string, or as <KEY=VALUE> pairs. Parameters are evaluated in order, so when a value is assigned twice, the latter value will be used.NEWLINE It is recommended that you supply your parameters file first, and then override selectively using KEY=VALUE syntax.NEWLINEexamples:NEWLINE - name: Validate whether a template is syntactically correct. (autogenerated)NEWLINE text: |NEWLINE az group deployment validate --parameters "{ \\"location\\": { \\"value\\": \\"westus\\" } }" \\NEWLINE --resource-group MyResourceGroup --template-file storage.jsonNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['group deployment wait'] = """NEWLINEtype: commandNEWLINEshort-summary: Place the CLI in a waiting state until a deployment condition is met.NEWLINEexamples:NEWLINE - name: Place the CLI in a waiting state until a deployment condition is met. (autogenerated)NEWLINE text: |NEWLINE az group deployment wait --name MyDeployment --resource-group MyResourceGroup --updatedNEWLINE crafted: trueNEWLINE - name: Place the CLI in a waiting state until a deployment condition is met. (autogenerated)NEWLINE text: |NEWLINE az group deployment wait --created --name MyDeployment --resource-group MyResourceGroupNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['group exists'] = """NEWLINEtype: commandNEWLINEshort-summary: Check if a resource group exists.NEWLINEexamples:NEWLINE - name: Check if 'MyResourceGroup' exists.NEWLINE text: >NEWLINE az group exists -n MyResourceGroupNEWLINE"""NEWLINENEWLINEhelps['group list'] = """NEWLINEtype: commandNEWLINEshort-summary: List resource groups.NEWLINEexamples:NEWLINE - name: List all resource groups located in the West US region.NEWLINE text: >NEWLINE az group list --query "[?location=='westus']"NEWLINE"""NEWLINENEWLINEhelps['group lock'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure resource group locks.NEWLINE"""NEWLINENEWLINEhelps['group lock create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a resource group lock.NEWLINEexamples:NEWLINE - name: Create a read-only resource group level lock.NEWLINE text: >NEWLINE az group lock create --lock-type ReadOnly -n lockName -g MyResourceGroupNEWLINE"""NEWLINENEWLINEhelps['group lock delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a resource group lock.NEWLINEexamples:NEWLINE - name: Delete a resource group lockNEWLINE text: >NEWLINE az group lock delete --name lockName -g MyResourceGroupNEWLINE"""NEWLINENEWLINEhelps['group lock list'] = """NEWLINEtype: commandNEWLINEshort-summary: List lock information in the resource-group.NEWLINEexamples:NEWLINE - name: List out all locks on the resource group levelNEWLINE text: >NEWLINE az group lock list -g MyResourceGroupNEWLINE"""NEWLINENEWLINEhelps['group lock show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show the details of a resource group lockNEWLINEexamples:NEWLINE - name: Show a resource group level lockNEWLINE text: >NEWLINE az group lock show -n lockname -g MyResourceGroupNEWLINE"""NEWLINENEWLINEhelps['group lock update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a resource group lock.NEWLINEexamples:NEWLINE - name: Update a resource group lock with new notes and typeNEWLINE text: >NEWLINE az group lock update --name lockName -g MyResourceGroup --notes newNotesHere --lock-type CanNotDeleteNEWLINE"""NEWLINENEWLINEhelps['group update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a resource group.NEWLINEexamples:NEWLINE - name: Update a resource group. (autogenerated)NEWLINE text: |NEWLINE az group update --resource-group MyResourceGroup --set tags.CostCenter='{"Dept":"IT","Environment":"Test"}'NEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['group wait'] = """NEWLINEtype: commandNEWLINEshort-summary: Place the CLI in a waiting state until a condition of the resource group is met.NEWLINEexamples:NEWLINE - name: Place the CLI in a waiting state until a condition of the resource group is met. (autogenerated)NEWLINE text: |NEWLINE az group wait --created --resource-group MyResourceGroupNEWLINE crafted: trueNEWLINE - name: Place the CLI in a waiting state until a condition of the resource group is met. (autogenerated)NEWLINE text: |NEWLINE az group wait --deleted --resource-group MyResourceGroupNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['lock'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure locks.NEWLINE"""NEWLINENEWLINEhelps['lock create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a lock.NEWLINElong-summary: 'Locks can exist at three different scopes: subscription, resource group and resource.'NEWLINEexamples:NEWLINE - name: Create a read-only subscription level lock.NEWLINE text: >NEWLINE az lock create --name lockName --resource-group group --lock-type ReadOnlyNEWLINE - name: Create a read-only resource level lock on a vnet resource.NEWLINE text: >NEWLINE az lock create --name lockName --resource-group group --lock-type ReadOnly --resource-type \\NEWLINE Microsoft.Network/virtualNetworks --resource myVnetNEWLINE - name: Create a read-only resource level lock on a subnet resource with a specific parent.NEWLINE text: >NEWLINE az lock create --name lockName --resource-group group --lock-type ReadOnly --resource-type \\NEWLINE Microsoft.Network/subnets --parent virtualNetworks/myVnet --resource mySubnetNEWLINE"""NEWLINENEWLINEhelps['lock delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a lock.NEWLINEexamples:NEWLINE - name: Delete a resource group-level lockNEWLINE text: >NEWLINE az lock delete --name lockName --resource-group groupNEWLINE"""NEWLINENEWLINEhelps['lock list'] = """NEWLINEtype: commandNEWLINEshort-summary: List lock information.NEWLINEexamples:NEWLINE - name: List out the locks on a vnet resource. Includes locks in the associated group and subscription.NEWLINE text: >NEWLINE az lock list --resource myvnet --resource-type Microsoft.Network/virtualNetworks -g groupNEWLINE - name: List out all locks on the subscription levelNEWLINE text: >NEWLINE az lock listNEWLINE"""NEWLINENEWLINEhelps['lock show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show the properties of a lockNEWLINEexamples:NEWLINE - name: Show a subscription level lockNEWLINE text: >NEWLINE az lock show -n locknameNEWLINE - name: Show the properties of a lock (autogenerated)NEWLINE text: |NEWLINE az lock show --name lockname --resource-group MyResourceGroup --resource-name MyResource --resource-type Microsoft.Network/virtualNetworksNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['lock update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a lock.NEWLINEexamples:NEWLINE - name: Update a resource group level lock with new notes and typeNEWLINE text: >NEWLINE az lock update --name lockName --resource-group group --notes newNotesHere --lock-type CanNotDeleteNEWLINE"""NEWLINENEWLINEhelps['managedapp'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage template solutions provided and maintained by Independent Software Vendors (ISVs).NEWLINE"""NEWLINENEWLINEhelps['managedapp create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a managed application.NEWLINEexamples:NEWLINE - name: Create a managed application of kind 'ServiceCatalog'. This requires a valid managed application definition ID.NEWLINE text: |NEWLINE az managedapp create -g MyResourceGroup -n MyManagedApp -l westcentralus --kind ServiceCatalog \\NEWLINE -m "/subscriptions/{SubID}/resourceGroups/{ManagedResourceGroup}" \\NEWLINE -d "/subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Solutions/applianceDefinitions/{ApplianceDefinition}"NEWLINE - name: Create a managed application of kind 'MarketPlace'. This requires a valid plan, containing details about existing marketplace package like plan name, version, publisher and product.NEWLINE text: |NEWLINE az managedapp create -g MyResourceGroup -n MyManagedApp -l westcentralus --kind MarketPlace \\NEWLINE -m "/subscriptions/{SubID}/resourceGroups/{ManagedResourceGroup}" \\NEWLINE --plan-name ContosoAppliance --plan-version "1.0" --plan-product "contoso-appliance" --plan-publisher ContosoNEWLINE"""NEWLINENEWLINEhelps['managedapp definition'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure Managed Applications.NEWLINE"""NEWLINENEWLINEhelps['managedapp definition create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a managed application definition.NEWLINEexamples:NEWLINE - name: Create a managed application defintion.NEWLINE text: >NEWLINE az managedapp definition create -g MyResourceGroup -n MyManagedAppDef -l eastus --display-name "MyManagedAppDef" \\NEWLINE --description "My Managed App Def description" -a "myPrincipalId:myRoleId" --lock-level None \\NEWLINE --package-file-uri "https://path/to/myPackage.zip"NEWLINE - name: Create a managed application defintion with inline values for createUiDefinition and mainTemplate.NEWLINE text: >NEWLINE az managedapp definition create -g MyResourceGroup -n MyManagedAppDef -l eastus --display-name "MyManagedAppDef" \\NEWLINE --description "My Managed App Def description" -a "myPrincipalId:myRoleId" --lock-level None \\NEWLINE --create-ui-definition @myCreateUiDef.json --main-template @myMainTemplate.jsonNEWLINE"""NEWLINENEWLINEhelps['managedapp definition update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a managed application definition.NEWLINEexamples:NEWLINE - name: Update a managed application defintion.NEWLINE text: >NEWLINE az managedapp definition update -g MyResourceGroup -n MyManagedAppDef -l eastus --display-name "MyManagedAppDef" \\NEWLINE --description "My Managed App Def description" -a "myPrincipalId:myRoleId" --lock-level None \\NEWLINE --package-file-uri "https://path/to/myPackage.zip"NEWLINE - name: Update a managed application defintion with inline values for createUiDefinition and mainTemplate.NEWLINE text: >NEWLINE az managedapp definition update -g MyResourceGroup -n MyManagedAppDef -l eastus --display-name "MyManagedAppDef" \\NEWLINE --description "My Managed App Def description" -a "myPrincipalId:myRoleId" --lock-level None \\NEWLINE --create-ui-definition @myCreateUiDef.json --main-template @myMainTemplate.jsonNEWLINE"""NEWLINENEWLINEhelps['managedapp definition delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a managed application definition.NEWLINEexamples:NEWLINE - name: Delete a managed application definition. (autogenerated)NEWLINE text: |NEWLINE az managedapp definition delete --name MyManagedApplicationDefinition --resource-group MyResourceGroupNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['managedapp definition list'] = """NEWLINEtype: commandNEWLINEshort-summary: List managed application definitions.NEWLINEexamples:NEWLINE - name: List managed application definitions. (autogenerated)NEWLINE text: |NEWLINE az managedapp definition list --resource-group MyResourceGroupNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['managedapp delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a managed application.NEWLINEexamples:NEWLINE - name: Delete a managed application. (autogenerated)NEWLINE text: |NEWLINE az managedapp delete --name MyManagedApplication --resource-group MyResourceGroup --subscription MySubscriptionNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['managedapp list'] = """NEWLINEtype: commandNEWLINEshort-summary: List managed applications.NEWLINEexamples:NEWLINE - name: List managed applications. (autogenerated)NEWLINE text: |NEWLINE az managedapp list --resource-group MyResourceGroupNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage resource policies.NEWLINE"""NEWLINENEWLINEhelps['policy assignment'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage resource policy assignments.NEWLINE"""NEWLINENEWLINEhelps['policy assignment create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a resource policy assignment.NEWLINEparameters:NEWLINE - name: --scopeNEWLINE type: stringNEWLINE short-summary: Scope to which this policy assignment applies.NEWLINEexamples:NEWLINE - name: Create a resource policy assignment at scopeNEWLINE text: |NEWLINE Valid scopes are management group, subscription, resource group, and resource, for exampleNEWLINE management group: /providers/Microsoft.Management/managementGroups/MyManagementGroupNEWLINE subscription: /subscriptions/0b1f6471-1bf0-4dda-aec3-111122223333NEWLINE resource group: /subscriptions/0b1f6471-1bf0-4dda-aec3-111122223333/resourceGroups/myGroupNEWLINE resource: /subscriptions/0b1f6471-1bf0-4dda-aec3-111122223333/resourceGroups/myGroup/providers/Microsoft.Compute/virtualMachines/myVMNEWLINE az policy assignment create --scope \\NEWLINE "/providers/Microsoft.Management/managementGroups/MyManagementGroup" \\NEWLINE --policy {PolicyName} -p "{ \\"allowedLocations\\": \\NEWLINE { \\"value\\": [ \\"australiaeast\\", \\"eastus\\", \\"japaneast\\" ] } }"NEWLINE - name: Create a resource policy assignment and provide rule parameter values.NEWLINE text: |NEWLINE az policy assignment create --policy {PolicyName} -p "{ \\"allowedLocations\\": \\NEWLINE { \\"value\\": [ \\"australiaeast\\", \\"eastus\\", \\"japaneast\\" ] } }"NEWLINE - name: Create a resource policy assignment with a system assigned identity.NEWLINE text: >NEWLINE az policy assignment create --name myPolicy --policy {PolicyName} --assign-identityNEWLINE - name: Create a resource policy assignment with a system assigned identity. The identity will have 'Contributor' role access to the subscription.NEWLINE text: >NEWLINE az policy assignment create --name myPolicy --policy {PolicyName} --assign-identity --identity-scope /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx --role ContributorNEWLINE - name: Create a resource policy assignment with an enforcement mode. It indicates whether a policy effect will be enforced or not during assignment creation and update. Please visit https://aka.ms/azure-policyAssignment-enforcement-mode for more information.NEWLINE text: >NEWLINE az policy assignment create --name myPolicy --policy {PolicyName} --enforcement-mode 'DoNotEnforce'NEWLINE"""NEWLINENEWLINEhelps['policy assignment update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a resource policy assignment.NEWLINEexamples:NEWLINE - name: Update a resource policy assignment's description.NEWLINE text: |NEWLINE az policy assignment update --name myPolicy --description 'My policy description'NEWLINE"""NEWLINENEWLINEhelps['policy assignment delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a resource policy assignment.NEWLINEexamples:NEWLINE - name: Delete a resource policy assignment. (autogenerated)NEWLINE text: |NEWLINE az policy assignment delete --name MyPolicyAssignmentNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy assignment identity'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage a policy assignment's managed identity.NEWLINE"""NEWLINENEWLINEhelps['policy assignment identity assign'] = """NEWLINEtype: commandNEWLINEshort-summary: Add a system assigned identity to a policy assignment.NEWLINEexamples:NEWLINE - name: Add a system assigned managed identity to a policy assignment.NEWLINE text: >NEWLINE az policy assignment identity assign -g MyResourceGroup -n MyPolicyAssignmentNEWLINE - name: Add a system assigned managed identity to a policy assignment and grant it the 'Contributor' role for the current resource group.NEWLINE text: >NEWLINE az policy assignment identity assign -g MyResourceGroup -n MyPolicyAssignment --role Contributor --identity-scope /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/MyResourceGroupNEWLINE"""NEWLINENEWLINEhelps['policy assignment identity remove'] = """NEWLINEtype: commandNEWLINEshort-summary: Remove a managed identity from a policy assignment.NEWLINE"""NEWLINENEWLINEhelps['policy assignment identity show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a policy assignment's managed identity.NEWLINEexamples:NEWLINE - name: Show a policy assignment's managed identity. (autogenerated)NEWLINE text: |NEWLINE az policy assignment identity show --name MyPolicyAssignment --scope '/providers/Microsoft.Management/managementGroups/MyManagementGroup'NEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy assignment non-compliance-message'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage a policy assignment's non-compliance messages.NEWLINE"""NEWLINENEWLINEhelps['policy assignment non-compliance-message create'] = """NEWLINEtype: commandNEWLINEshort-summary: Add a non-compliance message to a policy assignment.NEWLINEexamples:NEWLINE - name: Add a non-compliance message to a policy assignment.NEWLINE text: >NEWLINE az policy assignment non-compliance-message create -g MyResourceGroup -n MyPolicyAssignment -m 'Resources must follow naming standards'NEWLINE - name: Add a non-compliance message for a specific policy in an assigned policy set definition.NEWLINE text: >NEWLINE az policy assignment non-compliance-message create -g MyResourceGroup -n MyPolicySetAssignment -m 'Resources must use allowed SKUs' --policy-definition-reference-id SkuPolicyRefIdNEWLINE"""NEWLINENEWLINEhelps['policy assignment non-compliance-message list'] = """NEWLINEtype: commandNEWLINEshort-summary: List the non-compliance messages for a policy assignment.NEWLINEexamples:NEWLINE - name: List the non-compliance messages for a policy assignment.NEWLINE text: >NEWLINE az policy assignment non-compliance-message list -g MyResourceGroup -n MyPolicyAssignmentNEWLINE"""NEWLINENEWLINEhelps['policy assignment non-compliance-message delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Remove one or more non-compliance messages from a policy assignment.NEWLINEexamples:NEWLINE - name: Remove non-compliance messages from a policy assignment that contain a specific message and no policy definition reference ID.NEWLINE text: >NEWLINE az policy assignment non-compliance-message delete -g MyResourceGroup -n MyPolicyAssignment -m 'Resources must follow naming standards'NEWLINE - name: Remove non-compliance messages from a policy assignment that contain a specific message and a specific policy definition reference ID.NEWLINE text: >NEWLINE az policy assignment non-compliance-message delete -g MyResourceGroup -n MyPolicySetAssignment -m 'Resources must use allowed SKUs' --policy-definition-reference-id SkuPolicyRefIdNEWLINE"""NEWLINENEWLINEhelps['policy assignment list'] = """NEWLINEtype: commandNEWLINEshort-summary: List resource policy assignments.NEWLINE"""NEWLINENEWLINEhelps['policy assignment show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a resource policy assignment.NEWLINEexamples:NEWLINE - name: Show a resource policy assignment. (autogenerated)NEWLINE text: |NEWLINE az policy assignment show --name MyPolicyAssignmentNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy definition'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage resource policy definitions.NEWLINE"""NEWLINENEWLINEhelps['policy definition create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a policy definition.NEWLINEparameters:NEWLINE - name: --rulesNEWLINE type: stringNEWLINE short-summary: Policy rules in JSON format, or a path to a file containing JSON rules.NEWLINE - name: --management-groupNEWLINE type: stringNEWLINE short-summary: Name of the management group the new policy definition can be assigned in.NEWLINE - name: --subscriptionNEWLINE type: stringNEWLINE short-summary: Name or id of the subscription the new policy definition can be assigned in.NEWLINEexamples:NEWLINE - name: Create a read-only policy.NEWLINE text: |NEWLINE az policy definition create --name readOnlyStorage --rules "{ \\"if\\": \\NEWLINE { \\"field\\": \\"type\\", \\"equals\\": \\"Microsoft.Storage/storageAccounts/write\\" }, \\NEWLINE \\"then\\": { \\"effect\\": \\"deny\\" } }"NEWLINE - name: Create a policy parameter definition.NEWLINE text: |NEWLINE az policy definition create --name allowedLocations \\NEWLINE --rules "{ \\"if\\": { \\"allOf\\": [ \\NEWLINE { \\"field\\": \\"location\\",\\"notIn\\": \\"[parameters('listOfAllowedLocations')]\\" }, \\NEWLINE { \\"field\\": \\"location\\", \\"notEquals\\": \\"global\\" }, \\NEWLINE { \\"field\\": \\"type\\", \\"notEquals\\": \\NEWLINE \\"Microsoft.AzureActiveDirectory/b2cDirectories\\"} \\NEWLINE ] }, \\"then\\": { \\"effect\\": \\"deny\\" } }" \\NEWLINE --params "{ \\"allowedLocations\\": { \\NEWLINE \\"type\\": \\"array\\", \\"metadata\\": { \\"description\\": \\NEWLINE \\"The list of locations that can be specified when deploying resources\\", \\NEWLINE \\"strongType\\": \\"location\\", \\"displayName\\": \\"Allowed locations\\" } } }"NEWLINE - name: Create a read-only policy that can be applied within a management group.NEWLINE text: |NEWLINE az policy definition create -n readOnlyStorage --management-group "MyManagementGroup" \\NEWLINE --rules "{ \\"if\\": { \\"field\\": \\"type\\", \\NEWLINE \\"equals\\": \\"Microsoft.Storage/storageAccounts/write\\" }, \\NEWLINE \\"then\\": { \\"effect\\": \\"deny\\" } }"NEWLINE - name: Create a policy definition with mode. The mode 'Indexed' indicates the policy should be evaluated only for resource types that support tags and location.NEWLINE text: |NEWLINE az policy definition create --name TagsPolicyDefinition --subscription "MySubscription" \\NEWLINE --mode Indexed --rules "{ \\"if\\": { \\"field\\": \\"tags\\", \\"exists\\": \\"false\\" }, \\NEWLINE \\"then\\": { \\"effect\\": \\"deny\\" } }"NEWLINE"""NEWLINENEWLINEhelps['policy definition delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a policy definition.NEWLINEexamples:NEWLINE - name: Delete a policy definition. (autogenerated)NEWLINE text: |NEWLINE az policy definition delete --name MyPolicyDefinitionNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy definition list'] = """NEWLINEtype: commandNEWLINEshort-summary: List policy definitions.NEWLINE"""NEWLINENEWLINEhelps['policy definition show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a policy definition.NEWLINEexamples:NEWLINE - name: Show a policy definition. (autogenerated)NEWLINE text: |NEWLINE az policy definition show --name MyPolicyDefinitionNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy definition update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a policy definition.NEWLINEexamples:NEWLINE - name: Update a policy definition. (autogenerated)NEWLINE text: |NEWLINE az policy definition update --name MyPolicyDefinitionNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy set-definition'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage resource policy set definitions.NEWLINE"""NEWLINENEWLINEhelps['policy set-definition create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a policy set definition.NEWLINEparameters:NEWLINE - name: --definitionsNEWLINE type: stringNEWLINE short-summary: Policy definitions in JSON format, or a path to a file or URI containing JSON rules.NEWLINE - name: --management-groupNEWLINE type: stringNEWLINE short-summary: Name of management group the new policy set definition can be assigned in.NEWLINE - name: --subscriptionNEWLINE type: stringNEWLINE short-summary: Name or id of the subscription the new policy set definition can be assigned in.NEWLINEexamples:NEWLINE - name: Create a policy set definition.NEWLINE text: |NEWLINE az policy set-definition create -n readOnlyStorage \\NEWLINE --definitions "[ { \\"policyDefinitionId\\": \\"/subscriptions/mySubId/providers/ \\NEWLINE Microsoft.Authorization/policyDefinitions/storagePolicy\\", \\"parameters\\": \\NEWLINE { \\"storageSku\\": { \\"value\\": \\"[parameters(\\\\"requiredSku\\\\")]\\" } } }]" \\NEWLINE --params "{ \\"requiredSku\\": { \\"type\\": \\"String\\" } }"NEWLINE - name: Create a policy set definition with parameters.NEWLINE text: |NEWLINE az policy set-definition create -n readOnlyStorage \\NEWLINE --definitions '[ { \\"policyDefinitionId\\": \\"/subscriptions/mySubId/providers/ \\NEWLINE Microsoft.Authorization/policyDefinitions/storagePolicy\\" } ]'NEWLINE - name: Create a policy set definition in a subscription.NEWLINE text: |NEWLINE az policy set-definition create -n readOnlyStorage \\NEWLINE --subscription '0b1f6471-1bf0-4dda-aec3-111122223333' \\NEWLINE --definitions '[ { \\"policyDefinitionId\\": \\"/subscriptions/ \\NEWLINE 0b1f6471-1bf0-4dda-aec3-111122223333/providers/Microsoft.Authorization/ \\NEWLINE policyDefinitions/storagePolicy\\" } ]'NEWLINE - name: Create a policy set definition with policy definition groups.NEWLINE text: |NEWLINE az policy set-definition create -n computeRequirements \\NEWLINE --definitions "[ { \\"policyDefinitionId \\": \\"/subscriptions/mySubId/providers/ \\NEWLINE Microsoft.Authorization/policyDefinitions/storagePolicy\\", \\"groupNames\\": \\NEWLINE [ \\"CostSaving\\", \\"Organizational\\" ] }, { \\"policyDefinitionId\\": \\NEWLINE \\"/subscriptions/mySubId/providers/Microsoft.Authorization/ \\NEWLINE policyDefinitions/tagPolicy\\", \\"groupNames\\": [ \\NEWLINE \\"Organizational\\" ] } ]" \\NEWLINE --definition-groups "[{ \\"name\\": \\"CostSaving\\" }, { \\"name\\": \\"Organizational\\" } ]"NEWLINE"""NEWLINENEWLINEhelps['policy set-definition delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a policy set definition.NEWLINEexamples:NEWLINE - name: Delete a policy set definition. (autogenerated)NEWLINE text: |NEWLINE az policy set-definition delete --management-group myMg --name MyPolicySetDefinitionNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy set-definition list'] = """NEWLINEtype: commandNEWLINEshort-summary: List policy set definitions.NEWLINE"""NEWLINENEWLINEhelps['policy set-definition show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a policy set definition.NEWLINEexamples:NEWLINE - name: Show a policy set definition. (autogenerated)NEWLINE text: |NEWLINE az policy set-definition show --name MyPolicySetDefinitionNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy set-definition update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a policy set definition.NEWLINEexamples:NEWLINE - name: Update a policy set definition.NEWLINE text: |-NEWLINE az policy set-definition update \\NEWLINE --definitions '[ { \\"policyDefinitionId\\": \\"/subscriptions/mySubId/providers/ \\NEWLINE Microsoft.Authorization/policyDefinitions/storagePolicy\\" } ]' \\NEWLINE --name MyPolicySetDefinitionNEWLINE - name: Update the groups and definitions within a policy set definition.NEWLINE text: |NEWLINE az policy set-definition update -n computeRequirements \\NEWLINE --definitions "[ { \\"policyDefinitionId\\": \\"/subscriptions/mySubId/providers/ \\NEWLINE Microsoft.Authorization/policyDefinitions/storagePolicy\\", \\"groupNames\\": [ \\NEWLINE \\"CostSaving\\", \\"Organizational\\" ] }, { \\"policyDefinitionId\\": \\NEWLINE \\"/subscriptions/mySubId/providers/Microsoft.Authorization/ \\NEWLINE policyDefinitions/tagPolicy\\", \\NEWLINE \\"groupNames\\": [ \\"Organizational\\" ] } ]" \\NEWLINE --definition-groups "[{ \\"name\\": \\"CostSaving\\" }, { \\"name\\": \\"Organizational\\" } ]"NEWLINE"""NEWLINENEWLINEhelps['policy exemption'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage resource policy exemptions.NEWLINE"""NEWLINENEWLINEhelps['policy exemption create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a policy exemption.NEWLINEexamples:NEWLINE - name: Create a policy exemption in default subscription.NEWLINE text: |NEWLINE az policy exemption create -n exemptTestVM \\NEWLINE --policy-assignment "/subscriptions/mySubId/providers/Microsoft.Authorization/policyAssignments/limitVMSku" \\NEWLINE --exemption-category "Waiver"NEWLINE - name: Create a policy exemption in the resource group.NEWLINE text: |NEWLINE az policy exemption create -n exemptTestVM \\NEWLINE --policy-assignment "/subscriptions/mySubId/providers/Microsoft.Authorization/policyAssignments/limitVMSku" \\NEWLINE --exemption-category "Waiver" \\NEWLINE --resource-group "myResourceGroup"NEWLINE - name: Create a policy exemption in a management group.NEWLINE text: |NEWLINE az policy exemption create -n exemptTestVM \\NEWLINE --policy-assignment "/providers/Microsoft.Management/managementGroups/myMG/providers/Microsoft.Authorization/policyAssignments/limitVMSku" \\NEWLINE --exemption-category "Waiver" \\NEWLINE --scope "/providers/Microsoft.Management/managementGroups/myMG"NEWLINE"""NEWLINENEWLINEhelps['policy exemption delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a policy exemption.NEWLINEexamples:NEWLINE - name: Delete a policy exemption.NEWLINE text: |NEWLINE az policy exemption delete --name MyPolicyExemption --resource-group "myResourceGroup"NEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy exemption list'] = """NEWLINEtype: commandNEWLINEshort-summary: List policy exemptions.NEWLINE"""NEWLINENEWLINEhelps['policy exemption show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show a policy exemption.NEWLINEexamples:NEWLINE - name: Show a policy exemption.NEWLINE text: |NEWLINE az policy exemption show --name MyPolicyExemption --resource-group "myResourceGroup"NEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['policy exemption update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a policy exemption.NEWLINEexamples:NEWLINE - name: Update a policy exemption.NEWLINE text: |NEWLINE az policy exemption update -n exemptTestVM \\NEWLINE --exemption-category "Mitigated"NEWLINE - name: Update a policy exemption in the resource group.NEWLINE text: |NEWLINE az policy exemption update -n exemptTestVM \\NEWLINE --exemption-category "Mitigated" \\NEWLINE --resource-group "myResourceGroup"NEWLINE - name: Update a policy exemption in a management group.NEWLINE text: |NEWLINE az policy exemption update -n exemptTestVM \\NEWLINE --exemption-category "Mitigated" \\NEWLINE --scope "/providers/Microsoft.Management/managementGroups/myMG"NEWLINE"""NEWLINENEWLINEhelps['provider'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage resource providers.NEWLINE"""NEWLINENEWLINEhelps['provider list'] = """NEWLINEtype: commandNEWLINEexamples:NEWLINE - name: Display all resource types for the network resource provider.NEWLINE text: >NEWLINE az provider list --query [?namespace=='Microsoft.Network'].resourceTypes[].resourceTypeNEWLINE"""NEWLINENEWLINEhelps['provider permission'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage permissions for a provider.NEWLINE"""NEWLINENEWLINEhelps['provider permission list'] = """NEWLINEtype: commandNEWLINEshort-summary: List permissions from a provider.NEWLINE"""NEWLINENEWLINEhelps['provider operation'] = """NEWLINEtype: groupNEWLINEshort-summary: Get provider operations metadatas.NEWLINE"""NEWLINENEWLINEhelps['provider operation list'] = """NEWLINEtype: commandNEWLINEshort-summary: Get operations from all providers.NEWLINE"""NEWLINENEWLINEhelps['provider operation show'] = """NEWLINEtype: commandNEWLINEshort-summary: Get an individual provider's operations.NEWLINEexamples:NEWLINE - name: Get an individual provider's operations. (autogenerated)NEWLINE text: |NEWLINE az provider operation show --namespace Microsoft.StorageNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['provider register'] = """NEWLINEtype: commandNEWLINEshort-summary: Register a provider.NEWLINEexamples:NEWLINE - name: Register a provider. (autogenerated)NEWLINE text: |NEWLINE az provider register --namespace 'Microsoft.PolicyInsights'NEWLINE crafted: trueNEWLINE - name: Register a provider from RPaaS.NEWLINE text: |NEWLINE az provider register -n 'Microsoft.Confluent' --accept-termsNEWLINE - name: Register a management group.NEWLINE text: |NEWLINE az provider register --namespace Microsoft.Automation -m mgIDNEWLINE"""NEWLINENEWLINEhelps['provider unregister'] = """NEWLINEtype: commandNEWLINEshort-summary: Unregister a provider.NEWLINEexamples:NEWLINE - name: Unregister a provider. (autogenerated)NEWLINE text: |NEWLINE az provider unregister --namespace Microsoft.AutomationNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['resource'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure resources.NEWLINE"""NEWLINENEWLINEhelps['resource create'] = """NEWLINEtype: commandNEWLINEshort-summary: create a resource.NEWLINEexamples:NEWLINE - name: Create an API app by providing a full JSON configuration.NEWLINE text: |NEWLINE az resource create -g myRG -n myApiApp --resource-type Microsoft.web/sites \\NEWLINE --is-full-object --properties "{ \\"kind\\": \\"api\\", \\"location\\": \\NEWLINE \\"West US\\", \\"properties\\": { \\"serverFarmId\\": \\NEWLINE \\"/subscriptions/{SubID}/resourcegroups/{ResourceGroup} \\NEWLINE /providers/Microsoft.Web/serverfarms/{ServicePlan}\\" } }"NEWLINE - name: Create a resource by loading JSON configuration from a file.NEWLINE text: >NEWLINE az resource create -g myRG -n myApiApp --resource-type Microsoft.web/sites --is-full-object --properties @jsonConfigFileNEWLINE - name: Create a web app with the minimum required configuration information.NEWLINE text: |NEWLINE az resource create -g myRG -n myWeb --resource-type Microsoft.web/sites \\NEWLINE --properties "{ \\"serverFarmId\\":\\"/subscriptions/{SubID}/resourcegroups/ \\NEWLINE {ResourceGroup}/providers/Microsoft.Web/serverfarms/{ServicePlan}\\" }"NEWLINE - name: Create a resource by using the latest api-version whether this version is a preview version.NEWLINE text: >NEWLINE az resource create -g myRG -n myApiApp --resource-type Microsoft.web/sites --is-full-object --properties @jsonConfigFile --latest-include-previewNEWLINE"""NEWLINENEWLINEhelps['resource delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a resource.NEWLINEexamples:NEWLINE - name: Delete a virtual machine named 'MyVm'.NEWLINE text: >NEWLINE az resource delete -g MyResourceGroup -n MyVm --resource-type "Microsoft.Compute/virtualMachines"NEWLINE - name: Delete a web app using a resource identifier.NEWLINE text: >NEWLINE az resource delete --ids /subscriptions/0b1f6471-1bf0-4dda-aec3-111111111111/resourceGroups/MyResourceGroup/providers/Microsoft.Web/sites/MyWebappNEWLINE - name: Delete a subnet using a resource identifier.NEWLINE text: >NEWLINE az resource delete --ids /subscriptions/0b1f6471-1bf0-4dda-aec3-111111111111/resourceGroups/MyResourceGroup/providers/Microsoft.Network/virtualNetworks/MyVnet/subnets/MySubnetNEWLINE - name: Delete a virtual machine named 'MyVm' by using the latest api-version whether this version is a preview version.NEWLINE text: >NEWLINE az resource delete -g MyResourceGroup -n MyVm --resource-type "Microsoft.Compute/virtualMachines" --latest-include-previewNEWLINE"""NEWLINENEWLINEhelps['resource invoke-action'] = """NEWLINEtype: commandNEWLINEshort-summary: Invoke an action on the resource.NEWLINElong-summary: >NEWLINE A list of possible actions corresponding to a resource can be found at https://docs.microsoft.com/rest/api/. All POST requests are actions that can be invoked and are specified at the end of the URI path. For instance, to stop a VM, theNEWLINE request URI is https://management.azure.com/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroup}/providers/Microsoft.Compute/virtualMachines/{VM}/powerOff?api-version={APIVersion} and the corresponding action is `powerOff`. This canNEWLINE be found at https://docs.microsoft.com/rest/api/compute/virtualmachines/virtualmachines-stop.NEWLINEexamples:NEWLINE - name: Power-off a vm, specified by Id.NEWLINE text: >NEWLINE az resource invoke-action --action powerOff \\NEWLINE --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Compute/virtualMachines/{VMName}NEWLINE - name: Capture information for a stopped vm.NEWLINE text: >NEWLINE az resource invoke-action --action capture \\NEWLINE --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/ \\NEWLINE Microsoft.Compute/virtualMachines/{VMName} \\NEWLINE --request-body "{ \\"vhdPrefix\\": \\"myPrefix\\", \\"destinationContainerName\\": \\NEWLINE \\"myContainer\\", \\"overwriteVhds\\": true }"NEWLINE - name: Invoke an action on the resource. (autogenerated)NEWLINE text: |NEWLINE az resource invoke-action --action capture --name MyResource --resource-group MyResourceGroup --resource-type Microsoft.web/sitesNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['resource link'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage links between resources.NEWLINElong-summary: >NEWLINE Linking is a feature of the Resource Manager. It enables declaring relationships between resources even if they do not reside in the same resource group.NEWLINE Linking has no impact on resource usage, billing, or role-based access. It allows for managing multiple resources across groups as a single unit.NEWLINE"""NEWLINENEWLINEhelps['resource link create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a new link between resources.NEWLINEparameters:NEWLINE - name: --linkNEWLINE long-summary: >NEWLINE Format: /subscriptions/{SubID}/resourceGroups/{ResourceGroupID}/providers/{ProviderNamespace}/{ResourceType}/{ResourceName}/providers/Microsoft.Resources/links/{LinkName}NEWLINEexamples:NEWLINE - name: Create a link from {SourceID} to {ResourceID} with notesNEWLINE text: >NEWLINE az resource link create --link {SourceID} --target {ResourceID} --notes "SourceID depends on ResourceID"NEWLINE"""NEWLINENEWLINEhelps['resource link delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a link between resources.NEWLINEparameters:NEWLINE - name: --linkNEWLINE long-summary: >NEWLINE Format: /subscriptions/{SubID}/resourceGroups/{ResourceGroupID}/providers/{ProviderNamespace}/{ResourceType}/{ResourceName}/providers/Microsoft.Resources/links/{LinkName}NEWLINEexamples:NEWLINE - name: Delete link {LinkID}NEWLINE text: >NEWLINE az resource link delete --link {LinkID}NEWLINE"""NEWLINENEWLINEhelps['resource link list'] = """NEWLINEtype: commandNEWLINEshort-summary: List resource links.NEWLINEexamples:NEWLINE - name: List links, filtering with {filter-string}NEWLINE text: >NEWLINE az resource link list --filter {filter-string}NEWLINE - name: List all links for resource group {ResourceGroup} in subscription {SubID}NEWLINE text: >NEWLINE az resource link list --scope /subscriptions/{SubID}/resourceGroups/{ResourceGroup}NEWLINE"""NEWLINENEWLINEhelps['resource link update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update link between resources.NEWLINEparameters:NEWLINE - name: --linkNEWLINE long-summary: >NEWLINE Format: /subscriptions/{SubID}/resourceGroups/{ResourceGroupID}/providers/{ProviderNamespace}/{ResourceType}/{ResourceName}/providers/Microsoft.Resources/links/{LinkName}NEWLINEexamples:NEWLINE - name: Update the notes for {LinkID} notes "some notes to explain this link"NEWLINE text: >NEWLINE az resource link update --link {LinkID} --notes "some notes to explain this link"NEWLINE"""NEWLINENEWLINEhelps['resource list'] = """NEWLINEtype: commandNEWLINEshort-summary: List resources.NEWLINEexamples:NEWLINE - name: List all resources in the West US region.NEWLINE text: >NEWLINE az resource list --location westusNEWLINE - name: List all resources with the name 'resourceName'.NEWLINE text: >NEWLINE az resource list --name 'resourceName'NEWLINE - name: List all resources with the tag 'test'.NEWLINE text: >NEWLINE az resource list --tag testNEWLINE - name: List all resources with a tag that starts with 'test'.NEWLINE text: >NEWLINE az resource list --tag 'test*'NEWLINE - name: List all resources with the tag 'test' that have the value 'example'.NEWLINE text: >NEWLINE az resource list --tag test=exampleNEWLINE"""NEWLINENEWLINEhelps['resource lock'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage Azure resource level locks.NEWLINE"""NEWLINENEWLINEhelps['resource lock create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a resource-level lock.NEWLINEexamples:NEWLINE - name: Create a read-only resource level lock on a vnet.NEWLINE text: >NEWLINE az resource lock create --lock-type ReadOnly -n lockName -g MyResourceGroup --resource myvnet --resource-type Microsoft.Network/virtualNetworksNEWLINE - name: Create a read-only resource level lock on a vnet using a vnet id.NEWLINE text: >NEWLINE az resource lock create --lock-type ReadOnly -n lockName --resource /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Network/virtualNetworks/{VNETName}NEWLINE"""NEWLINENEWLINEhelps['resource lock delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a resource-level lock.NEWLINEexamples:NEWLINE - name: Delete a resource level lockNEWLINE text: >NEWLINE az resource lock delete --name lockName -g MyResourceGroup --resource myvnet --resource-type Microsoft.Network/virtualNetworksNEWLINE - name: Delete a resource level lock on a vnet using a vnet id.NEWLINE text: >NEWLINE az resource lock delete -n lockName --resource /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Network/virtualNetworks/{VMName}NEWLINE - name: Delete a resource-level lock. (autogenerated)NEWLINE text: |NEWLINE az resource lock delete --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Web/sites/{WebApp}NEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['resource lock list'] = """NEWLINEtype: commandNEWLINEshort-summary: List lock information in the resource-level.NEWLINEexamples:NEWLINE - name: List out all locks on a vnetNEWLINE text: >NEWLINE az resource lock list -g MyResourceGroup --resource myvnet --resource-type Microsoft.Network/virtualNetworksNEWLINE"""NEWLINENEWLINEhelps['resource lock show'] = """NEWLINEtype: commandNEWLINEshort-summary: Show the details of a resource-level lockNEWLINEexamples:NEWLINE - name: Show a resource level lockNEWLINE text: >NEWLINE az resource lock show -n lockname -g MyResourceGroup --resource myvnet --resource-type Microsoft.Network/virtualNetworksNEWLINE"""NEWLINENEWLINEhelps['resource lock update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a resource-level lock.NEWLINEexamples:NEWLINE - name: Update a resource level lock with new notes and typeNEWLINE text: >NEWLINE az resource lock update --name lockName -g MyResourceGroup --resource myvnet --resource-type Microsoft.Network/virtualNetworks --notes newNotesHere --lock-type CanNotDeleteNEWLINE - name: Update a resource-level lock. (autogenerated)NEWLINE text: |NEWLINE az resource lock update --lock-type CanNotDelete --name lockName --namespace Microsoft.Network --resource-group MyResourceGroup --resource-name myvnet --resource-type Microsoft.Network/virtualNetworksNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['resource show'] = """NEWLINEtype: commandNEWLINEshort-summary: Get the details of a resource.NEWLINEexamples:NEWLINE - name: Show a virtual machine resource named 'MyVm'.NEWLINE text: >NEWLINE az resource show -g MyResourceGroup -n MyVm --resource-type "Microsoft.Compute/virtualMachines"NEWLINE - name: Show a web app using a resource identifier.NEWLINE text: >NEWLINE az resource show --ids /subscriptions/0b1f6471-1bf0-4dda-aec3-111111111111/resourceGroups/MyResourceGroup/providers/Microsoft.Web/sites/MyWebappNEWLINE - name: Show a subnet.NEWLINE text: >NEWLINE az resource show -g MyResourceGroup -n MySubnet --namespace Microsoft.Network --parent virtualnetworks/MyVnet --resource-type subnetsNEWLINE - name: Show a subnet using a resource identifier.NEWLINE text: >NEWLINE az resource show --ids /subscriptions/0b1f6471-1bf0-4dda-aec3-111111111111/resourceGroups/MyResourceGroup/providers/Microsoft.Network/virtualNetworks/MyVnet/subnets/MySubnetNEWLINE - name: Show an application gateway path rule.NEWLINE text: >NEWLINE az resource show -g MyResourceGroup --namespace Microsoft.Network --parent applicationGateways/ag1/urlPathMaps/map1 --resource-type pathRules -n rule1NEWLINE - name: Show a virtual machine resource named 'MyVm' by using the latest api-version whether this version is a preview version.NEWLINE text: >NEWLINE az resource show -g MyResourceGroup -n MyVm --resource-type "Microsoft.Compute/virtualMachines" --latest-include-previewNEWLINE"""NEWLINENEWLINEhelps['resource tag'] = """NEWLINEtype: commandNEWLINEshort-summary: Tag a resource.NEWLINEexamples:NEWLINE - name: Tag the virtual machine 'MyVm' with the key 'vmlist' and value 'vm1'.NEWLINE text: >NEWLINE az resource tag --tags vmlist=vm1 -g MyResourceGroup -n MyVm --resource-type "Microsoft.Compute/virtualMachines"NEWLINE - name: Tag a web app with the key 'vmlist' and value 'vm1', using a resource identifier.NEWLINE text: >NEWLINE az resource tag --tags vmlist=vm1 --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Web/sites/{WebApp}NEWLINE - name: Tag the virtual machine 'MyVm' with the key 'vmlist' and value 'vm1' incrementally. It doesn't empty the existing tags.NEWLINE text: >NEWLINE az resource tag --tags vmlist=vm1 -g MyResourceGroup -n MyVm --resource-type "Microsoft.Compute/virtualMachines" -iNEWLINE - name: Tag the virtual machine 'MyVm' with the key 'vmlist' and value 'vm1' by using the latest api-version whether this version is a preview version.NEWLINE text: >NEWLINE az resource tag --tags vmlist=vm1 -g MyResourceGroup -n MyVm --resource-type "Microsoft.Compute/virtualMachines" --latest-include-previewNEWLINE"""NEWLINENEWLINEhelps['resource update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a resource.NEWLINEexamples:NEWLINE - name: Update a webapp by using the latest api-version whether this version is a preview version.NEWLINE text: >NEWLINE az resource update --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Web/sites/{WebApp} --set tags.key=value --latest-include-previewNEWLINE - name: Update a resource. (autogenerated)NEWLINE text: |NEWLINE az resource update --ids $id --set properties.connectionType=ProxyNEWLINE crafted: trueNEWLINE - name: Update a resource. (autogenerated)NEWLINE text: |NEWLINE az resource update --name myresource --resource-group myresourcegroup --resource-type subnets --set tags.key=valueNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['resource wait'] = """NEWLINEtype: commandNEWLINEshort-summary: Place the CLI in a waiting state until a condition of a resources is met.NEWLINEexamples:NEWLINE - name: Place the CLI in a waiting state until a condition of a resources is met. (autogenerated)NEWLINE text: |NEWLINE az resource wait --exists --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Web/sites/{WebApp}NEWLINE crafted: trueNEWLINE - name: Place the CLI in a waiting state until a condition of a resources is met. (autogenerated)NEWLINE text: |NEWLINE az resource wait --exists --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Web/sites/{WebApp} --include-response-body trueNEWLINE crafted: trueNEWLINE - name: Place the CLI in a waiting state until a condition of a resources is met. (autogenerated)NEWLINE text: |NEWLINE az resource wait --exists --name MyResource --resource-group MyResourceGroup --resource-type subnetsNEWLINE crafted: trueNEWLINE"""NEWLINENEWLINEhelps['tag'] = """NEWLINEtype: groupNEWLINEshort-summary: Tag Management on a resource.NEWLINE"""NEWLINENEWLINEhelps['tag add-value'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a tag value.NEWLINEexamples:NEWLINE - name: Create a tag value.NEWLINE text: >NEWLINE az tag add-value --name MyTag --value MyValueNEWLINE"""NEWLINENEWLINEhelps['tag create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create tags on a specific resource.NEWLINElong-summary: >NEWLINE The az tag create command with an id creates or updates the entire set of tags on a resource, resource group or subscription.NEWLINE This operation allows adding or replacing the entire set of tags on the specified resource, resource group or subscription.NEWLINE The specified entity can have a maximum of 50 tags.NEWLINEparameters:NEWLINE - name: --name -nNEWLINE short-summary: The name of the tag to create.NEWLINE - name: --subscriptionNEWLINE short-summary: Name or ID of subscription. You can configure the default subscription using az account set -s NAME_OR_ID.NEWLINE - name: --resource-idNEWLINE short-summary: The resource identifier for the entity being tagged. A resource, a resource group or a subscription may be tagged.NEWLINE - name: --tagsNEWLINE short-summary: The tags to be applied on the resource.NEWLINEexamples:NEWLINE - name: Create a tag in the subscription.NEWLINE text: >NEWLINE az tag create --name MyTagNEWLINE - name: Create or update the entire set of tags on a subscription.NEWLINE text: >NEWLINE az tag create --resource-id /subscriptions/{subId} --tags Dept=Finance Status=NormalNEWLINE - name: Create or update the entire set of tags on a resource group.NEWLINE text: >NEWLINE az tag create --resource-id /subscriptions/{sub-id}/resourcegroups/{rg} --tags Dept=Finance Status=NormalNEWLINE - name: Create or update the entire set of tags on a resource.NEWLINE text: >NEWLINE az tag create --resource-id /subscriptions/{sub-id}/resourcegroups/{rg}/providers/Microsoft.Compute/virtualMachines/{vmName} --tags Dept=Finance Status=NormalNEWLINE"""NEWLINENEWLINEhelps['tag delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete tags on a specific resource.NEWLINElong-summary:NEWLINE The az tag delete command with an id deletes the entire set of tags on a resource, resource group or subscription.NEWLINEparameters:NEWLINE - name: --name -nNEWLINE short-summary: The name of the tag to be deleted.NEWLINE - name: --resource-idNEWLINE short-summary: The resource identifier for the entity being tagged. A resource, a resource group or a subscription may be tagged.NEWLINEexamples:NEWLINE - name: Delete a tag from the subscription.NEWLINE text: >NEWLINE az tag delete --name MyTagNEWLINE - name: Delete the entire set of tags on a subscription.NEWLINE text: >NEWLINE az tag delete --resource-id /subscriptions/{sub-id}NEWLINE - name: Delete the entire set of tags on a resource group.NEWLINE text: >NEWLINE az tag delete --resource-id /subscriptions/{sub-id}/resourcegroups/{rg}NEWLINE - name: Delete the entire set of tags on a resource.NEWLINE text: >NEWLINE az tag delete --resource-id /subscriptions/{sub-id}/resourcegroups/{rg}/providers/Microsoft.Compute/virtualMachines/{vmName}NEWLINE"""NEWLINENEWLINEhelps['tag list'] = """NEWLINEtype: commandNEWLINEshort-summary: List the entire set of tags on a specific resource.NEWLINElong-summary: The az tag list command with an id lists the entire set of tags on a resource, resource group or subscription.NEWLINEparameters:NEWLINE - name: --resource-idNEWLINE short-summary: The resource identifier for the entity being tagged. A resource, a resource group or a subscription may be tagged.NEWLINEexamples:NEWLINE - name: List the entire set of tags on a subscription.NEWLINE text: >NEWLINE az tag list --resource-id /subscriptions/{sub-id}NEWLINE - name: List the entire set of tags on a resource group.NEWLINE text: >NEWLINE az tag list --resource-id /subscriptions/{sub-id}/resourcegroups/{rg}NEWLINE - name: List the entire set of tags on a resource.NEWLINE text: >NEWLINE az tag list --resource-id /subscriptions/{sub-id}/resourcegroups/{rg}/providers/Microsoft.Compute/virtualMachines/{vmName}NEWLINE"""NEWLINENEWLINEhelps['tag update'] = """NEWLINEtype: commandNEWLINEshort-summary: Selectively update the set of tags on a specific resource.NEWLINElong-summary: >NEWLINE The az tag update command with an id selectively updates the set of tags on a resource, resource group or subscription.NEWLINE This operation allows replacing, merging or selectively deleting tags on the specified resource, resource group or subscription.NEWLINE The specified entity can have a maximum of 50 tags at the end of the operation.NEWLINE The 'replace' option replaces the entire set of existing tags with a new set.NEWLINE The 'merge' option allows adding tags with new names and updating the values of tags with existing names.NEWLINE The 'delete' option allows selectively deleting tags based on given names or name/value pairs.NEWLINEparameters:NEWLINE - name: --resource-idNEWLINE short-summary: The resource identifier for the entity being tagged. A resource, a resource group or a subscription may be tagged.NEWLINE - name: --operationNEWLINE short-summary: The update operation. Options are Merge, Replace and Delete.NEWLINE - name: --tagsNEWLINE short-summary: The tags to be updated on the resource.NEWLINEexamples:NEWLINE - name: Selectively update the set of tags on a subscription with "merge" Operation.NEWLINE text: >NEWLINE az tag update --resource-id /subscriptions/{sub-id} --operation merge --tags key1=value1 key3=value3NEWLINE - name: Selectively update the set of tags on a resource group with "replace" Operation.NEWLINE text: >NEWLINE az tag update --resource-id /subscriptions/{sub-id}/resourcegroups/{rg} --operation replace --tags key1=value1 key3=value3NEWLINE - name: Selectively update the set of tags on a resource with "delete" Operation.NEWLINE text: >NEWLINE az tag update --resource-id /subscriptions/{sub-id}/resourcegroups/{rg}/providers/Microsoft.Compute/virtualMachines/{vmName} --operation delete --tags key1=value1NEWLINE"""NEWLINENEWLINEhelps['ts'] = """NEWLINEtype: groupNEWLINEshort-summary: Manage template specs at subscription or resource group scope.NEWLINE"""NEWLINENEWLINEhelps['ts create'] = """NEWLINEtype: commandNEWLINEshort-summary: Create a template spec and or template spec version.NEWLINEexamples:NEWLINE - name: Create a template spec.NEWLINE text: az ts create -g testRG --name TemplateSpecName -l WestUS --display-name "MyDisplayName" --description "Simple template spec" --tags key1=value1NEWLINE - name: Create a template spec version.NEWLINE text: az ts create -g testRG --name TemplateSpecName -v 2.0 -l WestUS --template-file templateSpec.json --version-description "Less simple template spec" --tags key1=value1 key3=value3NEWLINE - name: Create a template spec and a version of the template spec.NEWLINE text: az ts create -g testRG --name TemplateSpecName -v 1.0 -l WestUS --template-file templateSpec.json --display-name "MyDisplayName" --description "Simple template spec" --version-description "Version of simple template spec" --tags key1=value1 key2=value2NEWLINE"""NEWLINENEWLINEhelps['ts update'] = """NEWLINEtype: commandNEWLINEshort-summary: Update a template spec version.NEWLINEexamples:NEWLINE - name: Update the template content of a template spec or template spec version based on the resource ID.NEWLINE text: az ts update --template-spec resourceID -f updatedFile.jsonNEWLINE - name: Update the display name and tag(s) of a template spec based on the resource ID.NEWLINE text: az ts update --template-spec resourceID --display-name "NewParentDisplayName" --tags key1=value1NEWLINE - name: Update the description of a template spec version with no prompt.NEWLINE text: az ts update -g ExistingRG --name ExistingName -v 3.0 --version-description "New description" --yesNEWLINE - name: Update all the properties of a template spec version.NEWLINE text: az ts update -g ExistingRG --name ExistingName -v 3.0 -f updatedTemplate.json --display-name "New parent display name" --description "New parent description" --version-description "New child description" --ui-form-definition formDefinition.jsonNEWLINE - name: Remove tag(s) from template spec version with no prompt.NEWLINE text: az ts update -g ExistingRG --name ExistingName -v 3.0 -f updatedTemplate.json --tags --yesNEWLINENEWLINE"""NEWLINENEWLINEhelps['ts show'] = """NEWLINEtype: commandNEWLINEshort-summary: Get the specified template spec or template spec version.NEWLINEexamples:NEWLINE - name: Show the specified template spec.NEWLINE text: az ts show -g testrg --name TemplateSpecNameNEWLINE - name: Show the specified template spec version.NEWLINE text: az ts show -g testrg --name TemplateSpecName --version VersionNameNEWLINE - name: Show the specified template spec or template spec version based on the resource ID.NEWLINE text: az ts show --template-spec resourceIDNEWLINE"""NEWLINENEWLINEhelps['ts export'] = """NEWLINEtype: commandNEWLINEshort-summary: Export the specified template spec version and artifacts (if any) to the specified output folder.NEWLINEexamples:NEWLINE - name: Export the specified template spec version based on resource ID.NEWLINE text: az ts export -s resourceID --output-folder C:/path/NEWLINE - name: Export the specified template spec version.NEWLINE text: az ts export -g testrg --name TemplateSpecName --version VersionName --output-folder C:/path/NEWLINE"""NEWLINENEWLINEhelps['ts delete'] = """NEWLINEtype: commandNEWLINEshort-summary: Delete a specified template spec or template spec version by name or resource ID..NEWLINEexamples:NEWLINE - name: Delete the specified template spec and all versions.NEWLINE text: az ts delete -g MyResourceGroup --name TemplateSpecNameNEWLINE - name: Delete the specified version from the template spec.NEWLINE text: az ts delete -g MyResourceGroup --name TemplateSpecName --version VersionNameNEWLINE - name: Delete the template spec or version based on resource ID.NEWLINE text: az ts delete --template-spec resourceIDNEWLINE"""NEWLINENEWLINEhelps['ts list'] = """NEWLINEtype: commandNEWLINEshort-summary: List template specs or template spec versions.NEWLINEexamples:NEWLINE - name: List all template specs in current default subscription.NEWLINE text: az ts listNEWLINE - name: List all template specs in specified subscription.NEWLINE text: az ts list --subscription SubscriptionNEWLINE - name: List all template specs in resource group.NEWLINE text: az ts list -g MyResourceGroupNEWLINE - name: List all versions of parent template spec.NEWLINE text: az ts list -g MyResourceGroup -n TemplateSpecNameNEWLINE"""NEWLINENEWLINEhelps['bicep'] = """NEWLINEtype: groupNEWLINEshort-summary: Bicep CLI command group.NEWLINE"""NEWLINENEWLINEhelps['bicep install'] = """NEWLINEtype: commandNEWLINEshort-summary: Install Bicep CLI.NEWLINEexamples:NEWLINE - name: Install Bicep CLI.NEWLINE text: az bicep installNEWLINE - name: Install a specific version of Bicep CLI.NEWLINE text: az bicep install --version v0.2.212NEWLINE"""NEWLINENEWLINEhelps['bicep upgrade'] = """NEWLINEtype: commandNEWLINEshort-summary: Upgrade Bicep CLI to the latest version.NEWLINE"""NEWLINENEWLINEhelps['bicep build'] = """NEWLINEtype: commandNEWLINEshort-summary: Build a Bicep file.NEWLINEexamples:NEWLINE - name: Build a Bicep file.NEWLINE text: az bicep build --file {bicep_file}NEWLINE - name: Build a Bicep file and print all output to stdout.NEWLINE text: az bicep build --file {bicep_file} --stdoutNEWLINE - name: Build a Bicep file and save the result to the specified directory.NEWLINE text: az bicep build --file {bicep_file} --outdir {out_dir}NEWLINE - name: Build a Bicep file and save the result to the specified file.NEWLINE text: az bicep build --file {bicep_file} --outfile {out_file}NEWLINE"""NEWLINENEWLINEhelps['bicep decompile'] = """NEWLINEtype: commandNEWLINEshort-summary: Attempt to decompile an ARM template file to a Bicep file.NEWLINEexamples:NEWLINE - name: Decompile an ARM template file.NEWLINE text: az bicep decompile --file {json_template_file}NEWLINE"""NEWLINENEWLINEhelps['bicep version'] = """NEWLINEtype: commandNEWLINEshort-summary: Show the installed version of Bicep CLI.NEWLINE"""NEWLINENEWLINEhelps['bicep list-versions'] = """NEWLINEtype: commandNEWLINEshort-summary: List out all available versions of Bicep CLI.NEWLINE"""NEWLINE
# This file helps to compute a version number in source trees obtained fromNEWLINE# git-archive tarball (such as those provided by githubs download-from-tagNEWLINE# feature). Distribution tarballs (built by setup.py sdist) and buildNEWLINE# directories (produced by setup.py build) will contain a much shorter fileNEWLINE# that just contains the computed version number.NEWLINENEWLINE# This file is released into the public domain. Generated byNEWLINE# versioneer-0.15 (https://github.com/warner/python-versioneer)NEWLINENEWLINEimport errnoNEWLINEimport osNEWLINEimport reNEWLINEimport subprocessNEWLINEimport sysNEWLINENEWLINEfrom pandas.compat import PY3NEWLINENEWLINENEWLINEdef get_keywords():NEWLINE # these strings will be replaced by git during git-archive.NEWLINE # setup.py/versioneer.py will grep for the variable names, so they mustNEWLINE # each be defined on a line of their own. _version.py will just callNEWLINE # get_keywords().NEWLINE git_refnames = "$Format:%d$"NEWLINE git_full = "$Format:%H$"NEWLINE keywords = {"refnames": git_refnames, "full": git_full}NEWLINE return keywordsNEWLINENEWLINENEWLINEclass VersioneerConfig(object):NEWLINE passNEWLINENEWLINENEWLINEdef get_config():NEWLINE # these strings are filled in when 'setup.py versioneer' createsNEWLINE # _version.pyNEWLINE cfg = VersioneerConfig()NEWLINE cfg.VCS = "git"NEWLINE cfg.style = "pep440"NEWLINE cfg.tag_prefix = "v"NEWLINE cfg.parentdir_prefix = "pandas-"NEWLINE cfg.versionfile_source = "pandas/_version.py"NEWLINE cfg.verbose = FalseNEWLINE return cfgNEWLINENEWLINENEWLINEclass NotThisMethod(Exception):NEWLINE passNEWLINENEWLINENEWLINELONG_VERSION_PY = {}NEWLINEHANDLERS = {}NEWLINENEWLINENEWLINEdef register_vcs_handler(vcs, method): # decoratorNEWLINE def decorate(f):NEWLINE if vcs not in HANDLERS:NEWLINE HANDLERS[vcs] = {}NEWLINE HANDLERS[vcs][method] = fNEWLINE return fNEWLINE return decorateNEWLINENEWLINENEWLINEdef run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):NEWLINE assert isinstance(commands, list)NEWLINE p = NoneNEWLINE for c in commands:NEWLINE try:NEWLINE dispcmd = str([c] + args)NEWLINE # remember shell=False, so use git.cmd on windows, not just gitNEWLINE p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,NEWLINE stderr=(subprocess.PIPE if hide_stderrNEWLINE else None))NEWLINE breakNEWLINE except EnvironmentError:NEWLINE e = sys.exc_info()[1]NEWLINE if e.errno == errno.ENOENT:NEWLINE continueNEWLINE if verbose:NEWLINE print("unable to run {dispcmd}".format(dispcmd=dispcmd))NEWLINE print(e)NEWLINE return NoneNEWLINE else:NEWLINE if verbose:NEWLINE print("unable to find command, tried %s" % (commands,))NEWLINE return NoneNEWLINE stdout = p.communicate()[0].strip()NEWLINE if PY3:NEWLINE stdout = stdout.decode()NEWLINE if p.returncode != 0:NEWLINE if verbose:NEWLINE print("unable to run {dispcmd} (error)".format(dispcmd=dispcmd))NEWLINE return NoneNEWLINE return stdoutNEWLINENEWLINENEWLINEdef versions_from_parentdir(parentdir_prefix, root, verbose):NEWLINE # Source tarballs conventionally unpack into a directory that includesNEWLINE # both the project name and a version string.NEWLINE dirname = os.path.basename(root)NEWLINE if not dirname.startswith(parentdir_prefix):NEWLINE if verbose:NEWLINE print("guessing rootdir is '{root}', but '{dirname}' "NEWLINE "doesn't start with prefix '{parentdir_prefix}'".format(NEWLINE root=root, dirname=dirname,NEWLINE parentdir_prefix=parentdir_prefix))NEWLINE raise NotThisMethod("rootdir doesn't start with parentdir_prefix")NEWLINE return {"version": dirname[len(parentdir_prefix):],NEWLINE "full-revisionid": None,NEWLINE "dirty": False, "error": None}NEWLINENEWLINENEWLINE@register_vcs_handler("git", "get_keywords")NEWLINEdef git_get_keywords(versionfile_abs):NEWLINE # the code embedded in _version.py can just fetch the value of theseNEWLINE # keywords. When used from setup.py, we don't want to import _version.py,NEWLINE # so we do it with a regexp instead. This function is not used fromNEWLINE # _version.py.NEWLINE keywords = {}NEWLINE try:NEWLINE f = open(versionfile_abs, "r")NEWLINE for line in f.readlines():NEWLINE if line.strip().startswith("git_refnames ="):NEWLINE mo = re.search(r'=\s*"(.*)"', line)NEWLINE if mo:NEWLINE keywords["refnames"] = mo.group(1)NEWLINE if line.strip().startswith("git_full ="):NEWLINE mo = re.search(r'=\s*"(.*)"', line)NEWLINE if mo:NEWLINE keywords["full"] = mo.group(1)NEWLINE f.close()NEWLINE except EnvironmentError:NEWLINE passNEWLINE return keywordsNEWLINENEWLINENEWLINE@register_vcs_handler("git", "keywords")NEWLINEdef git_versions_from_keywords(keywords, tag_prefix, verbose):NEWLINE if not keywords:NEWLINE raise NotThisMethod("no keywords at all, weird")NEWLINE refnames = keywords["refnames"].strip()NEWLINE if refnames.startswith("$Format"):NEWLINE if verbose:NEWLINE print("keywords are unexpanded, not using")NEWLINE raise NotThisMethod("unexpanded keywords, not a git-archive tarball")NEWLINE refs = {r.strip() for r in refnames.strip("()").split(",")}NEWLINE # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead ofNEWLINE # just "foo-1.0". If we see a "tag: " prefix, prefer those.NEWLINE TAG = "tag: "NEWLINE tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}NEWLINE if not tags:NEWLINE # Either we're using git < 1.8.3, or there really are no tags. We useNEWLINE # a heuristic: assume all version tags have a digit. The old git %dNEWLINE # expansion behaves like git log --decorate=short and strips out theNEWLINE # refs/heads/ and refs/tags/ prefixes that would let us distinguishNEWLINE # between branches and tags. By ignoring refnames without digits, weNEWLINE # filter out many common branch names like "release" andNEWLINE # "stabilization", as well as "HEAD" and "master".NEWLINE tags = {r for r in refs if re.search(r'\d', r)}NEWLINE if verbose:NEWLINE print("discarding '{}', no digits".format(",".join(refs - tags)))NEWLINE if verbose:NEWLINE print("likely tags: {}".format(",".join(sorted(tags))))NEWLINE for ref in sorted(tags):NEWLINE # sorting will prefer e.g. "2.0" over "2.0rc1"NEWLINE if ref.startswith(tag_prefix):NEWLINE r = ref[len(tag_prefix):]NEWLINE if verbose:NEWLINE print("picking {r}".format(r=r))NEWLINE return {"version": r,NEWLINE "full-revisionid": keywords["full"].strip(),NEWLINE "dirty": False, "error": NoneNEWLINE }NEWLINE # no suitable tags, so version is "0+unknown", but full hex is still thereNEWLINE if verbose:NEWLINE print("no suitable tags, using unknown + full revision id")NEWLINE return {"version": "0+unknown",NEWLINE "full-revisionid": keywords["full"].strip(),NEWLINE "dirty": False, "error": "no suitable tags"}NEWLINENEWLINENEWLINE@register_vcs_handler("git", "pieces_from_vcs")NEWLINEdef git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):NEWLINE # this runs 'git' from the root of the source tree. This only gets calledNEWLINE # if the git-archive 'subst' keywords were *not* expanded, andNEWLINE # _version.py hasn't already been rewritten with a short version string,NEWLINE # meaning we're inside a checked out source tree.NEWLINENEWLINE if not os.path.exists(os.path.join(root, ".git")):NEWLINE if verbose:NEWLINE print("no .git in {root}".format(root=root))NEWLINE raise NotThisMethod("no .git directory")NEWLINENEWLINE GITS = ["git"]NEWLINE if sys.platform == "win32":NEWLINE GITS = ["git.cmd", "git.exe"]NEWLINE # if there is a tag, this yields TAG-NUM-gHEX[-dirty]NEWLINE # if there are no tags, this yields HEX[-dirty] (no NUM)NEWLINE describe_out = run_command(GITS, ["describe", "--tags", "--dirty",NEWLINE "--always", "--long"],NEWLINE cwd=root)NEWLINE # --long was added in git-1.5.5NEWLINE if describe_out is None:NEWLINE raise NotThisMethod("'git describe' failed")NEWLINE describe_out = describe_out.strip()NEWLINE full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)NEWLINE if full_out is None:NEWLINE raise NotThisMethod("'git rev-parse' failed")NEWLINE full_out = full_out.strip()NEWLINENEWLINE pieces = {}NEWLINE pieces["long"] = full_outNEWLINE pieces["short"] = full_out[:7] # maybe improved laterNEWLINE pieces["error"] = NoneNEWLINENEWLINE # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]NEWLINE # TAG might have hyphens.NEWLINE git_describe = describe_outNEWLINENEWLINE # look for -dirty suffixNEWLINE dirty = git_describe.endswith("-dirty")NEWLINE pieces["dirty"] = dirtyNEWLINE if dirty:NEWLINE git_describe = git_describe[:git_describe.rindex("-dirty")]NEWLINENEWLINE # now we have TAG-NUM-gHEX or HEXNEWLINENEWLINE if "-" in git_describe:NEWLINE # TAG-NUM-gHEXNEWLINE mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)NEWLINE if not mo:NEWLINE # unparseable. Maybe git-describe is misbehaving?NEWLINE pieces["error"] = ("unable to parse git-describe output: "NEWLINE "'{describe_out}'".format(NEWLINE describe_out=describe_out))NEWLINE return piecesNEWLINENEWLINE # tagNEWLINE full_tag = mo.group(1)NEWLINE if not full_tag.startswith(tag_prefix):NEWLINE fmt = ("tag '{full_tag}' doesn't start with prefix "NEWLINE "'{tag_prefix}'")NEWLINE msg = fmt.format(full_tag=full_tag, tag_prefix=tag_prefix)NEWLINE if verbose:NEWLINE print(msg)NEWLINE pieces["error"] = msgNEWLINE return piecesNEWLINENEWLINE pieces["closest-tag"] = full_tag[len(tag_prefix):]NEWLINENEWLINE # distance: number of commits since tagNEWLINE pieces["distance"] = int(mo.group(2))NEWLINENEWLINE # commit: short hex revision IDNEWLINE pieces["short"] = mo.group(3)NEWLINENEWLINE else:NEWLINE # HEX: no tagsNEWLINE pieces["closest-tag"] = NoneNEWLINE count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],NEWLINE cwd=root)NEWLINE pieces["distance"] = int(count_out) # total number of commitsNEWLINENEWLINE return piecesNEWLINENEWLINENEWLINEdef plus_or_dot(pieces):NEWLINE if "+" in pieces.get("closest-tag", ""):NEWLINE return "."NEWLINE return "+"NEWLINENEWLINENEWLINEdef render_pep440(pieces):NEWLINE # now build up version string, with post-release "local versionNEWLINE # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if youNEWLINE # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirtyNEWLINENEWLINE # exceptions:NEWLINE # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]NEWLINENEWLINE if pieces["closest-tag"]:NEWLINE rendered = pieces["closest-tag"]NEWLINE if pieces["distance"] or pieces["dirty"]:NEWLINE rendered += plus_or_dot(pieces)NEWLINE rendered += "{:d}.g{}".format(pieces["distance"], pieces["short"])NEWLINE if pieces["dirty"]:NEWLINE rendered += ".dirty"NEWLINE else:NEWLINE # exception #1NEWLINE rendered = "0+untagged.{:d}.g{}".format(pieces["distance"],NEWLINE pieces["short"])NEWLINE if pieces["dirty"]:NEWLINE rendered += ".dirty"NEWLINE return renderedNEWLINENEWLINENEWLINEdef render_pep440_pre(pieces):NEWLINE # TAG[.post.devDISTANCE] . No -dirtyNEWLINENEWLINE # exceptions:NEWLINE # 1: no tags. 0.post.devDISTANCENEWLINENEWLINE if pieces["closest-tag"]:NEWLINE rendered = pieces["closest-tag"]NEWLINE if pieces["distance"]:NEWLINE rendered += ".post.dev%d" % pieces["distance"]NEWLINE else:NEWLINE # exception #1NEWLINE rendered = "0.post.dev%d" % pieces["distance"]NEWLINE return renderedNEWLINENEWLINENEWLINEdef render_pep440_post(pieces):NEWLINE # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note thatNEWLINE # .dev0 sorts backwards (a dirty tree will appear "older" than theNEWLINE # corresponding clean one), but you shouldn't be releasing software withNEWLINE # -dirty anyways.NEWLINENEWLINE # exceptions:NEWLINE # 1: no tags. 0.postDISTANCE[.dev0]NEWLINENEWLINE if pieces["closest-tag"]:NEWLINE rendered = pieces["closest-tag"]NEWLINE if pieces["distance"] or pieces["dirty"]:NEWLINE rendered += ".post{:d}".format(pieces["distance"])NEWLINE if pieces["dirty"]:NEWLINE rendered += ".dev0"NEWLINE rendered += plus_or_dot(pieces)NEWLINE rendered += "g{}".format(pieces["short"])NEWLINE else:NEWLINE # exception #1NEWLINE rendered = "0.post%d" % pieces["distance"]NEWLINE if pieces["dirty"]:NEWLINE rendered += ".dev0"NEWLINE rendered += "+g{}".format(pieces["short"])NEWLINE return renderedNEWLINENEWLINENEWLINEdef render_pep440_old(pieces):NEWLINE # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty.NEWLINENEWLINE # exceptions:NEWLINE # 1: no tags. 0.postDISTANCE[.dev0]NEWLINENEWLINE if pieces["closest-tag"]:NEWLINE rendered = pieces["closest-tag"]NEWLINE if pieces["distance"] or pieces["dirty"]:NEWLINE rendered += ".post%d" % pieces["distance"]NEWLINE if pieces["dirty"]:NEWLINE rendered += ".dev0"NEWLINE else:NEWLINE # exception #1NEWLINE rendered = "0.post%d" % pieces["distance"]NEWLINE if pieces["dirty"]:NEWLINE rendered += ".dev0"NEWLINE return renderedNEWLINENEWLINENEWLINEdef render_git_describe(pieces):NEWLINE # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirtyNEWLINE # --always'NEWLINENEWLINE # exceptions:NEWLINE # 1: no tags. HEX[-dirty] (note: no 'g' prefix)NEWLINENEWLINE if pieces["closest-tag"]:NEWLINE rendered = pieces["closest-tag"]NEWLINE if pieces["distance"]:NEWLINE rendered += "-{:d}-g{}".format(pieces["distance"], pieces["short"])NEWLINE else:NEWLINE # exception #1NEWLINE rendered = pieces["short"]NEWLINE if pieces["dirty"]:NEWLINE rendered += "-dirty"NEWLINE return renderedNEWLINENEWLINENEWLINEdef render_git_describe_long(pieces):NEWLINE # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirtyNEWLINE # --always -long'. The distance/hash is unconditional.NEWLINENEWLINE # exceptions:NEWLINE # 1: no tags. HEX[-dirty] (note: no 'g' prefix)NEWLINENEWLINE if pieces["closest-tag"]:NEWLINE rendered = pieces["closest-tag"]NEWLINE rendered += "-{:d}-g{}".format(pieces["distance"], pieces["short"])NEWLINE else:NEWLINE # exception #1NEWLINE rendered = pieces["short"]NEWLINE if pieces["dirty"]:NEWLINE rendered += "-dirty"NEWLINE return renderedNEWLINENEWLINENEWLINEdef render(pieces, style):NEWLINE if pieces["error"]:NEWLINE return {"version": "unknown",NEWLINE "full-revisionid": pieces.get("long"),NEWLINE "dirty": None,NEWLINE "error": pieces["error"]}NEWLINENEWLINE if not style or style == "default":NEWLINE style = "pep440" # the defaultNEWLINENEWLINE if style == "pep440":NEWLINE rendered = render_pep440(pieces)NEWLINE elif style == "pep440-pre":NEWLINE rendered = render_pep440_pre(pieces)NEWLINE elif style == "pep440-post":NEWLINE rendered = render_pep440_post(pieces)NEWLINE elif style == "pep440-old":NEWLINE rendered = render_pep440_old(pieces)NEWLINE elif style == "git-describe":NEWLINE rendered = render_git_describe(pieces)NEWLINE elif style == "git-describe-long":NEWLINE rendered = render_git_describe_long(pieces)NEWLINE else:NEWLINE raise ValueError("unknown style '{style}'".format(style=style))NEWLINENEWLINE return {"version": rendered, "full-revisionid": pieces["long"],NEWLINE "dirty": pieces["dirty"], "error": None}NEWLINENEWLINENEWLINEdef get_versions():NEWLINE # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we haveNEWLINE # __file__, we can work backwards from there to the root. SomeNEWLINE # py2exe/bbfreeze/non-CPython implementations don't do __file__, in whichNEWLINE # case we can only use expanded keywords.NEWLINENEWLINE cfg = get_config()NEWLINE verbose = cfg.verboseNEWLINENEWLINE try:NEWLINE return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,NEWLINE verbose)NEWLINE except NotThisMethod:NEWLINE passNEWLINENEWLINE try:NEWLINE root = os.path.realpath(__file__)NEWLINE # versionfile_source is the relative path from the top of the sourceNEWLINE # tree (where the .git directory might live) to this file. InvertNEWLINE # this to find the root from __file__.NEWLINE for i in cfg.versionfile_source.split('/'):NEWLINE root = os.path.dirname(root)NEWLINE except NameError:NEWLINE return {"version": "0+unknown", "full-revisionid": None,NEWLINE "dirty": None,NEWLINE "error": "unable to find root of source tree"}NEWLINENEWLINE try:NEWLINE pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)NEWLINE return render(pieces, cfg.style)NEWLINE except NotThisMethod:NEWLINE passNEWLINENEWLINE try:NEWLINE if cfg.parentdir_prefix:NEWLINE return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)NEWLINE except NotThisMethod:NEWLINE passNEWLINENEWLINE return {"version": "0+unknown", "full-revisionid": None,NEWLINE "dirty": None,NEWLINE "error": "unable to compute version"}NEWLINE
WINDOW_TITLE = "ElectriPy"NEWLINEHEIGHT = 750NEWLINEWIDTH = 750NEWLINERESIZABLE = TrueNEWLINEFPS = 40NEWLINEDEFAULT_FORCE_VECTOR_SCALE_FACTOR = 22e32NEWLINEDEFAULT_EF_VECTOR_SCALE_FACTOR = 2e14NEWLINEDEFAULT_EF_BRIGHTNESS = 105NEWLINEDEFAULT_SPACE_BETWEEN_EF_VECTORS = 20NEWLINEMINIMUM_FORCE_VECTOR_NORM = 10NEWLINEMINIMUM_ELECTRIC_FIELD_VECTOR_NORM = 15NEWLINENEWLINEKEYS = {NEWLINE "clear_screen": "r",NEWLINE "show_vector_components": "space",NEWLINE "show_electric_forces_vectors": "f",NEWLINE "show_electric_field_at_mouse_position": "m",NEWLINE "show_electric_field": "e",NEWLINE "increment_electric_field_brightness": "+",NEWLINE "decrement_electric_field_brightness": "-",NEWLINE "remove_last_charge_added": "z",NEWLINE "add_last_charge_removed": "y",NEWLINE}NEWLINENEWLINE# Text settings:NEWLINECHARGES_SIGN_FONT = "Arial"NEWLINEPROTON_SIGN_FONT_SIZE = 23NEWLINEELECTRON_SIGN_FONT_SIZE = 35NEWLINEVECTOR_COMPONENTS_FONT = "Arial"NEWLINEVECTOR_COMPONENTS_FONT_SIZE = 13NEWLINE
from __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport tensorflow as tfNEWLINENEWLINEfrom ray.rllib.models.model import ModelNEWLINEfrom ray.rllib.models.fcnet import FullyConnectedNetworkNEWLINEfrom ray.rllib.models.action_dist import ReshaperNEWLINENEWLINENEWLINEclass MultiAgentFullyConnectedNetwork(Model):NEWLINE """Multiagent fully connected network."""NEWLINENEWLINE def _build_layers(self, inputs, num_outputs, options):NEWLINE # Split the input and output tensorsNEWLINE input_shapes = options["custom_options"]["multiagent_obs_shapes"]NEWLINE output_shapes = options["custom_options"]["multiagent_act_shapes"]NEWLINE input_reshaper = Reshaper(input_shapes)NEWLINE output_reshaper = Reshaper(output_shapes)NEWLINE split_inputs = input_reshaper.split_tensor(inputs)NEWLINE num_actions = output_reshaper.split_number(num_outputs)NEWLINENEWLINE custom_options = options["custom_options"]NEWLINE hiddens = custom_options.get("multiagent_fcnet_hiddens",NEWLINE [[256, 256]] * 1)NEWLINENEWLINE # check for a shared modelNEWLINE shared_model = custom_options.get("multiagent_shared_model", 0)NEWLINE reuse = tf.AUTO_REUSE if shared_model else FalseNEWLINE outputs = []NEWLINE for i in range(len(hiddens)):NEWLINE scope = "multi" if shared_model else "multi{}".format(i)NEWLINE with tf.variable_scope(scope, reuse=reuse):NEWLINE sub_options = options.copy()NEWLINE sub_options.update({"fcnet_hiddens": hiddens[i]})NEWLINE # TODO(ev) make this support arbitrary networksNEWLINE fcnet = FullyConnectedNetwork(split_inputs[i],NEWLINE int(num_actions[i]), sub_options)NEWLINE output = fcnet.outputsNEWLINE outputs.append(output)NEWLINE overall_output = tf.concat(outputs, axis=1)NEWLINE return overall_output, outputsNEWLINE
#!/usr/bin/env python3NEWLINE# -*- coding: utf-8 -*-NEWLINEimport sysNEWLINENEWLINEif __name__ == '__main__':NEWLINE X1 = int(input('Введите x1 '))NEWLINE Y1 = int(input('Введите y1 '))NEWLINE X2 = int(input('Введите x2 '))NEWLINE Y2 = int(input('Введите y2 '))NEWLINENEWLINE if X1 == -X2 and Y1 == -Y2:NEWLINE print('Точки симметричны относительно начала координат')NEWLINE elif X1 == -X2 and Y1 == Y2:NEWLINE print('Точки симметричны относительно оси Y')NEWLINE elif X1 == X2 and Y1 == -Y2:NEWLINE print('Точки симметричны относительно оси X')NEWLINE else:NEWLINE print('Точки не симметричны', file=sys.stderr)NEWLINE exit(1)NEWLINE
"""NEWLINECopyright (c) 2018-2020 Intel CorporationNEWLINENEWLINELicensed under the Apache License, Version 2.0 (the "License");NEWLINEyou may not use this file except in compliance with the License.NEWLINEYou may obtain a copy of the License atNEWLINENEWLINE http://www.apache.org/licenses/LICENSE-2.0NEWLINENEWLINEUnless required by applicable law or agreed to in writing, softwareNEWLINEdistributed under the License is distributed on an "AS IS" BASIS,NEWLINEWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINESee the License for the specific language governing permissions andNEWLINElimitations under the License.NEWLINE"""NEWLINENEWLINEfrom .postprocessing_executor import PostprocessingExecutor, PostprocessorNEWLINENEWLINEfrom .filter import (NEWLINE FilterPostprocessor,NEWLINENEWLINE FilterByHeightRange,NEWLINE FilterByLabels,NEWLINE FilterByMinConfidence,NEWLINE FilterEmpty,NEWLINE FilterByVisibility,NEWLINE FilterByAspectRatioNEWLINE)NEWLINENEWLINEfrom .cast_to_int import CastToIntNEWLINEfrom .clip_boxes import ClipBoxesNEWLINEfrom .nms import NMS, SoftNMSNEWLINEfrom .resize_prediction_boxes import ResizePredictionBoxesNEWLINEfrom .faster_rcnn_postprocessing_resize import FRCNNPostprocessingBboxResizeNEWLINEfrom .correct_yolo_v2_boxes import CorrectYoloV2BoxesNEWLINEfrom .resize_segmentation_mask import ResizeSegmentationMaskNEWLINEfrom .encode_segmentation_mask import EncodeSegMaskNEWLINEfrom .shift import Shift, ShiftLabelsNEWLINEfrom .normalize_landmarks_points import NormalizeLandmarksPointsNEWLINEfrom .clip_points import ClipPointsNEWLINEfrom .extend_segmentation_mask import ExtendSegmentationMaskNEWLINEfrom .zoom_segmentation_mask import ZoomSegMaskNEWLINEfrom .crop_segmentation_mask import CropSegmentationMask, CropOrPadSegmentationMaskNEWLINEfrom .clip_segmentation_mask import ClipSegmentationMaskNEWLINEfrom .normalize_boxes import NormalizeBoxesNEWLINEfrom .brats_postprocessing import SegmentationPredictionResample, TransformBratsPredictionNEWLINEfrom .extract_answers_tokens import ExtractSQUADPrediction, ExtractSQUADPredictionBiDAFNEWLINEfrom .translate_3d_poses import Translate3dPosesNEWLINEfrom .normalize_recomendation import MinMaxNormalizeRecommendation, SigmoidNormalizeRecommendationNEWLINEfrom .align_prediction_depth_map import AlignDepthNEWLINEfrom .resize_prediction_depth_map import ResizeDepthMapNEWLINEfrom .resize_super_resolution import ResizeSuperResolutionNEWLINEfrom .resize_style_transfer import ResizeStyleTransferNEWLINEfrom .crop_ground_truth_image import CropGTImage, CornerCropGTImageNEWLINEfrom .resize import ResizeNEWLINEfrom .to_gray_scale_ref_image import RGB2GRAYAnnotation, BGR2GRAYAnnotationNEWLINEfrom .remove_repeats import RemoveRepeatTokensNEWLINEfrom .tokens_to_lower_case import TokensToLowerCaseNEWLINEfrom .super_resolution_image_recovery import SRImageRecovery, ColorizationLABRecoveryNEWLINEfrom .argmax_segmentation_mask import ArgMaxSegmentationMaskNEWLINEfrom .normalize_salient_map import SalientMapNormalizerNEWLINENEWLINENEWLINE__all__ = [NEWLINE 'Postprocessor',NEWLINE 'PostprocessingExecutor',NEWLINENEWLINE 'FilterPostprocessor',NEWLINE 'FilterByHeightRange',NEWLINE 'FilterByLabels',NEWLINE 'FilterByMinConfidence',NEWLINE 'FilterEmpty',NEWLINE 'FilterByVisibility',NEWLINE 'FilterByAspectRatio',NEWLINENEWLINE 'CastToInt',NEWLINE 'ClipBoxes',NEWLINE 'NMS',NEWLINE 'SoftNMS',NEWLINE 'ResizePredictionBoxes',NEWLINE 'FRCNNPostprocessingBboxResize',NEWLINE 'CorrectYoloV2Boxes',NEWLINE 'NormalizeBoxes',NEWLINENEWLINE 'ResizeSegmentationMask',NEWLINE 'EncodeSegMask',NEWLINE 'Shift',NEWLINE 'ShiftLabels',NEWLINE 'ExtendSegmentationMask',NEWLINE 'ZoomSegMask',NEWLINE 'CropSegmentationMask',NEWLINE 'ClipSegmentationMask',NEWLINE 'ArgMaxSegmentationMask',NEWLINENEWLINE 'SegmentationPredictionResample',NEWLINE 'TransformBratsPrediction',NEWLINENEWLINE 'NormalizeLandmarksPoints',NEWLINENEWLINE 'ExtractSQUADPrediction',NEWLINE 'ExtractSQUADPredictionBiDAF',NEWLINENEWLINE 'Translate3dPoses',NEWLINENEWLINE 'SigmoidNormalizeRecommendation',NEWLINE 'MinMaxNormalizeRecommendation',NEWLINENEWLINE 'MinMaxNormalizeRecommendation',NEWLINENEWLINE 'AlignDepth',NEWLINE 'ResizeDepthMap',NEWLINENEWLINE 'ResizeSuperResolution',NEWLINE 'ResizeStyleTransfer',NEWLINE 'RGB2GRAYAnnotation',NEWLINE 'BGR2GRAYAnnotation',NEWLINENEWLINE 'CropGTImage',NEWLINE 'CornerCropGTImage',NEWLINENEWLINE 'Resize',NEWLINENEWLINE 'RemoveRepeatTokens',NEWLINE 'TokensToLowerCase',NEWLINENEWLINE 'SRImageRecovery',NEWLINE 'ColorizationLABRecovery',NEWLINENEWLINE 'SalientMapNormalizer'NEWLINE]NEWLINE
#!/usr/bin/python3NEWLINENEWLINEimport os, sys, logging, timeNEWLINEimport requests, struct, jsonNEWLINEfrom base64 import b64decode,b64encodeNEWLINEfrom datetime import datetime,timezoneNEWLINEimport paho.mqtt.client as mqttNEWLINEfrom datetime import datetimeNEWLINEfrom yawigle import clientNEWLINENEWLINE## Import local configNEWLINEfrom config import *NEWLINENEWLINE# --- WIGLE VARIABLES --------------------------NEWLINE## TODO: Replace with your credentialsNEWLINEWIGLE_USER = '' # REPLACE THISNEWLINEWIGLE_KEY = '' # REPLACE THISNEWLINENEWLINE# FunctionsNEWLINEiso2ts = lambda iso: datetime.strptime(iso, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc).timestamp()NEWLINEhwid2eui = lambda hwid: '-'.join(hwid[2*i:2*i+2] for i in range(8))NEWLINENEWLINE# --- LORA CLOUD FUNCTIONS --------------------------NEWLINENEWLINEdef decodeWifi(encoded):NEWLINE len = int(encoded[0:2],16)NEWLINE mac = []NEWLINE rssi = []NEWLINENEWLINE ### TODO: decode Wifi dataNEWLINE NEWLINE ## TIPS: use the bytes.fromhex to convert hex strings to bytesNEWLINE ## use the struct.unpack function to interpret bytes as binary dataNEWLINENEWLINE return [mac,rssi]NEWLINENEWLINEdef decodeAcc(encoded):NEWLINE acc = []NEWLINE NEWLINE ### TODO: decode Accelerometer dataNEWLINE NEWLINE ## TIPS: use the bytes.fromhex to convert hex strings to bytesNEWLINE ## use the struct.unpack function to interpret bytes as binary data NEWLINENEWLINE return [acc]NEWLINENEWLINEdef extractPacket(raw_data):NEWLINE packet_type = []NEWLINE packet_data = []NEWLINENEWLINE try:NEWLINE packet_num = raw_data[0][0]NEWLINE packet_full = raw_data[0][1]NEWLINE #Extract packet dataNEWLINE packet_type = packet_full[0:2]NEWLINE packet_data = packet_full[2:]NEWLINE except:NEWLINE # Empty PacketNEWLINE print("Received empty payload")NEWLINENEWLINE return [packet_type, packet_data]NEWLINE NEWLINENEWLINE# --- TTN FUNCTIONS --------------------------NEWLINENEWLINEdef on_connect(mqttc, obj, flags, rc):NEWLINE print("\nConnected to " + str(TTN_URL))NEWLINENEWLINEdef on_message(mqttc, obj, msg):NEWLINE try: NEWLINE print("\nMessage: " + msg.topic + " " + str(msg.qos)) # + " " + str(msg.payload))NEWLINE parsedJSON = json.loads(msg.payload)NEWLINE # Uncomment this to fill your terminal screen with JSONNEWLINE # print(json.dumps(parsedJSON, indent=4)) NEWLINE if(parsedJSON["uplink_message"] is not None):NEWLINE payload = b64decode(parsedJSON["uplink_message"]["frm_payload"]).hex() NEWLINE deveui = hwid2eui(parsedJSON["end_device_ids"]["dev_eui"])NEWLINE print("Payload: {}".format(payload))NEWLINE dmsmsg = json.dumps({NEWLINE deveui: {NEWLINE "fcnt": parsedJSON["uplink_message"]["f_cnt"],NEWLINE "port": parsedJSON["uplink_message"]["f_port"],NEWLINE "payload": payload,NEWLINE "dr": 0,NEWLINE "freq": int(parsedJSON["uplink_message"]["settings"]["frequency"]),NEWLINE "timestamp": iso2ts(parsedJSON["uplink_message"]["settings"]["time"][:26])NEWLINE }NEWLINE })NEWLINE NEWLINE ## Docode ROSE packet with Lora CloudNEWLINE headers = {'Authorization': DMS_APITOKEN}NEWLINE resp = requests.post(f"{DMS_HOST}/api/v1/uplink/send",data = dmsmsg, headers=headers)NEWLINE # print(resp)NEWLINE if(resp is not None):NEWLINE rjs = json.loads(resp.text)NEWLINE # print(rjs)NEWLINE NEWLINE if(rjs is not None):NEWLINE raw_data = rjs['result'].get(deveui)['result']NEWLINE if(raw_data is not None):NEWLINE print("R:{}".format(raw_data['stream_records']))NEWLINE if(raw_data['stream_records']):NEWLINE [packet_type, packet_data] = extractPacket(raw_data['stream_records'])NEWLINENEWLINE if(packet_type==WIFI_PACKET_TYPE):NEWLINE print("Received Wifi Data")NEWLINE [wifi_mac,rssi] = decodeWifi(packet_data)NEWLINE NEWLINE ## TODO: Query WiGLE database using received wifi mac'sNEWLINE NEWLINE if(packet_type==ACC_PACKET_TYPE):NEWLINE print("Received Accelerometer Data")NEWLINE acc_values = decodeAcc(packet_data)NEWLINE except Exception as e: NEWLINE print("Error processing message: {}".format(e))NEWLINENEWLINEdef on_subscribe(mqttc, obj, mid, granted_qos):NEWLINE print("\nSubscribed to " + str(MQTT_TOPIC))NEWLINENEWLINEdef on_log(mqttc, obj, level, string):NEWLINE print("\nLog: "+ string)NEWLINE logging_level = mqtt.LOGGING_LEVEL[level]NEWLINE logging.log(logging_level, string)NEWLINENEWLINENEWLINE# --- MAIN --------------------------NEWLINEprint(os.path.basename(__file__) + " " + VER)NEWLINENEWLINE# Init mqtt clientNEWLINEmqttc = mqtt.Client()NEWLINEmqttc.on_connect = on_connectNEWLINEmqttc.on_subscribe = on_subscribeNEWLINEmqttc.on_message = on_messageNEWLINE#mqttc.on_log = on_log # Logging for debugging OK, wasteNEWLINENEWLINE# Connecting to TTNNEWLINE# Setup authentication from settings aboveNEWLINEmqttc.username_pw_set(User, Password)NEWLINE# IMPORTANT - this enables the encryption of messagesNEWLINEmqttc.tls_set() # default certification authority of the systemNEWLINE#mqttc.tls_set(ca_certs="mqtt-ca.pem") # Use this if you get security errorsNEWLINE# It loads the TTI security certificate. Download it from their website from this page: NEWLINE# https://www.thethingsnetwork.org/docs/applications/mqtt/api/index.htmlNEWLINE# This is normally required if you are running the script on WindowsNEWLINEmqttc.connect(TTN_URL, 8883, 60)NEWLINEmqttc.subscribe(MQTT_TOPIC, 0) # all device uplinksNEWLINENEWLINEprint("Waiting for Data")NEWLINEtry: NEWLINE run = TrueNEWLINE while run:NEWLINE mqttc.loop(10) # seconds timeout / blocking timeNEWLINE print(".", end="", flush=True) # feedback to the user that something is actually happeningNEWLINE NEWLINEexcept KeyboardInterrupt:NEWLINE print("Exit")NEWLINE sys.exit(0)
# -*- coding: utf-8 -*-NEWLINE# Copyright (c) Facebook, Inc. and its affiliates. All Rights ReservedNEWLINEimport loggingNEWLINEimport copyNEWLINEimport torchNEWLINEimport torchvisionNEWLINEimport numpy as npNEWLINEimport cv2NEWLINEfrom PIL import ImageNEWLINEfrom fvcore.common.file_io import PathManagerNEWLINEfrom fvcore.transforms.transform import NoOpTransform, TransformNEWLINENEWLINEfrom detectron2.data import MetadataCatalogNEWLINEfrom detectron2.data import detection_utils as utilsNEWLINEfrom detectron2.data import transforms as TNEWLINENEWLINEfrom .dataset import BB8_KEYPOINT_CONNECTION_RULES, FPS8_KEYPOINT_CONNECTION_RULESNEWLINE# from .structures import DensePoseDataRelative, DensePoseList, DensePoseTransformDataNEWLINENEWLINEclass RandomBlurTransform(Transform):NEWLINE def __init__(self, blur_sigma=1):NEWLINE super().__init__()NEWLINE self._set_attributes(locals())NEWLINENEWLINE def apply_image(self, img: np.ndarray, interp: str = None) -> np.ndarray:NEWLINE """NEWLINE Apply blur transform on the image(s).NEWLINENEWLINE Args:NEWLINE img (ndarray): of shape NxHxWxC, or HxWxC or HxW. The array can beNEWLINE of type uint8 in range [0, 255], or floating point in rangeNEWLINE [0, 1] or [0, 255].NEWLINE interp (str): keep this option for consistency, perform blur would notNEWLINE require interpolation.NEWLINE Returns:NEWLINE ndarray: blured image(s).NEWLINE """NEWLINE if img.dtype == np.uint8:NEWLINE img = img.astype(np.float32)NEWLINE img = cv2.GaussianBlur(img, (self.blur_sigma, self.blur_sigma), 0)NEWLINE return np.clip(img, 0, 255).astype(np.uint8)NEWLINE else:NEWLINE return cv2.GaussianBlur(img, (self.blur_sigma, self.blur_sigma), 0)NEWLINENEWLINE def apply_coords(self, coords: np.ndarray) -> np.ndarray:NEWLINE """NEWLINE Apply no transform on the coordinates.NEWLINE """NEWLINE return coordsNEWLINENEWLINE def apply_segmentation(self, segmentation: np.ndarray) -> np.ndarray:NEWLINE """NEWLINE Apply no transform on the full-image segmentation.NEWLINE """NEWLINE return segmentationNEWLINENEWLINEclass ColorJitterTransform(Transform):NEWLINE def __init__(self, brightness=None,NEWLINE contrast=None,NEWLINE saturation=None,NEWLINE hue=None):NEWLINE super().__init__()NEWLINE self._set_attributes(locals())NEWLINENEWLINE def apply_image(self, img: np.ndarray, interp: str = None) -> np.ndarray:NEWLINE """NEWLINE Apply color jitter transform on the image(s).NEWLINENEWLINE Args:NEWLINE img (ndarray): of shape NxHxWxC, or HxWxC or HxW. The array can beNEWLINE of type uint8 in range [0, 255], or floating point in rangeNEWLINE [0, 1] or [0, 255].NEWLINE interp (str): keep this option for consistency, perform color jitter would notNEWLINE require interpolation.NEWLINE Returns:NEWLINE ndarray: color jittered image(s).NEWLINE """NEWLINE self.color_jitter = torchvision.transforms.ColorJitter(NEWLINE brightness=self.brightness,NEWLINE contrast=self.contrast,NEWLINE saturation=self.saturation,NEWLINE hue=self.hue)NEWLINE img = np.asarray(self.color_jitter(Image.fromarray(np.ascontiguousarray(img, np.uint8))))NEWLINE return imgNEWLINE NEWLINE def apply_coords(self, coords: np.ndarray) -> np.ndarray:NEWLINE """NEWLINE Apply no transform on the coordinates.NEWLINE """NEWLINE return coordsNEWLINENEWLINE def apply_segmentation(self, segmentation: np.ndarray) -> np.ndarray:NEWLINE """NEWLINE Apply no transform on the full-image segmentation.NEWLINE """NEWLINE return segmentationNEWLINENEWLINEclass RandomBlur(T.TransformGen):NEWLINE """NEWLINE Randomly gussian blur an image.NEWLINE """NEWLINE def __init__(self, blur_prob=0.5, blur_sigma=None):NEWLINE super().__init__()NEWLINE self._init(locals())NEWLINENEWLINE def get_transform(self, img):NEWLINE do = self._rand_range() < self.blur_probNEWLINE if do:NEWLINE if self.blur_sigma is None:NEWLINE self.blur_sigma = np.random.choice([3, 5, 7, 9])NEWLINE return RandomBlurTransform(self.blur_sigma)NEWLINE else:NEWLINE return NoOpTransform()NEWLINENEWLINEclass ColorJitter(T.TransformGen):NEWLINE """NEWLINE Color jitter an image.NEWLINE """NEWLINE def __init__(self, brightness=None, contrast=None, saturation=None, hue=None):NEWLINE super().__init__()NEWLINE self._init(locals())NEWLINENEWLINE def get_transform(self, img):NEWLINE return ColorJitterTransform(self.brightness, self.contrast, self.saturation, self.hue)NEWLINENEWLINEdef create_sixdpose_keypoint_hflip_indices(dataset_names, keypoint_format):NEWLINE """NEWLINE Args:NEWLINE dataset_names (list[str]): list of dataset namesNEWLINE keypoint_format(str): bb8, fps8, or bb8+fps8NEWLINE Returns:NEWLINE ndarray[int]: a vector of size=#keypoints, storing theNEWLINE horizontally-flipped keypoint indices.NEWLINE """NEWLINE meta = MetadataCatalog.get(dataset_names[0])NEWLINE keypoint_flip_map = () # sixd pose has no filp mapNEWLINENEWLINE if keypoint_format == 'bb8':NEWLINE names = (NEWLINE "center",NEWLINE # bb8NEWLINE "bb8_0", "bb8_1",NEWLINE "bb8_2", "bb8_3",NEWLINE "bb8_4", "bb8_5",NEWLINE "bb8_6", "bb8_7",NEWLINE )NEWLINE connection_rules = BB8_KEYPOINT_CONNECTION_RULESNEWLINE meta.set(keypoint_names=names, keypoint_flip_map=keypoint_flip_map, keypoint_connection_rules=connection_rules)NEWLINE elif keypoint_format == 'fps8':NEWLINE names = (NEWLINE "center",NEWLINE # fps8NEWLINE "fps8_0", "fps8_1",NEWLINE "fps8_2", "fps8_3",NEWLINE "fps8_4", "fps8_5",NEWLINE "fps8_6", "fps8_7",NEWLINE )NEWLINE connection_rules = FPS8_KEYPOINT_CONNECTION_RULESNEWLINE meta.set(keypoint_names=names, keypoint_flip_map=keypoint_flip_map, keypoint_connection_rules=connection_rules)NEWLINE else:NEWLINE assert keypoint_format == 'bb8+fps8', keypoint_formatNEWLINE names = (NEWLINE "center",NEWLINE # bb8NEWLINE "bb8_0", "bb8_1",NEWLINE "bb8_2", "bb8_3",NEWLINE "bb8_4", "bb8_5",NEWLINE "bb8_6", "bb8_7",NEWLINE # fps8NEWLINE "fps8_0", "fps8_1",NEWLINE "fps8_2", "fps8_3",NEWLINE "fps8_4", "fps8_5",NEWLINE "fps8_6", "fps8_7",NEWLINE )NEWLINE connection_rules = BB8_KEYPOINT_CONNECTION_RULES + FPS8_KEYPOINT_CONNECTION_RULESNEWLINE meta.set(keypoint_names=names, keypoint_flip_map=keypoint_flip_map, keypoint_connection_rules=connection_rules)NEWLINE NEWLINE # TODO flip -> hflip NEWLINE flip_map = dict(keypoint_flip_map)NEWLINE flip_map.update({v: k for k, v in flip_map.items()})NEWLINE flipped_names = [i if i not in flip_map else flip_map[i] for i in names]NEWLINE flip_indices = [names.index(i) for i in flipped_names]NEWLINE return np.asarray(flip_indices)NEWLINENEWLINENEWLINEclass DatasetMapper:NEWLINE """NEWLINE A callable which takes a dataset dict in Detectron2 Dataset format,NEWLINE and map it into a format used by the model.NEWLINENEWLINE This is the default callable to be used to map your dataset dict into training data.NEWLINE You may need to follow it to implement your own one for customized logic.NEWLINENEWLINE The callable currently does the following:NEWLINENEWLINE 1. Read the image from "file_name"NEWLINE 2. Applies cropping/geometric transforms to the image and annotationsNEWLINE 3. Prepare data and annotations to Tensor and :class:`Instances`NEWLINE """NEWLINENEWLINE def __init__(self, cfg, is_train=True):NEWLINE if cfg.INPUT.CROP.ENABLED and is_train:NEWLINE self.crop_gen = T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE)NEWLINE logging.getLogger(__name__).info("CropGen used in training: " + str(self.crop_gen))NEWLINE else:NEWLINE self.crop_gen = NoneNEWLINENEWLINE if cfg.INPUT.RANDOMBLUR.ENABLED and is_train:NEWLINE self.blur_gen = RandomBlur(cfg.INPUT.RANDOMBLUR.PROB)NEWLINE logging.getLogger(__name__).info("BlurGen used in training: " + str(self.blur_gen))NEWLINE else:NEWLINE self.blur_gen = None NEWLINENEWLINE if cfg.INPUT.COLORJITTER.ENABLED and is_train:NEWLINE self.colorjitter_gen = ColorJitter(cfg.INPUT.COLORJITTER.BRIGHTNESS, cfg.INPUT.COLORJITTER.CONTRAST,NEWLINE cfg.INPUT.COLORJITTER.SATURATION, cfg.INPUT.COLORJITTER.HUE)NEWLINE logging.getLogger(__name__).info("ColorJitterGen used in training: " + str(self.colorjitter_gen))NEWLINE else:NEWLINE self.colorjitter_gen = None NEWLINENEWLINE self.tfm_gens = utils.build_transform_gen(cfg, is_train)NEWLINENEWLINE # fmt: offNEWLINE self.img_format = cfg.INPUT.FORMATNEWLINE self.mask_on = cfg.MODEL.MASK_ON or cfg.MODEL.PVNET_ONNEWLINE self.mask_format = cfg.INPUT.MASK_FORMATNEWLINE self.keypoint_on = cfg.MODEL.KEYPOINT_ON or cfg.MODEL.PVNET_ON or cfg.MODEL.CRPNET_ON or cfg.MODEL.HCR_ONNEWLINE self.keypoint_format= cfg.INPUT.KEYPOINT_FORMATNEWLINE self.load_proposals = cfg.MODEL.LOAD_PROPOSALSNEWLINE # fmt: onNEWLINE if self.keypoint_on and is_train:NEWLINE # Flip only makes sense in trainingNEWLINE self.keypoint_hflip_indices = create_sixdpose_keypoint_hflip_indices(cfg.DATASETS.TRAIN, self.keypoint_format)NEWLINE else:NEWLINE self.keypoint_hflip_indices = NoneNEWLINENEWLINE if self.load_proposals:NEWLINE self.min_box_side_len = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZENEWLINE self.proposal_topk = (NEWLINE cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAINNEWLINE if is_trainNEWLINE else cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TESTNEWLINE )NEWLINE self.is_train = is_trainNEWLINENEWLINE def __call__(self, dataset_dict):NEWLINE """NEWLINE Args:NEWLINE dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format.NEWLINENEWLINE Returns:NEWLINE dict: a format that builtin models in detectron2 acceptNEWLINE """NEWLINE dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code belowNEWLINE # USER: Write your own image loading if it's not from a fileNEWLINE image = utils.read_image(dataset_dict["file_name"], format=self.img_format)NEWLINE utils.check_image_size(dataset_dict, image)NEWLINENEWLINE if "annotations" not in dataset_dict:NEWLINE image, transforms = T.apply_transform_gens(NEWLINE ([self.crop_gen] if self.crop_gen else []) + NEWLINE ([self.blur_gen] if self.blur_gen else []) + NEWLINE ([self.colorjitter_gen] if self.colorjitter_gen else []) + self.tfm_gens, imageNEWLINE )NEWLINE else:NEWLINE # Crop around an instance if there are instances in the image.NEWLINE # USER: Remove if you don't use croppingNEWLINE if self.crop_gen:NEWLINE crop_tfm = utils.gen_crop_transform_with_instance(NEWLINE self.crop_gen.get_crop_size(image.shape[:2]),NEWLINE image.shape[:2],NEWLINE np.random.choice(dataset_dict["annotations"]),NEWLINE )NEWLINE image = crop_tfm.apply_image(image)NEWLINE if self.blur_gen:NEWLINE blur_tfm = self.blur_gen.get_transform(image)NEWLINE image = blur_tfm.apply_image(image)NEWLINE if self.colorjitter_gen:NEWLINE colorjitter_tfm = self.colorjitter_gen.get_transform(image)NEWLINE image = colorjitter_tfm.apply_image(image)NEWLINENEWLINE image, transforms = T.apply_transform_gens(self.tfm_gens, image)NEWLINE if self.colorjitter_gen:NEWLINE transforms = colorjitter_tfm + transformsNEWLINE if self.blur_gen:NEWLINE transforms = blur_tfm + transformsNEWLINE if self.crop_gen:NEWLINE transforms = crop_tfm + transformsNEWLINENEWLINE image_shape = image.shape[:2] # h, wNEWLINENEWLINE # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory,NEWLINE # but not efficient on large generic data structures due to the use of pickle & mp.Queue.NEWLINE # Therefore it's important to use torch.Tensor.NEWLINE dataset_dict["image"] = torch.as_tensor(NEWLINE image.transpose(2, 0, 1).astype("float32")NEWLINE ).contiguous()NEWLINE # Can use uint8 if it turns out to be slow some dayNEWLINENEWLINE # USER: Remove if you don't use pre-computed proposals.NEWLINE if self.load_proposals:NEWLINE utils.transform_proposals(NEWLINE dataset_dict, image_shape, transforms, self.min_box_side_len, self.proposal_topkNEWLINE )NEWLINENEWLINE if not self.is_train:NEWLINE dataset_dict.pop("annotations", None)NEWLINE dataset_dict.pop("sem_seg_file_name", None)NEWLINE return dataset_dictNEWLINENEWLINE if "annotations" in dataset_dict:NEWLINE # USER: Modify this if you want to keep them for some reason.NEWLINE for anno in dataset_dict["annotations"]:NEWLINE if not self.mask_on:NEWLINE anno.pop("segmentation", None)NEWLINE if not self.keypoint_on:NEWLINE anno.pop("keypoints", None)NEWLINE # USER: load keypoints according to keypoint_formatNEWLINE else:NEWLINE keypts = anno["keypoints"]NEWLINE if 'bb8' in self.keypoint_format:NEWLINE corner_2d = np.array(anno["corner_2d"])NEWLINE corner_2d = np.insert(corner_2d, 2, 2, axis=1).flatten().tolist()NEWLINE keypts += corner_2dNEWLINE if 'fps8' in self.keypoint_format:NEWLINE fps_2d = np.array(anno["fps_2d"])NEWLINE fps_2d = np.insert(fps_2d, 2, 2, axis=1).flatten().tolist()NEWLINE keypts += fps_2dNEWLINE anno["keypoints"] = keyptsNEWLINENEWLINE # USER: Implement additional transformations if you have other types of dataNEWLINE annos = [NEWLINE utils.transform_instance_annotations(NEWLINE obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indicesNEWLINE )NEWLINE for obj in dataset_dict.pop("annotations")NEWLINE if obj.get("iscrowd", 0) == 0NEWLINE ]NEWLINE instances = utils.annotations_to_instances(NEWLINE annos, image_shape, mask_format=self.mask_formatNEWLINE )NEWLINE # Create a tight bounding box from masks, useful when image is croppedNEWLINE if self.crop_gen and instances.has("gt_masks"):NEWLINE instances.gt_boxes = instances.gt_masks.get_bounding_boxes()NEWLINE dataset_dict["instances"] = utils.filter_empty_instances(instances)NEWLINENEWLINE # USER: Remove if you don't do semantic/panoptic segmentation.NEWLINE # if "sem_seg_file_name" in dataset_dict:NEWLINE # with PathManager.open(dataset_dict.pop("sem_seg_file_name"), "rb") as f:NEWLINE # sem_seg_gt = Image.open(f)NEWLINE # sem_seg_gt = np.asarray(sem_seg_gt, dtype="uint8")NEWLINE # sem_seg_gt = transforms.apply_segmentation(sem_seg_gt)NEWLINE # sem_seg_gt = torch.as_tensor(sem_seg_gt.astype("long"))NEWLINE # dataset_dict["sem_seg"] = sem_seg_gtNEWLINE return dataset_dictNEWLINENEWLINEclass COCODatasetMapper:NEWLINE """NEWLINE A callable which takes a dataset dict in Detectron2 Dataset format,NEWLINE and map it into a format used by the model.NEWLINENEWLINE This is the default callable to be used to map your dataset dict into training data.NEWLINE You may need to follow it to implement your own one for customized logic,NEWLINE such as a different way to read or transform images.NEWLINE See :doc:`/tutorials/data_loading` for details.NEWLINENEWLINE The callable currently does the following:NEWLINENEWLINE 1. Read the image from "file_name"NEWLINE 2. Applies cropping/geometric transforms to the image and annotationsNEWLINE 3. Prepare data and annotations to Tensor and :class:`Instances`NEWLINE """NEWLINENEWLINE def __init__(self, cfg, is_train=True):NEWLINE if cfg.INPUT.CROP.ENABLED and is_train:NEWLINE self.crop_gen = T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE)NEWLINE logging.getLogger(__name__).info("CropGen used in training: " + str(self.crop_gen))NEWLINE else:NEWLINE self.crop_gen = NoneNEWLINENEWLINE self.tfm_gens = utils.build_transform_gen(cfg, is_train)NEWLINENEWLINE # fmt: offNEWLINE self.img_format = cfg.INPUT.FORMATNEWLINE self.mask_on = cfg.MODEL.MASK_ON or cfg.MODEL.PVNET_ONNEWLINE self.mask_format = cfg.INPUT.MASK_FORMATNEWLINE self.keypoint_on = cfg.MODEL.KEYPOINT_ON or cfg.MODEL.PVNET_ON or cfg.MODEL.CRPNET_ON or cfg.MODEL.HCR_ONNEWLINE self.load_proposals = cfg.MODEL.LOAD_PROPOSALSNEWLINE # fmt: onNEWLINE if self.keypoint_on and is_train:NEWLINE # Flip only makes sense in trainingNEWLINE self.keypoint_hflip_indices = utils.create_keypoint_hflip_indices(cfg.DATASETS.TRAIN)NEWLINE else:NEWLINE self.keypoint_hflip_indices = NoneNEWLINENEWLINE if self.load_proposals:NEWLINE self.min_box_side_len = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZENEWLINE self.proposal_topk = (NEWLINE cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAINNEWLINE if is_trainNEWLINE else cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TESTNEWLINE )NEWLINE self.is_train = is_trainNEWLINENEWLINE def __call__(self, dataset_dict):NEWLINE """NEWLINE Args:NEWLINE dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format.NEWLINENEWLINE Returns:NEWLINE dict: a format that builtin models in detectron2 acceptNEWLINE """NEWLINE dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code belowNEWLINE # USER: Write your own image loading if it's not from a fileNEWLINE image = utils.read_image(dataset_dict["file_name"], format=self.img_format)NEWLINE utils.check_image_size(dataset_dict, image)NEWLINENEWLINE if "annotations" not in dataset_dict:NEWLINE image, transforms = T.apply_transform_gens(NEWLINE ([self.crop_gen] if self.crop_gen else []) + self.tfm_gens, imageNEWLINE )NEWLINE else:NEWLINE # Crop around an instance if there are instances in the image.NEWLINE # USER: Remove if you don't use croppingNEWLINE if self.crop_gen:NEWLINE crop_tfm = utils.gen_crop_transform_with_instance(NEWLINE self.crop_gen.get_crop_size(image.shape[:2]),NEWLINE image.shape[:2],NEWLINE np.random.choice(dataset_dict["annotations"]),NEWLINE )NEWLINE image = crop_tfm.apply_image(image)NEWLINE image, transforms = T.apply_transform_gens(self.tfm_gens, image)NEWLINE if self.crop_gen:NEWLINE transforms = crop_tfm + transformsNEWLINENEWLINE image_shape = image.shape[:2] # h, wNEWLINENEWLINE # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory,NEWLINE # but not efficient on large generic data structures due to the use of pickle & mp.Queue.NEWLINE # Therefore it's important to use torch.Tensor.NEWLINE dataset_dict["image"] = torch.as_tensor(np.ascontiguousarray(image.transpose(2, 0, 1)))NEWLINENEWLINE # USER: Remove if you don't use pre-computed proposals.NEWLINE if self.load_proposals:NEWLINE utils.transform_proposals(NEWLINE dataset_dict, image_shape, transforms, self.min_box_side_len, self.proposal_topkNEWLINE )NEWLINENEWLINE if not self.is_train:NEWLINE # USER: Modify this if you want to keep them for some reason.NEWLINE dataset_dict.pop("annotations", None)NEWLINE dataset_dict.pop("sem_seg_file_name", None)NEWLINE return dataset_dictNEWLINENEWLINE if "annotations" in dataset_dict:NEWLINE # USER: Modify this if you want to keep them for some reason.NEWLINE for anno in dataset_dict["annotations"]:NEWLINE if not self.mask_on:NEWLINE anno.pop("segmentation", None)NEWLINE if not self.keypoint_on:NEWLINE anno.pop("keypoints", None)NEWLINENEWLINE # USER: Implement additional transformations if you have other types of dataNEWLINE annos = [NEWLINE utils.transform_instance_annotations(NEWLINE obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indicesNEWLINE )NEWLINE for obj in dataset_dict.pop("annotations")NEWLINE if obj.get("iscrowd", 0) == 0NEWLINE ]NEWLINE instances = utils.annotations_to_instances(NEWLINE annos, image_shape, mask_format=self.mask_formatNEWLINE )NEWLINE # Create a tight bounding box from masks, useful when image is croppedNEWLINE if self.crop_gen and instances.has("gt_masks"):NEWLINE instances.gt_boxes = instances.gt_masks.get_bounding_boxes()NEWLINE dataset_dict["instances"] = utils.filter_empty_instances(instances)NEWLINENEWLINE # USER: Remove if you don't do semantic/panoptic segmentation.NEWLINE if "sem_seg_file_name" in dataset_dict:NEWLINE with PathManager.open(dataset_dict.pop("sem_seg_file_name"), "rb") as f:NEWLINE sem_seg_gt = Image.open(f)NEWLINE sem_seg_gt = np.asarray(sem_seg_gt, dtype="uint8")NEWLINE sem_seg_gt = transforms.apply_segmentation(sem_seg_gt)NEWLINE sem_seg_gt = torch.as_tensor(sem_seg_gt.astype("long"))NEWLINE dataset_dict["sem_seg"] = sem_seg_gtNEWLINE return dataset_dict
# drawFace.pywNEWLINE# A program which draws facesNEWLINE"""Write and test a function to meet this specification.NEWLINENEWLINEdrawFace(center, size, win) center is a Point, size is an int, and win is aNEWLINEGraphWin. Draws a simple face of the given size in win.NEWLINENEWLINEYour function can draw a simple smiley (or grim) face. Demonstrate the functionNEWLINEby writing a program that draws several faces of varying size in a singleNEWLINEwindow."""NEWLINENEWLINEfrom graphics import *NEWLINENEWLINEdef drawFace(center, size, win):NEWLINENEWLINE x = center.getX()NEWLINE y = center.getY()NEWLINE head = Circle(Point(x, y), size)NEWLINE head.setOutline("peachpuff")NEWLINE head.setFill("peachpuff")NEWLINE head.draw(win)NEWLINENEWLINE leftEye = Circle(Point((x - (1/2) * size), (y + (1/2) * size))\NEWLINE , (size/10))NEWLINE leftEye.setOutline("black")NEWLINE leftEye.setFill("black")NEWLINE leftEye.draw(win)NEWLINENEWLINE rightEye = leftEye.clone()NEWLINE rightEye.move(size, 0)NEWLINE rightEye.draw(win)NEWLINENEWLINE #upperNose = Line(Point(4.5, 6), Point(3.5, 5))NEWLINE upperNose = Line(Point(x - 1/10 * size, y + size * (1/3)),\NEWLINE Point(x - (1/2) * size, y))NEWLINE upperNose.draw(win)NEWLINENEWLINE #lowerNose = Line(Point(3.5, 5), Point(4.5, 4))NEWLINE lowerNose = Line(Point(x - (1/2) * size, y)\NEWLINE , Point(x - 1/10 * size, y - size * (1/3)))NEWLINE lowerNose.draw(win)NEWLINENEWLINE mouth = leftEye.clone()NEWLINE mouth.move(1/2 * size, -size)NEWLINE mouth.draw(win)NEWLINENEWLINEdef properties(win):NEWLINE # Get where the user wants to center the faceNEWLINE print("\nPlease click where the center of the face should be.")NEWLINE center = win.getMouse() NEWLINENEWLINE # Get the size of the face you want to drawNEWLINE sizeFace = float(input("\nPlease enter the size of the face you want \NEWLINEto draw. "))NEWLINENEWLINE return center, sizeFaceNEWLINENEWLINEdef main():NEWLINENEWLINE # Get the dimensions of the windowNEWLINE winHeight = float(input("Please enter the height of the window you want: "))NEWLINE winWidth = float(input("Please enter the width of the window you want: "))NEWLINE win = GraphWin("Draw a Face", winHeight, winWidth)NEWLINENEWLINE center, size = properties(win)NEWLINENEWLINE drawFace(center, size, win)NEWLINENEWLINE center, size = properties(win)NEWLINENEWLINE drawFace(center, size, win)NEWLINENEWLINE message = Text(Point(winWidth/2, winHeight - (winHeight - winHeight/20))\NEWLINE , "Click anywhere to quit.")NEWLINE message.draw(win)NEWLINE win.getMouse()NEWLINE win.close()NEWLINE NEWLINEmain()NEWLINE
[ ## this file was manually modified by jtNEWLINE {NEWLINE 'functor' : {NEWLINE 'arity' : '1',NEWLINE 'call_types' : [],NEWLINE 'ret_arity' : '0',NEWLINE 'rturn' : {NEWLINE 'default' : 'T',NEWLINE },NEWLINE 'simd_types' : [],NEWLINE 'special' : ['fdlibm'],NEWLINE 'type_defs' : [],NEWLINE 'types' : ['real_'],NEWLINE },NEWLINE 'info' : 'manually modified',NEWLINE 'unit' : {NEWLINE 'global_header' : {NEWLINE 'first_stamp' : 'created by jt the 03/03/2011',NEWLINE 'included' : ['#include <nt2/include/functions/atan.hpp>'],NEWLINE 'notes' : [],NEWLINE 'stamp' : 'modified by jt the 03/03/2011',NEWLINE },NEWLINE 'ranges' : {NEWLINE 'default' : [['T(-1)', 'T(1)']],NEWLINE },NEWLINE 'specific_values' : {NEWLINE },NEWLINE 'verif_test' : {NEWLINE 'property_call' : {NEWLINE 'default' : ['nt2::fdlibm::atan(a0)'],NEWLINE },NEWLINE 'property_value' : {NEWLINE 'default' : ['nt2::atan(a0)'],NEWLINE },NEWLINE 'simd' : {NEWLINE },NEWLINE 'ulp_thresh' : {NEWLINE 'default' : ['1'],NEWLINE },NEWLINE },NEWLINE },NEWLINE },NEWLINE]NEWLINE