"""Time Recurrent Neural Network (RNN) (minus the final output layer)."""

from sktime.networks.base import BaseDeepNetwork
from sktime.utils.warnings import warn


# TODO (release 0.41.0)
# change the default value of 'activation' to "tanh"
# update the docstring for activation from "linear" to "tanh"
# and remove the note about the change from the docstring.
# Remove the usage of self._activation throughout the class
# and replace it with self.activation
class RNNNetwork(BaseDeepNetwork):
    """Establish the network structure for an RNN.

    Adapted from the implementation used in [1]

    Parameters
    ----------
    units : int, default = 6
        the number of recurring units
    random_state : int, default = 0
        seed to any needed random actions
    activation : str, default = "linear"
        activation function to use in the RNN layer;
        List of available keras activation functions:
        https://keras.io/api/layers/activations/
        Default value of activation will change to "tanh"
        in version '0.41.0'.
    """

    _tags = {
        "authors": [
            "James-Large",
            "Withington",
            "TonyBagnall",
            "achieveordie",
            "noxthot",
        ],
        "python_dependencies": ["tensorflow"],
        "capability:random_state": True,
        "property:randomness": "stochastic",
    }

    # TODO (release 0.41.0)
    # Change the default value of 'activation' to "tanh"
    def __init__(
        self,
        units=6,
        random_state=0,
        activation="changing_from_linear_to_tanh_in_0.41.0",
    ):
        self.activation = activation
        # TODO (release 0.41.0)
        # After changing the default value of 'activation' to "tanh"
        # in the __init__ method signature,
        # remove the following 'if-else' check.
        # Remove the usage of self._activation throughout the class
        # and replace it with self.activation
        if activation == "changing_from_linear_to_tanh_in_0.41.0":
            warn(
                "in `RNNNetwork`, the default value of parameter "
                "'activation' will change to 'tanh' in version '0.41.0'. "
                "To keep current behaviour and to silence this warning, "
                "set 'activation' to 'linear' explicitly.",
                category=DeprecationWarning,
                obj=self,
            )
            self._activation = "linear"
        else:
            self._activation = activation
        self.random_state = random_state
        self.units = units
        super().__init__()

    def build_network(self, input_shape, **kwargs):
        """Construct a compiled, un-trained, keras model that is ready for training.

        Parameters
        ----------
        input_shape : int or tuple
            The shape of the data fed into the input layer. It should either
            have dimensions of (m, d) or m. In case an int is passed,
            1 is appended for d.

        Returns
        -------
        output : a compiled Keras Model
        """
        from tensorflow import keras

        if isinstance(input_shape, int):
            input_layer = keras.layers.Input((input_shape, 1))
        elif isinstance(input_shape, tuple):
            if len(input_shape) == 2:
                input_layer = keras.layers.Input(input_shape)
            elif len(input_shape) == 1:
                input_layer = keras.layers.Input((*input_shape, 1))
            else:
                raise ValueError(
                    "If `input_shape` is a tuple, it must either be "
                    f"of length 1 or 2. Found length of {len(input_shape)}"
                )
        else:
            raise TypeError(
                "`input_shape` should either be of type int or tuple. "
                f"But found the type to be: {type(input_shape)}"
            )

        # TODO (release 0.41.0)
        # After changing the default value of 'activation' to "tanh"
        # in the __init__ method signature,
        # remove the usage of self._activation in the following lines
        # and replace it with self.activation
        output_layer = keras.layers.SimpleRNN(
            units=self.units,
            input_shape=input_layer.shape,
            activation=self._activation,
            use_bias=False,
            kernel_initializer="glorot_uniform",
            recurrent_initializer="orthogonal",
            bias_initializer="zeros",
            dropout=0.0,
            recurrent_dropout=0.0,
        )(input_layer)

        return input_layer, output_layer

    @classmethod
    def get_test_params(cls, parameter_set="default"):
        """Return testing parameter settings for the estimator.

        Parameters
        ----------
        parameter_set : str, default="default"
            Name of the set of test parameters to return, for use in tests. If no
            special parameters are defined for a value, will return ``"default"`` set.
            Reserved values for classifiers:
                "results_comparison" - used for identity testing in some classifiers
                    should contain parameter settings comparable to "TSC bakeoff"

        Returns
        -------
        params : dict or list of dict, default = {}
            Parameters to create testing instances of the class
            Each dict are parameters to construct an "interesting" test instance, i.e.,
            ``MyClass(**params)`` or ``MyClass(**params[i])`` creates a valid test
            instance.
            ``create_test_instance`` uses the first (or only) dictionary in ``params``
        """
        params1 = {}
        params2 = {"units": 5}
        return [params1, params2]
