# ! /usr/bin/env python3
from typing import List

import pandas
from abc import abstractmethod

from gai.v2.base import HookEnabled, LoggingEnabled
from gai.v2.unify.dtype import Dataset, DatasetType, PandasDataFrame


class TypeAdaptationSupport(object):

    @abstractmethod
    def get_input_type(self) -> DatasetType:
        """

        Returns:
            the enum value corresponding to the expected type of input dataset when
            type-adaptation is off
        """
        raise NotImplementedError()

    @abstractmethod
    def get_output_type(self) -> DatasetType:
        """
        Returns:
            the type of output dataset
        """
        raise NotImplementedError()

    def is_type_adaptive(self) -> bool:
        """
        Returns:
            the indicator of whether type-adaptation is on

        Notes:
            When type-adaptation is on and a mismatched input dataset is passed
            in, the actuator will automatically find a type adapter and convert
            the input dataset to an acceptable format.
        """
        return self._type_adaptive

    def set_type_adaptive(self, b: bool):
        """Sets the type-adaptivity.

        Args:
            b: input value
        Returns:
            ``self``
        """
        self._type_adaptive = b
        return self

    def _adapt_if_incompat(self, dataset):
        if dataset.get_type() == self.get_input_type():
            pass
        elif self.is_type_adaptive():
            type_adapter = find_dataset_type_adapter(dataset.get_type(),
                                                     self.get_input_type())
            dataset = type_adapter.transform(dataset)
        else:
            raise RuntimeError("either dataset should be compatible with the transformer/estimator, "
                               "or the transformer/estimator should be type-adaptive")
        return dataset


class Actuator(HookEnabled, TypeAdaptationSupport, LoggingEnabled):
    """An entity whose raison d'être is to assemble the interfaces of
    ``HookEnabled``, ``TypeAdaptationSupport``, and ``LoggingEnabled``, and
    to serve as the common ancestor of ``Transformer`` and ``Estimator``.


    Args:
        instance_name: the name of this particular instance of ``Transformer``. It is recommended
            to give a unique name.
        pre_hooks: the list of hook functions to be called at the entry
            of method ``transform()`` or ``fit()``
        post_hooks: the list of hook functions to be called at the exit
            of method ``transform()`` or ``fit()``
        type_adaptive: indicator of whether incompatible dataset is
            automatically adapted
    """

    def __init__(self, instance_name=None, pre_hooks=(), post_hooks=(), type_adaptive=False):
        HookEnabled.__init__(self, pre_hooks, post_hooks)
        TypeAdaptationSupport.__init__(self)
        LoggingEnabled.__init__(self)
        self.set_type_adaptive(type_adaptive)
        self.setInstanceName(instance_name)


class Transformer(Actuator):
    """A ``Transformer`` is a transformational entity that given a dataset
    can output another with its ``transform()`` method. A ``Transformer`` may
    be just a wrapper or adapter of an entity of similar functionality from
    other platforms like Spark, Pandas, etc. In this case, one can extract the
    underlying entity with method ``unwrap()``.


    Args:
        instance_name: the name of this particular instance of ``Transformer``. It is recommended
            to give a unique name.
        pre_hooks: the list of hook functions to be called at the entry
            of method ``transform()``
        post_hooks: the list of hook functions to be called at the exit
            of method ``transform()``
        type_adaptive: indicator of whether incompatible dataset is
            automatically adapted


    >>> from gai.v2.unify.transformer.variant import PandasFunctionWrapper
    >>> pre = lambda x, y: print("pre-hook is called")
    >>> post = lambda x, y: print("post-hook is called")
    >>> identity = lambda x: x
    >>> tfm = PandasFunctionWrapper(identity, post_hooks=[post]).addPreHook(pre)
    >>> df = pandas.DataFrame({"name":["Alice", "Bob"], "age":[12, 13]})
    >>> dataset = PandasDataFrame(df)
    >>> res = tfm.transform(dataset)
    pre-hook is called
    post-hook is called
    >>>
    """

    def __init__(self, instance_name=None, pre_hooks=(), post_hooks=(), type_adaptive=False):
        super().__init__(instance_name=instance_name, pre_hooks=pre_hooks, post_hooks=post_hooks,
                         type_adaptive=type_adaptive)

    @abstractmethod
    def unwrap(self):
        """
        Returns:
            the underlying entity that embodies the actual functionality of
            the transformer. It can be a unary function, a
            ``pyspark.ml.Transformer``, etc., depending on the subtype of this
            ``Transformer``.
        """
        raise NotImplementedError()

    def _transform(self, dataset):
        return dataset

    def transform(self, dataset: Dataset) -> Dataset:
        """Given a fixed ``dataset``, output another dataset via transformation.

        If this ``Transformer`` is type-adaptive, an incompatible dataset is
        automatically adapted into compatible format before transformation.
        Otherwise, an exception is raised on incompatibility.

        Besides, the method has side-effects.
            - Pre-hooks are applied on ``dataset`` before transformation.
            - Post-hooks are applied on the result dataset after transformation.

        Args:
            dataset: the dataset to be transformed
        Returns:
            the result of transformation
        """
        assert isinstance(dataset, Dataset)

        self._exec_pre_hooks(dataset)
        dataset = self._adapt_if_incompat(dataset)
        dataset = self._transform(dataset)
        self._exec_post_hooks(dataset)
        return dataset


def find_dataset_type_adapter(src_type: DatasetType, dest_type: DatasetType):
    """Find an adapter that transforms a dataset from source data type to
    destination data type.

    Args:
        src_type: the dataset type to transform from
        dest_type: the dataset type to transform to
    Returns:
        the desired ``Transformer``
    Raises:
        RuntimeError
    """
    from gai.v2.unify.transformer import Spark2PandasTransformer, Pandas2SparkTransformer

    lookup_table = {
        (DatasetType.PANDAS_DATA_FRAME, DatasetType.SPARK_DATA_FRAME): Pandas2SparkTransformer(),
        (DatasetType.SPARK_DATA_FRAME, DatasetType.PANDAS_DATA_FRAME): Spark2PandasTransformer()
    }

    from gai.v2.unify.transformer import IdentityTransformer

    if src_type == dest_type:
        return IdentityTransformer(src_type)
    elif (src_type, dest_type) in lookup_table:
        return lookup_table[(src_type, dest_type)]
    else:
        raise RuntimeError("the transformer from {} to {} has "
                           "not been implemented", src_type, dest_type)


class Estimator(Actuator):
    """
    Args:
        instance_name: the name of this particular instance of ``Estimator``. It is recommended
            to give a unique name.
        pre_hooks: the list of hook functions to be called at the entry
            of method ``fit()``
        post_hooks: the list of hook functions to be called at the exit
            of method ``fit()``
        type_adaptive: indicator of whether the estimator is type-adaptive
    """

    def __init__(self, instance_name=None, pre_hooks=(), post_hooks=(), type_adaptive=False):
        super().__init__(instance_name=instance_name, pre_hooks=pre_hooks, post_hooks=post_hooks,
                         type_adaptive=type_adaptive)

    def _fit(self, dataset):
        from gai.v2.unify.transformer import IdentityTransformer
        return IdentityTransformer(dtype=dataset.get_type())

    def fit(self, dataset: Dataset) -> Transformer:
        """Fits a model to the input dataset. At the entry of and the exit of
        this method, hook functions are called.

        Args:
            dataset: input dataset, which is an instance of ``v2.dtype.Dataset``

        Returns:
            fitted model, which is an instance of ``Transformer``
        """
        self._exec_pre_hooks(dataset)
        dataset = self._adapt_if_incompat(dataset)
        model = self._fit(dataset)
        self._exec_post_hooks(dataset)
        return model

    def unwrap(self):
        """
        Returns:
            the underlying entity that embodies the actual functionality of
            the estimator.

        """
        raise NotImplementedError()


class Pipeline(Estimator):
    """``Pipeline`` is a list of transformers and estimators which

    A pipeline is the counterpart of ``pyspark.ml.Pipeline`` in our system.

    Args:
        stages: the list of stages
        pre_hooks: the list of hook functions which will be called at the entry of ``fit()`` method
        post_hooks: the list of hook functions which will be called at the exit of ``fit()`` method
        type_adaptive: boolean indicator of whether type adaptation is on

    Notes:
        It is a class invariant that the stored list of stages is nonempty.
    """

    def __init__(self, stages: List[Actuator], pre_hooks=(), post_hooks=(), type_adaptive=False):
        super().__init__(pre_hooks, post_hooks, type_adaptive)
        self.set_stages(stages)

    def get_stages(self) -> List[Actuator]:
        """

        Returns:
            the stored list of stages
        """
        return self._stages

    def set_stages(self, stages: List[Actuator]):
        """

        Args:
            stages: input list of stages

        Returns:
            the current instance

        Raises:
            ``AssertionError`` when ``stages`` is empty
        """
        assert len(stages) > 0
        self._stages = stages
        return self

    def get_input_type(self) -> DatasetType:
        """

        Returns:
            the input type of the first stage when type-adaptation is off
        """
        first_stage = self.get_stages()[0]
        return first_stage.get_input_type()

    def get_output_type(self) -> DatasetType:
        """

        Returns:
            the output of the last stage
        """
        stage_num = len(self.get_stages())
        last_stage = self.get_stages()[stage_num - 1]
        return last_stage.get_output_type()

    def _fit(self, dataset):
        output_stages = []
        for stage in self.get_stages():
            if isinstance(stage, Transformer):
                dataset = stage.transform(dataset)
                output_stages.append(stage)
            elif isinstance(stage, Estimator):
                model = stage.fit(dataset)
                dataset = model.transform(dataset)
                output_stages.append(model)
            else:
                raise RuntimeError("unknown stage entity. Transformer/Estimator expected")

        return PipelineModel(stages=output_stages)


class PipelineModel(Transformer):
    """``PipelineModel`` is the a pipeline of transformers and fitted models. It
    is the counterpart of ``pyspark.ml.PipelineModel`` in our system.

    Args:
        stages: the list of stages
        pre_hooks: the list of hook functions which will be called at the entry of ``fit()`` method
        post_hooks: the list of hook functions which will be called at the exit of ``fit()`` method
        type_adaptive: boolean indicator of whether type adaptation is on

    """

    def __init__(self, stages, pre_hooks=(), post_hooks=(), type_adaptive=False):
        super(PipelineModel, self).__init__(pre_hooks=pre_hooks, post_hooks=post_hooks,
                                            type_adaptive=type_adaptive)
        self.set_stages(stages)

    def set_stages(self, stages):
        """

        Args:
            stages: the list of stages

        Returns:
            ``self``
        """
        assert len(stages) > 0
        self._stages = stages

    def get_stages(self):
        """

        Returns:
            the list of stages
        """
        return self._stages

    def _transform(self, dataset):
        if self.is_type_adaptive():
            return self._adaptive_transform(dataset)
        else:
            return self._plain_transform(dataset)

    def _plain_transform(self, dataset) -> Dataset:
        for stage in self.get_stages():
            dataset = stage.transform(dataset)
        return dataset

    def _adaptive_transform(self, dataset) -> Dataset:
        for stage in self.get_stages():
            b = stage.is_type_adaptive()
            stage.set_type_adaptive(True)
            dataset = stage.transform(dataset)
            stage.set_type_adaptive(b)
        return dataset

    def get_input_type(self) -> DatasetType:
        """

        Returns:
            the input type of the first stage when type-adaptation is off
        """
        assert len(self.get_stages()) > 0
        return self.get_stages()[0].get_input_type()

    def get_output_type(self) -> DatasetType:
        """

        Returns:
            the output of the last stage
        """
        assert len(self.get_stages()) > 0
        stage_num = len(self.get_stages())
        last_stage = self.get_stages()[stage_num - 1]
        return last_stage.get_output_type()