#! /usr/bin/env python3
import pyspark.ml

from gai.v2.unify.base import Estimator, Transformer
from gai.v2.unify.dtype import Dataset, DatasetType
from gai.v2.unify.transformer.variant import SparkTransformerWrapper


class SparkEstimatorWrapper(Estimator):
    """A wrapper of ``pyspark.ml.Estimator``.

    Args:
        estimator: input estimator, which is an instance of ``pyspark.ml.Estimator``
        pre_hooks: a list of hook functions to be called at the entry of ``fit()`` method
        post_hooks: a list of hook functions to be called at the exit of ``fit()`` method
        type_adaptive: boolean indicator of whether type adaptation is on
    """

    def __init__(self, estimator: pyspark.ml.Estimator, instance_name=None, pre_hooks=(), post_hooks=(),
                 type_adaptive=False):
        super().__init__(instance_name=instance_name,
                         pre_hooks=pre_hooks,
                         post_hooks=post_hooks,
                         type_adaptive=type_adaptive)

        self._kernel = estimator

    def unwrap(self):
        """

        Returns:
            the wrapped kernel object, which is an instance of ``pyspark.ml.Estimator``
        """
        return self._kernel

    def _fit(self, dataset: Dataset) -> Transformer:
        if dataset.get_type() != self.get_input_type():
            raise RuntimeError("estimator and dataset should match in type")
        else:
            data = dataset.unwrap()
            estimator = self.unwrap()
            model = estimator.fit(data)
            return SparkTransformerWrapper(model)

    def get_input_type(self):
        return DatasetType.SPARK_DATA_FRAME

    def getKernelModule(self):
        """

        Returns:
            the module name of the class of the kernel object
        """
        return self._kernel.__class__.__module__

    def getKernelName(self):
        """

        Returns:
            the class name of the kernel object
        """
        return self._kernel.__class__.__name__
