#! /usr/bin/env python3
from pyspark.ml.param import Params, Param
from pyspark.ml.param.shared import HasInputCols, HasOutputCols
from pyspark.sql import DataFrame, SparkSession
from pyspark.sql.functions import udf
from pyspark.sql.types import FloatType, IntegerType, StringType
from typing import List

from gai.v2.spark.base import SparkTransformer
from gai.v2.utils import generate_unique_str_n


@udf(returnType=FloatType())
def _to_float(x):
    return float(x)


@udf(returnType=IntegerType())
def _to_integer(x):
    return int(x)


@udf(returnType=StringType())
def _to_string(x):
    return str(x)


class TypeCaster(SparkTransformer, HasInputCols, HasOutputCols):
    """Casts the input columns to specified output types.
    Currently type casting is available for ``int``, ``float`` and ``str``.
    More complicated type casting can be done with ``ColumnMapper`` with
    user-defined functions.

    Args:
        inplace: boolean indicator of whether the casting is inplace,
            ie., without creating new columns
        inputCols: the names of input columns
        outputTypes: the desired types, in the same order as ``inputCols``
        outputCols: the names of output columns, effective only when ``inplace``
            is ``False``.

    >>> from gai.v2.utils import get_or_create_spark_session
    >>> spark = get_or_create_spark_session()
    >>> df = spark.createDataFrame([(2000.0, '11'), (1000.0, '12'),
    ...                             (3000.0, '20')], schema=("foo", "bar"))
    >>> df.printSchema()
    root
     |-- foo: double (nullable = true)
     |-- bar: string (nullable = true)
    <BLANKLINE>
    >>> type_caster = TypeCaster(inplace=True, inputCols=['foo', 'bar'],
    ...                          outputTypes=[str, int])
    >>> result = type_caster.transform(df)
    >>> result.printSchema()
    root
     |-- foo: string (nullable = true)
     |-- bar: integer (nullable = true)
    <BLANKLINE>
    >>> result.show()
    +------+---+
    |   foo|bar|
    +------+---+
    |2000.0| 11|
    |1000.0| 12|
    |3000.0| 20|
    +------+---+
    <BLANKLINE>
    >>> type_caster_a = TypeCaster(inplace=False, inputCols=['bar'],
    ...                            outputTypes=[int], outputCols=['bar_i'])
    >>> result = type_caster_a.transform(df)
    >>> result.printSchema()
    root
     |-- foo: double (nullable = true)
     |-- bar: string (nullable = true)
     |-- bar_i: integer (nullable = true)
    <BLANKLINE>
    >>> result.show()
    +------+---+-----+
    |   foo|bar|bar_i|
    +------+---+-----+
    |2000.0| 11|   11|
    |1000.0| 12|   12|
    |3000.0| 20|   20|
    +------+---+-----+
    <BLANKLINE>

    """

    inplace = Param(Params._dummy(), "inplace",
                    "boolean indicator of whether the casting is inplace, i.e., without creating new columns")

    outputTypes = Param(Params._dummy(), "outputTypes",
                        "the desired types, in the same order as ``inputCols``")

    def __init__(self, inplace: bool, inputCols: List[str], outputTypes: list,
                 outputCols=None):
        assert (inplace and outputCols is None) or \
               (not inplace and outputCols is not None)

        super(TypeCaster, self).__init__()
        self.setInputCols(inputCols)
        self.setOutputCols(outputCols)
        self.setInplace(inplace) \
            .setOutputTypes(outputTypes)

    def setInplace(self, inplace):
        """Sets the indicator of inplace casting.

        Args:
            inplace: indicator of whether casting is inplace

        Returns:
            ``self``
        """
        self._paramMap[self.inplace] = inplace
        return self

    def getInplace(self):
        """

        Returns:
            the indicator of whether casting is inplace
        """
        return self.getOrDefault(self.inplace)

    def setOutputTypes(self, outputTypes):
        """Sets the list of output types

        Args:
            outputTypes: the list of output types

        Returns:
            ``self``
        """
        self._paramMap[self.outputTypes] = outputTypes
        return self

    def getOutputTypes(self):
        """

        Returns:
            the list of output types
        """
        return self.getOrDefault(self.outputTypes)

    def _transform_inplace(self, dataset: DataFrame):
        assert self.getOutputCols() is None and \
               len(self.getOutputTypes()) == len(self.getInputCols())

        names_tmp = generate_unique_str_n(dataset.columns, len(self.getInputCols()))

        self.setOutputCols(names_tmp)
        dataset = self._transform_append(dataset)
        self.setOutputCols(None)

        for name in self.getInputCols():
            dataset = dataset.drop(name)
        for name_tmp, name in zip(names_tmp, self.getInputCols()):
            dataset = dataset.withColumnRenamed(name_tmp, name)
        return dataset

    def _transform_append(self, dataset: DataFrame):
        assert len(self.getOutputCols()) == len(self.getInputCols()) and \
               len(self.getOutputTypes()) == len(self.getInputCols())

        fun_dict = {float: _to_float, int: _to_integer, str: _to_string}
        fs = [fun_dict[x] for x in self.getOutputTypes()]
        for f, in_, out in zip(fs, self.getInputCols(), self.getOutputCols()):
            dataset = dataset.withColumn(out, f(dataset[in_]))
        return dataset

    def _transform(self, dataset):
        if self.getInplace():
            return self._transform_inplace(dataset)
        else:
            return self._transform_append(dataset)
