#
#  Copyright 2022 The Open Islands Authors. All Rights Reserved.
#
#  Licensed under the Apache License, Version 2.0 (the "License");
#  you may not use this file except in compliance with the License.
#  You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing, software
#  distributed under the License is distributed on an "AS IS" BASIS,
#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#  See the License for the specific language governing permissions and
#  limitations under the License.
#
import os.path

import pickle
import typing

from fate_arch import session
from fate_arch.common.base_utils import fate_uuid
from fate_arch.computing.spark._table import Table as SparkCTable
from fate_arch.computing.spark._table import from_rdd
from federatedml.components.components import Components
from federatedml.model_base import ComponentOutput
from oi.checkpoint import CheckpointManager
from oi.component_input import ComponentInput
from oi.tracker_client import TrackerClient
from oi.transform_utils import get_data_line, get_sid_data_line, get_auto_increasing_sid_data_line
from pyoi.base_operator_driver import BaseOperatorDriver
from pyoi.session import Session as OI_Session
from pyoi.constant import ComputingEngine, DatasetFormat, ModelFormat, DataStructure
from pyoi.runtime.types import RunArgs
from pyoi.runtime_env import RuntimeENV
from pyoi.types import DataIO, ModelIO, DataSchema
from pyoi.util.log_utils import getLogger

LOGGER = getLogger()


class OperatorDriver(BaseOperatorDriver):
    def __init__(self):
        super(OperatorDriver, self).__init__()

    def action(self, args: RunArgs) -> typing.Tuple[typing.List[DataIO], typing.List[ModelIO]]:
        fate_session = session.Session(session_id=OI_Session.get().session_id)
        fate_session.as_global()
        fate_session.init_computing()
        Components.provider_path = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
                                                "federatedml")
        all_modules = Components.get_names()
        try:
            run_object = Components.get(args.entry_point, all_modules).get_run_obj(args.role.lower())
        except Exception as e:
            raise RuntimeError("get operator object error", e)
        feeded_parameters = {
            "output_data_name": [dataio.name for dataio in args.output.data],
            "table_info": []
        }
        LOGGER.info(f"run parameters: {args.params}")
        cpn_input = ComponentInput(
            tracker=TrackerClient(),
            checkpoint_manager=CheckpointManager(),
            task_version_id=OI_Session.get().task_id,
            parameters=args.params,
            datasets=self.adaptive_input_datas(args.input.data),
            caches=None,
            models=self.adaptive_input_models(args.input.model),
            job_parameters={},
            roles=dict(
                role=dict((k.lower(), [int(j) for j in v]) for k, v in args.nodes.items()),
                local={"role": args.role.lower(), "party_id": int(args.node_id)},
            ),
            flow_feeded_parameters=feeded_parameters,
        )
        cpn_output: ComponentOutput = run_object.run(cpn_input)
        return self.adaptive_output_datas(cpn_output.data), self.adaptive_output_models([cpn_output.model])

    def adaptive_input_datas(self, input_datas: typing.Dict[str, typing.List[DataIO]]):
        # fate structure:
        # {'component_name(operator_name)': {'input_type(data/train_data/validate_data)': [<Table object>]}}
        data_for_fate = {}
        for input_type in input_datas.keys():
            for dataio in input_datas[input_type]:
                if dataio.task_name not in data_for_fate:
                    data_for_fate[dataio.task_name] = {}
                if input_type not in data_for_fate[dataio.task_name]:
                    data_for_fate[dataio.task_name][input_type] = []
                data_for_fate[dataio.task_name][input_type].append(dataio.data)
        return data_for_fate

    def adaptive_input_models(self, input_models: typing.Dict[str, typing.List[ModelIO]]):
        # fate structure:
        # {"input_type(model/isometric_model)":{"component_name(operator_name)": {"model_name": ("buffer_name": <bytes>)}}}
        model_for_fate = {}
        for input_type in input_models.keys():
            model_for_fate[input_type] = {}
            for modelio in input_models[input_type]:
                model_for_fate[input_type][modelio.task_name] = modelio.model
        return model_for_fate

    def adaptive_output_datas(self, datas: typing.List[SparkCTable]) -> typing.List[DataIO]:
        output_datas = []
        for data in datas:
            if data is None:
                output_datas.append(None)
            else:
                output_datas.append(DataIO(format=DatasetFormat.FATE, data=data))
        return output_datas

    def adaptive_output_models(self, models: typing.List[typing.Dict[str, typing.Tuple[str, bytes, dict]]]) -> \
    typing.List[ModelIO]:
        return [ModelIO(model=model, format=ModelFormat.FATE) for model in models]

    def load_data(self, dataio: DataIO):
        if RuntimeENV.COMPUTING_ENGINE == ComputingEngine.SPARK:
            rdd, schema = OI_Session.get().create_context(DataStructure.SPARK_RDD).load_text(dataio)
            table = from_rdd(rdd)
        else:
            raise RuntimeError(f"{RuntimeENV.COMPUTING_ENGINE} is not support")
        if dataio.format == DatasetFormat.FATE:
            table.schema = schema.metadata
        elif dataio.format == DatasetFormat.CSV:
            table = self.data_from_standard_format(table, schema)
            LOGGER.info(f"get header {table.schema}")
        else:
            raise RuntimeError(f"transform from {dataio.format} is not supported")
        return table

    def save_data(self, src_dataio: DataIO, dest_dataio: DataIO):
        table: SparkCTable = src_dataio.data
        context = OI_Session.get().create_context(DataStructure.SPARK_RDD)
        schema = DataSchema()
        schema.update_metadata(table.schema)
        context.save_text(schema=schema, rdd=table._rdd, data_output=dest_dataio)

    def load_model(self, modelio: ModelIO):
        if modelio.format != ModelFormat.FATE:
            raise RuntimeError("not support")
        return pickle.loads(self.read_model(modelio))

    def save_model(self, src_modelio: ModelIO, dest_modelio: ModelIO):
        self.write_model(pickle.dumps(src_modelio.model), dest_modelio)

    def data_from_standard_format(self, data: SparkCTable, dataschema: DataSchema):
        uuid = fate_uuid()
        LOGGER.info(f"dataschema metadata: {dataschema.metadata}")
        if not dataschema.metadata.get("add_sequence_id", False):
            get_line = get_data_line
        elif dataschema.metadata.get("random_sequence_id", False):
            get_line = get_sid_data_line
        else:
            get_line = get_auto_increasing_sid_data_line
        # todo: use other function
        rdd = data._rdd.zipWithIndex()

        def _line_func(lines):
            new_lines = []
            for values, index in lines:
                k, v = get_line(
                    values=values,
                    id_delimiter=dataschema.delimiter,
                    line_index=index,
                    fate_uuid=uuid,
                )
                new_lines.append((k, v))
            return new_lines

        table = from_rdd(rdd=rdd.mapPartitions(_line_func))
        table.schema = self.data_schema_from_standard_format(dataschema)
        return table

    def data_schema_from_standard_format(self, dataschema: DataSchema):
        if dataschema.metadata.get("add_sequence_id"):
            header = dataschema.delimiter.join(dataschema.columns).strip()
            sid = "sid"
        else:
            header = dataschema.delimiter.join(dataschema.columns[1:]).strip()
            sid = dataschema.columns[0].strip()
        return {'header': header, 'sid': sid}

    def data_to_standard_format(self, data):
        raise NotImplementedError

    def model_to_standard_format(self, model):
        raise NotImplementedError

    def model_from_standard_format(self, model):
        raise NotImplementedError


if __name__ == "__main__":
    OperatorDriver().start()
