#
#  Copyright 2022 The Open Islands Authors. All Rights Reserved.
#
#  Licensed under the Apache License, Version 2.0 (the "License");
#  you may not use this file except in compliance with the License.
#  You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing, software
#  distributed under the License is distributed on an "AS IS" BASIS,
#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#  See the License for the specific language governing permissions and
#  limitations under the License.
#
import os.path
import sys
import threading
import time
import typing

from pyoi.session import Session
from pyoi.constant import StandardDatasetFormat
from pyoi.rpc.rpc_client import RPCClient
from pyoi.runtime.base_runtime import BaseRuntime
from pyoi.runtime.federated_sender import FederatedSender
from pyoi.runtime.types import OperatorOutput
from pyoi.runtime.types import RuntimeHeartbeat, TaskState, RunArgs
from pyoi.runtime_env import RuntimeENV
from pyoi.types import profile, DataIO, ModelIO
from pyoi.util import UriUtils
from pyoi.util.base_utils import current_timestamp
from pyoi.util.log_utils import getLogger
from pyoi.util.process_utils import run_subprocess, check_process, kill_process

LOGGER = getLogger()


class BaseOperatorDriver(BaseRuntime):
    def run(self):
        self.report_heartbeat()
        self.start_sender()
        self.pending()
        self.runtime_state.state = TaskState.RUNNING
        self.report_state()

        Session.get_or_create(task_id=RuntimeENV.TASK_ID, runtime_config=self.runtime_config)
        LOGGER.info(f"operator input: {self.runtime_config.input}")
        LOGGER.info(f"operator output: {self.runtime_config.output}")
        if RuntimeENV.OI_PROFILE_LOG_ENABLED == "1":
            profile.profile_start()
            self.load_input()
            output_dataios, output_modelios = self.action(RunArgs.from_runtime_config(self.runtime_config))
            profile.profile_ends()
        else:
            self.load_input()
            output_dataios, output_modelios = self.action(RunArgs.from_runtime_config(self.runtime_config))

        self.save_output(dataios=output_dataios, modelios=output_modelios)
        self.runtime_state.state = TaskState.SUCCESS
        # todo: report profile metrics
        self.report_state()

    def load_input(self):
        for input_type in self.runtime_config.input.data.keys():
            for dataio in self.runtime_config.input.data[input_type]:
                dataio.data = self.load_data(dataio)
        for input_type in self.runtime_config.input.model.keys():
            for modelio in self.runtime_config.input.model[input_type]:
                modelio.model = self.load_model(modelio)

    def load_data(self, dataio: DataIO):
        raise NotImplementedError

    def load_model(self, modelio: ModelIO):
        raise NotImplementedError

    def read_model(self, modelio: ModelIO) -> bytes:
        # todo: move to pyoi.context and support more storage engine
        with open(UriUtils.trim_file_uri(modelio.uri), "rb") as fr:
            modelio.model = fr.read()
        return modelio.model

    def write_model(self, model: bytes, modelio: ModelIO):
        # todo: move to pyoi.context and support more storage engine
        os.makedirs(os.path.dirname(UriUtils.trim_file_uri(modelio.uri)), exist_ok=True)
        with open(UriUtils.trim_file_uri(modelio.uri), "wb") as fw:
            fw.write(model)

    def action(self, args: RunArgs) -> typing.Tuple[typing.List[DataIO], typing.List[ModelIO]]:
        raise NotImplementedError

    def data_to_standard_format(self, data):
        raise NotImplementedError

    def model_to_standard_format(self, model: bytes) -> bytes:
        raise NotImplementedError

    def save_output(self, dataios: typing.List[DataIO], modelios: typing.List[ModelIO]):
        output: OperatorOutput = self.runtime_config.output
        for i in range(len(output.data)):
            dataio = dataios[i]
            if dataio.format.name not in StandardDatasetFormat.names() and dataio.format != output.data[i].format:
                dataio.data = self.data_to_standard_format(dataio.data)
            LOGGER.info(f"output uri: {output.data[i].uri}")
            self.save_data(dataio, output.data[i])
        for i in range(len(output.model)):
            modelio = modelios[i]
            if modelio.format != output.model[i].format:
                modelio.model = self.model_to_standard_format(modelio.model)
            self.save_model(modelio, output.model[i])

    def save_data(self, src_dataio: DataIO, dest_dataio: DataIO):
        raise NotImplementedError

    def save_model(self, src_modelio: ModelIO, dest_modelio: ModelIO):
        raise NotImplementedError

    def export_output(self):
        pass

    def pending(self):
        self.runtime_state.state = TaskState.PENDING
        self.runtime_state.pendingTime = current_timestamp()
        self.report_state()

    def start_sender(self):
        module_file_path = sys.modules[FederatedSender.__module__].__file__
        processes = []
        cmd = [RuntimeENV.PYTHONINTERPRETER, module_file_path, f"--session-id", RuntimeENV.SESSION_ID]
        for i in range(self.runtime_config.numFederatedSenders):
            proc = run_subprocess(cmd, config_dir=RuntimeENV.CONFIG_DIR, log_dir=RuntimeENV.LOG_DIR,
                                  process_name="federated_packet_sender")
            processes.append([proc, 3, current_timestamp()])
        time.sleep(self.heartbeat_interval)
        self.check_sender(processes, cmd)

    def check_sender(self, processes, cmd: typing.List[str]):
        LOGGER.info("check federated packet sender")
        all_is_alive = True
        for i in range(len(processes)):
            process, retries, ct = processes[i]
            if not check_process(process):
                all_is_alive = False
                if ct - current_timestamp() > 1000 * 60 * 10:
                    # reset retries
                    retries = 3
                if retries > 0:
                    LOGGER.error(f"packet sender: {process.pid} is not alived, try restart")
                else:
                    LOGGER.error(f"packet sender: {process.pid} is not alived, no retries")
                processes[i][2] = current_timestamp()
                while retries > 0:
                    try:
                        retries -= 1
                        processes[i][1] = retries
                        processes[i][0] = run_subprocess(cmd, config_dir=RuntimeENV.CONFIG_DIR,
                                                         log_dir=RuntimeENV.LOG_DIR,
                                                         process_name="federated_packet_sender")
                        all_is_alive = True
                        LOGGER.info(f"packet sender restart success")
                        break
                    except Exception as e:
                        if retries > 0:
                            LOGGER.error(f"restart sender failed, try again")
                        else:
                            LOGGER.error(f"restart sender failed, no retries")
        if all_is_alive:
            timer = threading.Timer(self.heartbeat_interval, self.check_sender, args=(processes, cmd,))
            timer.setDaemon(True)
            timer.start()
        else:
            for process, retries, ct in processes:
                try:
                    kill_process(process, cmd)
                except Exception as e:
                    LOGGER.warning(e)
            # todo: report failed msg
            LOGGER.error("packet sender process exits unexpectedly")
            self.runtime_state.state = TaskState.RUN_FAILED
            self.report_state()
            # todo: not exit, must kill
            sys.exit(1)

    def handle_exception(self):
        self.runtime_state.state = TaskState.RUN_FAILED
        self.report_state()

    def report_heartbeat(self):
        rep = RPCClient.report_heartbeat(RuntimeHeartbeat())
        timer = threading.Timer(self.heartbeat_interval, self.report_heartbeat)
        timer.setDaemon(True)
        timer.start()

    def report_state(self):
        response = RPCClient.report_state(self.runtime_state)

    def finally_do(self):
        self.runtime_state.endTime = self.end_time
        self.runtime_state.elapsed = self.elapsed
        self.report_state()
