/*
 * Copyright 2022 The Open Islands Authors. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.openislands.oi.worker.scheduler;

import com.fasterxml.jackson.core.type.TypeReference;
import org.openislands.oi.config.MyNodeInfo;
import org.openislands.oi.constant.*;
import org.openislands.oi.operator.*;
import org.openislands.oi.worker.manager.OfNodeTaskManager;
import org.openislands.oi.error.LogicException;
import org.openislands.oi.pojo.base.OfNodeTaskIdentityPOJO;
import org.openislands.oi.pojo.dto.OfNodeTaskDTO;
import org.openislands.oi.scheduling.DAGOperator;
import org.openislands.oi.util.*;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.nio.file.Path;
import java.util.*;

@Service
public class OperatorRuntimeManager implements ApplicationListener<ContextRefreshedEvent> {
    private static final CustomerLogger schedulingLogger = CustomerLoggerFactory.getLogger(LogTraceType.JOB, LogType.SCHEDULING);

    /***
     * See load in {@link #onApplicationEvent(ContextRefreshedEvent)}
     */
    private final Map<OperatorContainerType, OperatorRuntimeContainer> operatorRuntimeContainers = new HashMap<>();

    @Resource
    private OfNodeTaskManager ofNodeTaskManager;

    @Resource
    private MyNodeInfo myNodeInfo;

    @Value(Dict.WORKER_HOST)
    private String workerHost;
    @Value(Dict.WORKER_HTTP_PORT)
    private String workerHttpPort;
    @Value(Dict.WORKER_GRPC_PORT)
    private String workerGrpcPort;
    @Value(Dict.FEDERATEDPROXY_HOST)
    private String federatedproxyHost;
    @Value(Dict.FEDERATEDPROXY_HTTP_PORT)
    private String federatedproxyHttpPort;
    @Value(Dict.FEDERATEDPROXY_GRPC_PORT)
    private String federatedproxyGrpcPort;
    @Value(Dict.FEDERATEDPROXY_PROTOCOL)
    private String federatedproxyProtocol;

    @Value(Dict.OPERATOR_RUNTIME_LOG_LEVEL)
    private String operatorRuntimeLogLevel;
    @Value(Dict.OPERATOR_RUNTIME_PROFILE_LOG_ENABLED)
    private Boolean operatorRuntimeProfileLogEnabled;

    @Value(Dict.OPERATOR_RUNTIME_DEFAULT_COMPUTING_ENGINE)
    private String defaultComputingEngine;
    @Value(Dict.OPERATOR_RUNTIME_DEFAULT_STORAGE_ENGINE)
    private String defaultStorageEngine;
    @Value(Dict.OPERATOR_RUNTIME_DEFAULT_QUEUE_ENGINE)
    private String defaultQueueEngine;

    @Value(Dict.KAFKA_SERVERS)
    private String kafkaServers;

    public OperatorRuntimeContainer runtime(OperatorContainerType operatorContainerType) throws LogicException {
        OperatorRuntimeContainer operatorRuntimeContainer = operatorRuntimeContainers.get(operatorContainerType);
        LogicException.nonNull(operatorRuntimeContainer,
                MessageCodeEnum.SYSTEM_ERROR, String.format("the %s container type is not supported", operatorContainerType));
        return operatorRuntimeContainer;
    }

    public String launchRuntime(OfNodeTaskDTO ofNodeTaskDTO) {
        schedulingLogger.info(ofNodeTaskDTO.getJobId(), "try to launch {} runtime", ofNodeTaskDTO.getSessionId());
        String containerInstance = this.runtime(ofNodeTaskDTO.getContainerType())
                .launchRuntime(ofNodeTaskDTO, this.getRunEnvVar(ofNodeTaskDTO), this.getEngineParams(ofNodeTaskDTO));
        if (StringUtils.isEmpty(containerInstance)) {
            schedulingLogger.info(ofNodeTaskDTO.getJobId(), "launch {} runtime failed", ofNodeTaskDTO.getSessionId());
            throw new LogicException(MessageCodeEnum.EXECUTION_ERROR, "launch operator runtime failed");
        }else {
            schedulingLogger.info(ofNodeTaskDTO.getJobId(), "launch {} runtime success, update info", ofNodeTaskDTO.getSessionId());
            ofNodeTaskDTO.setLatestHeartbeat(new Date());
            ofNodeTaskManager.updateOfNodeTask(ofNodeTaskDTO);
            return containerInstance;
        }
    }

    public Boolean terminateRuntime(OfNodeTaskDTO ofNodeTaskDTO) {
        schedulingLogger.info(ofNodeTaskDTO.getJobId(), "try to terminate task {} runtime", ofNodeTaskDTO.getSessionId());
        if (StringUtils.isEmpty(ofNodeTaskDTO.getContainerInstance())) {
            schedulingLogger.warn(ofNodeTaskDTO.getJobId(), "skip terminate empty instance task {}", ofNodeTaskDTO.getSessionId());
            return true;
        }

        boolean st = this.runtime(ofNodeTaskDTO.getContainerType()).terminateRuntime(ofNodeTaskDTO);
        if (st) {
            schedulingLogger.info(ofNodeTaskDTO.getJobId(), "terminate task {} runtime success", ofNodeTaskDTO.getSessionId());
        }else {
            schedulingLogger.warn(ofNodeTaskDTO.getJobId(), "terminate task {} runtime failed", ofNodeTaskDTO.getSessionId());
        }
        return st;
    }

    public Boolean activateRuntime(String jobId, String taskName, Integer taskVersion, RoleType role, String nodeId) {
        OfNodeTaskDTO ofNodeTaskDTO = ValueCopyUtils.methodParamsMapping(new OfNodeTaskDTO(), jobId, taskName, taskVersion, role, nodeId);
        return this.runtime(ofNodeTaskDTO.getContainerType()).activateRuntime(ofNodeTaskDTO);
    }

    public Boolean aliveRuntime(OfNodeTaskDTO ofNodeTaskDTO) {
        return this.runtime(ofNodeTaskDTO.getContainerType()).aliveRuntime(ofNodeTaskDTO);
    }

    public RuntimeConfig getRuntimeConfig(String jobId, String taskName, Integer taskVersion, RoleType role, String nodeId) {
        List<OfNodeTaskDTO> result = ofNodeTaskManager.queryOfNodeTask(jobId, taskName, taskVersion, role, nodeId);
        if (result.isEmpty()) {
            throw new LogicException(MessageCodeEnum.EXECUTION_ERROR, "can not found task at node");
        } else {
            OfNodeTaskDTO ofNodeTask = result.get(0);
            DAGOperator dagOperator = ObjectUtils.json2Object(ofNodeTask.getTaskConfig(), new TypeReference<DAGOperator>() {
            });
            RuntimeConfig runtimeConfig = new RuntimeConfig();
            //todo: get from job
            runtimeConfig.setJobType(JobType.MODELING);
            runtimeConfig.setName(Objects.requireNonNull(dagOperator, "parse config error").getName());
            runtimeConfig.setModule(dagOperator.getModule());
            runtimeConfig.setEntryPoint(ofNodeTask.getOperator().getEntrypoint());
            runtimeConfig.setInput(this.getOperatorInput(ofNodeTask, dagOperator));
            runtimeConfig.setOutput(this.getOperatorOutput(ofNodeTask, dagOperator));
            runtimeConfig.setRole(role);
            runtimeConfig.setNodeId(nodeId);
            runtimeConfig.setNodes(ofNodeTask.getNodes());
            //resource
            runtimeConfig.setNumPartitions((Integer) dagOperator.getResource().getOrDefault("partitions", 4));
            runtimeConfig.setNumFederatedSenders(4);
            runtimeConfig.setFederatedMode("MULTIPLE");

            runtimeConfig.setOperatorParams(dagOperator.getParam());
            Map<String, String> serviceParam = new HashMap<>();
            runtimeConfig.setServiceParams(serviceParam);

            Map<String, Map<String, String>> serviceAddress = new HashMap<>();
            Map<String, String> mqAddress = new HashMap<>();
            mqAddress.put("type", QueueEngine.valueOf(defaultQueueEngine.toUpperCase()).name());
            mqAddress.put("servers", kafkaServers);
            serviceAddress.put("mq", mqAddress);

            Map<String, String> proxyAddress = new HashMap<>();
            proxyAddress.put("host", federatedproxyHost);
            proxyAddress.put("port", federatedproxyHttpPort);
            proxyAddress.put("protocol", federatedproxyProtocol);
            serviceAddress.put("proxy", proxyAddress);

            runtimeConfig.setServiceAddresses(serviceAddress);
            return runtimeConfig;
        }
    }

    private OperatorInput getOperatorInput(OfNodeTaskDTO ofNodeTask, DAGOperator dagOperator) {
        //todo: fix dagOperator input dsl
        //{"data": "train": ["reader_0.data1"], "validation": ["reader_1.data1"]}
        //{"model": "general": ["reader_0.model1"]}
        OperatorInput operatorInput = new OperatorInput();
        if (dagOperator.getInput() == null){
            return operatorInput;
        }
        for(String inputType: dagOperator.getInput().getData().keySet()){
            operatorInput.addNewDataInputType(inputType);
            for (String desc :dagOperator.getInput().getData().get(inputType)){
                String[] items = desc.split("\\.");
                assert items.length == 2;
                String taskName = items[0];
                String dataName = items[1];
                //todo: query latest
                List<OfNodeTaskDTO> upstreamTasks = ofNodeTaskManager.queryOfNodeTask(ofNodeTask.getJobId(), taskName, 0, ofNodeTask.getRole(), ofNodeTask.getNodeId());
                if (upstreamTasks.size() == 0){
                    throw new LogicException(MessageCodeEnum.EXECUTION_ERROR, "can not found upstream task");
                }
                //todo: get the latest
                OfNodeTaskDTO upstreamTask = upstreamTasks.get(0);
                DataIO dataIO = new DataIO();
                dataIO.setName(dataName);
                dataIO.setTaskName(taskName);
                dataIO.setFormat(upstreamTask.getOutputDataFormat());
                dataIO.setUri(IOUtils.genDataIOUri(upstreamTask, dataName, dataIO.getFormat(), StorageEngine.LOCALFS));
                operatorInput.getData().get(inputType).add(dataIO);
            }
        }

        for(String inputType: dagOperator.getInput().getModel().keySet()){
            operatorInput.addNewModelInputType(inputType);
            for (String desc :dagOperator.getInput().getModel().get(inputType)){
                String[] items = desc.split("\\.");
                assert items.length == 2;
                String taskName = items[0];
                String modelName = items[1];
                List<OfNodeTaskDTO> upstreamTasks = ofNodeTaskManager.queryOfNodeTask(ofNodeTask.getJobId(), taskName, 0, ofNodeTask.getRole(), ofNodeTask.getNodeId());
                if (upstreamTasks.size() == 0){
                    throw new LogicException(MessageCodeEnum.EXECUTION_ERROR, "can not found upstream task");
                }
                //todo: get the latest
                OfNodeTaskDTO upstreamTask = upstreamTasks.get(0);
                ModelIO modelIO = new ModelIO();
                modelIO.setName(modelName);
                modelIO.setTaskName(taskName);
                modelIO.setFormat(upstreamTask.getOutputModelFormat());
                modelIO.setUri(IOUtils.genModelIOUri(upstreamTask, modelName, modelIO.getFormat(), StorageEngine.LOCALFS));
                operatorInput.getModel().get(inputType).add(modelIO);
            }
        }
        return operatorInput;
    }

    private OperatorOutput getOperatorOutput(OfNodeTaskDTO ofNodeTask, DAGOperator dagOperator) {
        //{"data": ["data1"]}
        //{"model": ["model1"]}
        OperatorOutput operatorOutput = new OperatorOutput();
        if (dagOperator.getOutput() == null){
            return operatorOutput;
        }
        for (String outputName :dagOperator.getOutput().getData()){
            DataIO dataIO = new DataIO();
            dataIO.setName(outputName);
            dataIO.setTaskName(ofNodeTask.getTaskName());
            dataIO.setFormat(ofNodeTask.getOutputDataFormat());
            dataIO.setUri(IOUtils.genDataIOUri(ofNodeTask, outputName, dataIO.getFormat(), StorageEngine.LOCALFS));
            operatorOutput.getData().add(dataIO);
        }

        for (String outputName : dagOperator.getOutput().getModel()) {
            ModelIO modelIO = new ModelIO();
            modelIO.setName(outputName);
            modelIO.setTaskName(ofNodeTask.getTaskName());
            modelIO.setFormat(ofNodeTask.getOutputModelFormat());
            modelIO.setUri(IOUtils.genModelIOUri(ofNodeTask, outputName, modelIO.getFormat(), StorageEngine.LOCALFS));
            operatorOutput.getModel().add(modelIO);
        }
        return operatorOutput;
    }

    public Boolean updateRuntimeState(String jobId, String taskName, Integer taskVersion, RoleType role, String nodeId, OfNodeTaskDTO ofNodeTaskDTO) {
        if (ofNodeTaskDTO.getState() != null) {
            ofNodeTaskManager.updateState(jobId, taskName, taskVersion, role, nodeId, ofNodeTaskDTO.getState());
        }
        //todo: if state is pending,check activating or not
        return ofNodeTaskManager.updateOfNodeTask(jobId, taskName, taskVersion, role, nodeId, ofNodeTaskDTO);
    }

    public Boolean checkNextState(String jobId, String taskName, Integer taskVersion, RoleType role, String nodeId) {
        return true;
    }

    public Boolean updateHeartbeat(String jobId, String taskName, Integer taskVersion, RoleType role, String nodeId, OfNodeTaskDTO ofNodeTaskDTO) {
        final OfNodeTaskDTO updateDTO = this.copyIdentifyFields(ofNodeTaskDTO);
        updateDTO.setLatestHeartbeat(ofNodeTaskDTO.getLatestHeartbeat());
        return ofNodeTaskManager.updateOfNodeTask(jobId, taskName, taskVersion, role, nodeId, updateDTO);
    }

    private Map<String, String> getRunEnvVar(OfNodeTaskDTO ofNodeTaskDTO){
        Map<String, String> runEnvVar = new HashMap<>();
        runEnvVar.put("OI_PROJECT_ROOT", DirectoryUtils.getProjectRootPath().toString());
        runEnvVar.put("OI_LOG_LEVEL", operatorRuntimeLogLevel);
        if (operatorRuntimeProfileLogEnabled) {
            runEnvVar.put("OI_PROFILE_LOG_ENABLED", "1");
        }else{
            runEnvVar.put("OI_PROFILE_LOG_ENABLED", "0");
        }

        runEnvVar.put("WORKER_SERVICE_HOST", workerHost);
        runEnvVar.put("WORKER_SERVICE_HTTP_PORT", workerHttpPort);
        runEnvVar.put("WORKER_SERVICE_GRPC_PORT", workerGrpcPort);
        //todo: support get from conf or using default
        runEnvVar.put("COMPUTING_ENGINE", ComputingEngine.valueOf(defaultComputingEngine.toUpperCase()).name());
        runEnvVar.put("STORAGE_ENGINE", StorageEngine.valueOf(defaultStorageEngine.toUpperCase()).name());
        runEnvVar.put("QUEUE_ENGINE", QueueEngine.valueOf(defaultQueueEngine.toUpperCase()).name());

        Path logDir = DirectoryUtils.genTaskLogPath(ofNodeTaskDTO);
        Path configDir = DirectoryUtils.genTaskConfigPath(ofNodeTaskDTO);
        runEnvVar.put("JOB_ID", ofNodeTaskDTO.getJobId());
        runEnvVar.put("TASK_NAME", ofNodeTaskDTO.getTaskName());
        runEnvVar.put("TASK_VERSION", ofNodeTaskDTO.getTaskVersion().toString());
        runEnvVar.put("TASK_ID", IdUtils.getTaskId(ofNodeTaskDTO));
        runEnvVar.put("ROLE", ofNodeTaskDTO.getRole().name());
        runEnvVar.put("NODE_ID", ofNodeTaskDTO.getNodeId());
        runEnvVar.put("SESSION_ID", ofNodeTaskDTO.getSessionId());
        runEnvVar.put("LOG_DIR", logDir.toString());
        runEnvVar.put("CONFIG_DIR", configDir.toString());
        return runEnvVar;
    }

    private Map<String, String> getEngineParams(OfNodeTaskDTO ofNodeTaskDTO) {
        Map<String, String> params = new HashMap<>();
        params.put(EngineParams.DRIVER_MEMORY, "2048m");
        params.put(EngineParams.NUM_EXECUTORS, "2");
        params.put(EngineParams.EXECUTOR_CORES, "2");
        params.put(EngineParams.EXECUTOR_MEMORY, "2048");
        params.put(EngineParams.NETWORK_TIMEOUT, "36000s");
        params.put(EngineParams.EXECUTOR_HEARTBEAT_INTERVAL, "3600s");
        return params;
    }

    private OfNodeTaskDTO copyIdentifyFields(OfNodeTaskDTO src) {
        return ValueCopyUtils.copyProperties(ValueCopyUtils.copyProperties(src, new OfNodeTaskIdentityPOJO()), new OfNodeTaskDTO());
    }

    @Override
    public void onApplicationEvent(ContextRefreshedEvent event) {
        ApplicationContext applicationContext = event.getApplicationContext();
        Map<String, OperatorRuntimeContainer> beansOfType = applicationContext.getBeansOfType(OperatorRuntimeContainer.class);
        for (OperatorRuntimeContainer operatorRuntimeContainer : beansOfType.values()) {
            operatorRuntimeContainers.put(operatorRuntimeContainer.operatorContainerType(), operatorRuntimeContainer);
        }
    }
}
