/*
 * Copyright 2022 The Open Islands Authors. All Rights Reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.openislands.oi.worker.scheduler;

import org.openislands.oi.constant.*;
import org.openislands.oi.pojo.base.OfNodeTaskIdentityPOJO;
import org.openislands.oi.pojo.dto.OfNodeTaskDTO;
import org.openislands.oi.pojo.dto.OperatorDTO;
import org.openislands.oi.util.*;
import org.springframework.stereotype.Component;

import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

@Component
public class WorkerSubprocessContainer implements OperatorRuntimeContainer {
    private static final CustomerLogger schedulingLogger = CustomerLoggerFactory.getLogger(LogTraceType.JOB, LogType.SCHEDULING);

    @Override
    public String launchRuntime(OfNodeTaskDTO ofNodeTaskDTO, Map<String, String> runEnvVar, Map<String, String> engineParams) {
        OperatorDTO operator = ofNodeTaskDTO.getOperator();
        Map<String, String> operatorEnv = operator.getEnv();
        List<String> cmd = new ArrayList<>();
        // todo: support other engine but not only standalone spark
        cmd.add(this.getSparkSubmit(operatorEnv.get("spark_home")));
        cmd.addAll(this.getSparkArgs(ofNodeTaskDTO, engineParams));
        if (operator.getPackagedType() == OperatorPackagedType.PYTHON) {
            cmd.addAll(this.getPysparkArgs(operatorEnv.get("pythoninterpreter")));
        }
        cmd.addAll(this.getEnvArgs(operatorEnv));
        cmd.addAll(this.getEnvArgs(runEnvVar));
        cmd.add(Paths.get(UriUtils.trimFileUri(operator.getSourceUri()), operator.getDriver()).toString());
        cmd.add("--session-id");
        cmd.add(ofNodeTaskDTO.getSessionId());
        Map<String, String> subprocessEnv = new HashMap<>(
                operatorEnv.entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey().toUpperCase(), Map.Entry::getValue)));
        subprocessEnv.putAll(runEnvVar);
        schedulingLogger.info(ofNodeTaskDTO.getJobId(), "start subprocess by command: {}", String.join(" ", cmd));
        return CommandUtils.startAsyncSubprocess(cmd.toArray(new String[0]), subprocessEnv, null, new SubprocessContainerCallback(ofNodeTaskDTO));
    }

    @Override
    public Boolean terminateRuntime(OfNodeTaskDTO ofNodeTaskDTO) {
        String containerInstance = ofNodeTaskDTO.getContainerInstance();
        // kill by sessionId
        return CommandUtils.fromFilterCondition(ofNodeTaskDTO.getSessionId(), () -> CommandUtils.stop(containerInstance));
    }

    @Override
    public Boolean aliveRuntime(OfNodeTaskDTO ofNodeTaskDTO) {
        return CommandUtils.isAlive(ofNodeTaskDTO.getContainerInstance());
    }

    @Override
    public Boolean activateRuntime(OfNodeTaskDTO ofNodeTaskDTO) {
        return false;
    }

    @Override
    public OperatorContainerType operatorContainerType() {
        return OperatorContainerType.WORKER_SUBPROCESS;
    }

    private String getSparkSubmit(String sparkHome) {
        return Paths.get(sparkHome, "bin/spark-submit").toString();
    }

    private List<String> getSparkArgs(OfNodeTaskDTO ofNodeTaskDTO, Map<String, String> engineParams) {
        //todo:
        List<String> cmd = new ArrayList<>();
        cmd.add(String.format("--name=oi-%s.%s.%d.%s-%s", ofNodeTaskDTO.getJobId(), ofNodeTaskDTO.getTaskName(), ofNodeTaskDTO.getTaskVersion(), ofNodeTaskDTO.getRole(), ofNodeTaskDTO.getNodeId()));
        //resource
        cmd.add("--master");
        cmd.add(String.format("local-cluster[%s, %s, %s]", engineParams.get(EngineParams.NUM_EXECUTORS), engineParams.get(EngineParams.EXECUTOR_CORES), engineParams.get(EngineParams.EXECUTOR_MEMORY)));
        cmd.add("--driver-memory=" + engineParams.get(EngineParams.DRIVER_MEMORY));
        //service param
        cmd.add("--conf");
        cmd.add("spark.network.timeout=" + engineParams.get(EngineParams.NETWORK_TIMEOUT));
        cmd.add("--conf");
        cmd.add("spark.executor.heartbeatInterval=" + engineParams.get(EngineParams.EXECUTOR_HEARTBEAT_INTERVAL));
        return cmd;
    }

    private List<String> getPysparkArgs(String pythonInterpreter) {
        List<String> cmd = new ArrayList<>();
        cmd.add("--conf");
        cmd.add("spark.pyspark.python=" + pythonInterpreter);
        cmd.add("--conf");
        cmd.add("spark.pyspark.driver.python=" + pythonInterpreter);
        return cmd;
    }

    private List<String> getEnvArgs(Map<String, String> runEnv) {
        List<String> cmd = new ArrayList<>();
        runEnv.forEach((key, value) -> {
            cmd.add("--conf");
            cmd.add(String.format("spark.executorEnv.%s=%s", key.toUpperCase(), value));
        });
        return cmd;
    }

    private String getRuntimeApplicationPath(OperatorDTO operator) {
        return Paths.get(UriUtils.trimFileUri(operator.getSourceUri()), operator.getDriver()).toAbsolutePath().toString();
    }


    private OfNodeTaskDTO copyIdentifyFields(OfNodeTaskDTO src) {
        return ValueCopyUtils.copyProperties(ValueCopyUtils.copyProperties(src, new OfNodeTaskIdentityPOJO()), new OfNodeTaskDTO());
    }

    private static class SubprocessContainerCallback implements CommandUtils.OnCommandExecOutputCallback {
        private final CustomerLogger customerLogger;

        public SubprocessContainerCallback(OfNodeTaskDTO ofNodeTaskDTO) {
            this.customerLogger = CustomerLoggerFactory.getLogger(ServiceName.OperatorRuntime, LogTraceType.JOB, LogType.STD, DirectoryUtils.genTaskLogPathItem(ofNodeTaskDTO));
        }

        @Override
        public void onSubprocessSuccess(String line) {
            this.customerLogger.info(line);
        }

        @Override
        public void onSubprocessError(String line) {
            this.customerLogger.error(line);
        }
    }
}
