/*
 *  Copyright 2020-2025 the original author or authors.
 *  You cannot use this file unless authorized by the author.
 */

package org.ipig.computing.spark.executer;

import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.ipig.commons.context.HadoopContext;
import org.ipig.commons.context.JavaContext;
import org.ipig.commons.helper.AssertHelper;
import org.ipig.commons.helper.CommandHelper;
import org.ipig.commons.helper.DateTimeHelper;
import org.ipig.commons.helper.StringHelper;
import org.ipig.commons.service.cmd.CommandService;
import org.ipig.computing.constant.context.SparkContext;
import org.ipig.computing.spark.conf.ApplicationConf;
import org.ipig.constants.JavaCnst;
import org.ipig.constants.ResultStatus;
import org.ipig.constants.SymbolCnst;

import java.io.File;
import java.text.MessageFormat;

/**
 * SparkLinuxCommandLineExecuter
 *
 * @author <a href="mailto:comchnts@163.com">chinats</a>
 * @since 1.0
 */
@Slf4j
@Data
public class SparkLinuxCommandLineExecuter implements CommandService<ApplicationConf, ResultStatus> {
    private long timeout = 45000;//45秒
    private long maxTimeout = 1800000;//30分种
    private String logDir = "";

    @Override
    public ResultStatus execute(ApplicationConf conf) {
        String command = toCommand(conf);
        log.info(command);
        long timeout = this.timeout;
        if (StringUtils.equals(conf.getAppDeployMode(), SparkContext.DeployMode.CLIENT.code)) {
            timeout = maxTimeout;
        }
        if (StringUtils.isBlank(logDir)) {
            logDir = System.getProperty(JavaCnst.IO_TMP_DIR);
        }
        StringBuilder sb = new StringBuilder();
        sb.append(logDir).append(File.separator).append(conf.getAppName()).append(SymbolCnst.UNDERLINE)
                .append(conf.getCreateTime()).append(SymbolCnst.UNDERLINE).append(DateTimeHelper.getDateTime()).append(".spark");
        String absolutePathFileName = sb.toString();
        return CommandHelper.execCommand(command,timeout,absolutePathFileName);
    }

    @Override
    public String toCommand(ApplicationConf conf) {
        String sparkLibs = StringHelper.join(File.separator,  SparkContext.getSparkLibDir(),SymbolCnst.ASTERISK);
        String sparkConfDir = SparkContext.getSparkConfDir();
        String hadoopConfDir = HadoopContext.getHadoopConfDir();
        String appLibDir = conf.getAppLibDir();
        String mainClass = conf.getMainClass();
        String mainArgs = conf.getMainArg();
        String appName = conf.getAppName();
        String appResource = conf.getAppResource();
        String deployMode = SparkContext.DeployMode.parse(conf.getAppDeployMode()).code;
        String workMode = SparkContext.WorkingMode.YARN.defaultValue;
        AssertHelper.hasText(appLibDir, MessageFormat.format("【{0}】 please enter the local path of the application dependency package【appLibDir】", conf.getAppName()));
        AssertHelper.hasText(mainClass, MessageFormat.format("【{0}】 enter the fully qualified path of the main function class【mainClass】",conf.getAppName()));
        AssertHelper.hasText(appResource, MessageFormat.format("【{0}】 enter the fully qualified path of the package where the main function class is located【appResource】",conf.getAppName()));
        int index = StringUtils.lastIndexOf(appLibDir, File.separator);
        String fullAppResourcePath ="";
        String fullAppLibPath ="";
        if (index >=0) {
            String endStr = StringUtils.substring(appLibDir, index + 1);
            if (StringUtils.isBlank(endStr)) {
                fullAppLibPath = appLibDir + SymbolCnst.ASTERISK;
                fullAppResourcePath=appLibDir+appResource;
            } else {
                int locIndex = StringUtils.indexOfAny(endStr, SymbolCnst.ASTERISK);
                if (locIndex < 0) {
                    fullAppLibPath = appLibDir + File.separator + SymbolCnst.ASTERISK;
                    fullAppResourcePath=appLibDir+ File.separator +appResource;
                }else{
                    fullAppLibPath = appLibDir;
                    fullAppResourcePath=StringUtils.substring(appLibDir, 0,index)+ File.separator +appResource;
                }
            }
        }else{
            AssertHelper.hasText(fullAppLibPath, MessageFormat.format("【{0}】 please enter effective  the local path of the application dependency package【appLibDir】",conf.getAppName()));
        }
        StringBuilder command = new StringBuilder();
        command.append(JavaContext.getJavaCmd()).append(JavaCnst.Cmd.CP.code).append(sparkConfDir)
                .append(SymbolCnst.COLON).append(hadoopConfDir);
        if (StringUtils.isNotBlank(conf.getAppConfDir())) {
            command.append(SymbolCnst.COLON).append(conf.getAppConfDir());
        }
        command.append(SymbolCnst.COLON).append(sparkLibs)
                .append(SymbolCnst.COLON).append(fullAppLibPath)
                .append(SymbolCnst.SPACE).append(SparkContext.SPARK_SUBMIT_CLASS).append(SymbolCnst.SPACE)
                .append(SparkContext.APP_CLASS).append(mainClass).append(SymbolCnst.SPACE);
        if (StringUtils.isNotBlank(appName)) {
            command.append(SparkContext.APP_NAME).append(appName);
        }
        command.append(SparkContext.APP_MASTER).append(workMode).append(SparkContext.APP_DEPLOY_MODE).append(deployMode).append(SymbolCnst.SPACE).append(fullAppResourcePath);
        if (StringUtils.isNotBlank(mainArgs)) {
            command.append(SymbolCnst.SPACE).append(mainArgs);
        }
        return command.toString();
    }
}

