/*
 *  Copyright 2020-2025 the original author or authors.
 *  You cannot use this file unless authorized by the author.
 */

package org.ipig.computing.spark.executer;

import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.ipig.commons.context.HadoopContext;
import org.ipig.commons.helper.AssertHelper;
import org.ipig.commons.helper.CommandHelper;
import org.ipig.commons.helper.DateTimeHelper;
import org.ipig.commons.helper.StringHelper;
import org.ipig.commons.service.cmd.CommandService;
import org.ipig.computing.constant.context.SparkContext;
import org.ipig.computing.spark.conf.ApplicationConf;
import org.ipig.constants.HadoopCnst;
import org.ipig.constants.JavaCnst;
import org.ipig.constants.ResultStatus;
import org.ipig.constants.SymbolCnst;

import java.io.File;

/**
 * SparkLinuxCommandLineExecuter
 *
 * @author <a href="mailto:comchnts@163.com">chinats</a>
 * @since 1.0
 */
@Slf4j
@Data
public class HadoopLinuxCommandLineExecuter implements CommandService<ApplicationConf, ResultStatus> {
    private long timeout = 60000;//1分种
    private String logDir = "";

    @Override
    public ResultStatus execute(ApplicationConf conf) {
        String command = toCommand(conf);
        log.info(command);
        if (StringUtils.isBlank(logDir)) {
            logDir = System.getProperty(JavaCnst.IO_TMP_DIR);
        }
        StringBuilder sb = new StringBuilder();
        sb.append(logDir).append(File.separator).append(DateTimeHelper.getDateTime()).append(SymbolCnst.UNDERLINE)
                .append(conf.getCreateTime()).append(SymbolCnst.UNDERLINE).append(conf.getMainClass())
                .append(SymbolCnst.UNDERLINE).append(conf.getAppName()).append(".hadoop");
        String absolutePathFileName = sb.toString();
        return CommandHelper.execCommand(command, timeout,absolutePathFileName);
    }


    @Override
    public String toCommand(ApplicationConf conf) {
        String hadoopHome = HadoopContext.getHadoopHome();
        String hadoopPath= StringHelper.join(File.separator,hadoopHome,"bin","hadoop");
        String uploadAppLibDir= SparkContext.getSparkOnYarnUploadDir();
        String appLibDir = conf.getAppLibDir();
        AssertHelper.hasText(appLibDir, "please enter the local path of the application dependency package【appLibDir】");
        int index = StringUtils.lastIndexOf(appLibDir, File.separator);
        String fullAppLibDir = appLibDir;
        if (index > 0) {
            String endStr = StringUtils.substring(appLibDir, index + 1);
            if (StringUtils.isBlank(endStr)) {
                fullAppLibDir = appLibDir + SymbolCnst.ASTERISK;
            } else {
                int locIndex = StringUtils.indexOfAny(endStr, SymbolCnst.ASTERISK);
                if (locIndex < 0) {
                    fullAppLibDir = appLibDir + File.separator + SymbolCnst.ASTERISK;
                }
            }
        }
        StringBuilder command=new StringBuilder();
        command.append(hadoopPath).append(SymbolCnst.SPACE).append(HadoopCnst.OpType.FS.code).append(SymbolCnst.SPACE).append(HadoopCnst.FsParms.PUT.code)
                .append(" -f ").append(fullAppLibDir).append(SymbolCnst.SPACE).append(uploadAppLibDir);
        return command.toString();
    }
}

