package com.central.scheduler.plugin.task.flink.sql.utils;

import com.central.common.entity.FlinkRunOptions;
import com.central.scheduler.plugin.task.flink.sql.FlinkSqlConstants;
import com.central.scheduler.plugin.task.util.ArgsUtils;
import org.apache.commons.lang3.StringUtils;

import java.util.ArrayList;
import java.util.List;

/**
 * @author Tindy
 * @date 2022/5/6
 * @describe
 * flink args utils
 */
public class FlinkArgsUtils {

    private FlinkArgsUtils() {
        throw new IllegalStateException("Utility class");
    }

    private static final String LOCAL_DEPLOY_MODE = "local";

    /**
     * build args
     *
     * @param runOptions flink parameters
     * @return argument list
     */
    public static List<String> buildArgs(FlinkRunOptions runOptions) {
        List<String> args = new ArrayList<>();

        //平台仅支持yarn-cluster发布模式
        args.add(FlinkSqlConstants.FLINK_RUN_MODE);  //-m
        args.add(FlinkSqlConstants.FLINK_YARN_CLUSTER);   //yarn-cluster
        //设置并行度
        Integer parallelism = runOptions.getParallelism();
        if(parallelism!=null){
            args.add(FlinkSqlConstants.FLINK_PARALLELISM);
            args.add(String.format("%d", parallelism));   // -p
        }
        //Number of slots per TaskManager
        Integer slot = runOptions.getSlot();
        if(slot!=null){
            args.add(FlinkSqlConstants.FLINK_YARN_SLOT);
            args.add(String.format("%d", slot));   //-ys
        }
        String appName = runOptions.getAppName();
        if (StringUtils.isNotEmpty(appName)) { //-ynm
            args.add(FlinkSqlConstants.FLINK_APP_NAME);
            args.add(ArgsUtils.escape(appName));
        }
        String jobManagerMemory = runOptions.getJobManagerMemory();
        if (StringUtils.isNotEmpty(jobManagerMemory)) {
            args.add(FlinkSqlConstants.FLINK_JOB_MANAGE_MEM);
            args.add(jobManagerMemory); //-yjm
        }
        String taskManagerMemory = runOptions.getTaskManagerMemory();
        if (StringUtils.isNotEmpty(taskManagerMemory)) { // -ytm
            args.add(FlinkSqlConstants.FLINK_TASK_MANAGE_MEM);
            args.add(taskManagerMemory);
        }
        String queue=runOptions.getQueue();
        if (StringUtils.isNotEmpty(queue)) { // -yqu
            args.add(FlinkSqlConstants.FLINK_QUEUE);
            args.add(queue);
        }

        String savepointPath=runOptions.getSavepointPath();
        if (StringUtils.isNotEmpty(savepointPath)) { // -s
            args.add(FlinkSqlConstants.FLINK_SAVEPOINT_PATH);
            args.add(savepointPath);
        }

        List<String> extJarPaths = runOptions.getExtJarPaths();
        for (String extJarPath : extJarPaths) {
            args.add(FlinkSqlConstants.FLINK_CLASS_PATH);
            args.add(extJarPath);
        }

        // If the job is submitted in attached mode, perform a best-effort cluster shutdown when the CLI is terminated abruptly
        // The task status will be synchronized with the cluster job status
        args.add(FlinkSqlConstants.FLINK_SHUTDOWN_ON_ATTACHED_EXIT); // -sae

//        String others = runOptions.getOthers();
//        // -s -yqu -yat -yD -D
//        if (StringUtils.isNotEmpty(others)) {
//            args.add(others);
//        }

        String mainClass=runOptions.getMainClass();
        if(StringUtils.isNotEmpty(mainClass)){
            args.add(FlinkSqlConstants.FLINK_MAIN_CLASS);    //-c
            args.add(runOptions.getMainClass());          //main class
        }

        String mainJar = runOptions.getMainJar();
        if(StringUtils.isNotEmpty(mainJar)){
            args.add(mainJar);
        }

        String mainArgs = runOptions.getMainArgs();
        if (StringUtils.isNotEmpty(mainArgs)) {
            args.add(mainArgs);
        }
        return args;
    }

}

