/*
 *  Copyright 2020-2025 the original author or authors.
 *  You cannot use this file unless authorized by the author.
 */

package org.ipig.computing.spark;

import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.launcher.SparkAppHandle;
import org.apache.spark.launcher.SparkLauncher;
import org.ipig.commons.conf.GenericConfService;
import org.ipig.computing.ComputingService;
import org.ipig.computing.LauncherService;
import org.ipig.computing.constant.context.SparkContext;
import org.ipig.computing.spark.conf.SparkLauncherConf;
import org.ipig.constants.ResultStatus;

import java.lang.reflect.Method;
import java.util.Map;
import java.util.concurrent.CountDownLatch;

/**
 * SparkLauncherService
 *
 * @author <a href="mailto:comchnts@163.com">chinats</a>
 * @since 1.0
 */
public interface SparkLauncherService<Conf extends SparkLauncherConf> extends GenericConfService<Conf>, LauncherService, ComputingService {
    /**
     * 获取主应用名称，必须是全局唯一的。
     *
     * @return String
     */
    public default String getAppName(){
        String appName=this.getClass().getName();
//        System.out.printf("【WARN】%s.getAppName()建议自定义实现，返回具有业务意义的全局唯一名称，默认返回：%s\n", this.getClass().getSimpleName(),appName);
        return appName;
    }
    /**
     * 获取主函数类的完整路径
     *
     * @return String
     */
    public String getMainClass();

    /**
     * 获取主函数类的运行参数
     *
     * @return String
     */
    public String getMainArg();

    /**
     * init
     */
    public default void init() {
    }

    /**
     * launch
     *
     * @return ResultStatus
     */
    @Override
    public default ResultStatus launch() {
        init();
        String mainArg = getMainArg();
        String mainClass = getMainClass();
        if (StringUtils.isBlank(mainClass)) {
            System.out.printf("请设置Driver的 mainClass\n");
            return ResultStatus.FAILURE;
        }
        if (StringUtils.isBlank(mainArg)) {
            System.out.printf("请设置Driver启动%s的mainArg\n", mainClass);
            return ResultStatus.FAILURE;
        }
        ResultStatus result = ResultStatus.SUCCESS;
        String master = getConf().getMaster();
        String workingMode = SparkContext.WorkingMode.YARN.code;
        String[] appArgs = new String[]{mainArg, workingMode};
        String threadId = Thread.currentThread().getName() + Thread.currentThread().getId();

        try {
            beforeComputing();
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println(ExceptionUtils.getFullStackTrace(e));
        }

        if (StringUtils.startsWithIgnoreCase(master, SparkContext.WorkingMode.LOCAL.code)) {
            workingMode = SparkContext.WorkingMode.LOCAL.code;
            appArgs[1] = workingMode;
            Method mainMethod;
            try {
                System.out.printf("启动%s,workingMode=【%s】%s\n", threadId, workingMode, SparkContext.WorkingMode.parse(workingMode).title);
                mainMethod = Class.forName(mainClass).getMethod(SparkContext.MAIN_FUNCTION, String[].class);
                mainMethod.invoke(null, (Object) appArgs);
            } catch (Exception e) {
                result = ResultStatus.FAILURE;
                e.printStackTrace();
                System.out.println(ExceptionUtils.getFullStackTrace(e));
            }
        } else {
            SparkLauncher launcher = new SparkLauncher();
            String appResource = getConf().getAppResource();
            if (StringUtils.isNotBlank(getConf().getSparkHome())) {
                launcher.setSparkHome(getConf().getSparkHome());
            }
            if (StringUtils.isNotBlank(getConf().getJavaHome())) {
                launcher.setJavaHome(getConf().getJavaHome());
            }
            if (StringUtils.isNotBlank(getConf().getAppName())) {
                launcher.setAppName(getConf().getAppName());
            }

            launcher.setMaster(master);
            launcher.addAppArgs(appArgs);
            launcher.setMainClass(mainClass);
            launcher.setAppResource(appResource);
            launcher.setDeployMode(getConf().getAppDeployMode());
            launcher.setVerbose(BooleanUtils.toBoolean(getConf().getVerbose()));

//            initSparkLauncherConf(launch);
            // 压缩与序列化
            launcher.setConf(SparkContext.CompressionAndSerialization.SERIALIZER.key, getConf().getSerializer());
            // SparkSql
            launcher.setConf(SparkContext.Sql.SQL_AUTO_BROADCAST_JOIN_THRESHOLD.key, getConf().getSqlAutoBroadcastJoinThreshold());
            /**Shuffle*/
            launcher.setConf(SparkContext.Shuffle.SHUFFLE_SERVICE_ENABLED.key, getConf().getShuffleServiceEnabled());
            launcher.setConf(SparkContext.Shuffle.SHUFFLE_SERVICE_PORT.key, getConf().getShuffleServicePort());
//		launch.setConf(SparkContext.Shuffle.SHUFFLE_MANAGER.key, getConf().getShuffleManager());
            launcher.setConf(SparkContext.Shuffle.SHUFFLE_FILE_BUFFER.key, getConf().getShuffleFileBuffer());
            /**Executor*/
            if (StringUtils.isNotBlank(getConf().getExecutorExtraClasspath())) {
                launcher.setConf(SparkContext.Executor.EXECUTOR_EXTRA_CLASSPATH.key, getConf().getExecutorExtraClasspath());
            }
            if (StringUtils.isNotBlank(getConf().getExecutorCores())) {
                launcher.setConf(SparkContext.Executor.EXECUTOR_CORES.key, getConf().getExecutorCores());
            }
            if (StringUtils.isNotBlank(getConf().getExecutorExtraJavaOptions())) {
                launcher.setConf(SparkContext.Executor.EXECUTOR_EXTRA_JAVA_OPTIONS.key, getConf().getExecutorExtraJavaOptions());
            }
            if (StringUtils.isNotBlank(getConf().getExecutorExtraLibraryPath())) {
                launcher.setConf(SparkContext.Executor.EXECUTOR_EXTRA_LIBRARY_PATH.key, getConf().getExecutorExtraLibraryPath());
            }
            if (StringUtils.isNotBlank(getConf().getExecutorMemory())) {
                launcher.setConf(SparkContext.Executor.EXECUTOR_MEMORY.key, getConf().getExecutorMemory());
            }
            /**Driver*/
            if (StringUtils.isNotBlank(getConf().getDriverExtraClasspath())) {
                launcher.setConf(SparkContext.Driver.DRIVER_EXTRA_CLASSPATH.key, getConf().getDriverExtraClasspath());
            }
            if (StringUtils.isNotBlank(getConf().getDriverExtraJavaOptions())) {
                launcher.setConf(SparkContext.Driver.DRIVER_EXTRA_JAVA_OPTIONS.key, getConf().getDriverExtraJavaOptions());
            }
            if (StringUtils.isNotBlank(getConf().getDriverExtraLibraryPath())) {
                launcher.setConf(SparkContext.Driver.DRIVER_EXTRA_LIBRARY_PATH.key, getConf().getDriverExtraLibraryPath());
            }
            if (StringUtils.isNotBlank(getConf().getDriverMemory())) {
                launcher.setConf(SparkContext.Driver.DRIVER_MEMORY.key, getConf().getDriverMemory());
            }
            launcher.setConf(SparkContext.Driver.DRIVER_ALLOW_MULTIPLE_CONTEXTS.key, getConf().getDriverAllowMultipleContexts());

            /** 内存管理 */
            launcher.setConf(SparkContext.MemoryManagement.MEMORY_FRACTION.key, getConf().getMemoryFraction());
            launcher.setConf(SparkContext.MemoryManagement.MEMORY_STORAGE_FRACTION.key, getConf().getMemorystorageFraction());
            /** 动态资源分配 */
            launcher.setConf(SparkContext.DynamicAllocation.DYNAMICALLOCATION_ENABLED.key, getConf().getDynamicAllocationEnabled());
            launcher.setConf(SparkContext.DynamicAllocation.DYNAMICALLOCATION_EXECUTOR_IDLE_TIMEOUT.key,
                    getConf().getDynamicExecutorIdleTimeout());
            launcher.setConf(SparkContext.DynamicAllocation.DYNAMICALLOCATION_INITIAL_EXECUTORS.key,
                    getConf().getDynamicAllocationInitialExecutors());
            launcher.setConf(SparkContext.DynamicAllocation.DYNAMICALLOCATION_MIN_EXECUTOR.key,
                    getConf().getDynamicAllocationMinExecutor());
            launcher.setConf(SparkContext.DynamicAllocation.DYNAMICALLOCATION_MAX_EXECUTOR.key,
                    getConf().getDynamicAllocationMaxExecutor());


            launcher.setConf(SparkContext.DynamicAllocation.DYNAMICALLOCATION_SCHEDULER_BACKLOG_TIMEOUT.key,
                    getConf().getDynamicAllocationSchedulerBacklogTimeout());
            launcher.setConf(SparkContext.DynamicAllocation.DYNAMICALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT.key,
                    getConf().getDynamicAllocationSustainedSchedulerBacklogTimeout());
            if (StringUtils.isNotBlank(getConf().getDynamicAllocationMaxExecutor())) {
                launcher.setConf(SparkContext.DynamicAllocation.DYNAMICALLOCATION_MAX_EXECUTOR.key,
                        getConf().getDynamicAllocationMaxExecutor());
            }
            if (StringUtils.isNotBlank(getConf().getDynamicAllocationCachedExecutorIdleTimeout())) {
                launcher.setConf(SparkContext.DynamicAllocation.DYNAMICALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT.key,
                        getConf().getDynamicAllocationCachedExecutorIdleTimeout());
            }
            // 执行行为
            launcher.setConf(SparkContext.ExecutionBehavior.BROADCAST_BLOCKSIZE.key, getConf().getBehaviorBroadcastBlocksize());
            if (StringUtils.isNotBlank(getConf().getBehaviorDefaultParallelism())) {
                launcher.setConf(SparkContext.ExecutionBehavior.DEFAULT_PARALLELISM.key,
                        getConf().getBehaviorDefaultParallelism());
            }
            if (getConf().getConfMap() != null && !getConf().getConfMap().isEmpty()) {
                Map<String, String> map = getConf().getConfMap();
                for (Map.Entry<String, String> kv : map.entrySet()) {
                    launcher.setConf(kv.getKey(), kv.getValue());
                }
            }
            /**YARN*/
            if (StringUtils.isNotBlank(getConf().getYarnAmWaitTime())) {
                launcher.setConf(SparkContext.Yarn.YARN_AM_WAIT_TIME.key, "300000");
//                launch.setConf(SparkContext.Yarn.YARN_AM_WAIT_TIME.key, getConf().getYarnAmWaitTime());
            }
            if (StringUtils.isNotBlank(getConf().getYarnJars())) {
                launcher.setConf(SparkContext.Yarn.YARN_JARS.key, getConf().getYarnJars());
            }

            //Streaming
            launcher.setConf(SparkContext.Streaming.STREAMING_RECEIVER_MAX_RATE.key, getConf().getStreamingReceiverMaxRate());
            launcher.setConf(SparkContext.Streaming.STREAMING_STOP_GRACE_FULLY_ON_SHUTDOWN.key, getConf().getStreamingStopGracefullyOnShutdown());
            launcher.setConf(SparkContext.Streaming.STREAMING_BACKPRESSURE_ENABLED.key, getConf().getStreamingBackpressureEnabled());
            launcher.setConf(SparkContext.Streaming.STREAMING_BACKPRESSURE_INITIALRATE.key, getConf().getStreamingBackpressureInitialRate());
            launcher.setConf(SparkContext.Streaming.STREAMING_KAFKA_MAX_RATE_PER_PARTITION.key, getConf().getStreamingKafkaMaxRatePerPartition());

/*
            if (StringUtils.isNotBlank(getConf().getYarnApplicationMasterWaitTries())) {
                launch.setConf(SparkContext.Yarn.YARN_APPLICATION_MASTER_WAIT_TRIES.key, getConf().getYarnApplicationMasterWaitTries());
            }
*/
            try {
//                System.setProperty("hadoop.home.dir", "/server/soft/hadoop");
//                System.out.printf("参数=%s", ToStringBuilder.reflectionToString(launch, ToStringStyle.MULTI_LINE_STYLE));
                CountDownLatch countDownLatch = new CountDownLatch(1);
                SparkAppHandle handle = launcher.startApplication(new SparkAppHandle.Listener() {
                    @Override
                    public void stateChanged(SparkAppHandle handle) {
                        System.out.printf("AppId=%s,State=%s,isFinal=%s\n", handle.getAppId(), handle.getState().toString(), handle.getState().isFinal());
                        if (handle.getState().isFinal()) {
                            countDownLatch.countDown();
                        }
                    }

                    @Override
                    public void infoChanged(SparkAppHandle handle) {
                        System.out.printf("AppId=%s,State=%s,isFinal=%s\n", handle.getAppId(), handle.getState().toString(), handle.getState().isFinal());
                    }
                });
                countDownLatch.await();
            } catch (Exception e) {
                result = ResultStatus.FAILURE;
                e.printStackTrace();
                System.out.println(ExceptionUtils.getFullStackTrace(e));
            }
        }

        try {
            afterComputing();
        } catch (Exception e) {
            e.printStackTrace();
            System.out.printf(ExceptionUtils.getFullStackTrace(e));
        }
        System.out.printf("结束%s,workingMode=【%s】%s\n", threadId, workingMode, SparkContext.WorkingMode.parse(workingMode).title);
        return result;
    }


}
