package com.ruijie.bigdata.executor.jobhandler;

import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.handler.IJobHandler;
import com.xxl.job.core.handler.annotation.JobHandler;
import com.xxl.job.core.log.XxlJobLogger;
import org.apache.spark.launcher.SparkAppHandle;
import org.apache.spark.launcher.SparkLauncher;
import org.springframework.stereotype.Component;

import java.io.*;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;

/**
 * Created by LinQing on 2018/7/4.
 */

@JobHandler(value = "sparkJobHandler")
@Component
public class SparkJobHandler extends IJobHandler {

    @Override
    public ReturnT<String> execute(String param) throws Exception {

        String submit_data = new SimpleDateFormat("YYYY-MM-dd_HH:mm:ss").format(new Date());
        XxlJobLogger.log(String.format("Run SparkJobHandler at %s", submit_data));
        XxlJobLogger.log(String.format("Get param is %s", param));

        // TODO: 检查配置文件是否存在,格式是否合法
        Properties app_config = parseAppConfigure(param);
        XxlJobLogger.log(app_config.getProperty(Constants.RJ_RES_PATH));

        SparkLauncher sparkLauncher = new SparkLauncher()
                .setMaster(app_config.getProperty(Constants.SPARK_MASTER, Constants.SPARK_MASTER_DEFAULT))
                .setDeployMode(app_config.getProperty(Constants.SPARK_SUBMIT_DEPLOYMODE, Constants.SPARK_SUBMIT_DEPLOYMODE_DEFAULT))
                .setSparkHome(app_config.getProperty(Constants.RJ_SPARK_HOME, Constants.RJ_SPARK_HOME_DEFAULT))
                .setAppName(String.format("%s-%s", app_config.getProperty(Constants.RJ_APP_NAME), submit_data))
                .setAppResource(app_config.getProperty(Constants.RJ_RES_PATH))
                .setMainClass(app_config.getProperty(Constants.RJ_MAIN_CLASS));

        for (Object key : app_config.keySet()) {
            String key_str = (String) key;
            if (key_str.startsWith(Constants.RJ_APP_ARR_PRE))
                sparkLauncher.addAppArgs(app_config.getProperty(key_str));
            if (key_str.startsWith(Constants.RJ_SPARK_ARR_PRE))
                sparkLauncher.setConf(key_str, app_config.getProperty(key_str));
        }

        SparkAppHandle sparkAppHandle = sparkLauncher.startApplication(new SparkAppHandle.Listener() {
            @Override
            public void stateChanged(SparkAppHandle sparkAppHandle) {
                XxlJobLogger.log(String.format("%s status is changed to  %s.!", sparkAppHandle.getAppId(), sparkAppHandle.getState().toString()));
            }

            @Override
            public void infoChanged(SparkAppHandle sparkAppHandle) {
                /***
                 * Do nothing when info Changed!
                 */
            }
        });

        long timeout = 60 * 1000 * Long.parseLong(app_config.getProperty(Constants.RJ_APP_TIMEOUT_IN_MIN, Constants.RJ_APP_TIMEOUT_IN_MIN_DEFAULT));
        long time_elapsed = 0;
        int try_stop = 1;

        while (true) {
            try {
                SparkAppHandle.State appstatus = sparkAppHandle.getState();
                if (appstatus != null && appstatus.toString() != "null" && appstatus.isFinal()) {
                    ReturnT<String> isSuccess = (appstatus.equals(SparkAppHandle.State.FINISHED)) ? SUCCESS : FAIL;
                    sparkAppHandle.disconnect();
                    return isSuccess;
                } else {
                    /***
                     * TODO:使用定时器进行超时关闭
                     */
                    XxlJobLogger.log(String.format("%s status is : %s. ", sparkAppHandle.getAppId(), sparkAppHandle.getState()));
                    Thread.sleep(Constants.RJ_STATUS_CHECK);
                    time_elapsed += Constants.RJ_STATUS_CHECK;
                    if (timeout > 0 && time_elapsed >= timeout) {
                        if (!tryStopApp(sparkAppHandle))
                            killApp(sparkAppHandle);
                        return FAIL;
                    }

                }
            } catch (InterruptedException e) {
                XxlJobLogger.log(String.format("Get InterruptedException for user. Try to stop %s!", sparkAppHandle.getAppId()));
                e.printStackTrace();
            }
        }
    }

    /**
     * 尝试停止App，最多尝试五次
     *
     * @param sparkAppHandle
     * @return
     * @throws InterruptedException
     */
    public static boolean tryStopApp(SparkAppHandle sparkAppHandle) throws InterruptedException {
        for (int try_stop_time = 1; try_stop_time <= Constants.RJ_TRY_STOP_TIME; try_stop_time++) {
            XxlJobLogger.log(String.format("%s status is timeout, try to stop it! Try %s .", sparkAppHandle.getAppId(), try_stop_time));
            sparkAppHandle.stop();
            Thread.sleep(Constants.RJ_TRY_STOP_INTERVAL);
            if (sparkAppHandle.getState().isFinal())
                return true;
        }
        return false;
    }

    /**
     * Kill App,并关闭SparkAppHandler连接
     *
     * @param sparkAppHandle
     */
    public static void killApp(SparkAppHandle sparkAppHandle) {
        XxlJobLogger.log(String.format("can't stop %s, try to Kill it! And close SparkAppHandle.", sparkAppHandle.getAppId()));
        sparkAppHandle.kill();
        sparkAppHandle.disconnect();
    }

    public static Properties parseAppConfigure(String app_name) {
        Properties config = new Properties();
        InputStream in;
        try {
            Path app_configfile_name = Paths.get(Constants.RJ_APP_CONF_FILE_BASE, String.format("%s.properties", app_name));
            in = new BufferedInputStream(new FileInputStream(new File(app_configfile_name.toString())));
            config.load(in);
            in.close();
        } catch (FileNotFoundException e) {
            XxlJobLogger.log(e.toString());
            e.printStackTrace();
        } catch (IOException e) {
            XxlJobLogger.log(e.toString());
            e.printStackTrace();
        }
        return config;
    }
}
