package com.calabar.phm.etl.driver.spark;

import com.calabar.phm.etl.driver.spark.exception.DriverException;
import com.calabar.phm.etl.driver.spark.exception.ExceptionCodeEnum;
import com.calabar.phm.etl.driver.spark.exception.OperatorParamsValidateException;
import com.calabar.phm.etl.driver.spark.exception.OperatorRunningException;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonSyntaxException;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.log4j.Logger;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.storage.StorageLevel;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @Author zmc <mingcheng.zhang@cdcalabar.com>
 * @Date 17-5-24 下午1:17
 * @Version v1.0
 * @Des spark驱动
 */
public class Main {
    private static final Logger LOG = Logger.getLogger(Main.class);

    /**
     * 任务编号
     */
    private static String jobId;
    /**
     * 子任务编号
     */
    private static String subJobId;
    /**
     * 任务参数
     */
    private static String jobParams;
    /**
     * 算子标识
     */
    private static String operatorUnique;

    /**
     * 异常内容
     */
    private static String errMsg;
    /**
     * 驱动运行状态
     */
    private static int driverState;
    /**
     * 异常码
     */
    private static int code;
    /**
     * 驱动配置文件
     */
    private static String DRIVER_PROPERTIES_PATH;
    /**
     * 记录任务运行状态和运行日志;
     */
    private static Recorder recorder;
    /**
     * 任务是否重复提交
     */
    private static boolean submitted;
    /**
     * 任务是否初始化完成
     */
    private static boolean inited;
    /**
     * 参数是否已经检查完成
     */
    private static boolean checked;
    /**
     * Json字符串转换为Java对象工具
     */
    private static Gson gson = new GsonBuilder().setPrettyPrinting().create();
    /**
     * 驱动运行参数
     */
    private static Configuration driverConf;

    public static void main(String[] args) throws DriverException {

        try {
            //初始化参数
            initConfiguration();

            //检查传递的参数是否正确
            checkParams(args);

            //判断任务是否重复提交
            submitted = recorder.isSubmitted(jobId, subJobId);
            if (submitted) {
                LOG.warn("请勿重复提交任务，jobId:" + jobId + " subJobId:" + subJobId);
                return;
            }

            //将传入的json转化为参数对象
            Params params = getParams();

            //解析参数中算子之间的关系，还原DAG
            DAG dag = conn2DAG(params);

            //记录驱动Accepted状态
            recorder.insertJobState(jobId, subJobId, DriverStateEnum.ACCEPTED.get_code());
            LOG.info("记录任务状态为：Accepted");

            //提交spark任务
            SparkSession.Builder builder = SparkSession.builder();
            builder.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
            builder.config("spark.scheduler.mode", "FAIR");

            //设置spark运行参数（暂时运行参数为空）
            Map<String, String> sparkEnv = params.getSparkEnv();
            for (Map.Entry<String, String> kv : sparkEnv.entrySet()) {
                builder.config(kv.getKey(), kv.getValue());
            }

            //支持hive
            builder.config("spark.sql.warehouse.dir",
                    driverConf.getString(Constants.SPARK_SQL_WAREHOUSE_DIR))
// 禁止使用spark parquet组织格式，由于它无法保证顺序(测试之后无效，数据还是无法保证顺序，等到以后spark版本升级在测试)
//                        .config("spark.sql.hive.convertMetastoreParquet",false)
                    .enableHiveSupport();

            //本地测试使用
            if (args.length > 3 && "local".equals(args[3])) {
                builder.master("local[4]");
                LOG.info("本地运行模式！");
            }

            SparkSession session = builder.getOrCreate();

            String appId = session.sparkContext().applicationId();
            recorder.updateAppID(jobId, subJobId, appId);
            LOG.info("app id : " + appId);

            //记录驱动submitted状态
            recorder.updateJobState(jobId, subJobId, DriverStateEnum.SUBMITTED.get_code());
            LOG.info("更新任务状态：Submitted");

            //将DAG转化为spark运行程序，提交到集群环境
            transform(dag, session);

        } catch (OperatorRunningException e) {
            LOG.error(ExceptionUtils.getFullStackTrace(e));
            operatorUnique = e.getOperatorUnique();
            errMsg = e.getMessage();
            code = e.getCode();
            driverState = DriverStateEnum.FAILED.get_code();
        } catch (OperatorParamsValidateException e) {
            LOG.error(ExceptionUtils.getFullStackTrace(e));
            operatorUnique = e.getOperatorUnique();
            errMsg = e.getMessage();
            code = e.getCode();
            driverState = DriverStateEnum.FAILED.get_code();
        } catch (DriverException e) {
            LOG.error(ExceptionUtils.getFullStackTrace(e));
            code = e.getCode();
            errMsg = e.getMessage();
            driverState = DriverStateEnum.FAILED.get_code();
        } catch (Exception ex) {
            LOG.error(ExceptionUtils.getFullStackTrace(ex));
            errMsg = ex.getMessage();
            code = ExceptionCodeEnum.OTHER_EXCEPTION.get_code();
            driverState = DriverStateEnum.FAILED.get_code();
        } finally {
            if (driverState == DriverStateEnum.FAILED.get_code()) {
                if (inited && checked & !submitted) {//已经初始化，参数已经检查，不是重复任务
                    recorder.recordJobStateAndErrorLog(jobId, subJobId, driverState, code, errMsg, operatorUnique);
                    LOG.info("更新任务状态：Failed");
                }

                LOG.info("记录任务运行异常状况如下：\njobid = " + jobId
                        + "\nsubjobid = " + subJobId
                        + "\ndirverstate = " + driverState
                        + "\ncode = " + code
                        + "\nerrmsg = " + errMsg
                        + "\noperatorUnique = " + operatorUnique);
                LOG.error("任务异常退出！");
                System.exit(1);
            } else {
                if (!submitted) {
                    //记录驱动finished状态
                    recorder.updateJobState(jobId, subJobId, DriverStateEnum.FINISHED.get_code());
                    LOG.info("更新任务状态：Finished");
                }
                LOG.info("任务运行结束！");
            }
        }
    }

    /**
     * 检查前端传递的参数
     *
     * @param args
     * @throws DriverException 驱动检查参数抛出的异常
     */
    private static void checkParams(String[] args) throws DriverException {
        LOG.info("开始检查前端传递的参数！");
        if (args.length < 4) {
            throw new DriverException(ExceptionCodeEnum.DRIVER_CHECKPARAMS_EXCEPTION.get_code(),
                    "传递的参数数量缺少，请检查参数！ Usage<dbParams,jobid,subjobid,jobParams>");
        }

        String dbParams = args[0];
        jobId = args[1];
        subJobId = args[2];
        jobParams = args[3];

        //解码（Base64）
        try {
            dbParams = new String(Base64.decodeBase64(dbParams));
            jobParams = new String(Base64.decodeBase64(jobParams));
        } catch (Exception ex) {
            throw new DriverException(ExceptionCodeEnum.DRIVER_CHECKPARAMS_EXCEPTION.get_code(),
                    "解码Base64编码参数出现错误！", ex);
        }

        LOG.info("传递的参数为：\n"
                + "dbParams = " + dbParams
                + "\njobid = " + jobId
                + "\nsubjobid = " + subJobId
                + "\njobParams = " + jobParams);

        //转换为数据库参数对象
        DBParams dbParam = null;
        try {
            dbParam = gson.fromJson(dbParams, DBParams.class);
            LOG.info("数据库参数:\n" + dbParams.toString());
        } catch (JsonSyntaxException e) {
            throw new DriverException(ExceptionCodeEnum.DRIVER_CHECKPARAMS_EXCEPTION.get_code(),
                    "前端传递的数据库配置json字符串格式不正确！ \n" + dbParams, e);
        }
        recorder = new Recorder(dbParam.getUrl(), dbParam.getUserName(), dbParam.getUserPasswd());
        checked = true;
        LOG.info("前端传递参数检查完成！");
    }

    /**
     * 把任务参数转化为Java对象
     *
     * @return
     * @throws DriverException 驱动转化Json字符串抛出的异常
     */
    private static Params getParams() throws DriverException {
        LOG.info("开始将任务json字符串转换为参数对象！");
        Params params = null;
        try {
            params = gson.fromJson(jobParams, Params.class);
            LOG.info("任务参数:\n" + params.toString());
        } catch (JsonSyntaxException e) {
            throw new DriverException(ExceptionCodeEnum.DRIVER_CHECKPARAMS_EXCEPTION.get_code(),
                    "前端传递的job配置json字符串格式不正确！ \n" + jobParams, e);
        }
        LOG.info("将任务json字符串转换为参数对象完成！");
        return params;
    }

    /**
     * 从环境变量中读取驱动配置文件（driver.properties）
     *
     * @throws DriverException 加载配置文件抛出的异常
     */
    private static void initConfiguration() throws DriverException {
        LOG.info("开始加载驱动（driver.properties）参数！");
        try {
            DRIVER_PROPERTIES_PATH = ClassLoader.getSystemClassLoader()
                    .getResource("driver.properties").getFile();
            driverConf = new PropertiesConfiguration(DRIVER_PROPERTIES_PATH);
        } catch (Exception ex) {
            throw new DriverException(ExceptionCodeEnum.DRIVER_INIT_EXCEPTION.get_code(),
                    "加载驱动参数异常!", ex);
        }
        LOG.info("DRIVER_PROPERTIES_PATH = " + DRIVER_PROPERTIES_PATH);
        inited = true;
        LOG.info("加载驱动参数完成！");
    }

    /**
     * 将算子关系转化为DAG图
     *
     * @param params
     * @return
     * @throws DriverException 检查算子关系参数抛出的异常
     */
    private static DAG conn2DAG(Params params) throws DriverException {
        LOG.info("开始将算子关系转化为DAG！");
        Map<String, Operator> operatorMap = params.getOperatorMap();
        Map<String, List<String>> connections = params.getConnection();

        DAG dag = new DAG();
        for (Map.Entry<String, List<String>> conn : connections.entrySet()) {
            String childrenUnique = conn.getKey();
            Operator childrenOperator = operatorMap.get(childrenUnique);
            if (childrenOperator == null) {
                throw new DriverException(ExceptionCodeEnum.DRIVER_CHECKPARAMS_EXCEPTION.get_code(),
                        "算子关系不正确！根据算子标识：" + childrenUnique + "无法找到算子对象！");
            }
            List<String> parentsUnique = conn.getValue();
            if (parentsUnique != null && parentsUnique.size() != 0) {
                for (String parentUnique : parentsUnique) {
                    if (parentUnique == null || parentUnique.isEmpty()) {//输入算子的parent是空！
                        continue;
                    }
                    Operator parentOperator = operatorMap.get(parentUnique);
                    if (parentOperator == null) {
                        throw new DriverException(ExceptionCodeEnum.DRIVER_CHECKPARAMS_EXCEPTION.get_code(),
                                "算子关系不正确！根据算子标识：" + parentUnique + "无法找到算子对象！");
                    }
                    dag.add(parentOperator);
                    dag.add(childrenOperator);
                    dag.connnct(parentOperator, childrenOperator);
                }
            }
        }
        LOG.info("将算子关系转化为DAG完成！");
        LOG.info("DAG:\n" + dag.toString());
        return dag;
    }


    /**
     * 把Dag转化为Spark任务提交，根据DAG遍历的拓扑图（从叶子节点深度优先遍历）顺序，
     * 判断算子的类型，根据不同的类型调用相应的接口。
     *
     * @param dag     算子关系
     * @param session spark任务运行初始化的对象
     * @throws OperatorParamsValidateException
     * @throws OperatorRunningException
     */
    public static void transform(DAG dag, SparkSession session) throws OperatorParamsValidateException,
            OperatorRunningException {
        LOG.info("开始将DAG转化为Spark任务，提交到YARN！");
        //存储遍历过得算子计算结果
        Map<Operator, Dataset<Row>> transformed = new HashMap<>();
        int parentSize = 0;
        int childSize = 0;
        for (Operator operator : dag.getAllWork()) {
            Dataset<Row> dataFrame = null;
            //算子的父节点
            List<Operator> parents = dag.getParents(operator);
            parentSize = parents.size();

            if (dag.isRoot(operator)) {//根节点（输入算子）
                dataFrame = operator.transform(session);
            } else if (dag.isLeaf(operator)) {//叶子节点（输出算子）
                operator.transform(session, transformed.get(parents.get(0)));
            } else {//中间节点（计算算子）
                Dataset<Row>[] dataFrames = new Dataset[parentSize];
                for (int i = 0; i < parentSize; i++) {
                    dataFrames[i] = transformed.get(parents.get(i));
                }
                dataFrame = operator.transform(session, dataFrames);
            }

            //当此节点的子节点数大于1的时候开启缓存
            childSize = dag.getChildren(operator).size();
            if (childSize > 1) {
                dataFrame.persist(StorageLevel.MEMORY_AND_DISK_SER());
                LOG.info("算子节点（" + operator.getOperatorUnique() + ")开启缓存！");
            }

            //记录遍历过的算子
            transformed.put(operator, dataFrame);
        }
    }

}
