package com.dukk.espark.core;


import cn.hutool.core.io.FileUtil;
import com.alibaba.fastjson.JSONObject;
import com.dukk.espark.utils.ConfigKit;
import org.reflections.Reflections;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

/**
 * spark引擎核心入口层，通过此入口加载引用业务spark计算任务
 *
 * @version 2020-08-06
 * @author shengshi_feiyang@yeah.net
 */
public class EngineCore implements Serializable {

    private static final Logger logger = LoggerFactory.getLogger(EngineCore.class);
    private static final long serialVersionUID = -6902670029504862581L;

    public EngineCore(){}

    /**
     * @param args 脚本输入的配置参数  {\"appName\":\"GeoJsonApp\",\"commonPropertiesPath\":\"/home/easy-spark/cluster/config/config.properties\"}
     *
     * @param debugAppName 开发调试应用名称
     */
    public void start(String[] args, String debugAppName){
        String appName = "";
        String confPath = "";
        boolean isDebug = false;
        JSONObject jsonObject = new JSONObject();
        if(null != args && args.length > 0){ //集群模式
            jsonObject = JSONObject.parseObject(args[0]);
            logger.info("外部参数:{}", JSONObject.toJSONString(jsonObject));
            confPath = jsonObject.getString("commonPropertiesPath");
            appName = jsonObject.getString("appName");

            if(!FileUtil.exist(confPath)){
                logger.error("配置文件不存在,请检查路径配置,路径[{}]", confPath);
            }

        }else{ //本地调试模式
            confPath = "config.properties";
            appName = debugAppName;
            isDebug =  true;
        }

        Reflections reflections = new Reflections("com.dukk.espark.*");

        Set<Class<?>> engineClasses = reflections.getTypesAnnotatedWith(Engine.class);
        Map<String, Class> engineClassMap = new HashMap<>();
        for(Class engine : engineClasses){
            String className = engine.getSimpleName();
            engineClassMap.put(className.toLowerCase(), engine);
        }

        Class class_ = engineClassMap.get(appName.toLowerCase());
        if(null == class_){
            logger.error("---------------启动任务不存在，启动脚本请输入正确的appName--------------");
        }

        ConfigKit configKit = new ConfigKit(confPath);
        SparkCtx sparkCtx = new SparkCtx(configKit, appName, isDebug);

        DbFactory dbFactory = new DbFactory(sparkCtx.getSparkSession().sqlContext(), configKit);

        try {
            logger.info("-----------启动任务开始，统计任务名称[{}],配置文件路径[{}]-----------------", appName, confPath);

            SparkBase sparkBase = (SparkBase)class_.newInstance();
            sparkBase.setConfigKit(configKit);
            sparkBase.setSparkSession(sparkCtx.getSparkSession());
            sparkBase.setJavaSparkContext(sparkCtx.getJavaSparkContext());
            sparkBase.setSqlContext(sparkCtx.getSparkSession().sqlContext());
            sparkBase.setDbFactory(dbFactory);
            sparkBase.doAi(jsonObject);

        } catch (Exception e) {
            logger.error("----------------任务名称[{}], 启动实例化任务失败------------------{}",appName, e);
        }finally {
            try{
                sparkCtx.close();
            }catch (Exception e){
                logger.error("关闭spark对象失败{}", e.getMessage());
            }
        }

    }


}
