package com.navinfo.platform.common.dto;

import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.IDataHandleService;
import com.navinfo.platform.common.service.ILoadDataService;
import com.navinfo.platform.common.service.impl.CommonService;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.Map;
import java.util.Set;

/***
 * @author gx
 */
public abstract class BaseApplication {

    @SuppressWarnings("unchecked")
    protected void start(String[] args) {
        SparkSession spark = null;
        try {
            initConfig(args);
            spark = getSpark().getOrCreate();
            Map<String, String> config = getConfig();
            ILoadDataService loadDataService = getLoadDataService();
            List<Object> datas = loadDataService.loadData(spark, config);
            if(datas == null){
                return;
            }
            IDataHandleService dataHandleService = getDataHandleService();
            dataHandleService.handle(datas, config, spark);
        }catch (Exception ex){
          ex.printStackTrace();
        }  finally {
            spark.stop();
        }
    }

    protected void initConfig(String[] args) {
        getCommonService().initConfig(getFileName(), args);
    }

    protected abstract String getFileName();
    protected abstract IDataHandleService getDataHandleService();

    protected ICommonService getCommonService() {
        return CommonService.INSTANCE;
    }

    protected abstract ILoadDataService getLoadDataService();


    protected Map<String, String> getConfig(){
        return getCommonService().getConfig();
    }

    protected SparkSession.Builder getSpark() {
//        System.setProperty("hadoop.home.dir", "D:\\workenv\\winutil");
        SparkSession.Builder sparkSessionBuilder = SparkSession.builder();
        Set<Map.Entry<String, String>> entrySet = getConfig().entrySet();
        for (Map.Entry<String, String> entry : entrySet) {
            String key = entry.getKey();
            String value = entry.getValue();
            if (!key.startsWith("spark")) {
                continue;
            }
            sparkSessionBuilder.config(key, value);
        }
        String env = getConfig().get("run.env");
        if (StringUtils.equals(env, "local")) {
            sparkSessionBuilder.config("spark.master", "local");
        }
//        sparkSessionBuilder.config("spark.sql.warehouse.dir", "hdfs://nhdp1.hadoop.com:8020/apps/hive/warehouse");
//        sparkSessionBuilder.config("spark.sql.hive.metastore.jars","maven");
        sparkSessionBuilder.config("mapreduce.fileoutputcommitter.marksuccessfuljobs", "false");
        sparkSessionBuilder.enableHiveSupport();
        return sparkSessionBuilder;
    }


}
