package com.feidee.data.report.dao.impl;

import com.feidee.data.report.constant.Constant;
import com.feidee.data.report.dao.ISparkDao;
import com.feidee.data.report.util.ToolUtils;
import com.feidee.fdcommon.configuration.CustomConfiguration;
import org.apache.log4j.Logger;

import java.util.HashMap;
import java.util.Map;

public class SparkDaoImpl extends BaseReportQueryDao implements ISparkDao {

    private static Logger logger = Logger.getLogger(SparkDaoImpl.class);

    @Override
    public void setup() {
        super.setup();
    }

    @Override
    public void cleanup() {
        super.cleanup();
    }

    public void setConfiguration() {
        engineFlag = Constant.SPARK_ENG;

        driverStr = CustomConfiguration.getString("hive.jdbc.driverClassName");
        if (!ToolUtils.isNotNull(driverStr)) {
            driverStr = Constant.HIVE_DRIVER;
        }
        int sparkNum = CustomConfiguration.getInt("spark.url.num");

		for (int i = 1; i <= sparkNum; i++) {
			Map<String, String> conf = new HashMap<>();
			conf.put("url", CustomConfiguration.getString("spark.jdbc.url" + "_" + i, Constant.SPARK_CONNECTION));
			conf.put("username", CustomConfiguration.getString("spark.jdbc.username" + "_" + i, Constant.SPARK_USER));
			conf.put("password", CustomConfiguration.getString("spark.jdbc.password" + "_" + i, Constant.SPARK_PWD));
			sparkConf.put(i, conf);
		}
    }

}
