package com.feidee.fdhadoop.spark;

import com.feidee.fdcommon.configuration.CustomConfiguration;
import com.feidee.fdcommon.constant.CommonConstant;
import com.feidee.fdcommon.util.ToolUtils;
import com.feidee.fdhadoop.hdfs.HdfsUtils;
import com.feidee.fdhadoop.kerberos.KerberosAuthentication;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;

import java.util.Map;
import java.util.Properties;

public class SparkUtils {
	private static Logger logger = Logger.getLogger(SparkUtils.class);
	public static final String SPARK_MASTER_LOCAL = "local";

	public static SparkConf getConf() {
		return getConf(null,null);
	}

	public static SparkConf getConf(String appName) {
		return getConf(null,appName);
	}

	public static SparkConf getConf(String master, String appName) {
		SparkConf sparkConf = new SparkConf();
		String tmpMaster = null;
		if(ToolUtils.isNotNull(master)){
			sparkConf.setMaster(master);
			tmpMaster = master;
		}else{
			tmpMaster = "";
		}
		if(ToolUtils.isNotNull(appName)){
			sparkConf.setAppName(appName);
		}
		if (!tmpMaster.contains(SPARK_MASTER_LOCAL) && (CommonConstant.ENV_TEST.equals(CustomConfiguration.getString(CommonConstant.ENV_KEY)) || CommonConstant.ENV_DEV.equals(CustomConfiguration.getString(CommonConstant.ENV_KEY)))) {
			KerberosAuthentication.authenticate();
			System.setProperty("java.security.auth.login.config", KerberosAuthentication.path + "kafka_spark_jaas.conf");
			sparkConf.set("spark.yarn.principal", "data_team")
					.set("spark.yarn.keytab", KerberosAuthentication.path + "new_data_team.keytab")
					.set("spark.files", KerberosAuthentication.path + "kafka_spark_jaas.conf," + KerberosAuthentication.path + "new_kafka_spark.keytab")
					.set("spark.executor.extraJavaOptions", "-Djava.security.auth.login.config=./kafka_spark_jaas.conf")
					.set("spark.driver.extraJavaOptions", "-Djava.security.auth.login.config=./kafka_spark_jaas.conf");
		}
		return sparkConf;
	}
}