package cn.com.cennavi.config;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.spark.SparkConf;

import cn.com.cennavi.util.sparkUtils;

/**
 * 全局配置文件
 * 
 * @ClassName: GlobalConfig
 * @Description: TODO(这里用一句话描述这个类的作用)
 * @author buf
 * @date 2017年2月6日 下午3:17:44
 *
 */
public class GlobalConfig implements Serializable {
	/**
	 * @Fields serialVersionUID : TODO(用一句话描述这个变量表示什么)
	 */
	private static final long serialVersionUID = 1L;
	public SparkConf sparkconf = null;
	public Configuration hdfsconf = null;

	public GlobalConfig() {
		Properties props = new Properties();
		try {
			props.load(new FileInputStream("./configfiles/sparkconfig.properties"));
			sparkconf = new SparkConf();

			ArrayList<String> list = sparkUtils.quietly(props.getProperty("jarFileFolder"), new ArrayList<String>());
			sparkconf.setJars(list.toArray(new String[list.size()]));
			System.out.println(list.toString());
			sparkconf.setMaster(props.getProperty("spark.master"));
			sparkconf.setAppName(props.getProperty("jobName"));
			// standalone模式通过spark.cores.max设置每个程序需要的最大核数
			sparkconf.set("spark.executor.instances", props.getProperty("spark.executor.instances"));
			sparkconf.set("spark.executor.cores", props.getProperty("spark.executor.cores"));
			sparkconf.set("spark.executor.memory", props.getProperty("spark.executor.memory"));
			sparkconf.set("spark.driver.memory", props.getProperty("spark.driver.memory"));
			sparkconf.set("spark.driver.maxResultSize", props.getProperty("spark.driver.maxResultSize"));
			sparkconf.set("spark.memory.fraction", props.getProperty("spark.memory.fraction"));
			sparkconf.set("spark.memory.storageFraction", props.getProperty("spark.memory.storageFraction"));
			sparkconf.set("spark.storage.blockManagerSlaveTimeoutMs", props.getProperty("spark.storage.blockManagerSlaveTimeoutMs"));
			sparkconf.set("spark.executor.extraJavaOptions", props.getProperty("spark.executor.extraJavaOptions"));
			// 推测执行
			sparkconf.set("spark.speculation.interval", props.getProperty("spark.speculation.interval"));
			sparkconf.set("spark.speculation.quantile", props.getProperty("spark.speculation.quantile"));
			sparkconf.set("spark.speculation.multiplier", props.getProperty("spark.speculation.multiplier"));
			sparkconf.set("spark.speculation", props.getProperty("spark.speculation"));
			sparkconf.set("spark.serializer", props.getProperty("spark.serializer"));
			sparkconf.set("spark.kryo.registrator", props.getProperty("spark.kryo.registrator"));
			// 如果linux的ext4文件系统，建议大家还是默认设置为true，提高处理性能
			sparkconf.set("spark.shuffle.consolidateFiles", props.getProperty("spark.shuffle.consolidateFiles"));
			sparkconf.set("spark.streaming.unpersist", props.getProperty("spark.streaming.unpersist"));
			sparkconf.set("spark.network.timeout", props.getProperty("spark.network.timeout"));
			sparkconf.set("spark.executor.heartbeatInterval", props.getProperty("spark.executor.heartbeatInterval"));
			hdfsconf = new Configuration();
			hdfsconf.set("mapred.output.compress", props.getProperty("mapred.output.compress"));
			hdfsconf.set("mapred.output.compression.codec", props.getProperty("mapred.output.compression.codec"));
		} catch (FileNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	public static void main(String[] args) {
		// TODO Auto-generated method stub

		System.out.println(new GlobalConfig().sparkconf.get("spark.master"));
	}

}
