package cn.com.cennavi.config;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.spark.SparkConf;

import cn.com.cennavi.util.sparkUtils;

/**
 * 全局配置文件
 * 
 * @ClassName: GlobalConfig
 * @Description: TODO(这里用一句话描述这个类的作用)
 * @author buf
 * @date 2017年2月6日 下午3:17:44
 *
 */
public class SparkConfigFile implements Serializable {
	/**
	 * @Fields serialVersionUID : TODO(用一句话描述这个变量表示什么)
	 */
	public final long serialVersionUID = 1L;
	public SparkConf sparkconf = null;
	public Configuration hdfsconf = null;
	public Long SparkStreamDurationsSeconds = -1l;
	public String SparkStreamCheckPointPath = null;
	public int runPartation=90;
	/**
	 * 滑动时间窗口的时间参数
	 */
	public int windowDuration, slideDuration;

	public int getrunPartation() {
		return this.runPartation;
	}

	public String getSparkStreamCheckPointPath() throws FileNotFoundException, IOException {

		return SparkStreamCheckPointPath;
	}

	public void setSparkStreamCheckPointPath(String SparkStreamCheckPointPath1) throws FileNotFoundException, IOException {
		SparkStreamCheckPointPath = SparkStreamCheckPointPath1;
	}

	public Long getSparkStreamDurationsSeconds() throws FileNotFoundException, IOException {

		return SparkStreamDurationsSeconds;
	}

	public void setSparkStreamDurationsSeconds(Long SparkStreamDurationsSeconds1) throws FileNotFoundException, IOException {
		SparkStreamDurationsSeconds = SparkStreamDurationsSeconds1;
	}

	public void init() throws FileNotFoundException, IOException {
		Properties props = new Properties();
		props.load(new FileInputStream("./configfiles/sparkconfig.properties"));
		windowDuration = Integer.parseInt(props.getProperty("reduceByKeyAndWindow.windowDuration"));
		slideDuration = Integer.parseInt(props.getProperty("reduceByKeyAndWindow.slideDuration"));
		runPartation = Integer.parseInt(props.getProperty("spark.runPartation"));
		sparkconf = new SparkConf();
		ArrayList<String> list = sparkUtils.quietly(props.getProperty("jarFileFolder"), new ArrayList<String>());
		sparkconf.setJars(list.toArray(new String[list.size()]));
		System.out.println(list.toString());
		sparkconf.setMaster(props.getProperty("spark.master"));
		sparkconf.setAppName(props.getProperty("jobName"));
		// standalone模式通过spark.cores.max设置每个程序需要的最大核数
		SparkStreamDurationsSeconds = Long.parseLong(props.getProperty("SparkStream.Durations.Seconds"));
		SparkStreamCheckPointPath = props.getProperty("SparkStream.CheckPointPath");
		sparkconf.set("spark.executor.instances", props.getProperty("spark.executor.instances"));
		sparkconf.set("spark.executor.cores", props.getProperty("spark.executor.cores"));
		sparkconf.set("spark.executor.memory", props.getProperty("spark.executor.memory"));
		sparkconf.set("spark.driver.memory", props.getProperty("spark.driver.memory"));
		sparkconf.set("spark.driver.maxResultSize", props.getProperty("spark.driver.maxResultSize"));
		sparkconf.set("spark.memory.fraction", props.getProperty("spark.memory.fraction"));
		sparkconf.set("spark.memory.storageFraction", props.getProperty("spark.memory.storageFraction"));
		sparkconf.set("spark.storage.blockManagerSlaveTimeoutMs", props.getProperty("spark.storage.blockManagerSlaveTimeoutMs"));
		sparkconf.set("spark.executor.extraJavaOptions", props.getProperty("spark.executor.extraJavaOptions"));
		// 推测执行
		sparkconf.set("spark.speculation.interval", props.getProperty("spark.speculation.interval"));
		sparkconf.set("spark.speculation.quantile", props.getProperty("spark.speculation.quantile"));
		sparkconf.set("spark.speculation.multiplier", props.getProperty("spark.speculation.multiplier"));
		sparkconf.set("spark.speculation", props.getProperty("spark.speculation"));
		sparkconf.set("spark.serializer", props.getProperty("spark.serializer"));
		sparkconf.set("spark.kryo.registrator", props.getProperty("spark.kryo.registrator"));
		// 如果linux的ext4文件系统，建议大家还是默认设置为true，提高处理性能
		sparkconf.set("spark.shuffle.consolidateFiles", props.getProperty("spark.shuffle.consolidateFiles"));
		sparkconf.set("spark.streaming.unpersist", props.getProperty("spark.streaming.unpersist"));
		sparkconf.set("spark.network.timeout", props.getProperty("spark.network.timeout"));
		sparkconf.set("spark.executor.heartbeatInterval", props.getProperty("spark.executor.heartbeatInterval"));
		hdfsconf = new Configuration();
		hdfsconf.set("mapred.output.compress", props.getProperty("mapred.output.compress"));
		hdfsconf.set("mapred.output.compression.codec", props.getProperty("mapred.output.compression.codec"));
	}

	public SparkConf getSparkConf() throws FileNotFoundException, IOException {
		System.out.println(sparkconf.get("spark.executor.instances"));// ,
																		// props.getProperty("spark.executor.instances));
		System.out.println(sparkconf.get("spark.executor.cores"));// ,
																	// props.getProperty("spark.executor.cores));
		System.out.println(sparkconf.get("spark.executor.memory"));// ,
																	// props.getProperty("spark.executor.memory));
		System.out.println(sparkconf.get("spark.driver.memory"));// ,
																	// props.getProperty("spark.driver.memory));
		System.out.println(sparkconf.get("spark.driver.maxResultSize"));// ,
																		// props.getProperty("spark.driver.maxResultSize));
		System.out.println(sparkconf.get("spark.memory.fraction"));// ,
																	// props.getProperty("spark.memory.fraction));
		System.out.println(sparkconf.get("spark.memory.storageFraction"));// ,
																			// props.getProperty("spark.memory.storageFraction));
		System.out.println(sparkconf.get("spark.storage.blockManagerSlaveTimeoutMs"));// ,
																						// props.getProperty("spark.storage.blockManagerSlaveTimeoutMs));
		System.out.println(sparkconf.get("spark.executor.extraJavaOptions"));// ,
																				// props.getProperty("spark.executor.extraJavaOptions));
		// 推测执行
		System.out.println(sparkconf.get("spark.speculation.interval"));// ,
																		// props.getProperty("spark.speculation.interval));
		System.out.println(sparkconf.get("spark.speculation.quantile"));// ,
																		// props.getProperty("spark.speculation.quantile));
		System.out.println(sparkconf.get("spark.speculation.multiplier"));// ,
																			// props.getProperty("spark.speculation.multiplier));
		System.out.println(sparkconf.get("spark.speculation"));// ,
																// props.getProperty("spark.speculation));
		System.out.println(sparkconf.get("spark.serializer"));// ,
																// props.getProperty("spark.serializer));
		System.out.println(sparkconf.get("spark.kryo.registrator"));// ,
																	// props.getProperty("spark.kryo.registrator));
		// 如果linux的ext4文件系统，建议大家还是默认设置为true，提高处理性能
		System.out.println(sparkconf.get("spark.shuffle.consolidateFiles"));// ,
																			// props.getProperty("spark.shuffle.consolidateFiles));
		System.out.println(sparkconf.get("spark.streaming.unpersist"));// ,
																		// props.getProperty("spark.streaming.unpersist));
		System.out.println(sparkconf.get("spark.network.timeout"));// ,
																	// props.getProperty("spark.network.timeout));
		System.out.println(sparkconf.get("spark.executor.heartbeatInterval"));// ,
																				// props.getProperty("spark.executor.heartbeatInterval));

		return sparkconf;
	}

	public static void main(String[] args) {
		// TODO Auto-generated method stub

		System.out.println(new SparkConfigFile().sparkconf.get("spark.master"));
	}

}
