package com.rainsoft.center.isec.common.utils;

import com.rainsoft.center.isec.common.entity.Constants;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.mapred.TableOutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaStreamingContext;

import java.io.IOException;

/**
 * @Name com.rainsoft.center.isec.common.utils.ISecSparkUtils
 * @Description
 * @Author Elwyn
 * @Version 2017/11/21
 * @Copyright 上海云辰信息科技有限公司
 **/
public class ISecSparkUtils  {

	public static SparkConf getSparkConf(String appName) {
		SparkConf sparkConf = new SparkConf().setAppName(appName).setMaster("local[2]")
				.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");

		if (Constants.LOCAL) {
			sparkConf.setMaster("local[2]");
		}
		return sparkConf;
	}

	public static JavaSparkContext getJavaSparkContext(SparkConf sparkConf) {
		return new JavaSparkContext(sparkConf);
	}

	public static JavaStreamingContext getJavaStreamingContext(SparkConf sparkConf, long duration) {
		return new JavaStreamingContext(sparkConf, Durations.seconds(duration));
	}


	/**
	 * @param zkHost    zookeeper 连接地址
	 * @param tableName HBase 表名
	 * @return
	 */
	public static Job setOutputTable(String zkHost, String tableName) {
		//不能用spark的javaSparkContext.hadoopConfiguration()获取conf
		Configuration conf = HBaseConfiguration.create();
		//设置zookeeper的地址,01-07都有装,设置多个保证高可用
		//如果不配置会去找localhost:2181
		conf.set("hbase.zookeeper.quorum", zkHost);
		conf.set(TableOutputFormat.OUTPUT_TABLE, tableName);
		//必须要利用conf创建Job对象,并设置输出格式,否则会出异常
		//Exception in thread "main" org.apache.hadoop.mapred.InvalidJobConfException: Output directory not set.
		Job newAPIJobConfiguration = null;
		try {
			newAPIJobConfiguration = Job.getInstance(conf);
			newAPIJobConfiguration.setOutputFormatClass(org.apache.hadoop.hbase.mapreduce.TableOutputFormat.class);
			return newAPIJobConfiguration;
		} catch (IOException e) {
			e.printStackTrace();
			return null;
		}
	}

}
