package com.sheep.spark.util;

import com.sheep.spark.init.InitWordCountJob;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaStreamingContext;

import java.io.Serializable;

/**
 * Created with IntelliJ IDEA.
 * User: Administrator
 * Date: 16-5-30
 * Time: 下午2:48
 * To change this template use File | Settings | File Templates.
 */
public class SparkSupport implements Serializable {
    private static final Logger logger = Logger.getLogger(SparkSupport.class);
    private static SparkConf sparkConf = null;
    private static JavaSparkContext javaSparkContext = null;
    private static JavaStreamingContext javaStreamingContext = null;

    public static JavaStreamingContext getNewJavaStreamingContext(String appName) {
        SparkConf
                sparkConf = new SparkConf().setJars(ConfSupport.getSparkJarAddress()).setAppName(appName)
                .set("spark.driver.allowMultipleContexts", "true");
        if (ConfSupport.needLoadLocalHadoop()) {
            sparkConf.setMaster(ConfSupport.getSparkAddress());
        }
        return new JavaStreamingContext(sparkConf, Durations.seconds(ConfSupport.getStreamInterval()));
    }

    public static JavaStreamingContext getJavaStreamingContext() {
        if (javaStreamingContext == null) {
            javaStreamingContext = new JavaStreamingContext(JavaSparkContext.fromSparkContext(JavaSparkContext.toSparkContext(getJavaSparkContext())), Durations.seconds(ConfSupport.getStreamInterval()));
        }
        return javaStreamingContext;
    }

    public static SparkConf getSparkConf() {
        if (sparkConf == null) {
            sparkConf = new SparkConf().setJars(ConfSupport.getSparkJarAddress()).setAppName(ConfSupport.getSparkName())
                    .set("spark.driver.allowMultipleContexts", "true");
            if (ConfSupport.needLoadLocalHadoop()) {
                sparkConf.setMaster(ConfSupport.getSparkAddress());
            }
        }
        return sparkConf;
    }


    public static JavaSparkContext getJavaSparkContext() {
        if (javaSparkContext == null) {
            javaSparkContext = new JavaSparkContext(getSparkConf());
        }
        return javaSparkContext;
    }


    /**
     * 初始化
     */
    public static void init() {
        try {
            if (ConfSupport.needLoadLocalHadoop()) {
                System.setProperty("hadoop.home.dir", ConfSupport.getLocalHadoopDir());
                System.setProperty("HADOOP_USER_NAME", ConfSupport.getLoginUser());
            }
            new InitWordCountJob();
        } catch (Exception e) {
            logger.error(e);
        }
    }
}
