package spark.util;

import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SparkSession;

/**
 * 描述:
 * 获取sparkSession、sparkContext、sqlContext的通用类
 *
 * @author jiantao7
 * @create 2018-05-17 15:54
 */
public class SparkUtil {
    private static volatile SparkSession spark;
    private static volatile SparkContext sc;
    private static volatile SQLContext sqlContext;

    private void setSc(SparkContext sc) {
        SparkUtil.sc = sc;
    }

    public SQLContext getSqlContext() {
        return sqlContext;
    }

    private static void setSqlContext(SQLContext sqlContext) {
        SparkUtil.sqlContext = sqlContext;
    }

    private SparkUtil() {

    }

    public SparkUtil(String appName, boolean isLocal) {
        getSparkSession(appName, isLocal);
    }

    public SparkSession getSparkSession() {
        return spark;
    }

    /**
     * 获取sparkSession
     *
     * @param appName
     * @param islocal
     * @return
     */
    private void getSparkSession(String appName, boolean islocal) {
        if (spark == null) {

            if (islocal) {
                spark = SparkSession.builder()
                        .appName(appName)
                        .master("local[*]")
                        .getOrCreate();
            } else {
                spark = SparkSession.builder()
                        .appName(appName)
                        .getOrCreate();
            }
        }
        setSqlContext(spark.sqlContext());
    }

    /**
     * 获取javaSparkContext
     *
     * @return
     */
    public JavaSparkContext getJavaSparkContext() {
        return JavaSparkContext.fromSparkContext(spark.sparkContext());
    }
}