package com.sxzjrj.commen

import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by ljj in 2019/3/28
  *
  */
object MBYLBCCommen {

  def setSparkConf(
                    appName: String,
                    moreConfig: Map[String, String] = Map.empty[String,String]
                  ) = {
    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .set("spark.streaming.stopGracefullyOnShutdown", "true")
      .set("spark.streaming.kafka.maxRatePerPartition", "10000")
      .set("spark.sql.parquet.compression.codec", "snappy")

    moreConfig.foreach {
      case (key, value) => conf.set(key, value)
    }

    conf

  }

  def createSparkContext(appName: String,
                         moreConfig: Map[String, String] = Map.empty[String,String]
                        ) = {
    new SparkContext(setSparkConf(appName,moreConfig))

  }

  def createSparkSQL(appName: String,
                     moreConfig: Map[String, String] = Map.empty[String,String]
                    ) ={
    val session = SparkSession
      .builder()
      .config(setSparkConf(appName,moreConfig))
      .getOrCreate()

    (session,session.sparkContext)
  }

  def createSparkStreaming(
                            time: Int,
                            appName: String,
                            moreConfig: Map[String, String] = Map.empty[String,String]
                          ) = {
     val ssc =  new StreamingContext(createSparkContext(appName,moreConfig),Seconds(time))


    (ssc,ssc.sparkContext)
  }


}
