package com.zhao.util

import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.{KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Description: Spark工具类<br/>
 * Copyright (c) ，2021 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2021/1/5 15:44
 *
 * @author 柒柒
 * @version : 1.0
 */

object SparkUtil {

  /**
   * debug模式
   */
  var debugModel = false

  /**
   * 创建SparkSession
   * @param appName
   * @param master
   * @return
   */
  def createSparkSession(appName: String, master: String): SparkSession= {
    val conf: SparkConf = new SparkConf()
    if (appName != null && master != null){
      if (appName != null){
        conf.setAppName(appName)
      }
      if (master != null){
        conf.setMaster(master)
      }
      conf.set("spark.driver.host","localhost")
    }
    SparkSession.builder.config(conf).getOrCreate
  }

  /**
   * 流数据
   * @param args
   * @return
   */
  def createKafkaStream(args: Array[String]):(DStream[ConsumerRecord[String,String]],StreamingContext) = {
    //解析执行命令的参数
    val paramMap: Map[String, String] = ParamParserUtil.argsToOptionMap(args)

    //定义的appName,唯一性
    val conf: SparkConf = new SparkConf().setAppName(paramMap("appName"))
    //如果是debug模式
    if (debugModel){
      conf.setMaster("local[*]")
    }
    //统计时间间隔,默认是秒
    val ssc: StreamingContext = new StreamingContext(conf, Seconds(paramMap("interval").toLong))

    //订阅的topic,逗号分隔
    val topics: Set[String] = paramMap("topics").split(",").toSet

    val autoCommit: Boolean = if (paramMap("enable.auto.commit") == null) true else paramMap("enable.auto.commit").toBoolean

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> paramMap("bootstrap.servers"),
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> paramMap("group.id"),
      "auto.offset.reset" -> paramMap("auto.offset.reset"),
      "enable.auto.commit" -> (autoCommit: java.lang.Boolean)
    )
    (KafkaUtils.createDirectStream[String,String](
      ssc,
      LocationStrategies.PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)),ssc
    )
  }

  def setDebugModel(model: Boolean) = {
    debugModel = model
  }
}














