package com.zh.util

import com.zh.constants.Constants
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkUtils {

  @transient private var instance: SparkSession = _

  def initSpark(appName: String, sparkConf: SparkConf): SparkSession = {
    //背压
    sparkConf.set("spark.streaming.backpressure.enabled", "true")
    //最低费率
    sparkConf.set("spark.streaming.backpressure.pid.minRate", "1")
    //上限
    sparkConf.set("spark.streaming.kafka.maxRatePerPartition", "2000")
    //初始最大
    sparkConf.set("spark.streaming.backpressure.initialRate", "100")
    //重试间隔
    sparkConf.set("spark.streaming.kafka.consumer.poll.ms", "3000")
    //重试次数  dufault4
    sparkConf.set("spark.task.maxFailures", "10")
    //优雅停止
    sparkConf.set("spark.streaming.stopGracefullyOnShutdown", "true")
    //kryo 序列化
    sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    if (!PropertiesUtils.isPro()) {
      sparkConf.setMaster("local[*]")
    }
    if (instance == null) {
      instance = SparkSession
        .builder
        .appName(appName)
        .config(sparkConf)
        //.config("spark.sql.warehouse.dir", "./spark-warehouse")
        //.enableHiveSupport()
        .getOrCreate()
      println("spark defaultParallelism>>>", instance.sparkContext.defaultParallelism)
    }
    instance
  }

  def initSparkStreaming(spark: SparkSession, interval: Long) = {
    val ssc = new StreamingContext(spark.sparkContext, Seconds(interval))
    ssc
  }

  def getKafkaDirectStream(ssc: StreamingContext, kafkaBrokers: String, groupId: String, autooffsetreset: String, kafkaTopics: String): InputDStream[ConsumerRecord[String, String]] = {
    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> kafkaBrokers,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> groupId,
      "auto.offset.reset" -> autooffsetreset,
      "enable.auto.commit" -> (false: java.lang.Boolean),
      "max.poll.interval.ms" -> Constants.KAFKA_MAX_POLL_INTERVAL_MS,
      "max.poll.records" -> Constants.KAFKA_MAX_POLL_RECORDS
    )
    val topics = Array(kafkaTopics)
    val dStream = KafkaUtils.createDirectStream[String, String](
      ssc,
      PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )
    dStream
  }

  def getImpala(spark: SparkSession, impalaMaster: String, impalaTable: String): DataFrame = {
    val jdbcDF = spark.read
      .format("jdbc")
      .option("driver", "com.cloudera.impala.jdbc41.Driver")
      .option("url", impalaMaster)
      .option("dbtable", impalaTable)
      .option("user", "141951")
      .option("password", "141951")
      .load()
    jdbcDF
  }

  /** Lazily instantiated singleton instance of SparkSession */
  object SparkSessionSingleton {

    @transient private var instance: SparkSession = _

    def getInstance(sparkConf: SparkConf): SparkSession = {
      if (instance == null) {
        instance = SparkSession
          .builder
          .config(sparkConf)
          .getOrCreate()
      }
      instance
    }
  }

  /**
   * spark参数处理
   *
   * @param args
   * @return
   */
  def argsHandle(args: Array[String]): (String, String) = {
    if (args.length < 1) {
      System.err.println("Usage: args参数不正确 [1]-config properties")
      System.exit(1)
    }
    var appName: String = "消费kafka写入hudi"
    if (args.length >= 2)
      appName = args(1)
    (args(0), appName)
  }

}

