package org.yonggan.shop.etl

import com.google.gson.Gson
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming._
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, KafkaUtils, LocationStrategies}
import org.slf4j.LoggerFactory
import org.yonggan.shop.constant.ConfigurationManager
import org.yonggan.shop.domain.LogJson
import org.yonggan.shop.utils.{SQLUtils, SparkUtils}

/**
  * 实时日志的etl
  */
object RealTimeEtlKfk2Hdfs {

  // 日志
  private val LOGGER = LoggerFactory.getLogger(RealTimeEtlKfk2Hdfs.getClass)

  def main(args: Array[String]): Unit = {

    val conf = SparkUtils.getSparkConf("实时日志的etl")

    val ssc = new StreamingContext(conf, Seconds(30))
    ssc.sparkContext.setLogLevel("WARN")

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> ConfigurationManager.KFK_SERVERS,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> ConfigurationManager.KFK_GROUPID,
      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    val topics = ConfigurationManager.KFK_TOPIC

    //提取出Kfk数据
    val kfkDS: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )

    kfkDS.foreachRDD(rdd => {

      val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

      val session = SparkUtils.getSparkSession(rdd.sparkContext.getConf)
      import session.implicits._
      val jsonDF = rdd.map {
        json =>
          var logJson:LogJson = null
          try {
            val gson = new Gson()
            logJson = gson.fromJson(json.value(), classOf[LogJson])
          } catch {
            case e:Exception =>
             if (LOGGER.isErrorEnabled()) LOGGER.error(s"日常日志记录 ：${json.value()}", e)
          }
          logJson
      }.toDF()
      jsonDF.createOrReplaceTempView("v_etl_json")
      //
      val sql =
        """
          | SELECT page FROM v_etl_json
        """.stripMargin
      val resDF = SQLUtils.executor(sql, session)
      resDF.show()
    })

    ssc.start()
    ssc.awaitTermination()
  }

}
