package com.atguigu.gmall.realtime.app

import com.alibaba.fastjson.{JSON, JSONObject}
import com.atguigu.gmall.realtime.utils.{KafkaUtil, OffsetUtil, RedisUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

import java.util


/**
 * @author caodan
 * @date 2022-09-05 10:04
 * @version 1.0
 */
object BaseDbApp {

  def main(args: Array[String]): Unit = {


    val conf = new SparkConf

    conf.setMaster("local[1]").setAppName("base-db-app")
    val dbTopic: String = "ODS_BASE_DB"
    val groupId: String = "ODS_BASE_DB"
    val context = new StreamingContext(conf, Seconds(5))

    //从redis中获取数据
    val offsets: Map[TopicPartition, Long] = OffsetUtil.getOffset(dbTopic, groupId)

    var kafkaDStream: DStream[ConsumerRecord[String, String]] = null
    if (offsets != null && offsets.nonEmpty) {
      kafkaDStream = KafkaUtil.getKafkaDstream(dbTopic, context, groupId, offsets)
    } else {
      kafkaDStream = KafkaUtil.getKafkaDstream(dbTopic, context, groupId)
    }

    var offsetRanges: Array[OffsetRange] = null // driver
    kafkaDStream = kafkaDStream.transform( // 每批次执行一次
      (rdd: RDD[ConsumerRecord[String, String]]) => {
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )

    // 获取kafka 中的数据
    val jsonObjDstream: DStream[JSONObject] = kafkaDStream.map(
      (consumerRecord: ConsumerRecord[String, String]) => {
        val value: String = consumerRecord.value()
        val jsonObj: JSONObject = JSON.parseObject(value)
        jsonObj
      }
    )

    // 定义事务型数据
    // val affairsData: Set[String] = Set[String]("order_info")
    // 定义维度型数据
    // val dimensionData: Set[String] = Set[String]("user_info", "base_province")
    //消费数据
    jsonObjDstream.foreachRDD(
      (rdd: RDD[JSONObject]) => {
        // 从redis中获取数据表的列表清单
        val jedis: Jedis = RedisUtil.getJedisConn
        val affairsData: util.Set[String] = jedis.smembers("affairsData")
        // 定义成广播变量 通过广播变量
        val affairsDataBC: Broadcast[util.Set[String]] = context.sparkContext.broadcast(affairsData)
        val dimensionData: util.Set[String] = jedis.smembers("dimensionData")
        val dimensionDataBC: Broadcast[util.Set[String]] = context.sparkContext.broadcast(dimensionData)
        println(affairsDataBC.value)
        println(dimensionDataBC.value)
        rdd.foreachPartition(
          (jsonObjIte: Iterator[JSONObject]) => {
            val conn: Jedis = RedisUtil.getJedisConn
            for (jsonObj <- jsonObjIte) {
              // 获取操作类型
              val op: String = jsonObj.getString("type")
              val opStr: String = op match {
                case "bootstrap-insert" => "I"
                case "insert" => "I"
                case "update" => "U"
                case "delete" => "D"
                case _ => null
              }
              if (opStr != null) {
                // 获取表名
                val tableName: String = jsonObj.getString("table")
                val jsonData: JSONObject = jsonObj.getJSONObject("data")
                //事务型数据
                if (affairsDataBC.value.contains(tableName)) {
                  //放入kafka中
                  println(affairsDataBC.value)
                  KafkaUtil.send(s"DWD_${tableName.toUpperCase}_$opStr", jsonData.toJSONString)
                }
                //维度型数据
                if (dimensionDataBC.value.contains(tableName)) {
                  println(dimensionDataBC.value)
                  //放入redis中
                  //val conn: Jedis = RedisUtil.getJedisConn
                  val id: String = jsonData.getString("id")
                  conn.set(s"DIM:$tableName:$id", jsonData.toJSONString)
                  //RedisUtil.closeJedisConn(conn)
                }
              }
            }
            RedisUtil.closeJedisConn(conn)
            KafkaUtil.flush()
          }
        )
        RedisUtil.closeJedisConn(jedis)
        OffsetUtil.saveOffset(dbTopic, groupId, offsetRanges)
      }
    )


    context.start()

    context.awaitTermination()
  }


}
