package com.zxq.mall.realtime.app

import com.alibaba.fastjson.{JSON, JSONObject}
import com.zxq.mall.realtime.util.{MyKafkaUtils, MyRedisUtils, OffsetManagerUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf}
import redis.clients.jedis.Jedis
import java.util

object BaseDBApp_maxwell {

  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new
        SparkConf().setAppName("base_db_app").setMaster("local[4]")
    val ssc = new StreamingContext(sparkConf,Seconds(5))
    val topic = "ODS_BASE_DB_M"
    val groupId = "base_db_group"

    val offsets: Map[TopicPartition, Long] = OffsetManagerUtil.getOffset(topic,groupId)

    var kafkaDStream : DStream[ConsumerRecord[String,String ]] =
      null
    if(offsets != null && offsets.nonEmpty){
      kafkaDStream =
        MyKafkaUtils.getKafkaDStream(topic,ssc,offsets,groupId)
    }else{
      kafkaDStream = MyKafkaUtils.getKafkaDStream(ssc,topic,groupId)
    }

    var offsetRanges: Array[OffsetRange] = null
    kafkaDStream = kafkaDStream.transform(
      rdd => {
        offsetRanges =
          rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )

    val jsonObjDstream: DStream[JSONObject] = kafkaDStream.map(
      record => {
        val jsonString: String = record.value()
        val jSONObject: JSONObject = JSON.parseObject(jsonString)
        jSONObject
      }
    )

    jsonObjDstream.foreachRDD(
      rdd => {
        val jedis: Jedis = MyRedisUtils.getJedisClient
        val dimTableKey : String = "DIM:TABLES"
        val factTableKey : String = "FACT:TABLES"
        //从 redis 中读取表清单
        val dimTables: util.Set[String] = jedis.smembers(dimTableKey)
        val factTables: util.Set[String] =
          jedis.smembers(factTableKey)
        println("检查维度表: " + dimTables)
        println("检查事实表: " + factTables)

        val dimTablesBC: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(dimTables)
        val factTablesBC: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(factTables)
        jedis.close()
        rdd.foreachPartition(
          jsonObjIter => {

            val jedis: Jedis = MyRedisUtils.getJedisClient
            for (jsonObj <- jsonObjIter) {

              val tableName: String = jsonObj.getString("table")

              val optType: String = jsonObj.getString("type")
              val opt: String = optType match {
                case "bootstrap-insert" => "I"
                case "insert" => "I"
                case "update" => "U"
                case "delete" => "D"
                case _ => null
              }
              if(opt != null ){

                val dataJsonArray: JSONObject = jsonObj.getJSONObject("data")

                if(factTablesBC.value.contains(tableName)){
                  val topicName = s"DWD_${tableName.toUpperCase()}_$opt"
                  val key: String = dataJsonArray.getString("id")

                  MyKafkaUtils.send(topicName,key,
                    dataJsonArray.toJSONString)
                }

                if(dimTablesBC.value.contains(tableName)){
                  val id: String = dataJsonArray.getString("id")
                  val redisKey : String = s"DIM:${tableName.toUpperCase()}:$id"
                  val redisValue : String = dataJsonArray.toJSONString
                  jedis.set(redisKey,redisValue)
                }
              }
            }
            jedis.close()
            MyKafkaUtils.flush()
          }
        )

        OffsetManagerUtil.saveOffset(topic,groupId,offsetRanges)
      }
    )
    ssc.start()
    ssc.awaitTermination()
  }

}
