package com.zhang.gmall.app

import com.alibaba.fastjson.{JSON, JSONObject}
import com.zhang.gmall.util.{JedisUtil, MyKafkaUtils, MyOffsetUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

import java.util

/**
 * @title: 业务数据分流
 * @author: zhang
 * @date: 2022/3/22 12:14
 *
 *        1.准备环境
 *        2.从redis中读取偏移量
 *        3.从kafka中消费数据
 *        4.提取偏移量结束点
 *        5.结构转换数据处理
 *        分流事实数据->kafka
 *        维度数据->Redis
 *        flush kafka缓冲区
 *        提交offset
 *
 */
object ODSBaseDbApp {

  def main(args: Array[String]): Unit = {

    //TODO 1.获取环境
    val sparkConf: SparkConf = new SparkConf().setAppName("ODSBaseDbApp").setMaster("local[4]")
    val ssc = new StreamingContext(sparkConf, Seconds(5))

    //TODO 2.从redis读取偏移量
    val topic = "ODS_BASE_DB_SPARK"
    val groupId = "ODS_BASE_DB_SPARK_GROUP"
    val offsets: Map[TopicPartition, Long] = MyOffsetUtil.getOffset(topic, groupId)
    var kafkaDS: InputDStream[ConsumerRecord[String, String]] = null
    if (offsets != null && offsets.nonEmpty) {
      kafkaDS = MyKafkaUtils.getKafkaDStream(ssc, topic, groupId, offsets)
    } else {
      kafkaDS = MyKafkaUtils.getKafkaDStream(ssc, topic, groupId)
    }

    //TODO 3.提取消费到数据的结束偏移量
    var offsetRanges: Array[OffsetRange] = null
    val offsetRangesDS: DStream[ConsumerRecord[String, String]] = kafkaDS.transform(
      rdd => {
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )
    //TODO 4.转换数据结构
    val jsonObjDS: DStream[JSONObject] = offsetRangesDS.map(
      data => {
        JSON.parseObject(data.value())
      }
    )

    //TODO 5.消费数据
    jsonObjDS.foreachRDD(
      rdd => {
        //rdd算子之外代码在driver端执行 ，这里是周期执行，一个采集周期执行一次
        //维护配置清单到redis中，实时任务每批次动态从redis中获取配置
        //redis 数据类型： set、key：FACT:TABLES  DIM:TABLES、value：表名的集合
        val redisFactKeys = "FACT:TABLES"
        val redisDimKeys = "DIM:TABLES"
        val jedis: Jedis = JedisUtil.getJedis()
        //事实表清单
        val factTables: util.Set[String] = jedis.smembers(redisFactKeys)
        //维度表清单
        val dimTables: util.Set[String] = jedis.smembers(redisDimKeys)
        println(s"factTables:$factTables")
        println(s"dimTables:$dimTables")
        //做成广播变量，每个executor上的task可以共享
        val factTablesBC: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(factTables)
        val dimTablesBC: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(dimTables)
        //关闭jedis连接
        jedis.close()

        rdd.foreachPartition(
          jsonObjIter => {
            //开启redis连接写入维度数据使用
            val jedis: Jedis = JedisUtil.getJedis()
            for (jsonObj <- jsonObjIter) {
              val op: String = jsonObj.getString("type")
              val opValue: String = op match {
                case "bootstrap-insert" => "I"
                case "insert" => "I"
                case "update" => "U"
                case "delete" => "D"
                case _ => null
              }

              //判断操作类型，过滤不需要统计的操作
              if (opValue != null) {
                //提取表名
                val tableName: String = jsonObj.getString("table")
                if (factTablesBC.value.contains(tableName)){
                  //事实数据发送到kafka
                  //按操作类型发送到不同的主题
                  val data: String = jsonObj.getString("data")
                  MyKafkaUtils.send(s"DWD_${tableName.toUpperCase}_${opValue}_SPARK",data)
                }
                if (dimTablesBC.value.contains(tableName)){
                  //维度数据发送到redis
                  //数据类型string、key：DIM：表名：ID、value：整条数据的jsonString
                  //提取数据中的id
                  val dataObj: JSONObject = jsonObj.getJSONObject("data")
                  val id: String = dataObj.getString("id")
                  val redisKey:String = s"DIM:${tableName.toUpperCase}:$id"
                  jedis.set(redisKey,dataObj.toJSONString)
                }
              }
            }
            //关闭redis连接
            jedis.close()
            //刷写缓冲区
            MyKafkaUtils.flush()
          }
        )
        //提交offset（周期性）
        MyOffsetUtil.saveOffset(topic, groupId, offsetRanges)
      }
    )
    //TODO 1.
    //TODO 1.
    //TODO 1.
    //TODO 1.
    //TODO 1.执行任务
    ssc.start()
    ssc.awaitTermination()
  }
}
