package com.atguigu.gmall.realtime.app

import java.util

import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.atguigu.gmall.realtime.util.{MyKafkaSender, MyKafkaUtil, OffsetManager, RedisUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

object OdsBaseDbApp {


  def main(args: Array[String]): Unit = {
    //0 初始化环境
    val sparkConf: SparkConf = new SparkConf().setAppName("ods_base_db_app").setMaster("local[3]")

    val ssc = new StreamingContext(sparkConf,Seconds(5))

    val topic="ODS_BASE_DB_C"
    val groupId="ods_base_db_app"


    //1 、提取kafka中的數據到流
    val offsetMap: Map[TopicPartition, Long] = OffsetManager.getOffset(topic,groupId)

    val inputDstream: InputDStream[ConsumerRecord[String, String]] = MyKafkaUtil.getKafkaStream(topic,ssc,offsetMap,groupId)


     //提取流中的而偏移量
    var offsetRanges: Array[OffsetRange]=null

    val inputWithOffsetDstream: DStream[ConsumerRecord[String, String]] = inputDstream.transform { rdd =>
      //4个分区 的偏移量各种值
      offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges //dr

      rdd
    }


    // 2、 转换格式 把流中元素转为通用对象
    val jsonObjDStream: DStream[JSONObject] = inputWithOffsetDstream.map { record =>
      val jsonString: String = record.value()
      val jSONObject: JSONObject = JSON.parseObject(jsonString)
      jSONObject
    }
    jsonObjDStream.print(1000)

    //3  区分维度数据还是事实数据
    //事实数据：进行判断决定进入到不同的topic中
    //维度数据：保存在redis中
    // 思考问题？  1  要把维度表名的定义清单 包括事实表的定义清单   动态维护   redis
      //          2   历史维度数据 如何导入进来

   // val  dimTables=Set("user_info" ,"sku_info","base_province")
  //  val  factTables=Set("order_info","order_detail","cart_info")

    jsonObjDStream.foreachRDD{rdd=>
      //A  driver   //1 满足周期性 查询   2  可以通过广播变量方式发送节省io和内存
      val jedis: Jedis = RedisUtil.getJedisClient
      val  dimTableKey="DIM_TABLES"
      val dimTables: util.Set[String] = jedis.smembers(dimTableKey )
      val  factTableKey="FACT_TABLES"
      val factTables: util.Set[String] = jedis.smembers(factTableKey )
      val dimTablesBC: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(dimTables)
      val factTablesBC: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(factTables)
      println(s"维度表：$dimTables")
      println(s"事实表：$factTables")
      jedis.close()

      rdd.foreachPartition{jsonObjItr=>
        //B
        val jedis: Jedis = RedisUtil.getJedisClient
        val list: List[JSONObject] = jsonObjItr.toList  //iterator不能多次迭代 ，如果需要多次迭代请提取到集合中。
        println(s"本批次数据量:${list.size}")
        for (jsonObj <- list ) {

          val tablename: String = jsonObj.getString("table")
          // 事实数据
          if(factTablesBC.value.contains(tablename)){
            //发送kafka
            //主题名：  DWD_表名大写_操作类型首字母(I\U\D)
            val optType: String = jsonObj.getString("type")
            val opt: String = optType.substring(0,1).toUpperCase()
            val targetTopic=s"DWD_${tablename.toUpperCase}_$opt"

            val dataArr: JSONArray = jsonObj.getJSONArray("data")
            for(i <- 0 to dataArr.size()-1 ){
              val dataJsonObj: JSONObject = dataArr.getJSONObject(i)
              MyKafkaSender.send(targetTopic,dataJsonObj.toJSONString)
              if(tablename=="order_detail"){
               //   Thread.sleep(150)
              }
            }
          }
          //维度数据
          if(dimTablesBC.value.contains(tablename)){

              //type?  string     1:1   key ?  DIM:表名大写:ID        行为单位      field/score ?  无 value  ? user_info Json
               //写入api set     读api  get      过期时间？  不设
               val dataArr: JSONArray = jsonObj.getJSONArray("data")
               for(i <- 0 to dataArr.size()-1 ) {
                  val dataJsonObj: JSONObject = dataArr.getJSONObject(i)
                 val id: String = dataJsonObj.getString("id")  //提取主键
                 val dimkey=s"DIM:${tablename.toUpperCase()}:$id"

                 jedis.set(dimkey,dataJsonObj.toJSONString)
               }

          }

        }
          jedis.close()
          MyKafkaSender.flush()
      }

      OffsetManager.saveOffset(topic,groupId,offsetRanges)
    }

    ssc.start()
    ssc.awaitTermination()
  }

}
