package com.atguigu.gmall.realtime.app

import java.util
import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.atguigu.gmall.realtime.util.{MyKafkaUtil, MykafkaSender, OffsetManager, RedisUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis


object OdsBaseDbApp {


  //1 接收kafka数据
  //2 把record转换成一个方便处理的结构化对象
  //转换算子 （map）
  //3  判断每个json的类型 进行拆解、提取关键字段  分发到不同的topic中 kafka

  def main(args: Array[String]): Unit = {
    //0 初始化环境
    val topic = "ODS_BASE_DB_C"
    val groupId = "ods_base_db_c_app"

    val sparkConf: SparkConf = new SparkConf().setAppName("ods_base_db_c_app").setMaster("local[4]")
    val ssc = new StreamingContext(sparkConf, Seconds(5))
    //1 接收kafka数据
    val offsetMap: Map[TopicPartition, Long] = OffsetManager.getOffset(topic, groupId)

    var inputDstream: InputDStream[ConsumerRecord[String, String]] = null
    //如果能够加载已保存的偏移 则通过偏移量加载kafka数据  ，否则从最新的默认位置读取偏移量
    if (offsetMap == null || offsetMap.size == 0) {
      inputDstream = MyKafkaUtil.getKafkaStream(topic, ssc, groupId)
    } else {
      inputDstream = MyKafkaUtil.getKafkaStream(topic, ssc, offsetMap, groupId)
    }

    var offsetRanges: Array[OffsetRange] = null //driver
    val inputDstreamWithOffset: DStream[ConsumerRecord[String, String]] = inputDstream.transform { rdd =>
      println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
      offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges //driver ?  executor ?
      rdd
    }

    //inputDstream.map(record=>  record.value() )
    // inputDstream.map(_.value()).print(1000)
    //2 把record转换成一个方便处理的结构化对象  jsonObject
    val jsonObjDstream: DStream[JSONObject] = inputDstreamWithOffset.map { record => //ex
      val jsonString: String = record.value()
      val jsonObj: JSONObject = JSON.parseObject(jsonString)
      jsonObj
    }
    jsonObjDstream.print(10000)


    //3
   //3.1 方向上的拆分  维度数据 进入固定的存储容器   事实数据 进入DWD的数据流
    //3.2细粒度 的拆  不同的表  要进入不同的主题 或者  固定容器的“表”
    //3.3  由redis保存所有的维度数据 未来提供各种维度查询


     //思考： 如果变化了呢 redis
    // type ?    set     key?  dim_tables  fact_tables  value ? "sku_info","user_info" ...   写 ？sadd    读？smembers     过期？  不过期
    // sadd DIM_TABLES coupon_info activity_info activity_rule user_info base_province spu_info base_trademark sku_info base_category3

    // val dimTables= Array("sku_info","user_info","base_province")
   // val factTables= Array("order_info","order_detail","cart_info")
    // sadd FACT_TABLES order_info order_detail cart_info favor_info comment_info order_detail_activity order_detail_coupon order_refund_info payment_info refund_payment

    //jedisClient  a driver  b executor  a driver 建  exe
    //a val jedisClient: Jedis = RedisUtil.getJedisClient  dr 错
    jsonObjDstream.foreachRDD{rdd=>
      //b val jedisClient: Jedis = RedisUtil.getJedisClient  dr 错
      //b
      //利用Redis动态保存维度表和事实表的表名，利用广播变量分发到executor
      val jedis: Jedis = RedisUtil.getJedisClient
      val dimTablesKey="DIM_TABLES"
      val dimTablesSet: util.Set[String] = jedis.smembers(dimTablesKey)
      //封装广播变量
      println("当前维度表："+dimTablesSet)
      val dimTableSetBC: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(dimTablesSet)
      val factTablesKey="FACT_TABLES"
      val factTablesSet: util.Set[String] = jedis.smembers(factTablesKey)
      println("当前事实表："+factTablesSet)
      val factTableSetBC: Broadcast[util.Set[String]] = ssc.sparkContext.broadcast(factTablesSet)
      jedis.close()

      rdd.foreachPartition{jsonObjItr=>
        //c
        val jedisClient: Jedis = RedisUtil.getJedisClient
        for (jsonObj <- jsonObjItr ) {
          // 区分维度数据还是 事实数据
          //d
          val tableName: String = jsonObj.getString("table")
          if(dimTableSetBC.value.contains(tableName)){
            //维度数据 处理  存储在redis中
            //type ? string    key?   DIM:表名:主键  value?  数据的json   field/score? 无
            // 写入api? set   读取 api?  get  过期？ 不设

            val dataJSONArray: JSONArray = jsonObj.getJSONArray("data")
            for( i <- 0 to dataJSONArray.size()-1  ){
              val dataJsonObj: JSONObject = dataJSONArray.getJSONObject(i)
              val id: String = dataJsonObj.getString("id")
              val dimKey = s"DIM:$tableName:$id"
              jedisClient.set(dimKey,dataJsonObj.toJSONString)
            }
          }else if(factTableSetBC.value.contains(tableName)){
            //事实数据 处理  发送到kafka
            val dataJSONArray: JSONArray = jsonObj.getJSONArray("data")
            val dataJsonType: String = jsonObj.getString("type")
            var optType:String=null
            if(dataJsonType=="INSERT"){
              optType="I"
            }else if(dataJsonType=="UPDATE"){
              optType="U"
            }
            val topic=s"DWD_${tableName.toUpperCase}_$optType"
            for( i <- 0 to dataJSONArray.size()-1  ) {
              val dataObject: JSONObject = dataJSONArray.getJSONObject(i)
              MykafkaSender.send(topic,dataObject.toJSONString );
              if(tableName.equals("order_info")){
               //  Thread.sleep(500)
              }
            }
          }
        }
        MykafkaSender.flush()
        jedisClient.close()
      }
      inputDstream.asInstanceOf[CanCommitOffsets].commitAsync( offsetRanges)
    }





    ssc.start()
    ssc.awaitTermination()

  }






}
