package com.qiangsheng.gmall.realtime.app

import com.alibaba.fastjson.{JSON, JSONObject}
import com.qiangsheng.gmall.realtime.util.{MyKafkaUtils, MyOffsetUtils, MyRedisUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import redis.clients.jedis.Jedis

/**
 * 业务数据消费分流
 * 1.准备实时环境
 * 2.从redis中读取偏移量
 * 3.从kafka中消费数据
 * 4.提取偏移量结束点
 * 5.数据处理
 *  5.1 转换数据结构
 *  5.2 分流
 *    事实数据 => kafka
 *    维度数据 => redis
 * 6.flush kafka的缓冲区
 * 7.提交offset
 */
object OdsBaseDbApp {
  def main(args: Array[String]): Unit = {
    //1.准备实时环境
    val sparkConf: SparkConf = new SparkConf().setAppName("ods_base_db_app").setMaster("local[4]")
    val ssc = new StreamingContext(sparkConf, Seconds(5))

    //2.从redis中读取偏移量
    val topicName = "ODS_BASE_DB"
    val groupId = "ODS_BASE_DB_GROUP"
    val offfsets: Map[TopicPartition, Long] = MyOffsetUtils.readOffset(topicName, groupId)

    //3.从kafka中消费数据
    var kafkaDStream: InputDStream[ConsumerRecord[String, String]] = null
    if(offfsets != null && offfsets.nonEmpty){
      kafkaDStream = MyKafkaUtils.getKafkaDStream(ssc, topicName, groupId, offfsets)
    }else{
      kafkaDStream = MyKafkaUtils.getKafkaDStream(ssc, topicName, groupId)
    }

    //4.提取偏移量结束点
    var offsetRanges: Array[OffsetRange] = null
    val offsetRangesDStream: DStream[ConsumerRecord[String, String]] = kafkaDStream.transform(
      rdd => {
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )

    //5.处理数据
    //5.1 转换数据结构
    val jsonObjDstream: DStream[JSONObject] = offsetRangesDStream.map(
      consumerRecod => {
        val dataJson: String = consumerRecod.value()
        val jSONObject: JSONObject = JSON.parseObject(dataJson)
        jSONObject
      }
    )
//    jsonObjDstream.print(100)

    //5.2 分流
    //事实表清单
    val factTables : Array[String] = Array[String]("order_info","order_detail" /*缺啥补啥*/)
    //维度表清单
    val dimTables : Array[String] = Array[String]("user_info","base_province" /*缺啥补啥*/)
    jsonObjDstream.foreachRDD(
      rdd => {
        rdd.foreachPartition(
          jsonObjIter => {
            for (jsonObj <- jsonObjIter) {
              //提取操作类型
              val operType: String = jsonObj.getString("type")
              val opValue: String = operType match {
                case "bootstrap-insert" => "I"
                case "insert" => "I"
                case "update" => "U"
                case "delete" => "D"
                case _ => null
              }

              //判断操作类型：1.明确什么操作 2.过滤不感兴趣的数据
              if(opValue != null){
                //提取表名
                val tableName: String = jsonObj.getString("table")

                if(factTables.contains(tableName)){
                  //事实数据
                  //提取数据
                  val data: String = jsonObj.getString("data")
                  //DWD_ORDER_INFO_I DWD_ORDER_INFO_U DWD_ORDER_INFO_D
                  val dwdTopicName = s"DWD_${tableName.toUpperCase}_${opValue}"
                  MyKafkaUtils.send(dwdTopicName,data)
                }

                if(dimTables.contains(tableName)){
                  //维度数据
                  //类型 ：string
                  //key: DIM:表名:ID
                  //value: jsonString
                  //写入API: set
                  //读取API: get
                  //过期：不过期

                  //提取数据中的id
                  val dataObj: JSONObject = jsonObj.getJSONObject("data")
                  val id: String = dataObj.getString("id")
                  val redisKey = s"DIM:${tableName.toUpperCase}:${id}"
                  val jedis: Jedis = MyRedisUtils.getJedisFromPool()
                  jedis.set(redisKey,dataObj.toJSONString)
                  jedis.close()
                }
              }
            }
            //刷新kafka缓冲区
            MyKafkaUtils.flush()
          }
        )
        //提交offset
        MyOffsetUtils.saveOffset(topicName,groupId,offsetRanges)
      }
    )

    ssc.start()
    ssc.awaitTermination()
  }
}
