package com.atguigu.gmall.realtime.app

import java.lang

import com.alibaba.fastjson.serializer.SerializeConfig
import com.alibaba.fastjson.{JSON, JSONObject}
import com.atguigu.gmall.realtime.bean.PageLog
import com.atguigu.gmall.realtime.util.{MyKafkaSender, MyKafkaUtil, OffsetManager}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object OdsBaseLogApp {

  def main(args: Array[String]): Unit = {
    //0 初始化环境
    val sparkConf: SparkConf = new SparkConf().setAppName("ods_base_log_app").setMaster("local[4]")

    val ssc = new StreamingContext(sparkConf,Seconds(5))

    val topic="ODS_BASE_LOG"
    val groupId="ods_base_log_app"

    val pageLogTopic="DWD_PAGE_LOG"

    //1 、提取kafka中的數據到流
    val offsetMap: Map[TopicPartition, Long] = OffsetManager.getOffset(topic,groupId)

    val inputDstream: InputDStream[ConsumerRecord[String, String]] = MyKafkaUtil.getKafkaStream(topic,ssc,offsetMap,groupId)

  //  inputDstream.map(_.value()).print(1000)

    var offsetRanges: Array[OffsetRange]=null

    val inputWithOffsetDstream: DStream[ConsumerRecord[String, String]] = inputDstream.transform { rdd =>
      //4个分区 的偏移量各种值
      offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges //dr
      rdd
    }


     // 2、 转换格式 把流中元素转为通用对象
    val jsonObjDStream: DStream[JSONObject] = inputWithOffsetDstream.map { record =>
      val jsonString: String = record.value()
      val jSONObject: JSONObject = JSON.parseObject(jsonString)
      jSONObject
    }

    //3 进行判断决定进入到不同的topic中、提取相关的字段
    jsonObjDStream.foreachRDD{rdd=>
    //  println("1123")  //driver
      rdd.foreachPartition { jsonObjItr =>

        for (jsonObj <- jsonObjItr) {

          // jsonObj.getJSONArray()  //提取数组

          val commonJsonObj: JSONObject = jsonObj.getJSONObject("common")
          val mid: String = commonJsonObj.getString("mid")
          val uid: String = commonJsonObj.getString("uid")
          val provinceId: String = commonJsonObj.getString("ar")
          val channel: String = commonJsonObj.getString("ch")
          val isNew: String = commonJsonObj.getString("is_new")
          val model: String = commonJsonObj.getString("md")
          val operateSystem: String = commonJsonObj.getString("os")
          val versionCode: String = commonJsonObj.getString("vc")

          val pageJsonObj: JSONObject = jsonObj.getJSONObject("page") //提取键值对
          if (pageJsonObj != null && pageJsonObj.size() > 0) { //发送到 DWD_PAGE_LOG主题中

            val duringTime: lang.Long = pageJsonObj.getLong("during_time")
            val pageItem: String = pageJsonObj.getString("item")
            val pageItemType: String = pageJsonObj.getString("item_type")
            val pageId: String = pageJsonObj.getString("page_id")
            val lastPageId: String = pageJsonObj.getString("last_page_id")

            val ts: lang.Long = jsonObj.getLong("ts")

            val pageLog = PageLog(mid, uid, provinceId, channel, isNew, model, operateSystem, versionCode, pageId, lastPageId, pageItem, pageItemType, duringTime, ts)
            //scala的样例类没有getter setter 所以需要特别配置直接 通过反射提取字段值即可。
            val pageJson: String = JSON.toJSONString(pageLog, new SerializeConfig(true))
            MyKafkaSender.send(pageLogTopic, pageJson)
            //A   MyKafkaSender.flush()  ex

          } else if (1 == 0) { //补充其他主题信息

          }
          // for 循环里    ex  每条一次   //B  MyKafkaSender.flush()   ex
        }
        // foreachPartition 每个分区每个批次一次 ex
        MyKafkaSender.flush()
      }
      //  提交  foreachRDD里 dr  每批次一次 dr
      OffsetManager.saveOffset(topic,groupId,offsetRanges)
      println(222)
    }
    //  提交  foreachRDD外 main里  dr  执行一次
   println(111)
    ssc.start()
    ssc.awaitTermination()
  }




}
