package com.atguigu.gmall.realtime.app

import java.lang

import com.alibaba.fastjson.serializer.SerializeConfig
import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.atguigu.gmall.realtime.bean.{PageActionLog, PageDisplayLog, PageLog, StartLog}
import com.atguigu.gmall.realtime.utils.{MyKafkaUtils, MyOffsetUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object OdsBaseLogApp {
  def main(args: Array[String]): Unit = {
    /*
     *一般几个分区分给几个线程
     */
    val conf: SparkConf = new SparkConf().setAppName("ods_base_log_app").setMaster("local[4]")
    val context: StreamingContext = new StreamingContext(conf, Seconds(5))
    val topic_Id: String = "ODS_BASE_LOG_0212"
    val groupId: String = "ODS_BASE_LOG_GROUP"
    /*
    * 先从redis中取出存储的offset信息，看其是否为空，为空则采用kafka的
    * 偏移量获取规则，否则按照自己规则去提交偏移量获取
    * 只会在刚开始运行时读取一次，进行offset提交获取数据
    * 后续kafka会在自己的内存中维护一个offset,但需要手动周期性保存
     */
    var kafkaDStream: InputDStream[ConsumerRecord[String, String]] = null
    val partitionToLong: Map[TopicPartition, Long] = MyOffsetUtils.ReadOffset(topic_Id, groupId)

    if (partitionToLong.nonEmpty) {
      kafkaDStream = MyKafkaUtils.GetConsumerDStream(context, topic_Id, groupId, partitionToLong)
    } else {
      kafkaDStream = MyKafkaUtils.GetConsumerDStream(context, topic_Id, groupId)
    }
    /*
     获取每批次拉取数据的offset，每批次拉取数据时，每批次拉取的是kafkaRDD,需要将其转换成HasOffsetRanges,
     来获取偏移量
     */
    var ranges: Array[OffsetRange] = null
    val currentKafkaDStream: DStream[ConsumerRecord[String, String]] = kafkaDStream.transform(
      rdd => {
        ranges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )
    //得到的kafkaDstream不能被序列化，其为key value结构，其value保存的传递的json字符串，需要将其取出
    val jsonDStream: DStream[JSONObject] = currentKafkaDStream.map(consumerRecond => {
      val msg: String = consumerRecond.value()
      val jsonObject: JSONObject = JSON.parseObject(msg)
      jsonObject
    }) //.print(100)

    //3.2 分流
    //分流原则:
    //  错误数据: 只要数据中包含"err"字段，就定义为错误数据，直接发往错误主题 => DWD_ERROR_TOPIC_0212
    //  页面日志数据:
    //        页面访问数据:  common+page + ts    => DWD_PAGE_TOPIC_0212
    //        曝光数据:      display + page + common + ts  => DWD_DISPLAY_TOPIC_0212
    //        事件数据:      action + page + common + ts  => DWD_ACTION_TOPIC_0212
    //  启动日志数据:  common + start + ts   => DWD_START_TOPIC_0212
    val dwderrortopic: String = "DWD_ERROR_TOPIC_0212"
    val dwd_page_topic: String = "DWD_PAGE_TOPIC_0212"
    val dwd_display_topic = "DWD_DISPLAY_TOPIC_0212"
    val dwd_action_topic: String = "DWD_ACTION_TOPIC_0212"
    val dwd_start_topic: String = "DWD_START_TOPIC_0212"

    jsonDStream.foreachRDD(
      rdd => {
        /*
        使用foreachPartition对各个分区的数据进行整体刷新操作
         */
        rdd.foreachPartition(
          JsonObjeItor => {
            for (jsonObj <- JsonObjeItor) {
              val errObj: JSONObject = jsonObj.getJSONObject("err")
              if (errObj != null) {
                MyKafkaUtils.send(dwderrortopic, jsonObj.toString())
              } else {
                //解析公共字段
                //解析common数据
                val commonObj: JSONObject = jsonObj.getJSONObject("common")
                val ar: String = commonObj.getString("ar")
                val uid: String = commonObj.getString("uid")
                val os: String = commonObj.getString("os")
                val ch: String = commonObj.getString("ch")
                val isNew: String = commonObj.getString("is_new")
                val md: String = commonObj.getString("md")
                val mid: String = commonObj.getString("mid")
                val vc: String = commonObj.getString("vc")
                val ba: String = commonObj.getString("ba")
                //解析ts字段
                val ts: lang.Long = jsonObj.getLong("ts")

                //2). 分流页面日志数据
                val pageObj: JSONObject = jsonObj.getJSONObject("page")
                if (pageObj != null) {
                  val pageId: String = pageObj.getString("page_id")
                  val lastPageId: String = pageObj.getString("last_page_id")
                  val pageItem: String = pageObj.getString("item")
                  val pageItemType: String = pageObj.getString("item_type")
                  val duringTime: lang.Long = pageObj.getLong("during_time")
                  val sourceType: String = pageObj.getString("source_type")
                  //将相关字段封装到PageLog
                  val pageLog: PageLog =
                    PageLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, ts)
                  //发送到页面访问主题
                  //注意：此处需要将对象转为json字符串其实就是序列化操作，但是JSON.toJSONString需要调用对象的get和set方法，但是scala对象是不提供
                  //get和set方法的，所以加个序列化配置，并设置为true表明不使用get,set方法直接采用属性值去获取
                  MyKafkaUtils.send(dwd_page_topic, JSON.toJSONString(pageLog, new SerializeConfig(true)))

                  //分流曝光数据
                  val displayArrObj: JSONArray = jsonObj.getJSONArray("displays")
                  if (displayArrObj != null && displayArrObj.size() > 0) {
                    for (i <- 0 until displayArrObj.size()) {
                      val displayObj: JSONObject = displayArrObj.getJSONObject(i)
                      val displayType: String = displayObj.getString("display_type")
                      val displayItem: String = displayObj.getString("item")
                      val displayItemType: String = displayObj.getString("item_type")
                      val posId: String = displayObj.getString("pos_id")
                      val order: String = displayObj.getString("order")

                      //将相关字段封装到PageDisplayLog中
                      val pageDisplayLog: PageDisplayLog =
                        PageDisplayLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, displayType, displayItem, displayItemType, order, posId, ts)
                      //发送到曝光主题
                      MyKafkaUtils.send(dwd_display_topic, JSON.toJSONString(pageDisplayLog, new SerializeConfig(true)))
                    }
                  }
                  //分流事件数据
                  val actionArrObj: JSONArray = jsonObj.getJSONArray("actions")
                  if (actionArrObj != null && actionArrObj.size() > 0) {
                    for (i <- 0 until actionArrObj.size()) {
                      val actionObj: JSONObject = actionArrObj.getJSONObject(i)
                      val actionId: String = actionObj.getString("action_id")
                      val actionItem: String = actionObj.getString("item")
                      val actionItemType: String = actionObj.getString("item_type")
                      val actionTs: lang.Long = actionObj.getLong("ts")

                      //将相关字段封装到PageActionLog
                      val pageActionLog: PageActionLog =
                        PageActionLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, actionId, actionItem, actionItemType, actionTs, ts)

                      //发送到事件主题
                      MyKafkaUtils.send(dwd_action_topic, JSON.toJSONString(pageActionLog, new SerializeConfig(true)))
                    }
                  }
                }
                //3). 分流启动日志数据
                val startObj: JSONObject = jsonObj.getJSONObject("start")
                if (startObj != null) {
                  val entry: String = startObj.getString("entry")
                  val openAdId: String = startObj.getString("open_ad_id")
                  val openAdMs: lang.Long = startObj.getLong("open_ad_ms")
                  val openAdSkipMs: lang.Long = startObj.getLong("open_ad_skip_ms")
                  val loadingTime: lang.Long = startObj.getLong("loading_time")

                  //将相关字段封装到StartLog中
                  val startLog: StartLog =
                    StartLog(mid, uid, ar, ch, isNew, md, os, vc, ba, entry, openAdId, loadingTime, openAdMs, openAdSkipMs, ts)

                  //发送到启动主题
                  MyKafkaUtils.send(dwd_start_topic, JSON.toJSONString(startLog, new SerializeConfig(true)))

                }

              }
            }
            MyKafkaUtils.flush()
            //算子(foreachPartition)里面:   Executor端执行，每批次每分区执行一次
          }
        )
        /*
      在数据处理完后，提交偏移量，此处使用的是异步发送，只要将数据发送到kafka后就提交偏移量到redis上
      foreachRDD里面， 算子(foreach)外面:  Driver端执行. 每批次执行一次.
     */
        MyOffsetUtils.SaveOffset(topic_Id, groupId, ranges)
      }

    )


    context.start()
    context.awaitTermination()


  }
}
