package com.zhang.gmall.app

import com.alibaba.fastjson.serializer.SerializeConfig
import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.zhang.gmall.bean.{PageActionLog, PageDisplayLog, PageLog, StartLog}
import com.zhang.gmall.util.{MyKafkaUtils, MyOffsetUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}


/**
 * @title: 日志数据分流
 * @author: zhang
 * @date: 2022/3/20 19:41 
 */
object ODSBaseLogApp {
  def main(args: Array[String]): Unit = {

    // TODO 1. 准备实时环境 注意并行度与Kafka中topic的分区个数的对应关系
    val sparkConf: SparkConf = new SparkConf().setAppName("ods_base_log_app").setMaster("local[4]")
    val ssc: StreamingContext = new StreamingContext(sparkConf, Seconds(5))

    // TODO 2. 从kafka中消费数据
    val topicName: String = "ODS_BASE_LOG_SPARK"
    val groupId: String = "ODS_BASE_LOG_GROUP"
    //TODO 优化手动提交offset保存在redis中
    val offsets: Map[TopicPartition, Long] = MyOffsetUtil.getOffset(topicName, groupId)
    var kafkaDS:InputDStream[ConsumerRecord[String,String]] = null
    // 如果读取到保存的offset则从指定位置读取。
    if (offsets!=null&&offsets.nonEmpty){
      kafkaDS = MyKafkaUtils.getKafkaDStream(ssc, topicName, groupId,offsets)
    }else {
      // 如果没有读取到保存的offset则从默认位置读取
      kafkaDS = MyKafkaUtils.getKafkaDStream(ssc, topicName, groupId)
    }

    //从当前流消费到的数据提取offsets，不对流中的数据做任何处理
    var offsetRanges: Array[OffsetRange] = null
    val offsetRangesDS: DStream[ConsumerRecord[String, String]] = kafkaDS.transform(
      rdd => {
        //driver端周期执行
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )


    //TODO 3. 处理数据转换结构
    val kafkaJsonObj: DStream[JSONObject] = offsetRangesDS.map(
      line => {
        JSON.parseObject(line.value())
      }
    )
    //kafkaJsonObj.print(100)
    val DWD_PAGE_LOG_TOPIC: String = "DWD_PAGE_LOG_TOPIC_SPARK" // 页面访问
    val DWD_PAGE_DISPLAY_TOPIC: String = "DWD_PAGE_DISPLAY_TOPIC_SPARK" //页面曝光
    val DWD_PAGE_ACTION_TOPIC: String = "DWD_PAGE_ACTION_TOPIC_SPARK" //页面事件
    val DWD_START_LOG_TOPIC: String = "DWD_START_LOG_TOPIC_SPARK" // 启动数据
    val DWD_ERROR_LOG_TOPIC: String = "DWD_ERROR_LOG_TOPIC_SPARK" // 错误数据

    kafkaJsonObj.foreachRDD(
      rdd => {

        rdd.foreachPartition(
          jsonObjIter=>{
            for (jsonObj <- jsonObjIter) {
              //TODO 4. 错误数据
              val errJsonObj: JSONObject = jsonObj.getJSONObject("err")
              if (errJsonObj != null) {
                MyKafkaUtils.send(DWD_ERROR_LOG_TOPIC, jsonObj.toJSONString)
              }
              //提取公共字段
              val commonObj: JSONObject = jsonObj.getJSONObject("common")
              val ar: String = commonObj.getString("ar")
              val uid: String = commonObj.getString("uid")
              val os: String = commonObj.getString("os")
              val ch: String = commonObj.getString("ch")
              val isNew: String = commonObj.getString("is_new")
              val md: String = commonObj.getString("md")
              val mid: String = commonObj.getString("mid")
              val vc: String = commonObj.getString("vc")
              val ba: String = commonObj.getString("ba")
              //提取时间戳
              val ts: Long = jsonObj.getLong("ts")
              //TODO 5.页面数据提取和封装
              val pageObj: JSONObject = jsonObj.getJSONObject("page")
              if (pageObj != null) {
                //提取page字段
                val pageId: String = pageObj.getString("page_id")
                val pageItem: String = pageObj.getString("item")
                val pageItemType: String = pageObj.getString("item_type")
                val duringTime: Long = pageObj.getLong("during_time")
                val lastPageId: String = pageObj.getString("last_page_id")
                val sourceType: String = pageObj.getString("source_type")
                //封装成PageLog
                var pageLog =
                  PageLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, ts)
                //发送到DWD_PAGE_LOG_TOPIC
                MyKafkaUtils.send(DWD_PAGE_LOG_TOPIC, JSON.toJSONString(pageLog, new SerializeConfig(true)))
                //TODO 6. 尝试提取曝光数据
                val displayArray: JSONArray = jsonObj.getJSONArray("displays")
                if (displayArray != null && displayArray.size() > 0) {
                  for (i <- 0 until displayArray.size()) {
                    val displayObj: JSONObject = displayArray.getJSONObject(i)
                    //提取曝光字段
                    val displayType: String = displayObj.getString("display_type")
                    val displayItem: String = displayObj.getString("item")
                    val displayItemType: String = displayObj.getString("item_type")
                    val posId: String = displayObj.getString("pos_id")
                    val order: String = displayObj.getString("order")

                    //封装成PageDisplayLog
                    val pageDisplayLog =
                      PageDisplayLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, displayType, displayItem, displayItemType, order, posId, ts)
                    // 写到 DWD_PAGE_DISPLAY_TOPIC
                    MyKafkaUtils.send(DWD_PAGE_DISPLAY_TOPIC, JSON.toJSONString(pageDisplayLog, new SerializeConfig(true)))
                  }
                }
                //TODO 7. 尝试提取动作数据
                val actionArr: JSONArray = jsonObj.getJSONArray("actions")
                if (actionArr != null && actionArr.size() > 0) {
                  for (i <- 0 until actionArr.size()) {
                    val actionObj: JSONObject = actionArr.getJSONObject(i)
                    val actionItem: String = actionObj.getString("item")
                    val actionId: String = actionObj.getString("action_id")
                    val actionType: String = actionObj.getString("item_type")
                    val actionTs: Long = actionObj.getLong("ts")
                    val pageActionLog: PageActionLog = PageActionLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId,
                      lastPageId, pageItem, pageItemType, duringTime,
                      actionId, actionItem, actionType, sourceType, actionTs,ts)

                    //发送到动作主题
                    MyKafkaUtils.send(DWD_PAGE_ACTION_TOPIC, JSON.toJSONString(pageActionLog, new SerializeConfig(true)))
                  }
                }

              }
              //TODO 8. 获取启动数据
              val startJsonObj: JSONObject = jsonObj.getJSONObject("start")
              if (startJsonObj != null) {
                val entry: String = startJsonObj.getString("entry")
                val loadingTim = startJsonObj.getLong("loading_time")
                val adId  = startJsonObj.getString("open_ad_id")
                val adMs = startJsonObj.getLong("open_ad_ms")
                val adSkipMs = startJsonObj.getLong("open_ad_skip_ms")
                //封装启动bean
                val startLog: StartLog = StartLog(mid, uid, ar, ch, isNew, md, os, vc, ba,
                  entry, adId, loadingTim, adMs, adSkipMs, ts
                )
                //发送到启动主题
                MyKafkaUtils.send(DWD_START_LOG_TOPIC,JSON.toJSONString(startLog,new SerializeConfig(true)))
              }

            }
            // foreachPartition里面:  Executor段执行， 每批次每分区执行一次
            //刷写Kafka ,因为写入数据是在Executor端执行的，所以刷写操做也应该在Executor端
            // foreachRDD外面:  提交offsets??? C  Driver执行，每次启动程序执行一次.
            // foreachRDD外面:  刷写kafka缓冲区??? C  Driver执行，每次启动程序执行一次.
            // 分流是在executor端完成，driver端做刷写，刷的不是同一个对象的缓冲区.
            MyKafkaUtils.flush()
          }
        )
        //每批次提交一次offset
        MyOffsetUtil.saveOffset(topicName,groupId,offsetRanges)

      }
    )
    //TODO 9.执行任务
    ssc.start()
    ssc.awaitTermination()

  }
}
