package com.gt.app

import com.alibaba.fastjson.serializer.SerializeConfig
import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.gt.bean.{PageDisplayLog, PageLog, StartLog}
import com.gt.util.{MyKafkaUtils, MyOffsetsUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object OdsBaseLogApp2 {

  val topic = "ODS_BASE_LOG"
  val groupId = "OdsBaseLogApp2_001"

  val DWD_PAGE_LOG_TOPIC: String = "DWD_PAGE_LOG_TOPIC_1018" // 页面访问
  val DWD_PAGE_DISPLAY_TOPIC: String = "DWD_PAGE_DISPLAY_TOPIC_1018" //页面曝光
  val DWD_PAGE_ACTION_TOPIC: String = "DWD_PAGE_ACTION_TOPIC_1018" //页面事件
  val DWD_START_LOG_TOPIC: String = "DWD_START_LOG_TOPIC_1018" // 启动数据
  val DWD_ERROR_LOG_TOPIC: String = "DWD_ERROR_LOG_TOPIC_1018" // 错误数据

  def main(args: Array[String]): Unit = {
    //1. 创建SparkStreamingContext
    //1.1 创建配置文件
    val conf: SparkConf = new SparkConf().setAppName("OdsBaseLogApp2").setMaster("local[1]")
    //1.2 创建ssc
    val ssc = new StreamingContext(conf, Seconds(5))

    //x1. 获取偏移量
    val offsets: Map[TopicPartition, Long] = MyOffsetsUtils.readOffset(topic, groupId)

    //2. 读取kafka数据
    var kafkaDStream: InputDStream[ConsumerRecord[String, String]] = null
    if (offsets != null && offsets.size > 0) {
      kafkaDStream = MyKafkaUtils.getKafkaDStream(ssc, topic, groupId, offsets)
    }else{
      kafkaDStream = MyKafkaUtils.getKafkaDStream(ssc, topic, groupId)
    }

    //x2. 从当前消费到的数据中提取offsets , 不对流中的数据做任何处理
    var offsetRanges: Array[OffsetRange] = null
//    kafkaDStream.transform(rdd => {
//      offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges //该code 在driver端执行
//      rdd
//    })
    kafkaDStream.foreachRDD(rdd =>{
      offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
      rdd
    })


    //3. 转换数据格式
    val jsonObjDStream: DStream[JSONObject] = kafkaDStream.map(_.value()).map(JSON.parseObject(_))

    //4. 发送数据到指定topic
    //分流规则:
    // 错误数据: 不做任何的拆分， 只要包含错误字段，直接整条数据发送到对应的topic
    // 页面数据: 拆分成页面访问， 曝光， 事件 分别发送到对应的topic
    // 启动数据: 发动到对应的topic
    jsonObjDStream.foreachRDD(rdd => {
      rdd.foreachPartition(it => {
        it.foreach(jsonObj => {
          val errObj: JSONObject = jsonObj.getJSONObject("err")
          if (errObj != null) {
            //发送到 DWD_ERROR_LOG_TOPIC
            MyKafkaUtils.send(DWD_ERROR_LOG_TOPIC, jsonObj.toJSONString)
          } else {
            // 提取公共字段
            val commonObj: JSONObject = jsonObj.getJSONObject("common")
            val ar: String = commonObj.getString("ar")
            val uid: String = commonObj.getString("uid")
            val os: String = commonObj.getString("os")
            val ch: String = commonObj.getString("ch")
            val isNew: String = commonObj.getString("is_new")
            val md: String = commonObj.getString("md")
            val mid: String = commonObj.getString("mid")
            val vc: String = commonObj.getString("vc")
            val ba: String = commonObj.getString("ba")
            //提取时间戳
            val ts: Long = jsonObj.getLong("ts")
            // 页面数据
            val pageObj: JSONObject = jsonObj.getJSONObject("page")
            if (pageObj != null) {
              //提取page字段
              val pageId: String = pageObj.getString("page_id")
              val pageItem: String = pageObj.getString("item")
              val pageItemType: String = pageObj.getString("item_type")
              val duringTime: Long = pageObj.getLong("during_time")
              val lastPageId: String = pageObj.getString("last_page_id")
              val sourceType: String = pageObj.getString("source_type")
              //封装成PageLog
              val pageLog =
                PageLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, ts)
              //发送到DWD_PAGE_LOG_TOPIC
              MyKafkaUtils.send(DWD_PAGE_LOG_TOPIC, JSON.toJSONString(pageLog, new SerializeConfig(true)))

              //提取曝光数据
              val displaysJsonArr: JSONArray = jsonObj.getJSONArray("displays")
              if (displaysJsonArr != null && displaysJsonArr.size() > 0) {
                for (i <- 0 until displaysJsonArr.size()) {
                  //循环拿到每个曝光
                  val displayObj: JSONObject = displaysJsonArr.getJSONObject(i)
                  //提取曝光字段
                  val displayType: String = displayObj.getString("display_type")
                  val displayItem: String = displayObj.getString("item")
                  val displayItemType: String = displayObj.getString("item_type")
                  val posId: String = displayObj.getString("pos_id")
                  val order: String = displayObj.getString("order")

                  //封装成PageDisplayLog
                  val pageDisplayLog =
                    PageDisplayLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, displayType, displayItem, displayItemType, order, posId, ts)
                  // 写到 DWD_PAGE_DISPLAY_TOPIC
                  MyKafkaUtils.send(DWD_PAGE_DISPLAY_TOPIC, JSON.toJSONString(pageDisplayLog, new SerializeConfig(true)))
                }
              }

            }

            // 启动数据（课下完成）
            val startJsonObj: JSONObject = jsonObj.getJSONObject("start")
            if (startJsonObj != null) {
              //提取字段
              val entry: String = startJsonObj.getString("entry")
              val loadingTime: Long = startJsonObj.getLong("loading_time")
              val openAdId: String = startJsonObj.getString("open_ad_id")
              val openAdMs: Long = startJsonObj.getLong("open_ad_ms")
              val openAdSkipMs: Long = startJsonObj.getLong("open_ad_skip_ms")

              //封装StartLog
              var startLog =
                StartLog(mid, uid, ar, ch, isNew, md, os, vc, ba, entry, openAdId, loadingTime, openAdMs, openAdSkipMs, ts)
              //写出DWD_START_LOG_TOPIC
              MyKafkaUtils.send(DWD_START_LOG_TOPIC, JSON.toJSONString(startLog, new SerializeConfig(true)))
            }
          }
        })
      })
      //5. 保存偏移量
      MyOffsetsUtils.saveOffset(topic, groupId, offsetRanges)
    })

    //. 启动
    ssc.start()
    ssc.awaitTermination()
  }

}
