package com.xuejiujiu.gmall.realtine.app

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializeConfig
import com.xuejiujiu.gmall.realtine.bean.{PageActionLog, PageDisplayLog, PageLog, StartLog}
import com.xuejiujiu.gmall.realtine.util.MyKafkaUtiles
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * 日志数据的消费分流
 * 分析:
 * 1. 准备实时处理环境 StreamingContext
 *
 * 2. 从Kafka中消费数据
 *
 * 3. 处理数据
 * 3.1 转换数据结构
 *
 * 3.2 数据分流
 *
 * 4. 写出到Dwd层
 */
object OdsBaseLogApp {
  def main(args: Array[String]): Unit = {
    // 1. 准备环境
    val sparkConf = new SparkConf().setAppName("ods_base_log_app").setMaster("local[3]") //local[n] n最好和kafka的分区数量保持一致
    val ssc = new StreamingContext(sparkConf, Seconds(5))

    // 2. 从kafka中消费数据
    val topicName = "ODS_BASE_LOG_1018" // 对应生成器配置中的主题名
    val groupId = "ODS_BASE_LOG_GROUP_1018"
    val kafkaDStream = MyKafkaUtiles.getKafkaDStream(ssc, topicName, groupId)
    //        kafkaDStream.print(100)

    // TODO 补充： 从当前消费到的数据中提取Offset,该过程不对流中的数据做任何处理
    var offsetRanges: Array[OffsetRange] = null
    val offsetRangesDStream: DStream[ConsumerRecord[String,String]] = kafkaDStream.transform(
      rdd => {
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges // 在哪里执行？ driver? executor?
        rdd
      }
    )


    // 3. 处理数据
    // 3.1 转化数据结构
    val jsonObjDStream = kafkaDStream.map(
      consumerRecord => {
        // 获取ConsumerRecord中的value, value就是日志数据
        val log = consumerRecord.value()
        // 转化为json对象
        val jsonObj = JSON.parseObject(log)
        // 返回获取的数据
        jsonObj

      }
    )
    // 打印就相当于将数据流消耗
    //    jsonObjDStream.print(1000)

    // 3.2 分流操作
    //    日志数据：
    //      页面访问数据
    //        页面数据
    //        曝光数据
    //        事件数据
    //        错误数据
    //      启动数据
    //        公共字段
    //        启动数据
    //        错误数据
    val DWD_PAGE_lOG_TOPIC = "DWD_PAGE_LOG_TOPIC_1018" // 页面访问
    val DWD_PAGE_DISPLAY_TOPIC = "DWD_PAGE_DISPLAY_TOPIC_1018" // 页面曝光
    val DWD_PAGE_ACTION_TOPIC = "DWD_PAGE_ACTION_TOPIC_1018" // 页面事件
    val DWD_START_LOG_TOPIC = "DWD_START_LOG_TOPIC_1018" // 启动数据
    val DWD_ERROR_LOG_TOPIC = "DWD_ERROR_LOG_TOPIC_1018" // 错误数据

    /**
     * 分流规则
     * 错误数据: 不做拆分，只要包含错误字段，直接将数据发送到对应的topic中
     * 页面数据: 拆分成页面访问，曝光，事件 分别发送到对应的topic
     * 启动数据: 发动到对应的topic
     */
    jsonObjDStream.foreachRDD(
      rdd => {
        rdd.foreach(
          jsonObj => {
            // 分流过程
            // 分流错误数据,拿去json数据中带有err的数据
            val errObj = jsonObj.getJSONObject("err")
            // 如果errObj不为空，则该数据有错误
            if (errObj != null) {
              // 将错误数据发送到 DWD_ERROR_LOG_TOPIC
              MyKafkaUtiles.send(DWD_ERROR_LOG_TOPIC, jsonObj.toJSONString)
            } else {
              // 提取公共字段
              val commonObj = jsonObj.getJSONObject("common")
              val ar = commonObj.getString("ar")
              val uid = commonObj.getString("uid")
              val os = commonObj.getString("os")
              val ch = commonObj.getString("ch")
              val isNew = commonObj.getString("is_new")
              val md = commonObj.getString("md")
              val mid = commonObj.getString("mid")
              val vc = commonObj.getString("vc")
              val ba = commonObj.getString("ba")
              // 提取时间戳
              val ts = jsonObj.getLong("ts")

              // 页面数据
              val pageObj = jsonObj.getJSONObject("page")
              if (pageObj != null) {
                // 提取page字段
                val pageId = pageObj.getString("page_id")
                val pageItem = pageObj.getString("page_item")
                val pageItemType = pageObj.getString("page_type")
                val duringTime = pageObj.getLong("during_time")
                val lastPageId = pageObj.getString("last_page_id")
                val sourceType = pageObj.getString("source_type")

                // 封装成PageLog
                val pageLog =
                  PageLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, ts)
                // 发送到DWD_PAGE_LOG_TOPIC主题中
                MyKafkaUtiles.send(DWD_PAGE_lOG_TOPIC, JSON.toJSONString(pageLog, new SerializeConfig(true)))

                // 提取曝光数据
                val displaysJsonArr = jsonObj.getJSONArray("displays")
                if (displaysJsonArr != null && displaysJsonArr.size() > 0) {
                  for (i <- 0 until displaysJsonArr.size()) {
                    // 循环拿到每一个曝光数据
                    val displayObj = displaysJsonArr.getJSONObject(i)
                    // 提取曝光字段
                    val displayType = displayObj.getString("display_type")
                    val displayItem = displayObj.getString("item")
                    val displayItemType = displayObj.getString("item_type")
                    val posId = displayObj.getString("pos_id")
                    val order = displayObj.getString("order")
                    // 封装成PageLog
                    val pageDisplayLog =
                      PageDisplayLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, displayType
                        , displayItem, displayItemType, posId, order, ts)
                    // 写入kafka DWD_PAGE_DISPLAY_TOPIC
                    MyKafkaUtiles.send(DWD_PAGE_DISPLAY_TOPIC, JSON.toJSONString(pageDisplayLog, new SerializeConfig(true)))
                  }
                }

                // 提取事件数据
                val pageActionArr = jsonObj.getJSONArray("actions")
                if (pageActionArr != null && pageActionArr.size() > 0) {
                  for (i <- 0 until pageActionArr.size()) {
                    val pageActionObj = pageActionArr.getJSONObject(i)

                    val actionId = pageActionObj.getString("action_id")
                    val actionItem = pageActionObj.getString("action_item")
                    val actionItemType = pageActionObj.getString("action_item_type")
                    val actionTs = pageActionObj.getLong("action_ts")
                    // 封装成PageLog mid, uid, ar, ch, isNew, md, os, vc,
                    //pageId, lastPageId, pageItem, pageItemType,
                    //duringTime,actionId,actionItem,actionItemType,actionTs, ts
                    val pageActionLog = PageActionLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime,sourceType, actionId, actionItem, actionItemType, actionTs, ts)
                    // 写入kafka
                    MyKafkaUtiles.send(DWD_PAGE_ACTION_TOPIC, JSON.toJSONString(pageActionLog, new SerializeConfig(true)))
                  }
                }
              }
              // 启动数据
              val startJsonObj = jsonObj.getJSONObject("start")
              if (startJsonObj != null) {
                val entry = startJsonObj.getString("entry")
                val openAdSkipMs = startJsonObj.getLong("open_ad_skip_ms")
                val openAdMs = startJsonObj.getLong("open_ad_ms")
                val loadingTimeMs = startJsonObj.getLong("loading_time_ms")
                val openAdId = startJsonObj.getString("open_ad_id")

                // 封装成PageLog
                val startLog = StartLog(mid, uid, ar, ch, isNew, md, os, vc, ba, entry, openAdId, loadingTimeMs, openAdMs, openAdSkipMs, ts)
                // 写入kafka
                MyKafkaUtiles.send(DWD_START_LOG_TOPIC, JSON.toJSONString(startLog, new SerializeConfig(true)))


              }
            }
          }
        )
      }
    )

    ssc.start()
    ssc.awaitTermination()
  }
}
