package com.qiangsheng.gmall.realtime.app

import com.alibaba.fastjson._
import com.alibaba.fastjson.serializer.SerializeConfig
import com.qiangsheng.gmall.realtime.bean.{PageActionLog, PageDisplayLog, PageLog, StartLog}
import com.qiangsheng.gmall.realtime.util.{MyKafkaUtils, MyOffsetUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object OdsBaseLogApp {
  def main(args: Array[String]): Unit = {
    //注意并行度与kafka中topic的分区个数的对应关系
    val sparkConf: SparkConf = new SparkConf().setMaster("local[4]").setAppName("ods_base_log_app")
    val ssc = new StreamingContext(sparkConf, Seconds(5))

    //从kafka中消费数据
    val topicName = "ODS_BASE_LOG" //对应生成器配置中的主题
    val groupId = "ODS_BASE_LOG_GROUP"

    //从redis中读取offset，指定offset进行消费
    val offsets: Map[TopicPartition, Long] = MyOffsetUtils.readOffset(topicName, groupId)
    var kafkaDStream: InputDStream[ConsumerRecord[String, String]] = null
    if(offsets != null && offsets.nonEmpty){
      //使用指定的offset进行消费
      kafkaDStream = MyKafkaUtils.getKafkaDStream(ssc, topicName, groupId, offsets)
    }else{
      //默认offset进行消费
      kafkaDStream = MyKafkaUtils.getKafkaDStream(ssc, topicName, groupId)
    }

    //从当前消费到的数据中提取offsets,不对流中的数据作任何处理
    var offsetRanges: Array[OffsetRange] = null
    val offsetRangesDstream: DStream[ConsumerRecord[String, String]] = kafkaDStream.transform(
      rdd => {
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )

//    kafkaDStream.print(100)
    //处理数据
    //转换数据结构
    //   专用结构
    //   通用结构
    val jsonObjectDStream: DStream[JSONObject] = offsetRangesDstream.map(
      cosumerRecord => {
        //获取ConsumerRecord中的value,value就是日志数据
        val log: String = cosumerRecord.value()
        //转换成Json对象
        val jsonObj: JSONObject = JSON.parseObject(log)
        //返回
        jsonObj
      }
    )
//    jsonObjectDStream.print(1000)

    /*
    日志数据分流：
      页面访问数据
          公共字段
          页面数据
          曝光数据
          事件数据
          错误数据
      启动数据
          公共字段
          启动数据
          错误数据
     */

    val DWD_PAGE_LOG_TOPIC = "DWD_PAGE_LOG_TOPIC" //页面访问
    val DWD_PAGE_DISPLAY_TOPIC = "DWD_PAGE_DISPLAY_TOPIC" //页面曝光
    val DWD_PAGE_ACTION_TOPIC = "DWD_PAGE_ACTION_TOPIC" //页面事件
    val DWD_START_LOG_TOPIC = "DWD_START_LOG_TOPIC" //启动数据
    val DWD_ERROR_LOG_TOPIC = "DWD_ERROR_LOG_TOPIC" //错误数据

    /*
    分流规则：
    错误数据：不做任何拆分，只要包含错误字段，直接整条数据发送到对应的topic
    页面数据：拆分成页面访问、曝光、事件 分别发送到对应的topic
    启动数据：发送到对应的topic
     */
    jsonObjectDStream.foreachRDD(
      rdd => {
        rdd.foreachPartition(
          josnObjIter => {
            for (jsonObj <- josnObjIter) {
              //分流过程
              //分流错误数据
              val errObj: JSONObject = jsonObj.getJSONObject("err")
              if(errObj != null){
                //错误数据发送到DWD_ERROR_LOG_TOPIC
                MyKafkaUtils.send(DWD_ERROR_LOG_TOPIC,jsonObj.toJSONString)
              }else{
                //提取公共字段
                val commonObj: JSONObject = jsonObj.getJSONObject("common")
                val ar: String = commonObj.getString("ar")
                val uid: String = commonObj.getString("uid")
                val os: String = commonObj.getString("os")
                val ch: String = commonObj.getString("ch")
                val isNew: String = commonObj.getString("is_new")
                val md: String = commonObj.getString("md")
                val mid: String = commonObj.getString("mid")
                val vc: String = commonObj.getString("vc")
                val ba: String = commonObj.getString("ba")
                //提取时间戳
                val ts: Long = jsonObj.getLong("ts")
                //页面数据
                val pageObj: JSONObject = jsonObj.getJSONObject("page")
                if(pageObj != null){
                  //提取page字段
                  val pageId: String = pageObj.getString("page_id")
                  val pageItem: String = pageObj.getString("item")
                  val duringTime: Long = pageObj.getLong("during_time")
                  val pageItemType: String = pageObj.getString("item_type")
                  val lastPageId: String = pageObj.getString("last_page_id")
                  val sourceType: String = pageObj.getString("source_type")

                  //封装成PageLog
                  val pageLog: PageLog = PageLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, ts)
                  //发送到DWD_PAGE_LOG_TOPIC
                  MyKafkaUtils.send(DWD_PAGE_LOG_TOPIC,JSON.toJSONString(pageLog, new SerializeConfig(true)))

                  //提取曝光数据
                  val displaysJsonArr: JSONArray = jsonObj.getJSONArray("displays")
                  if(displaysJsonArr != null && displaysJsonArr.size() > 0){
                    for(i <- 0 until displaysJsonArr.size()){
                      //循环拿到每个曝光
                      val displayObj: JSONObject = displaysJsonArr.getJSONObject(i)
                      //提取曝光字段
                      val displayType: String = displayObj.getString("display_type")
                      val displayItem: String = displayObj.getString("item")
                      val displayItemType: String = displayObj.getString("item_type")
                      val posId: String = displayObj.getString("pos_id")
                      val order: String = displayObj.getString("order")

                      //封装成PageDisplayLog
                      val pageDisplayLog: PageDisplayLog = PageDisplayLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, displayType, displayItem, displayItemType, order, posId, ts)

                      //写DWD_PAGE_DISPLAY_TOPIC
                      MyKafkaUtils.send(DWD_PAGE_DISPLAY_TOPIC,JSON.toJSONString(pageDisplayLog,new SerializeConfig(true)))
                    }
                  }

                  //提取事件数据
                  val actionsJsonArr: JSONArray = jsonObj.getJSONArray("actions")
                  if(actionsJsonArr != null && actionsJsonArr.size() > 0){
                    for(i <- 0 until actionsJsonArr.size()){
                      val actionObj: JSONObject = actionsJsonArr.getJSONObject(i)
                      //提取action字段
                      val actionItem: String = actionObj.getString("item")
                      val actionId: String = actionObj.getString("aciton_id")
                      val actionItemType: String = actionObj.getString("item_type")
                      val actionTs: Long = actionObj.getLong("ts")

                      //封装成PageActionLog
                      val pageActionLog: PageActionLog = PageActionLog(mid, uid, ar, ch, isNew, md, os, vc, ba, pageId, lastPageId, pageItem, pageItemType, duringTime, sourceType, actionId, actionItem, actionItemType, ts)

                      //发送到DWD_PAGE_ACTION_TOPIC
                      MyKafkaUtils.send(DWD_PAGE_ACTION_TOPIC,JSON.toJSONString(pageActionLog,new SerializeConfig(true)))
                    }
                  }
                }


                //启动数据
                val startJsonObj: JSONObject = jsonObj.getJSONObject("start")
                if(startJsonObj != null){
                  //提取启动字段
                  val entry: String = startJsonObj.getString("entry")
                  val openAdSkipMs: Long = startJsonObj.getLong("open_ad_skip_ms")
                  val openAdMs: Long = startJsonObj.getLong("open_ad_ms")
                  val loadingTime: Long = startJsonObj.getLong("loading_time")
                  val openAdId: String = startJsonObj.getString("open_ad_id")

                  //封装为StartLog
                  val startLog: StartLog = StartLog(mid, uid, ar, ch, isNew, md, os, vc, ba, entry, openAdId, loadingTime, openAdMs, openAdSkipMs, ts)

                  //发送到DWD_START_LOG_TOPIC
                  MyKafkaUtils.send(DWD_START_LOG_TOPIC,JSON.toJSONString(startLog,new SerializeConfig(true)))
                }
              }
            }
            //foreachPartition里面：Executor端执行，每批次每分区执行一次
            //刷写Kafka
            MyKafkaUtils.flush()
          }
        )
        /*rdd.foreach(
          jsonObj => {


            //foreach里面: 提交offset?executor执行，每条数据执行一次,相当于同步发送消息
          }
        )*/
        //foreachRDD里面，foreach外面：提交offset?Driver端执行，一批次执行一次（周期性）分流是在executor端完成，driver端做刷写，刷的不是同一个对象的缓冲区
        MyOffsetUtils.saveOffset(topicName,groupId,offsetRanges)
      }
    )
    //foreachRDD外面：提交offset?Driver执行，每次启动程序执行一次 分流是在executor端完成，driver端做刷写，刷的不是同一个对象的缓冲区

    ssc.start()
    ssc.awaitTermination()
  }
}