package com.atguigu.gmall.realtime.app

import java.lang

import com.alibaba.fastjson.serializer.SerializeConfig
import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.atguigu.gmall.realtime.bean.{PageActionLog, PageDisplayLog, PageLog, StartLog}
import com.atguigu.gmall.realtime.utils.{MyKafkaUtils, MyOffsetUtils}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * 日志数据的消费和分流
 * 1.准备sparkStreaming环境对象
 * 2.从kafka消费数据
 * 3.处理数据
 *  3.1转换数据结构，通过消费者输出的对象为consumerRecord对象
 *          通用结构：Map、Json
 *          专用结构：自己封装的bean对象
 * 4.再将数据发送给kafka
 */
object OdsBaseLogApp {

  def main(args: Array[String]): Unit = {
    //1.准备环境对象
    val conf: SparkConf = new SparkConf().setAppName("OdsBaseLog").setMaster("local[4]")
    //并行度和kafka分区相互对应，如果不手动创建分区，会自动创建，默认是一个分区，可以在kafka中的server.properties中修改
    val ssc : StreamingContext = new StreamingContext(conf,Seconds(5))

    val topicName : String = "ODS_BASE_LOG_1118"
    val groupName : String = "ODS_BASE_LOG_Group"

    // TODO: 指定offset消费，从redist中读取offset
    val offsets: Map[TopicPartition, Long] = MyOffsetUtils.readOffset(topicName,groupName)

    //2.从kafka中消费数据
    var kafkaDStream: InputDStream[ConsumerRecord[String, String]] = null
    if(offsets != null && offsets.nonEmpty){
      //指定offsetxiaofe
      kafkaDStream = MyKafkaUtils.getKafkaDstream(ssc,topicName,groupName,offsets)
    }else{
      //默认offset消费
      kafkaDStream = MyKafkaUtils.getKafkaDstream(ssc,topicName,groupName)
    }
    

    // TODO: 消费到数据后获取offset
    var offsetRanges: Array[OffsetRange] = null
    val offsetDstream: DStream[ConsumerRecord[String, String]] = kafkaDStream.transform(
      rdd => {
        offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        rdd
      }
    )


    //3.数据处理
    //3.1转换数据结构
    val jsonObjDstream: DStream[JSONObject] = offsetDstream.map(
      consumerRecord => {
        val jsonStr: String = consumerRecord.value()
        val jsonObj: JSONObject = JSON.parseObject(jsonStr)
        jsonObj
      }
    )

    //jsonObjDstream.print(100) 成功消费到数据
    //3.2数据分流
    /*
    分流原则
    1.错误数据  错误数据不做任何处理，直接写到DWD_ERROR_TOPIC_1118

    2.页面日志数据:
          2.1 页面数据 ->DWD_PAGE_TOPIC_1118
          2.2 曝光数据 ->DWD_DISPLAY_TOPIC_1118
          2.3 事件数据 ->DWD_ACTION_TOPIC_1118
    3.启动数据  -> DWD_START_TOPIC_1118
     */
    val error_topic : String = "DWD_ERROR_TOPIC_1118"
    val start_topic : String = "DWD_START_TOPIC_1118"
    val page_topic : String = "DWD_PAGE_TOPIC_1118"
    val display_topic : String = "DWD_DISPLAY_TOPIC_1118"
    val action_topic : String = "DWD_ACTION_TOPIC_1118"


    //TODO 提交offset的位置:
    //1. foreachRDD外面:  Driver端 , 任务启动时执行一次。
    //2. foreachRDD里面, foreach外面:   Driver端执行,每批次执行一次
    //3. foreach里面:    Executor端, 每条数据执行一次。

    jsonObjDstream.foreachRDD(
      rdd => {
        rdd.foreachPartition(
          jsonObjIter => {
            for (jsonObj <- jsonObjIter) {
              // 对一条数据的分流
              //分流错误数据
              val errObj: JSONObject = jsonObj.getJSONObject("err")
              if(errObj != null ){
                //直接写到错误主题中
                MyKafkaUtils.send(error_topic , jsonObj.toJSONString)
              }else{
                //提取common
                val commonObj: JSONObject = jsonObj.getJSONObject("common")
                val ar: String = commonObj.getString("ar")
                val uid: String = commonObj.getString("uid")
                val os: String = commonObj.getString("os")
                val ch: String = commonObj.getString("ch")
                val isNew: String = commonObj.getString("is_new")
                val md: String = commonObj.getString("md")
                val mid: String = commonObj.getString("mid")
                val vc: String = commonObj.getString("vc")
                val ba: String = commonObj.getString("ba")

                //提取ts
                val ts: lang.Long = jsonObj.getLong("ts")

                val pageObj: JSONObject = jsonObj.getJSONObject("page")
                if(pageObj != null ){
                  //分流页面日志数据
                  //提取page
                  val pageId: String = pageObj.getString("page_id")
                  val pageItem: String = pageObj.getString("item")
                  val pageItemType: String = pageObj.getString("item_type")
                  val duringTime: Long = pageObj.getLong("during_time")
                  val lastPageId: String = pageObj.getString("last_page_id")
                  val sourceType: String = pageObj.getString("source_type")

                  //封装成PageLog
                  val pageLog =
                    PageLog(mid,uid,ar,ch,isNew,md,os,vc,ba,pageId,lastPageId,pageItem,pageItemType,duringTime,sourceType,ts)
                  //写入到对应的主题中
                  MyKafkaUtils.send(page_topic , JSON.toJSONString(pageLog , new SerializeConfig(true)))

                  //分流曝光数据
                  val displayJsonArr: JSONArray = jsonObj.getJSONArray("displays")
                  if(displayJsonArr !=null && displayJsonArr.size() > 0){
                    for(i <- 0 until displayJsonArr.size()){
                      val displayObj: JSONObject = displayJsonArr.getJSONObject(i)
                      //提取曝光字段
                      val displayType: String = displayObj.getString("display_type")
                      val displayItem: String = displayObj.getString("item")
                      val displayItemType: String = displayObj.getString("item_type")
                      val order: String = displayObj.getString("order")
                      val posId: String = displayObj.getString("pos_id")

                      //封装成PageDisplayLog
                      val pageDisplayLog =
                        PageDisplayLog(mid,uid,ar,ch,isNew,md,os,vc,ba,pageId,lastPageId,pageItem,pageItemType,duringTime,sourceType,displayType,displayItem,displayItemType,order,posId,ts)
                      //发送到对应的主题
                      MyKafkaUtils.send(display_topic , JSON.toJSONString(pageDisplayLog , new SerializeConfig(true)))
                    }
                  }
                  //分流动作数据
                  val actionJsonArr: JSONArray = jsonObj.getJSONArray("actions")
                  if(actionJsonArr != null && actionJsonArr.size()>0){
                    for(i <- 0 until actionJsonArr.size()){
                      val actionObj: JSONObject = actionJsonArr.getJSONObject(i)
                      //提取动作字段
                      val actionId: String = actionObj.getString("action_id")
                      val actionItem: String = actionObj.getString("item")
                      val actionItemType: String = actionObj.getString("item_type")
                      val actionTs: lang.Long = actionObj.getLong("ts")

                      //封装成PageActionLog
                      val pageActionLog =
                        PageActionLog(mid,uid,ar,ch,isNew,md,os,vc,ba,pageId,lastPageId,pageItem,pageItemType,duringTime,sourceType,actionId,actionItem,actionItemType,actionTs ,ts)
                      //发送到对应的主题中
                      MyKafkaUtils.send(action_topic , JSON.toJSONString(pageActionLog,new SerializeConfig(true)))

                    }
                  }
                }
                val startObj: JSONObject = jsonObj.getJSONObject("start")
                if(startObj != null ){
                  //分流启动数据
                  val entry: String = startObj.getString("entry")
                  val loadingTime: lang.Long = startObj.getLong("loading_time")
                  val openAdId: String = startObj.getString("open_ad_id")
                  val openAdMs: Long = startObj.getLong("open_ad_ms")
                  val openAdSkipMs: Long = startObj.getLong("open_ad_skip_ms")

                  //封装成StartLog
                  val startLog =
                    StartLog(mid,uid,ar,ch,isNew,md,os,vc,ba,entry,openAdId,loadingTime,openAdMs,openAdSkipMs,ts)
                  //发送到对应的主题
                  MyKafkaUtils.send(start_topic , JSON.toJSONString(startLog,new SerializeConfig(true)))
                }
              }
            }
            //TODO flush
            MyKafkaUtils.flush()
          }
        )
        //提交offset
        MyOffsetUtils.saveOffset(topicName,groupName,offsetRanges)
      }

    )

    ssc.start()
    ssc.awaitTermination()

  }
}
