package com.atguigu.gmall.realtime.handler

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializeConfig
import com.atguigu.gmall.realtime.handler.EsHandler.{esHost, esPort}
import com.atguigu.gmall.realtime.util.{Json4sUtil, MyKafkaUtil, MyPropertiesUtil, OffsetManager}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges, OffsetRange}

import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.reflect.{ClassTag, classTag}

class CommonApp {


  val offsetInfoList: ListBuffer[OffsetInfo] = new ListBuffer[OffsetInfo]()



  def initSsc(groupId: String, master: String, sec: Int): StreamingContext = {
    val  sparkConf : SparkConf = new SparkConf().setAppName(groupId)


    if (master != null) {
      sparkConf.setMaster(master)
    }
    new StreamingContext(sparkConf, Seconds(sec))
  }

  def getDstream[T: Manifest](ssc: StreamingContext,
                              topic: String,
                              groupId: String,
                             ): DStream[T] = {


    //1 加载kafka数据
    val offsetMap: Map[TopicPartition, Long] = OffsetManager.getOffset(topic, groupId)

    var inputDstream: InputDStream[ConsumerRecord[String, String]] = null
    if (offsetMap == null || offsetMap.size == 0) {
      inputDstream = MyKafkaUtil.getKafkaStream(topic, ssc, groupId)
    } else {
      inputDstream = MyKafkaUtil.getKafkaStream(topic, ssc, offsetMap, groupId)
    }

    //2 提取偏移
    val inputDstreamWithOffset: DStream[ConsumerRecord[String, String]] = inputDstream.transform { rdd =>
      val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
      val offsetInfo: OffsetInfo = OffsetInfo(topic, groupId, inputDstream, offsetRanges)

      offsetInfoList.append(offsetInfo)
      rdd
    }


    //3 转换结构
    val objDstream: DStream[T] = inputDstreamWithOffset.map { record =>
      val jsonString: String = record.value()
      val t: T = JSON.parseObject[T](jsonString, classTag[T].runtimeClass)
      t
    }
    objDstream
  }


  //修改了提交方式
  def commitAllOffsetKafka(): Unit = {
    for (offsetInfo <- this.offsetInfoList ) {
      val ds: DStream[ConsumerRecord[String, String]] = offsetInfo.ds
      ds.asInstanceOf[CanCommitOffsets].commitAsync(offsetInfo.offsetRanges)
    }
  }

  case class OffsetInfo(topic: String,
                        groupId: String,
                        ds: DStream[ConsumerRecord[String, String]],
                        offsetRanges: Array[OffsetRange]) {


  }


}
