package read.niit.handler

import com.google.gson.Gson
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import read.niit.bean.Reader
import read.niit.util.{MyKafkaUtil, SparkUtil}


object DataHandler {

  val ssc = SparkUtil.takeSSC()

  def kafkaReaderDataHandler(groupId:String,topic:String):DStream[Reader]={

    //从Kafka中获得数据
    val kfDataDS: InputDStream[ConsumerRecord[String, String]] = MyKafkaUtil.getKafkaStream(groupId,topic, ssc)
    val reader: DStream[Reader] = kfDataDS.map(kafkaData => {
      val data = kafkaData.value()
      /*
      JSON---> JavaScript
       */
      val gson = new Gson();
      val reader: Reader = gson.fromJson(data, classOf[Reader])
      reader
    })
    reader
  }

  def startAndAwait(): Unit ={
    ssc.start()
    ssc.awaitTermination()
  }

}