package org.cancer.handler

import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.cancer.bean.CancerTypeDeathData_Faye
import org.cancer.util.{MyKafkaUtil, SparkUtil}

object DataHandler_Faye {

  private val ssc = SparkUtil.takeSSC()
  def KafkaDataHandler(groupId: String, topic: String): DStream[CancerTypeDeathData_Faye] = {
    val kfDataDS: InputDStream[ConsumerRecord[String, String]] = MyKafkaUtil.getKafkaStream(groupId, topic,ssc)
    val cancerTypeDeathData: DStream[CancerTypeDeathData_Faye] = kfDataDS.map(kafkaData => {
      val data = kafkaData.value()
      val datas = data.split(" ")
      CancerTypeDeathData_Faye(datas(0), datas(1), datas(2))
    })
    cancerTypeDeathData
  }

  def startAndAwait(): Unit = {
    ssc.start()
    ssc.awaitTermination()
  }
}
