package com.atguigu.realtime.util

import com.atguigu.cm.constan.KafkaCons
import com.atguigu.realtime.bean.AdsInfo
import com.atguigu.realtime.util.MyKafkaUtil.getDStream
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * description ：应用工具类
 * author      ：剧情再美终是戏 
 * mail        : 13286520398@163.com
 * date        ：Created in 2020/1/17 9:55
 * modified By ：
 * version:    : 1.0
 */
trait MyApp {

  def main(args: Array[String]): Unit = {

    // 获取 streamingContext
    val conf = new SparkConf().setAppName("StreamingContext").setMaster("local[*]")
    val ssc: StreamingContext = new StreamingContext(conf, Seconds(1))
    ssc.checkpoint("./spark-stream-project-ck11")

    // 源数据 -> 1576655451922,华北,北京,105,2
    val result: InputDStream[ConsumerRecord[String, String]] = getDStream(ssc, Array(KafkaCons.TOPIC_SPARK_STREAM_PJ))

    // 将kafka数据封装成AdsInfo
    val adsInfo: DStream[AdsInfo] = result.map {
      record =>
        val kafkaValues: Array[String] = record.value().split(",")
        AdsInfo(
          kafkaValues(0).toLong, // ts: Long
          kafkaValues(1), // area: String
          kafkaValues(2), // city: String
          kafkaValues(3), // userId: String
          kafkaValues(4) // adsId: String
        )
    }

    // 逻辑处理
    doSomething(adsInfo)

    // 启动 streamingContext
    ssc.start()
    ssc.awaitTermination()
  }

  def doSomething(adsInfo: DStream[AdsInfo])

}
