package spark.stream

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializerFeature
import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @className KafkaStream
 * @author sjw
 * @date 2020/11/30 16:38
 * @description: TODO
 */
object KafkaStream {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("kafka-stream").setMaster("local[2]")
    val streamingContext = new StreamingContext(sparkConf, Seconds(10))

    val zk = "192.168.10.200:2181"
    val group = "sjw"
    val topics = "logEvent"
    val numThread = 1
    val topic = Array("logEvent").toSet
    val topicMap = topics.split(",").map((_, numThread.toInt)).toMap
    val brokers = "192.168.10.200:9092"
    val kafkaParams = Map[String, String](
      "bootstrap.servers" -> "192.168.10.200:9092",
      //"key.deserializer" -> classOf[StringDeserializer],
      //"value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "sjw",
      "auto.offset.reset" -> "largest"
      //"auto.offset.reset" -> "latest",
      //"enable.auto.commit" -> (false: java.lang.Boolean)
    )
    var logEvents: InputDStream[(String, String)] = KafkaUtils
      .createDirectStream[String, String, StringDecoder, StringDecoder](streamingContext, kafkaParams, topic)
    //KafkaUtils.createDirectStream(streamingContext, kafkaParams, topicMap)
    //logEvents.map(x => ()
    //logEvents.map(x => JSON.parseObject[LogEvent](x,classOf[LogEvent])).print()
    //    logEvents.print()






    var dStream: DStream[LogEvent] = logEvents
      .map(item => {
        println(item._2)
        JSON.parseObject(item._2, classOf[LogEvent])})
      .cache()
    //var dStream: DStream[String] =logEvents.map(element=>element._2)
    dStream.map(e=>JSON.toJSONString(e,SerializerFeature.PrettyFormat)).count().print()
    dStream.filter(event=>"buy".equals(event.behavior)).count().print();

    streamingContext.start()
    streamingContext.awaitTermination()
  }

}
