package com.pro.streaming

import com.pro.beans.UserLog
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object KafkaDemo {

  def main(args: Array[String]): Unit = {
    // StreamingContext(spark配置,时间间隔)
    val ssc = new StreamingContext(
      new SparkConf().setAppName("streaming")
        .setMaster("local[2]"),Seconds(10)
    )
    ssc.sparkContext.setLogLevel("ERROR")

    // 设置检查点 streaming是24*7
    ssc.checkpoint("./check_port")

    // 定义kafka的相关参数
    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "127.0.0.1:9092",  // kafka位置
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "key.serializer" -> "org.apache.kafka.common.serialization.StringSerializer",
      "value.serializer" -> "org.apache.kafka.common.serialization.StringSerializer",
      "group.id" -> "spark", // 消费者组织ID
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean))

    val data = KafkaUtils.createDirectStream(
      ssc,LocationStrategies.PreferConsistent
      ,ConsumerStrategies
        .Subscribe[String,String](List("streaming"),kafkaParams)// (消息名称,kafka参数)
    )

    data.map(_.value()).print()

    ssc.start()
    ssc.awaitTermination()

  }

}
