package com.bd03.streaminglearn.day0402

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

object Kafka2Streaming {

  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName(this.getClass.getSimpleName)
      //设置使用序列化的库
      //默认使用的序列化方式 java Serializer
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");  //使用Kryo序列化库

    val ssc = new StreamingContext(conf, Seconds(2))

    val config: Config = ConfigFactory.load()


    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "hdp01:9092,hdp02:9092,hdp03:9092,hdp04:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "streaming_consumer",//0-100条
      "auto.offset.reset" -> "earliest",//理论上,所有的数据只消费一次,避免重复消费
      //偏移量的管理,本地管理,spark管理和kafka管理
      "enable.auto.commit" -> (true: java.lang.Boolean)//不自动管理偏移量,每次都是重新消费
    )

    //直连的方式
    //receive方式 已经不使用
    //直连和receive
    val stream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      //PreferBrokers  如果kafka的broker和executor在同一个节点上,使用该方式
      //PreferConsistent,尽可能把kafka分区内的数据打散分配到executor
      //PreferFixed,当数据有明显的负载不均衡,数据倾斜,使用这个指定明确的分区和executor
      LocationStrategies.PreferConsistent,//本次存储策略
      ConsumerStrategies.Subscribe[String, String](List("kafka"), kafkaParams)

    )

   val value = stream.map(t => {
      //1.0.32.0|1.0.63.255|16785408|16793599|亚洲|中国|广东|广州||电信|440100|China|CN|113.280637|23.125178
      val str = t.value().split("\\|")
      (str(2), str(3), str(6))
    })

    value.print()

    ssc.start()
    ssc.awaitTermination()
  }
}
