package com.shujia

import org.apache.kafka.clients.consumer.{ConsumerRecord, ConsumerRecords, KafkaConsumer}

import java.{lang, util}
import java.util.Properties

object Demo03KafkaConsumer {
  def main(args: Array[String]): Unit = {
    val properties = new Properties()

    //1、kafka broker列表
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")

    //2、指定消费者组
    properties.setProperty("group.id", "adfasdfa")


    //key value 反序列化的类
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")


    /**
     * earliest
     * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，从头开始消费
     * latest
     * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，消费新产生的该分区下的数据
     * none
     * topic各分区都存在已提交的offset时，从offset后开始消费；只要有一个分区不存在已提交的offset，则抛出异常
     *
     */
    //从最早读取数据
    properties.put("auto.offset.reset", "earliest")


    val kafkaConsumer: KafkaConsumer[String, String] = new KafkaConsumer[String, String](properties)

    val topicArr = new util.ArrayList[String]()
    topicArr.add("students")

    // 订阅Topic 可以同时订阅多个Topic
    kafkaConsumer.subscribe(topicArr)

    while (true) {
      // 设置一个超时时间，每次会拉去一批数据
      val records: ConsumerRecords[String, String] = kafkaConsumer.poll(1000)

      val iter: util.Iterator[ConsumerRecord[String, String]] = records.records("students").iterator()

      while (iter.hasNext) {
        val consumerRecord: ConsumerRecord[String, String] = iter.next()

        val key: String = consumerRecord.key()
        val value: String = consumerRecord.value()
        val partition: Int = consumerRecord.partition()
        val timestamp: Long = consumerRecord.timestamp()
        val topic: String = consumerRecord.topic()

        println(s"$key,$value,$partition,$timestamp,$topic")
      }
    }

    kafkaConsumer.close()


  }

}
