package com.shujia.kafka

import java.{lang, util}
import java.util.Properties

import org.apache.kafka.clients.consumer.{ConsumerRecord, ConsumerRecords, KafkaConsumer}

object Demo3Consumer {
  def main(args: Array[String]): Unit = {

    /**
      * 创建消费者
      *
      */
    val properties = new Properties
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")
    //key vvalue 反序列化的类
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")

    //2、指定消费者组
    properties.setProperty("group.id", "asdasdasd")


    /**
      * earliest
      * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，从头开始消费
      * latest  默
      * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，消费新产认值生的该分区下的数据
      * none
      * topic各分区都存在已提交的offset时，从offset后开始消费；只要有一个分区不存在已提交的offset，则抛出异常
      *
      */


    //从最早读取数据
    properties.put("auto.offset.reset", "earliest")




    val kafkaConsumer = new KafkaConsumer[String, String](properties)

    //需要订阅的topic
    val topic = new util.ArrayList[String]()
    topic.add("student2")

    //订阅消息
    kafkaConsumer.subscribe(topic)


    while (true){


      //拉去数据，一次可以拉去多条
      val consumerRecords: ConsumerRecords[String, String] = kafkaConsumer.poll(1000)

      val records: lang.Iterable[ConsumerRecord[String, String]] = consumerRecords.records("student2")

      val recordsIter: util.Iterator[ConsumerRecord[String, String]] = records.iterator()


      while (recordsIter.hasNext) {
        //获取一行数据
        val record: ConsumerRecord[String, String] = recordsIter.next()

        val topic: String = record.topic()
        //topic
        val key: String = record.key()
        //key  默认是null
        val partition: Int = record.partition()
        //分区编号
        val ts: Long = record.timestamp()
        //数据的时间戳
        val offset: Long = record.offset()
        //偏移量
        val value: String = record.value() //数据

        println(s"$topic\t$key\t$partition\t$ts\t$offset\t$value")

      }


    }


  }

}
