package com.shujia.flink.kafka

import java.time.Duration
import java.util
import java.util.Properties

import org.apache.kafka.clients.consumer.{ConsumerRecord, ConsumerRecords, KafkaConsumer}

object Demo2Consumer {

  def main(args: Array[String]): Unit = {

    /**
      * 创建消费者
      *
      */

    val properties = new Properties
    //kafka broker 地址列表
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")


    //keu 和value  反序列化的类
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")


    //指定消费者组
    //同一条数据 在同一个消费者组内只处理一次
    properties.setProperty("group.id", "asdasdasda")


    //从最早读取数据
    properties.put("auto.offset.reset", "earliest")


    //自动提交偏移量间隔时间
    properties.put("auto.commit.interval.ms", "1000")


    val consumer = new KafkaConsumer[String, String](properties)


    //topic集合，可以订阅多个
    val topics = new util.ArrayList[String]()
    topics.add("student1")

    //订阅topic
    consumer.subscribe(topics)


    while (true) {

      println("正在消费数据")

      //获取数据
      val records: ConsumerRecords[String, String] = consumer.poll(Duration.ofSeconds(1))


      //读取数据，返回一个迭代器
      val lines: util.Iterator[ConsumerRecord[String, String]] = records.iterator()


      while (lines.hasNext) {
        //读取一行数据
        val record: ConsumerRecord[String, String] = lines.next()

        //解析数据
        val key: String = record.key()
        val value: String = record.value()
        val offset: Long = record.offset()
        val partition: Int = record.partition()
        val ts: Long = record.timestamp()
        val topic: String = record.topic()

        println(s"$key\t$value\t$offset\t$partition\t$ts\t$topic")

      }


    }
    consumer.close()


  }

}
