package com.csw.flink.kafka

import java.time.Duration
import java.util
import java.util.Properties

import org.apache.kafka.clients.consumer.{ConsumerRecord, ConsumerRecords, KafkaConsumer}

object Demo02Consumer {

  def main(args: Array[String]): Unit = {

    /**
      * 创建消费者
      */

    val properties: Properties = new Properties

    //kafka  broker 地址列表
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")

    //keu 和value  反序列化的类
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")


    //指定消费者组
    //同一条数据 在同一个消费者组内只处理一次，前提条件是偏移量已经提交了
    properties.setProperty("group.id","csw1")//换个组即可读取到

    //从最早读取数据
    properties.setProperty("auto.offset.reset", "earliest")

    //自动提交偏移量间隔时间，默认是5秒
    properties.put("auto.commit.interval.ms", "1000")


    val consumer: KafkaConsumer[String, String] = new KafkaConsumer[String,String](properties)


    //topic集合，可以订阅多个
    val topics: util.ArrayList[String] = new util.ArrayList[String]()
    topics.add("student1")

    //订阅topic
    consumer.subscribe(topics)

    while (true){

      println("正在消费数据")

      //获取数据
      val records: ConsumerRecords[String, String] = consumer.poll(Duration.ofSeconds(1))

      //读取数据返回一个java迭代器
      val lines: util.Iterator[ConsumerRecord[String, String]] = records.iterator()

      while (lines.hasNext){
        //读取一行数据
        val record: ConsumerRecord[String, String] = lines.next()

        //解析数据
        val key: String = record.key()//key
        val value: String = record.value()//value
        val offset: Long = record.offset()//偏移量
        val partition: Int = record.partition()//分区
        val timestamp: Long = record.timestamp()//时间戳
        val topic: String = record.topic()//topic

        println(s"$key\t$value\t$offset\t$partition\t$timestamp\t$topic")
      }

    }

    //关闭连接
    consumer.close()
  }

}
