package com.example.bigdata.spark.Kafka

import java.time.Duration
import java.util
import java.util.Properties

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord, ConsumerRecords, KafkaConsumer}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.StringDeserializer

object ConsumerSampleDemo {

    val TOPIC_NAME = "Demo_topic"
    def main(args: Array[String]): Unit = {
//        consumerSample(consumerConfig(),1)
        consumerDemo
    }

    def consumerConfig(): Properties ={
        var props = new Properties
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.23.6.159:9092,172.23.6.160:9092,172.23.6.161:9092")
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "Demo_topic")
        props.setProperty("enable.auto.commit", "false")
        props.setProperty("auto.commit.interval.ms", "1000")
        // 序列化、反序列化 标准写法
        props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
        props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
        return props
    }

    def consumerSample (props: Properties,offset:Long): Unit = {
        val props: Properties = consumerConfig()
        val consumer = new KafkaConsumer[String, String](props);
//        println(consumer.partitionsFor(TOPIC_NAME))
        val tp0 = new TopicPartition(TOPIC_NAME,0)
        val tp1 = new TopicPartition(TOPIC_NAME,1)
        consumer.assign(util.Arrays.asList(tp1))
        consumer.seek(tp1,offset)
        val records: ConsumerRecords[String, String] = consumer.poll(Duration.ofMillis(1000))
        import scala.collection.JavaConversions._
        for (record <- records if record.offset == offset) {
            println(s"offset = ${record.offset}, patition = ${record.partition} ,key = ${record.key}, value = ${record.value}")
        }
    }

    def consumerDemo(): Unit ={
        val props: Properties = consumerConfig()
        val consumer: KafkaConsumer[String, String] = new KafkaConsumer[String,String](props)
        consumer.subscribe(util.Arrays.asList(TOPIC_NAME))
        import scala.collection.JavaConversions._
        while (true){
            val records: ConsumerRecords[String, String] = consumer.poll(Duration.ofMinutes(1000))
            for (record <- records ) {
                println(s"topic = ${record.topic()} , patition = ${record.partition} , offset = ${record.offset}, key = ${record.key}, value = ${record.value} , timestamp = ${record.timestamp()}")
            }
        }
    }
}
