package org.example.kafka

import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.StringDeserializer
import java.time.Duration
import java.util.*


const val bootstrapServers = "192.168.180.146:9094"
const val username = "root"
const val password = "123456"


fun main() {
    val topic = "mes_059"
    val groupId = "mes_059-log-group"
    val partition = 0       // 要消费的分区 ID
    val startOffset = 6075707L  // 起始 offset
    val max = 100

    val consumer = createConsumer(groupId, 1000)
    val topicPartition = TopicPartition(topic, partition)
    consumer.assign(listOf(topicPartition))

    val aaa = consumer.committed(setOf(topicPartition))
    val commitOffset = aaa[topicPartition]!!.offset()

    consumer.seek(topicPartition, startOffset)

    consumeAll(consumer, topicPartition)
    println("开始消费 $topic 分区 $partition 从 offset $startOffset ...")
    return
    try {
        var count = 0
        while (true) {
            val records = consumer.poll(Duration.ofSeconds(1))
            if (records.isEmpty) {
                println("没有获取到任何消息")
                break
            }
            for (record in records) {
                println("${records.count()} 收到消息：offset=${record.offset()}, key=${record.key()}, value=${record.value()}")
            }
            if(count >= max) {
                break
            }
//            consumer.commitSync()
            count += records.count()
        }
        println("一共消费 $count")
    } finally {
        consumer.close()
    }
}

fun consumeAll(consumer : KafkaConsumer<String, String>, topicPartition: TopicPartition) {
    val endOffsets = consumer.endOffsets(listOf(topicPartition))
    val endOffset = endOffsets[topicPartition]
    if (endOffset != null) {
        consumeToIndex(consumer, topicPartition, endOffset)
    }
}

fun consumeToIndex(consumer : KafkaConsumer<String, String>, topicPartition: TopicPartition, index : Long) {
    val aaa = consumer.committed(setOf(topicPartition))
    val commitOffset = aaa[topicPartition]?.offset()
    if(commitOffset!! > index) return
    var count = index - commitOffset
    consumer.seek(topicPartition, commitOffset)
    println("开始消费 $topicPartition 分区 从 offset $commitOffset $count")
    while(count > 0) {
        val records = consumer.poll(Duration.ofSeconds(2))
        if (records.isEmpty) {
            break
        }
        count -= records.count()
        consumer.commitSync()
    }
}

fun createConsumer(groupId : String, maxPoll : Int) : KafkaConsumer<String, String> {
    val props = Properties().apply {
        put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers)
        put(ConsumerConfig.GROUP_ID_CONFIG, groupId)
        put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer::class.java.name)
        put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer::class.java.name)
        put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false") // 手动提交
        // 🔐 SASL/PLAIN 配置
        put("security.protocol", "SASL_PLAINTEXT") // 或 SASL_SSL（如果使用 SSL）
        put("sasl.mechanism", "PLAIN")
        put(
            "sasl.jaas.config",
            "org.apache.kafka.common.security.plain.PlainLoginModule required " +
                    "username=\"$username\" " +
                    "password=\"$password\";"
        )
    }
    props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPoll.toString()) // 每次最多拉取
    val consumer = KafkaConsumer<String, String>(props)
    return consumer
}

