package com.sali.avrodecorator;

import com.sali.common.KafkaUtils;
import com.sali.pojo.KafkaEntity;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.io.File;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class AvroConsumer extends KafkaUtils {

    public static void consumeData(KafkaEntity kafkaEntity) {
        checkParams(kafkaEntity);

        Properties kafkaAvroProperties = getKafkaProperties(kafkaEntity.getBrokerIp());
        kafkaAvroProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
        kafkaAvroProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        kafkaAvroProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, kafkaEntity.getConsumeOffset());
        kafkaAvroProperties.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaEntity.getGroupId());

        KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(kafkaAvroProperties);
        consumer.subscribe(Collections.singleton(kafkaEntity.getKafkaTopic()));

        try {
            Schema avroSchema = new Schema.Parser().parse(new File(kafkaEntity.getAvroFilePath()));

            ConsumerRecords<String, byte[]> records = consumer.poll(Duration.ofMillis(1000));
            for (ConsumerRecord<String, byte[]> record : records) {
                byte[] avroData = record.value();
                GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<>(avroSchema);
                BinaryDecoder binaryDecoder = DecoderFactory.get().binaryDecoder(avroData, null);
                GenericRecord genericRecord = datumReader.read(null, binaryDecoder);
                System.out.println("获取到的结果为：" + genericRecord);
            }
        } catch (Exception e) {
            System.out.println("消费出错：" + e.getMessage());
        }
    }

    public static void main(String[] args) throws InterruptedException {
        final String brokerIp = "172.16.80.25:6667";
        final String kafkaTopic = "zh_avro_topic002";
        final String avroFilePath = "C:\\Users\\zh\\Desktop\\中台项目\\sali-tools\\kafka-tools\\src\\main\\resources\\test01.avro";
        KafkaEntity kafkaEntity = new KafkaEntity();
        kafkaEntity.setBrokerIp(brokerIp);
        kafkaEntity.setKafkaTopic(kafkaTopic);
        kafkaEntity.setAvroFilePath(avroFilePath);

        while (true) {
            consumeData(kafkaEntity);
            Thread.sleep(2 * 1000);
        }
    }

}
