package com.sali.avrodecorator;

import com.alibaba.fastjson.JSONObject;
import com.sali.common.KafkaUtils;
import com.sali.pojo.KafkaEntity;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.Properties;

public class AvroProducer extends KafkaUtils {


    public static void sendData(KafkaEntity kafkaEntity, JSONObject dataJson) throws Exception {
        checkParams(kafkaEntity);
        if (StringUtils.isBlank(kafkaEntity.getAvroFilePath())) {
            throw new RuntimeException("avro文件路径为空...");
        }

        Properties properties = getKafkaProperties(kafkaEntity.getBrokerIp());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        KafkaProducer<String, byte[]> producer = new KafkaProducer<>(properties);

        Schema avroSchema = new Schema.Parser().parse(new File(kafkaEntity.getAvroFilePath()));


        GenericData.Record avroRecord = new GenericData.Record(avroSchema);

        for (String key : dataJson.keySet()) {
            avroRecord.put(key, dataJson.get(key));
        }

        DatumWriter<Object> datumWriter = new SpecificDatumWriter<>(avroSchema);
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(baos, null);
        datumWriter.write(avroRecord, binaryEncoder);
        binaryEncoder.flush();
        byte[] avroData = baos.toByteArray();
        ProducerRecord<String, byte[]> record = new ProducerRecord<>(kafkaEntity.getKafkaTopic(), "", avroData);

        producer.send(record, (metadata, exception) -> {
            if (exception == null) {
                System.out.println("消息发送成功，Topic: " + metadata.topic() +
                        ", Partition: " + metadata.partition() +
                        ", Offset: " + metadata.offset());
            } else {
                System.err.println("消息发送失败: " + exception.getMessage());
            }
        });
        producer.close();
    }

    public static void main(String[] args) {
        final String brokerIp = "192.168.80.103:6667";
        final String kafkaTopic = "zh_avro_topic002";
        final String avroFilePath = "C:\\Users\\zh\\Desktop\\中台项目\\sali-tools\\kafka-tools\\src\\main\\resources\\test01.avro";
        KafkaEntity kafkaEntity = KafkaEntity.builder().kafkaTopic(kafkaTopic).brokerIp(brokerIp).avroFilePath(avroFilePath).build();

        try {
            for (int i = 0; i < 3; i++) {
                JSONObject dataJson = generateDataJson(i);
                sendData(kafkaEntity, dataJson);

                System.out.println("发送成功条数: " + (i + 1));
                Thread.sleep(2 * 1000);
            }
        } catch (Exception e) {
            System.out.println("数据发送异常" + e.getMessage());
        }
    }

    private static JSONObject generateDataJson(int i) {
        JSONObject dataJson = new JSONObject();

        dataJson.put("data_source", "datasource_" + i);
        dataJson.put("account_id", "accountId_" + i);
        dataJson.put("mobile_phone", 1000 + i);
        dataJson.put("brand", "brand_" + i);
        dataJson.put("model", "model_" + i);
        dataJson.put("price", 10.1f + i);
        dataJson.put("insert_day", 20240201);
        dataJson.put("insert_hour", 10);
        dataJson.put("insert_min", 56);

        return dataJson;
    }

}
