package org.xhy.sniffer.service;


import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.xhy.sniffer.boot.BootService;
import org.xhy.sniffer.datacarrier.DataCarrier;
import org.xhy.sniffer.datacarrier.consumer.IConsumer;
import org.xhy.sniffer.trace.TraceContext;
import org.xhy.sniffer.trace.TraceSegment;
import org.xhy.sniffer.trace.TracingContextListener;

import java.util.List;
import java.util.Properties;

public class KafkaSegmentClient implements BootService, IConsumer<TraceSegment>, TracingContextListener {

    private static final ObjectMapper objectMapper = new ObjectMapper(); // Jackson ObjectMapper实例

    private DataCarrier dataCarrier;

    private Producer<String, String> producer;

    private final String TOPIC = "trace_kafka";


    public KafkaSegmentClient() {
        dataCarrier = new DataCarrier(1,20);
        dataCarrier.consume(this,1);

    }

    @Override
    public void boot(Properties properties) {

        TraceContext.ListenerManager.add(this);
        // 初始化Kafka生产者配置
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getProperty("kafkaHost"));
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // 创建Kafka Producer实例
        this.producer = new KafkaProducer<>(props);
    }

    @Override
    public void init(Properties properties) {

    }

    @Override
    public void consume(List<TraceSegment> data) {
        // 发送 kafka
        data.forEach((traceSegment) -> {
            // 使用Jackson序列化对象为JSON字符串
            try {
                String json = objectMapper.writeValueAsString(traceSegment);
                // 创建ProducerRecord对象
                ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC, traceSegment.getTraceId(), json);

                // 发送消息并添加回调函数处理异常
                producer.send(record, (metadata, exception) -> {
                    if (exception != null) {
                        System.err.println("Error while sending message: " + exception.getMessage());
                    } else {
                        System.out.println("Message sent successfully to topic: " + TOPIC +
                                " | Partition: " + metadata.partition() +
                                " | Offset: " + metadata.offset());
                    }
                });
            } catch (JsonProcessingException e) {
                throw new RuntimeException(e);
            }

        });
    }

    @Override
    public void onError(List<TraceSegment> data, Throwable t) {

    }

    @Override
    public void onExit() {

    }

    @Override
    public void afterFinished(TraceSegment traceSegment) {
        dataCarrier.produce(traceSegment);
    }

}
