package tester.component;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import tester.Utils.PubUtil;
import tester.common.Constants;
import tester.entitys.KafkaInfo;

import java.util.Properties;

public class Sender {
    public static final String prefix = "Sender";
    public static void send(String[] prefixs, String suffixStr) {
        String sourceType = prefixs[1];
        String signature = prefixs[2];
        switch (sourceType) {
            case Constants.KafkaSource:
                String topic = prefixs[3];
                KafkaInfo info = Configer.KafkaMap.get(signature);
                sendKafka(info,topic,suffixStr);
        }
        
    }

    private static void sendKafka(KafkaInfo info, String topic, String suffixStr) {
        // 1. 配置生产者属性
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, info.ips);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // 可选配置（根据需求调整）
        props.put(ProducerConfig.ACKS_CONFIG, "all"); // 确保消息被完全提交
        props.put(ProducerConfig.RETRIES_CONFIG, 3); // 发送失败时的重试次数

        // 2. 创建生产者实例
        Producer<String, String> producer = new KafkaProducer<>(props);

        String key = PubUtil.getPrefix(suffixStr,':');
        String value = PubUtil.getSuffix(suffixStr, ':');
        ProducerRecord<String, String> record = new ProducerRecord<>(topic, key, value);
        try {
            RecordMetadata metadata = producer.send(record).get();
            System.out.printf("Sent message(key=%s value=%s) to partition=%d offset=%d%n",
                    record.key(), record.value(), metadata.partition(), metadata.offset());
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
