package me.seawenc.datastash.outputs.kafka;

import me.seawenc.datastash.outputs.AbstractConsumer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import java.util.Properties;
import java.util.concurrent.ExecutionException;

public class KafkaConsumer extends AbstractConsumer {

    private final org.apache.kafka.clients.producer.KafkaProducer<String, String> producer;

    private String topic;

    public KafkaConsumer(Properties props) {
        super(props);
        Properties properties = getKafkaConfig(props);
        topic = properties.getProperty("topic");
        System.out.println("kafka完整参数：");
        properties.forEach((k, v)-> System.out.println(k+"="+v));
        producer = new org.apache.kafka.clients.producer.KafkaProducer<>(properties);
    }

    private Properties getKafkaConfig(Properties fileProps) {
        Properties properties = new Properties();
        fileProps.stringPropertyNames().stream()
                .filter(key-> key.startsWith("output.kafka."))
                .forEach(key-> {
                    String currentKey = key.replace("output.kafka.","");
                    String value = fileProps.getProperty(key);
                    properties.put(currentKey, value);
                });

        setDefValue(properties,"acks","all");
        setDefValue(properties,"key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        setDefValue(properties,"value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        setDefValue(properties,"key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        setDefValue(properties,"value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        setDefValue(properties,"max.request.size","10485760");
        return properties;
    }

    private void setDefValue(Properties target, String key, String def){
        String val = target.getProperty(key);
        if(val==null || "".equals(val)){
            target.setProperty(key,def);
        }
    }

    @Override
    public void consumption(String msg) {
        try {
            ProducerRecord<String, String> record = new ProducerRecord<>(topic, msg);
            RecordMetadata metadata = producer.send(record).get();
            System.out.println("topic:" + topic+",partition = "+ metadata.partition());
        }catch (InterruptedException | ExecutionException e) {
            System.out.println("kafka推送异常，msg:"+e.getMessage());
        }
    }

}
