package com.tt.vtg.scenario.kafka;

import com.tt.vtg.scenario.util.ValueUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

public enum KafkaProducerTemplate {
    INSTANCE;
    private KafkaProducer<String, String> kafkaProducer;
    private static final String TOPIC = "inbound";
    private LinkedBlockingQueue<KafkaInboundMsg> bufferList = new LinkedBlockingQueue<>(100);
    KafkaProducerTemplate(){
        Properties prop = new Properties();
        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, ValueUtils.get("kafka.bootstrap.server"));
        prop.put("acks", "1");
        prop.put("retries", 1);
        prop.put("batch.size", 16384);
        prop.put("linger.ms", 60000);
        prop.put("key.serializer", StringSerializer.class.getName());
        prop.put("value.serializer", StringSerializer.class.getName());

        kafkaProducer = new KafkaProducer<>(prop);

        run();
    }

    private void run() {
        ScheduledExecutorService quartZ = Executors.newSingleThreadScheduledExecutor();
        quartZ.scheduleAtFixedRate(() -> {
            exec();
        }, 0, 100, TimeUnit.MILLISECONDS);
    }

    public static void put(KafkaInboundMsg record){
        INSTANCE._put(record);
    }

    private void _put(KafkaInboundMsg record) {
        try {
            bufferList.put(record);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    private void exec(){
        List<KafkaInboundMsg> lst = new ArrayList<>();
        bufferList.drainTo(lst);

        try{
            lst.forEach(r ->{
                ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC, r.getOrderId(), r.getPayload());
                kafkaProducer.send(record, (metadata, exception) -> {
                    if(metadata != null){
                        System.out.printf("Sent record:\nkey = %s \nvalue = %s\n" +
                                        "partition = %d\noffset = %d\n",
                                record.key(), record.value(), metadata.partition(),
                                metadata.offset());
                        System.out.println("======================================");
                    }else{
                        exception.printStackTrace();
                    }
                });
            });
        }finally {
            kafkaProducer.flush();
        }

        lst = null;
    }
}
