package com.rock.code.common.util;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

/**
 * ${DESCRIPTION} kafka的工具类
 *
 * @author zhangli
 * @create 2019/9/10 16:02
 **/
public class KafkaUtil {


    private static java.util.ResourceBundle KAFKAInfo = java.util.ResourceBundle.getBundle("kafka");

    private static String HOST = "127.0.0.1";

    private static long LAST_TIME = 0;

    private static String acks = null;

    private static int retries = 0;

    private static int batch_size = 0;

    private static int linger_ms = 0;

    private static long buffer_memory = 0;

    private static String key_serializer = null;

    private static String value_serializer = null;

    private static String KAFKA_TOPIC = null;

    private static String GROUP_ID = null;


    private static int RELOADTIME = Integer.parseInt(KAFKAInfo.getString("realtimerinterval"));


    private static Producer<String, Object> producer = null;


    /***
     * @Author zhangli
     * @Description //TODO 指定时间初始化kafka
     * @Date 16:00 2019/2/19
     * @Param []
     * @return void
     **/
    public static void initkafka() {

        if (System.currentTimeMillis() - LAST_TIME > RELOADTIME) {

            HOST = KAFKAInfo.getString("bootstrap.servers");

            GROUP_ID = KAFKAInfo.getString("group.id");

            KAFKA_TOPIC = KAFKAInfo.getString("kafka_topic");

            acks = KAFKAInfo.getString("acks");
            retries = Integer.parseInt(KAFKAInfo.getString("retries"));
            batch_size = Integer.parseInt(KAFKAInfo.getString("batch.size"));
            linger_ms = Integer.parseInt(KAFKAInfo.getString("linger.ms"));
            buffer_memory = Integer.parseInt(KAFKAInfo.getString("buffer.memory"));

            key_serializer = KAFKAInfo.getString("key.serializer");
            value_serializer = KAFKAInfo.getString("value.serializer");

            Properties props = new Properties();
            props.put("bootstrap.servers", HOST);
            props.put("acks", acks);
            props.put("retries", retries);
            props.put("batch.size", batch_size);
            props.put("linger.ms", linger_ms);
            props.put("buffer.memory", buffer_memory);
            props.put("key.serializer", key_serializer);
            props.put("value.serializer", value_serializer);

            producer = new KafkaProducer<>(props);
            LAST_TIME = System.currentTimeMillis();
        }
    }

    /***
     * @Author zhangli
     * @Description //TODO  发送消息msg
     * @Date 17:10 2019/9/10
     * @Param [msg]
     * @return void
     **/
    public static void sendMsgToKafka(Object msg) {
        initkafka();
        producer.send(new ProducerRecord<String, Object>(KAFKA_TOPIC, GROUP_ID, msg));

    }

    /***
     * @Author zhangli
     * @Description //TODO 关闭Producer
     * @Date 17:10 2019/9/10
     * @Param []
     * @return void
     **/
    public static void colseProducer() {
        if (producer != null) {
            try {
                producer.close();
                producer = null;
            } catch (Exception e) {
                e.printStackTrace();
            }

        }
    }

}
