package com.yanqu.data.analytics.consumer;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.yanqu.road.utils.ConfigHelper;
import com.yanqu.road.utils.kafka.KafKaHelper;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.logging.log4j.Logger;

import java.util.List;
import java.util.Map;
import java.util.concurrent.Future;

public class KafKaProductConsumer extends CollectConsumer {

    public static String topic = "dzg_ta_log";

    private long serverId;

    public KafKaProductConsumer(Logger logger, String serverUrl, long serverId) {
        super(logger);
        setServerUrl(serverUrl);
        init();
        this.serverId = serverId;
    }

    public void init(){

    }

    @Override
    public void flush() {

        List<Map<String, Object>> tempList = poll(3000);
        int num = 0;
        if(tempList.size() > 0){
            String key = getMessageKey();
            for (Map<String, Object> objectMap : tempList) {
                String data = JSON.toJSONStringWithDateFormat(objectMap, "yyyy-MM-dd HH:mm:ss.SSS", new SerializerFeature[0]);
                Future<RecordMetadata> future = KafKaHelper.getProducer().send(new org.apache.kafka.clients.producer.ProducerRecord<>(topic, key, data));
            }
            KafKaHelper.getProducer().flush();
            logger.info("kafka 消费记录[{}]", num);
        }

    }

    public String getMessageKey(){
        return ConfigHelper.getValue("serverId");
    }

    @Override
    public void close() {
        if(KafKaHelper.getProducer() != null) {
            KafKaHelper.getProducer().close();
        }
    }
}
