package com.sbp.message.send_message.impl;

import java.util.*;

import com.alibaba.fastjson.JSON;
import com.dap.utils.AsyncQueue;
import com.sbp.message.api.entity.Message;
import com.sbp.message.api.service.MessageSysInfoService;
import com.sbp.message.send_message.MessageSender;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;

/**
 * Created by wangmin on 2019/4/18.
 */
@Service
public class KafkaMessageSender implements MessageSender, AsyncQueue.AsyncQueueConsumer {
    private static final Logger logger = LoggerFactory.getLogger(KafkaMessageSender.class);
    private final KafkaProducer<String, String> producer;
    private HashMap<String, AsyncQueue> asyncQueueMap = new HashMap<>();

    @Autowired
    public KafkaMessageSender(
            @Value("${kafka.bootstrapServers}") String bootstrapServers,
            @Value("${kafka.keySerializer}") String keySerializer,
            @Value("${kafka.valueSerializer}") String valueSerializer) {
        Properties props = new Properties();
        props.put("bootstrap.servers", bootstrapServers);
        props.put("key.serializer", keySerializer);
        props.put("value.serializer", valueSerializer);

        props.put("producer.type", "sync");

        props.put("acks", "1");

        props.put("request.timeout.ms", 200);

        props.put("retries", 0);

        props.put("batch.size", 16384);
        props.put("linger.ms", 1000);

        producer = new KafkaProducer<>(props);
    }


    @Override
    public boolean sendMessage(final Message message) {
        if (validateMessage(message)) {
            // 队列按照消息类型分组
            AsyncQueue asyncQueue;
            synchronized (asyncQueueMap) {
                asyncQueue = asyncQueueMap.get(message.getDestination());
                if (null == asyncQueue) {
                    asyncQueue = new AsyncQueue();
                    asyncQueueMap.put(message.getDestination(), asyncQueue);
                }
            }

            Long earliestConsumeTime = message.getAllowConsumeTime() != null ? message.getAllowConsumeTime().getTime() : null;
            if (null == earliestConsumeTime || earliestConsumeTime <= System.currentTimeMillis()) {
                consumeAsyncQueueContent(message);
            } else {
                logger.info("messageId={}, earliestConsumeTime={}, 消息加入到发送队列(经过队列之后才会发送到kafka)", message.getId(), earliestConsumeTime);
                asyncQueue.enqueueMessage(this, message, earliestConsumeTime);
            }
            return true;
        } else {
            logger.warn("invalid message={}", JSON.toJSONString(message));
            return false;
        }
    }

    private static boolean validateMessage(Message message) {
        return message != null && !StringUtils.isEmpty(message.getDestination());
    }

    @Override
    public void close() {
        producer.close();
        for (AsyncQueue asyncQueue : asyncQueueMap.values()) {
            asyncQueue.close();
        }
    }

    @Override
    public void consumeAsyncQueueContent(Object content) {
        final Message message = (Message) content;
        logger.info("messageId={}, 消息被发送到kafka", message.getId());

        ProducerRecord<String, String> data;
        String kafkaRealTopicName = MessageSysInfoService.generateKafkaRealTopicName(message.destination);
        String messageJson = JSON.toJSONString(message);
        if (message.isOrderly) {
            data = new ProducerRecord<>(kafkaRealTopicName, 0, message.id, messageJson);
        } else {
            data = new ProducerRecord<>(kafkaRealTopicName, message.id, messageJson);
        }
        producer.send(data);
    }
}

