package com.lzq.mq.broker.core;

import com.lzq.mq.broker.cache.CommonCache;
import com.lzq.mq.broker.model.*;
import lombok.extern.slf4j.Slf4j;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.lzq.mq.common.constants.BrokerConstants.CONSUME_QUEUE_ELEMENT_BYTE_SIZE;

/**
 * 消费队列消费处理器
 */
@Slf4j
public class ConsumeQueueConsumeHandler {

    /**
     * 批量消费队列中的消息
     * @param req 消费队列拉取消息请求对象
     * @return CommitLog中的一条消息
     */
    public List<byte[]> consume(ConsumeQueueConsumeReqModel req) {

        // 检查参数合法性
        MqTopicModel mqTopicModel = CommonCache.getMqTopicModelMap().get(req.getTopic());
        if (mqTopicModel == null) {
            throw new IllegalArgumentException("主题不存在");
        }
        ConsumeQueueModel consumeQueueModel = mqTopicModel.getQueueList().get(req.getQueueId()); // 获取消费队列的模型
        if (consumeQueueModel == null) {
            throw new IllegalArgumentException("消费队列不存在");
        }

        /* 获取消费组对当前主题的消费队列queueId的消费偏移量 *********/
        ConsumeQueueOffsetModel.OffsetTable consumeQueueOffsetModel = CommonCache.getConsumeQueueOffsetModel().getOffsetTable();
        Map<String, ConsumeQueueOffsetModel.ConsumerGroupDetail> topicConsumerGroupDetailMap = consumeQueueOffsetModel.getTopicConsumerGroupDetailMap();
        ConsumeQueueOffsetModel.ConsumerGroupDetail consumerGroupDetail = topicConsumerGroupDetailMap.get(req.getTopic());

        // 不存在记录的情况需要初始化
        if (consumerGroupDetail == null) {
            // 队列不存在偏移量的信息, 初始化并添加
            consumerGroupDetail = new ConsumeQueueOffsetModel.ConsumerGroupDetail();
            topicConsumerGroupDetailMap.put(req.getTopic(), consumerGroupDetail);
        }
        Map<String, Map<String, String>> consumerGroupDetailMap = consumerGroupDetail.getConsumerGroupDetailMap(); // 获取消费组下的成员队列偏移量
        if (consumerGroupDetailMap == null) {
            consumerGroupDetailMap = new HashMap<>();
            consumerGroupDetail.setConsumerGroupDetailMap(consumerGroupDetailMap);
        }

        Map<String, String> consumerGroupConsumeMap = consumerGroupDetailMap.get(req.getConsumeGroup());
        if (consumerGroupConsumeMap == null) {
            consumerGroupConsumeMap = new HashMap<>();
            for (ConsumeQueueModel queueModel : mqTopicModel.getQueueList()) {
                consumerGroupConsumeMap.put(String.valueOf(queueModel.getId()), "00000000#0");
            }
            consumerGroupDetailMap.put(req.getConsumeGroup(), consumerGroupConsumeMap);
        }

        // 读取消费偏移量
        String offsetStrInfo = consumerGroupConsumeMap.get(String.valueOf(req.getQueueId()));    // 获取消费组下queueId队列的消费进度
        String[] offsetStrArr = offsetStrInfo.split("#");
        Integer consumeQueueOffset = Integer.valueOf(offsetStrArr[1]);

        // 消费到了尽头, 直接返回空
        if (consumeQueueModel.getLatestOffset().get() <= consumeQueueOffset) {
            return null;
        }


        ConsumeQueueMMapFileModel consumeQueueMMapFileModel =
                CommonCache.getConsumeQueueMMapFileModelManager().getConsumeQueueMMapFileModel(req.getTopic(), req.getQueueId());
        // 读取消费队列中的元素并转化为映射对象
        // byte[] bytes = consumeQueueMMapFileModel.readContent(consumeQueueOffset);
        /*
        读取的内容数量不能超过消息队列的边界
         */
        int maxBatchSize = CommonCache.getConsumeQueueOffsetModel().getConsumeAvailableElementNum(req.getTopic(), req.getConsumeGroup(), req.getQueueId());
        int batchSize = Math.min(maxBatchSize, req.getBatchSize());
        log.info("主题{}的{}号队列, 批量拉取{}条数据", req.getTopic(), req.getQueueId(), batchSize);
        List<byte[]> records = consumeQueueMMapFileModel.readContent(consumeQueueOffset, batchSize); // 批量读取、
        List<byte[]> dateList = new ArrayList<>(req.getBatchSize());
        CommitLogMMapFileModel commitLogMMapFileModel = CommonCache.getCommitLogMMapFileModelManager().get(req.getTopic());
        for (byte[] record : records) {
            ConsumeQueueElementModel cq = ConsumeQueueElementModel.coverFromBytes(record);
            // 通过映射对象读取CommitLog中的消息
            byte[] bytes = commitLogMMapFileModel.readContent(cq.getMsgOffset(), cq.getMsgLength());
            dateList.add(bytes);
        }
        return dateList;
        // TODO 如果消费不是commitLog的最新文件分片, 如何解决？
    }


    /**
     * 确认消费
     * @param topicName 主题名称
     * @param consumeGroup 消费组名称
     * @param queueId 队列ID
     * @return 确认结果
     */
    public boolean ack(String topicName, String consumeGroup, Integer queueId) {

        ConsumeQueueOffsetModel.OffsetTable consumeQueueOffsetModel = CommonCache.getConsumeQueueOffsetModel().getOffsetTable();
        Map<String, ConsumeQueueOffsetModel.ConsumerGroupDetail> topicConsumerGroupDetailMap = consumeQueueOffsetModel.getTopicConsumerGroupDetailMap();
        ConsumeQueueOffsetModel.ConsumerGroupDetail consumerGroupDetail = topicConsumerGroupDetailMap.get(topicName);

        Map<String, Map<String, String>> consumerGroupDetailMap = consumerGroupDetail.getConsumerGroupDetailMap(); // 获取消费组下的成员队列偏移量
        Map<String, String> consumerGroupConsumeMap = consumerGroupDetailMap.get(consumeGroup);
        // 读取消费偏移量
        String offsetStrInfo = consumerGroupConsumeMap.get(String.valueOf(queueId));    // 获取消费组下queueId队列的消费进度
        String[] offsetStrArr = offsetStrInfo.split("#");
        String fileName = offsetStrArr[0];
        int consumeQueueOffset = Integer.parseInt(offsetStrArr[1]);
        consumeQueueOffset += 12;
        // TODO 获取下一个元素文件名和的偏移量, 如果偏移地址超过的单个CQ文件的大小, 无法自动换fileName?
        consumerGroupConsumeMap.put(String.valueOf(queueId), fileName + "#" + consumeQueueOffset);
        return true;
    }
}
