package org.mq.mymq.broker.core;

import org.mq.mymq.broker.cache.CommonCache;
import org.mq.mymq.broker.model.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @ClassName ConsumeQueueConsumeHandler
 * @Description 消费队列消费处理器
 * @Author jiarong_ye
 * @Date 2025/6/16 22:43
 * @Version 1.0
 */
public class ConsumeQueueConsumeHandler {
    private static final Logger log = LoggerFactory.getLogger(ConsumeQueueConsumeHandler.class);

    /**
     * 读取当前最新N条consumeQueue的消息内容，返回commitLog原始消息内容
     *
     * @param consumeQueueConsumeReqModel
     * @return 消费内容
     */
    public List<byte[]> consume(ConsumeQueueConsumeReqModel consumeQueueConsumeReqModel) {
        String topic = consumeQueueConsumeReqModel.getTopic();
        String consumeGroup = consumeQueueConsumeReqModel.getConsumeGroup();
        Integer queueId = consumeQueueConsumeReqModel.getQueueId();
        Integer batchSize = consumeQueueConsumeReqModel.getBatchSize();
        // 检查参数合法性
        // 获取当前匹配的队列的最新的consumeQueue的offset是多少
        // 获取当前匹配的队列存储文件的mmap对象，然后读取offset地址的数据
        MyMqTopicModel myMqTopicModel = CommonCache.getMyMqTopicModelsMap().get(topic);
        if (myMqTopicModel == null) {
            throw new RuntimeException("The topic " + topic + " does not exist");
        }
        ConsumeQueueOffsetModel.OffsetTable offsetTable = CommonCache.getConsumeQueueOffsetModel().getOffsetTable();
        Map<String, ConsumeQueueOffsetModel.ConsumerGroupDetail> topicConsumerGroupDetail = offsetTable.getTopicConsumerGroupDetail();
        ConsumeQueueOffsetModel.ConsumerGroupDetail consumerGroupDetail = topicConsumerGroupDetail.get(topic);
        // 首次消费
        if (consumerGroupDetail == null) {
            consumerGroupDetail = new ConsumeQueueOffsetModel.ConsumerGroupDetail();
            topicConsumerGroupDetail.put(topic, consumerGroupDetail);
        }
        Map<String, Map<String, String>> consumerGroupDetailMap = consumerGroupDetail.getConsumerGroupDetailMap();
        Map<String, String> queueOffsetDetailMap = consumerGroupDetailMap.get(consumeGroup);
        List<QueueModel> queueList = myMqTopicModel.getQueueList();
        if (queueOffsetDetailMap == null) {
            queueOffsetDetailMap = new HashMap<>();
            for (QueueModel queueModel : queueList) {
                queueOffsetDetailMap.put(String.valueOf(queueModel.getId()), "00000000#0");
            }
            consumerGroupDetailMap.put(consumeGroup, queueOffsetDetailMap);
        }
        String offsetStrInfo = queueOffsetDetailMap.get(String.valueOf(queueId));
        String[] offsetStrArr = offsetStrInfo.split("#");
//        String consumeQueueFileName = offsetStrArr[0];
        int consumeQueueOffset = Integer.valueOf(offsetStrArr[1]);
        QueueModel queueModel = queueList.get(queueId);
        if (queueModel.getLatestOffset().get() <= consumeQueueOffset) {
            log.info("没有消息可消费");
            return null;
        }
        List<ConsumeQueueMMapFileModel> consumeQueueMMapFileModels = CommonCache.getConsumerQueueMMapFileModelManager().get(topic);
        ConsumeQueueMMapFileModel consumeQueueMMapFileModel = consumeQueueMMapFileModels.get(queueId);
        // 一次读取多条consumerQueue的数据内容
        List<byte[]> contentList = consumeQueueMMapFileModel.readContent(consumeQueueOffset, batchSize);
        List<byte[]> commitLogContentList = new ArrayList<>();
        for (byte[] content : contentList) {
            ConsumeQueueDetailModel consumeQueueDetailModel = new ConsumeQueueDetailModel();
            consumeQueueDetailModel.converFromBytes(content);
            CommitLogMMapFileModel commitLogMMapFileModel = CommonCache.getCommitLogMMapFileModelManager().get(topic);
            byte[] commitLogContent = commitLogMMapFileModel.readContent(consumeQueueDetailModel.getMsgIndex(), consumeQueueDetailModel.getMsgLength());
            commitLogContentList.add(commitLogContent);
        }
        return commitLogContentList;
    }

    public boolean ack(String topic, String consumeGroup, Integer queueId, Integer ackCount) {
        ConsumeQueueOffsetModel.OffsetTable offsetTable = CommonCache.getConsumeQueueOffsetModel().getOffsetTable();
        Map<String, ConsumeQueueOffsetModel.ConsumerGroupDetail> topicConsumerGroupDetail = offsetTable.getTopicConsumerGroupDetail();
        ConsumeQueueOffsetModel.ConsumerGroupDetail consumerGroupDetail = topicConsumerGroupDetail.get(topic);
        Map<String, String> consumeQueueOffsetDetailMap = consumerGroupDetail.getConsumerGroupDetailMap().get(consumeGroup);
        String offsetStrInfo = consumeQueueOffsetDetailMap.get(String.valueOf(queueId));
        String[] offsetStrArr = offsetStrInfo.split("#");
        String fileName = offsetStrArr[0];
        Integer currentOffset = Integer.valueOf(offsetStrArr[1]);
        currentOffset += (12 * ackCount);
        consumeQueueOffsetDetailMap.put(String.valueOf(queueId), fileName + "#" + String.valueOf(currentOffset));
        return true;
    }
}
