package com.hudson.store.commitlog;

import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.hudson.codec.domain.MessageExt;
import com.hudson.codec.domain.MessageExtBrokerInner;
import com.hudson.codec.proto.domain.MessageExtBrokerInnerProto;
import com.hudson.codec.proto.domain.MessageExtProto;
import com.hudson.common.domain.MessageMeta;
import com.hudson.core.json.JSONUtil;
import com.hudson.core.util.MsgIdUtil;
import com.hudson.core.util.PropertiesUtil;
import com.hudson.core.util.StringUtil;
import com.hudson.store.dispatch.CommitLogDispatchService;
import com.hudson.store.dispatch.DispatchDto;
import com.hudson.store.queue.TopicQueue;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;

public class CommitLogManger {
    public static final long FILE_SIZE = 1024 * 1024 * 10; // 暂时设置为1MB,方便测试
    public static final List<CommitLogFile> COMMIT_LOG_FILE_LIST = new ArrayList<>();
    private static final Logger logger = LoggerFactory.getLogger(CommitLogManger.class);
    private static final String COMMIT_LOG_PATH = "store/commit-log/";
    private static final String FILE_EXTENSION = ".log";
    private static final ConcurrentHashMap<String, MappedFile> MAPPER_FILE_MAP = new ConcurrentHashMap<>();
    private static final ReentrantLock lock = new ReentrantLock();
    private static final BlockingQueue<MessageExtBrokerInner> writeQueue = new LinkedBlockingQueue<>();
    private static final int BATCH_SIZE = 1024; // 批量大小
    private static final long FLUSH_INTERVAL = 10; // 刷盘间隔 ms
    // 文件目錄
    private static File commitLogDir;
    private static MappedFile currentFile;
    private static volatile Long asyncFlushTime;

    private static String address;
    private static int port;

    static {
        try {
            Properties prop = PropertiesUtil.getProperties("Config.properties");
            address = String.valueOf(prop.getProperty("BrokerAddress", "127.0.0.1"));
            port = Integer.parseInt(prop.getProperty("BrokerPort", "8080"));
        } catch (Exception e) {
            logger.warn("无法读取Broker配置，使用默认地址: {}:{}", address, port);
        }

        commitLogDir = new File(COMMIT_LOG_PATH);
        if (!commitLogDir.exists()) {
            commitLogDir.mkdirs();
        }

        // 查找最新的 MappedFile
        File[] files = commitLogDir.listFiles();
        if (files != null && files.length > 0) {
            Arrays.sort(files); // 按名称排序，假设是顺序生成的文件名
            for (int i = 0; i < files.length; i++) {
                COMMIT_LOG_FILE_LIST.add(new CommitLogFile(files[i], FILE_SIZE * i, FILE_SIZE * (i + 1)));
            }
            currentFile = new MappedFile(files[files.length - 1].getAbsolutePath());
        }

        Boolean appendMessageMethods = null;
        try {
            Properties prop = PropertiesUtil.getProperties("Config.properties");
            appendMessageMethods = Boolean.valueOf(prop.getProperty("async", "false"));
            asyncFlushTime = Long.valueOf(prop.getProperty("async-flush-time", "1000"));
        } catch (Exception e) {
            appendMessageMethods = false; // 默认使用同步刷盘
            asyncFlushTime = 1000L;
        }
        ;
        new Thread(CommitLogManger::flushDate).start();
        if (appendMessageMethods) {
            new Thread(CommitLogManger::flushBatch).start();
            logger.info("异步刷盘任务开启");
        }
    }

    private static void flushDate() {
        currentFile.flush();
    }


    public static MappedFile getCurrentFile(int size) {
        if (currentFile == null) {
            String nextFileName = getNextFileName();
            currentFile = new MappedFile(nextFileName);
            COMMIT_LOG_FILE_LIST.add(new CommitLogFile(new File(currentFile.getFileName()), FILE_SIZE * Integer.parseInt(currentFile.getFileName().substring(0, currentFile.getFileName().lastIndexOf("."))), FILE_SIZE));
            MAPPER_FILE_MAP.put(nextFileName, currentFile);
        } else if (currentFile.isFull(size)) {
            try {
                currentFile.flush();
                currentFile.close();
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
            String nextFileName = getNextFileName();
            currentFile = new MappedFile(nextFileName);
            COMMIT_LOG_FILE_LIST.add(new CommitLogFile(new File(currentFile.getFileName()), FILE_SIZE * Integer.parseInt(currentFile.getFileName().substring(0, currentFile.getFileName().lastIndexOf("."))), FILE_SIZE));
            MAPPER_FILE_MAP.put(nextFileName, currentFile);
        }
        return currentFile;
    }

    private static String getNextFileName() {
        // 根據當前文件數量生成新的文件名
        int fileCount = commitLogDir.listFiles().length;
        return COMMIT_LOG_PATH + String.format("%08d", fileCount) + FILE_EXTENSION;
    }

    /**
     * todo: 暂时先试用一个 ReentrantLock 锁住整个同步刷盘操作，后续优化，缩小锁的粒度
     * todo: msgId写法太啰嗦，后续优化
     */
    public static String appendMessage(String topic, int queueId, ByteString message) {
        lock.lock();
        try {
            MessageExtBrokerInnerProto.MessageExtBrokerInner messageExtBrokerInner = MessageExtBrokerInnerProto.MessageExtBrokerInner.newBuilder()
                    .setCommitLogOffset(0L)
                    .setBody(message)
                    .build();
            byte[] byteArray = messageExtBrokerInner.toByteArray();
            MappedFile mappedFile = getCurrentFile(byteArray.length);
            String msgId = MsgIdUtil.encode(address, port, currentFile.getCurrentOffset(), byteArray.length);

            messageExtBrokerInner = MessageExtBrokerInnerProto.MessageExtBrokerInner.newBuilder()
                    .setCommitLogOffset(currentFile.getCurrentOffset())
                    .setBody(message)
                    .build();
            byteArray = messageExtBrokerInner.toByteArray();
//            MessageMeta messageMeta = new MessageMeta(msgId, message);
            long queueOffset = mappedFile.syncWriteByFlush(byteArray);
            TopicQueue.write(topic, queueId, queueOffset, byteArray.length, "");
            return msgId;
        } finally {
            lock.unlock();
        }
    }

    //新codec的api
    public static String appendMessage(String topic, int queueId, int retryNum, byte[] message) {
        lock.lock();
        try {
            MessageExtBrokerInner messageExtBrokerInner = MessageExtBrokerInner.builder()
                    .commitLogOffset(0L)
                    .retryNum(retryNum)
                    .body(message)
                    .build();
            byte[] byteArray = MessageExtBrokerInner.serialize(messageExtBrokerInner);
            MappedFile mappedFile = getCurrentFile(byteArray.length);
            String msgId = MsgIdUtil.encode(address, port, currentFile.getCurrentOffset(), byteArray.length);

            messageExtBrokerInner.setCommitLogOffset(currentFile.getCurrentOffset());
            byteArray = MessageExtBrokerInner.serialize(messageExtBrokerInner);
//            MessageMeta messageMeta = new MessageMeta(msgId, message);
            long queueOffset = mappedFile.syncWriteByFlush(byteArray);
            TopicQueue.write(topic, queueId, queueOffset, byteArray.length, "");
            return msgId;
        } finally {
            lock.unlock();
        }
    }

    //新codec的api
    public static String appendMessage(MessageExtBrokerInner messageExtBrokerInner) {
        lock.lock();
        try {
            byte[] byteArray = MessageExtBrokerInner.serialize(messageExtBrokerInner);
            MappedFile mappedFile = getCurrentFile(byteArray.length);
            String msgId = MsgIdUtil.encode(address, port, currentFile.getCurrentOffset(), byteArray.length);

            messageExtBrokerInner.setCommitLogOffset(currentFile.getCurrentOffset());
            byteArray = MessageExtBrokerInner.serialize(messageExtBrokerInner);
//            MessageMeta messageMeta = new MessageMeta(msgId, message);
            long queueOffset = mappedFile.asyncWriteByFlush(byteArray);
            // 缺少Tag,請求唯一Id
            CommitLogDispatchService.putDispatchDto(new DispatchDto(messageExtBrokerInner.getTopic(),
                    messageExtBrokerInner.getQueueId(),
                    messageExtBrokerInner.getCommitLogOffset(),
                    byteArray.length,
                    messageExtBrokerInner.getTags(),
                    0,
                    messageExtBrokerInner.getStoreTimestamp()));
            TopicQueue.write(messageExtBrokerInner.getTopic(), messageExtBrokerInner.getQueueId(), queueOffset, byteArray.length, "");
            return msgId;
        } finally {
            lock.unlock();
        }
    }


    public static byte[] getMessage(long offset, int size) {
        // 需要根據 offset 找到對應的文件並讀取
        File file = findFileByOffset(offset);
        if (file == null) {
            return null;
        }

        if (StringUtil.isEmpty(file.getName())) {
            return null;
        }

        if (file.getName().equals(currentFile.getFileName())) {
            return currentFile.read(offset % FILE_SIZE, size);
        } else if (MAPPER_FILE_MAP.containsKey(file.getName())) {
            // 复用已有的 MappedFile 实例或安全地创建新实例
            MappedFile cacheMapperFile = MAPPER_FILE_MAP.get(file.getName());
            return cacheMapperFile.read(offset % FILE_SIZE, size);
        } else {
            // 复用已有的 MappedFile 实例或安全地创建新实例
            MappedFile mappedFile = new MappedFile(file.getPath());
            // 创建新的MappedFile 并放入缓存
            MAPPER_FILE_MAP.put(file.getName(), mappedFile);
            return mappedFile.read(offset % FILE_SIZE, size);
        }
    }

    private static File findFileByOffset(long offset) {
        int low = 0, high = COMMIT_LOG_FILE_LIST.size() - 1;

        while (low <= high) {
            int mid = (low + high) / 2;
            CommitLogFile midFile = COMMIT_LOG_FILE_LIST.get(mid);

            if (midFile.getStartOffset() <= offset && offset < midFile.getStartOffset() + midFile.getLength()) {
                return midFile.getFile();
            } else if (offset < midFile.getStartOffset()) {
                high = mid - 1;
            } else {
                low = mid + 1;
            }
        }

        return null; // 没找到
    }

    private static byte[] getMessageByTopicByte(String topic, int queueId, int offset) {
        TopicQueue.FileOffset fileOffset = TopicQueue.read(topic, queueId, offset);
        if (fileOffset == null) {
            return null;
        }
        return getMessage(fileOffset.getOffset(), fileOffset.getSize());
    }

    public static MessageExtBrokerInner getMessageByIndex(long commitLogOffset,int messageSize){
        byte[] message = getMessage(commitLogOffset, messageSize);
        return MessageExtBrokerInner.deserialize(message);
    }

    public static MessageMeta getMessageByTopic(String topic, int queueId, int offset) {
        byte[] messageByTopicByte = getMessageByTopicByte(topic, queueId, offset);
        if (messageByTopicByte == null) {
            return null;
        }
        return JSONUtil.fromJson(new String(messageByTopicByte, StandardCharsets.UTF_8), MessageMeta.class);
    }

    public static List<MessageExtProto.MessageExt> getListMessageByTopic(String topic, int queueId, int offset, int size) {
        List<MessageExtProto.MessageExt> result = new ArrayList<>();
        for (int i = 0; i < size; i++) {
            byte[] messageByTopicByte = getMessageByTopicByte(topic, queueId, offset + i);
            if (messageByTopicByte.length > 0) {
                MessageExtBrokerInnerProto.MessageExtBrokerInner messageExtBrokerInner = null;
                try {
                    messageExtBrokerInner = MessageExtBrokerInnerProto.MessageExtBrokerInner.parseFrom(messageByTopicByte);
                } catch (InvalidProtocolBufferException e) {
                    logger.error("消息转换失败");
                }
                assert messageExtBrokerInner != null;
                MessageExtProto.MessageExt messageExt = MessageExtProto.MessageExt.newBuilder()
                        .setTopic(topic)
                        .setQueueId(queueId)
                        .setCommitLogOffset(messageExtBrokerInner.getCommitLogOffset())
                        .setStoreTimestamp(messageExtBrokerInner.getStoreTimestamp())
                        .setBody(messageExtBrokerInner.getBody())
                        .build();
                result.add(messageExt);
            } else {
                return result;
            }
        }

        return result;
    }

    public static List<MessageExt> getListMessageExtByTopic(String topic, int queueId, int offset, int size) {
        List<MessageExt> result = new ArrayList<>();
        for (int i = 0; i < size; i++) {
            byte[] messageByTopicByte = getMessageByTopicByte(topic, queueId, offset + i);
            if (messageByTopicByte != null && messageByTopicByte.length > 0) {
                MessageExtBrokerInner messageExtBrokerInner = MessageExtBrokerInner.deserialize(messageByTopicByte);
                MessageExt messageExt = MessageExt.builder()
                        .topic(topic)
                        .queueId(queueId)
                        .tag(messageExtBrokerInner.getTags())
                        .commitLogOffset(messageExtBrokerInner.getCommitLogOffset())
                        .storeTimestamp(messageExtBrokerInner.getStoreTimestamp())
                        .reconsumeTimes(messageExtBrokerInner.getRetryNum())
                        .body(messageExtBrokerInner.getBody())
                        .build();
                result.add(messageExt);
            }
        }

        return result;
    }

    // 异步写入方法
    public static void asyncAppendMessage(MessageExtBrokerInner message) {
        writeQueue.offer(message);
    }

    // 批量处理线程
    private static void flushBatch() {
        List<MessageExtBrokerInner> batch = new ArrayList<>();
        while (true) {
            try {
                // 等待一定时间，若无数据则继续等待
                MessageExtBrokerInner message = writeQueue.poll(10, TimeUnit.MILLISECONDS);
                if (message != null) {
                    batch.add(message);

                    // 一次性取出剩余所有任务，避免多次 poll
                    writeQueue.drainTo(batch, BATCH_SIZE - batch.size());

                    // 执行批量写入
                    doFlush(batch);
                    batch.clear();
                }

            } catch (InterruptedException e) {
                logger.error("异步刷盘任务被打断");
                Thread.currentThread().interrupt();
                break;
            }
        }
    }

    /**
     * todo: 异步刷盘需重构
     *
     * @param batch
     */
    private static void doFlush(List<MessageExtBrokerInner> batch) {
        MappedFile currentFile = CommitLogManger.getCurrentFile(0);

        // 计算总长度
        int totalLength = 0;
        for (MessageExtBrokerInner messageExtBrokerInner : batch) {
            MessageMeta tempMessageMeta = new MessageMeta("", messageExtBrokerInner.getBody());
            byte[] tempMetaBytes = JSONUtil.toJson(tempMessageMeta).getBytes(StandardCharsets.UTF_8);

            int size = tempMetaBytes.length + MsgIdUtil.MSG_LENGTH;

            totalLength += size;
        }

        // 分配 buffer
        ByteBuf byteBuf = ByteBufAllocator.DEFAULT.buffer();

        // 存储每个消息的 offset，用于写入 TopicQueue
        List<Long> offsets = new ArrayList<>();

        Long offset = currentFile.getCurrentOffset();

        // 合并所有消息
        for (MessageExtBrokerInner messageExtBrokerInner : batch) {
            MessageMeta tempMessageMeta = new MessageMeta("", messageExtBrokerInner.getBody());
            byte[] tempMetaBytes = JSONUtil.toJson(tempMessageMeta).getBytes(StandardCharsets.UTF_8);

            int size = tempMetaBytes.length + MsgIdUtil.MSG_LENGTH;
            String msgId = MsgIdUtil.encode(address, port, offset, size);

            MessageMeta messageMeta = new MessageMeta(msgId, messageExtBrokerInner.getBody());
            byte[] messageBytes = JSONUtil.toJson(messageMeta).getBytes(StandardCharsets.UTF_8);

            messageExtBrokerInner.setBody(messageBytes);

            byteBuf.writeBytes(messageBytes);

            offset += messageBytes.length;
            // 记录当前 offset
            offsets.add(offset);
        }

        // 写入 MappedFile
        currentFile.asyncWrite(byteBuf.array());

        // 批量写入 TopicQueue
        for (int i = 0; i < batch.size(); i++) {
            MessageExtBrokerInner messageExtBrokerInner = batch.get(i);
            long temp = offsets.get(i);
            int size = (messageExtBrokerInner.getBody()).length;
            TopicQueue.write(messageExtBrokerInner.getTopic(), messageExtBrokerInner.getQueueId(), temp, size, "");
        }
    }

    public byte[] getMessage(long offset) {
        // 需要根據 offset 找到對應的文件並讀取
        File file = findFileByOffset(offset);
        if (file == null) {
            return null;
        }

        if (StringUtil.isEmpty(file.getName())) {
            return null;
        }

        if (file.getName().equals(currentFile.getFileName())) {
            return currentFile.readLine(offset % FILE_SIZE);
        } else if (MAPPER_FILE_MAP.containsKey(file.getName())) {
            // 复用已有的 MappedFile 实例或安全地创建新实例
            MappedFile cacheMapperFile = MAPPER_FILE_MAP.get(file.getName());
            return cacheMapperFile.readLine(offset % FILE_SIZE);
        } else {
            // 复用已有的 MappedFile 实例或安全地创建新实例
            MappedFile mappedFile = new MappedFile(file.getPath());
            // 创建新的MappedFile 并放入缓存
            MAPPER_FILE_MAP.put(file.getName(), mappedFile);
            return mappedFile.readLine(offset % FILE_SIZE);
        }
    }
}