package com.nosql.db.core.storage;

import com.nosql.db.core.modul.Document;
import com.nosql.db.core.wal.WAL;
import com.nosql.db.core.wal.WALEntry;

import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;

public class LSMStore {
    private MemTable memTable = new MemTable();
    private final Queue<SSTable> ssTables = new ConcurrentLinkedQueue<>();
    private WAL wal;
    private final Path dataDir;
    private final int maxMemTableSize;
    private final ReentrantLock flushLock = new ReentrantLock();
    private final Map<String, WAL> walPerCollection = new HashMap<>();
    // 添加合并相关属性
    private static final int LEVELS = 5;
    private static final long[] LEVEL_SIZE_LIMITS = {10 * 1024 * 1024, 100 * 1024 * 1024, 500 * 1024 * 1024, 1_000 * 1024 * 1024, 5_000 * 1024 * 1024};
    private final ExecutorService compactionExecutor = Executors.newSingleThreadExecutor();

    public LSMStore(Path dataDir, int maxMemTableSize) throws IOException, ClassNotFoundException {
        System.out.printf("[Storage] 初始化存储引擎 目录=%s 刷盘阈值=%d字节\n",
                dataDir.toAbsolutePath(), maxMemTableSize);
        this.dataDir = dataDir;
        this.maxMemTableSize = maxMemTableSize;

        if (!Files.exists(dataDir)) {
            Files.createDirectories(dataDir);
        }
        // 初始化全局WAL
        this.wal = new WAL(dataDir.resolve("global_wal.log"));
        recover();
        startCompactionDaemon();
    }
    // 启动合并守护线程
    private void startCompactionDaemon() {
        compactionExecutor.execute(() -> {
            while (!Thread.interrupted()) {
                try {
                    Thread.sleep(60_000); // 每分钟检查一次
                    checkCompaction();
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                }
            }
        });
    }
    private void checkCompaction() {
        try {
            for (int level = 0; level < LEVELS; level++) {
                List<SSTable> levelTables = getSSTablesByLevel(level);
                long totalSize = calculateTotalSize(levelTables);

                if (totalSize > LEVEL_SIZE_LIMITS[level]) {
                    compactLevel(level);
                }
            }
        } catch (Exception e) {
            System.err.println("合并过程中发生错误: " + e.getMessage());
            e.printStackTrace();
        }
    }
    // 获取指定层级的SSTable
    private List<SSTable> getSSTablesByLevel(int level) {
        return ssTables.stream()
                .filter(ss -> ss.getLevel() == level)
                .collect(Collectors.toList());
    }

    // 计算总大小
    private long calculateTotalSize(List<SSTable> tables) {
        return tables.stream()
                .mapToLong(SSTable::getSize)
                .sum();
    }

    // 合并层级
    private void compactLevel(int level) throws IOException {
        flushLock.lock();
        try {
            List<SSTable> tablesToCompact = getSSTablesByLevel(level);
            if (tablesToCompact.isEmpty()) return;

            System.out.printf("[Compaction] 开始合并层级 %d, %d 个文件\n", level, tablesToCompact.size());

            // 读取所有文档并合并
            Map<String, Document> mergedDocs = new HashMap<>();
            for (SSTable table : tablesToCompact) {
                Map<String, Document> docs = readAllDocuments(table);
                docs.forEach((id, doc) -> {
                    Document existing = mergedDocs.get(id);
                    if (existing == null || doc.getTimestamp() > existing.getTimestamp()) {
                        mergedDocs.put(id, doc);
                    }
                });
            }

            // 创建新SSTable
            MemTable tempTable = new MemTable();
            mergedDocs.values().forEach(tempTable::put);

            int newLevel = Math.min(level + 1, LEVELS - 1);
            Path newFilePath = dataDir.resolve(String.format("sstable_L%d_%d.txt", newLevel, System.currentTimeMillis()));
            SSTable newTable = new SSTable(newFilePath, newLevel);
            newTable.writeWithIndex(tempTable);

            // 更新SSTable列表
            ssTables.removeAll(tablesToCompact);
            ssTables.add(newTable);

            // 删除旧文件
            for (SSTable table : tablesToCompact) {
                Files.deleteIfExists(table.getFilepath());
            }

            System.out.printf("[Compaction] 层级 %d 合并完成，新文件: %s\n", level, newFilePath);
        } finally {
            flushLock.unlock();
        }
    }

    // 读取SSTable中的所有文档
    private Map<String, Document> readAllDocuments(SSTable table) throws IOException {
        Map<String, Document> docs = new HashMap<>();
        try (BufferedReader reader = Files.newBufferedReader(table.getFilepath())) {
            String line;
            boolean inDataSection = false;
            while ((line = reader.readLine()) != null) {
                if (line.equals("#DATA_START")) {
                    inDataSection = true;
                    continue;
                }
                if (line.equals("#INDEX_START")) break;

                if (inDataSection) {
                    Document doc = parseTxtLine(line);
                    docs.put(doc.getId(), doc);
                }
            }
        }
        return docs;
    }

    private boolean recover() {
        try {
            System.out.println("[恢复] 开始从WAL恢复数据");
            wal.replay(memTable);
            System.out.printf("[恢复] 成功恢复 %d 个文档\n", memTable.values().size());
            return true;
        } catch (Exception e) {
            System.err.println("[恢复] 部分记录恢复失败: " + e.getMessage());
            e.printStackTrace();
            memTable.clear();
            return false;
        }
    }

//    private void loadSSTables() throws IOException {
//        // 递归遍历所有子目录
//        Files.walk(dataDir)
//                .filter(path -> {
//                    // 仅处理.txt文件且不是WAL文件
//                    String fileName = path.getFileName().toString();
//                    return path.toString().endsWith(".txt")
//                            && !path.getFileName().toString().startsWith("wal");
//                })
//                .sorted((p1, p2) -> {
//                    // 从文件名提取时间戳进行排序
//                    long t1 = extractTimestamp(p1);
//                    long t2 = extractTimestamp(p2);
//                    return Long.compare(t2, t1); // 降序排列
//                })
//                .forEach(path -> {
//                    try (BufferedReader reader = Files.newBufferedReader(path)) {
//                        System.out.println("[Load] 加载SSTable文件: " + path);
//                        String line;
//                        boolean inDataSection = false;
//                        while ((line = reader.readLine()) != null) {
//                            if (line.equals("#DATA_START")) {
//                                inDataSection = true;
//                                continue;
//                            }if (line.equals("#INDEX_START")) { // 新增边界判断
//                                break; // 索引段无需处理直接退出
//                            }
//                            if (inDataSection) {
//                                Document doc = parseTxtLine(line);
//                                memTable.put(doc);
//                            }
//                        }
//                    } catch (Exception e) {
//                        System.err.println("加载SSTable失败: " + path);
//                        e.printStackTrace();
//                    }
//                });
//    }

    // 提取时间戳工具方法
    private long extractTimestamp(Path path) {
        String fileName = path.getFileName().toString();
        String[] parts = fileName.split("[_.]");
        if (parts.length >= 2) {
            try {
                return Long.parseLong(parts[1]);
            } catch (NumberFormatException e) {
                return 0L;
            }
        }
        return 0L;
    }

    private Document parseTxtLine(String line) {
        // 处理转义字符的正则表达式
        String[] parts = line.split("(?<!\\\\)\\|");
        Map<String, String> metadata = new HashMap<>();

        // 解析元数据
        for (String part : parts) {
            String[] kv = part.split("(?<!\\\\)=", 2);
            if (kv.length == 2) {
                String key = kv[0].replace("\\=", "=");
                String value = kv[1].replace("\\|", "|");
                metadata.put(key, value);
            }
        }

        // 提取系统字段
        String id = metadata.get("ID");
        String collection = metadata.get("COLLECTION");
        long timestamp = Long.parseLong(metadata.get("TIMESTAMP"));

        // 解析数据字段
        Map<String, Object> data = new HashMap<>();
        String dataContent = metadata.get("DATA");
        if (dataContent != null) {
            dataContent = dataContent
                    .replace("\\,", "\uE000")
                    .replace("\\=", "\uE001");

            String[] entries = dataContent.split(",");
            for (String entry : entries) {
                String[] kvPair = entry.split("=", 2);
                if (kvPair.length == 2) {
                    String key = kvPair[0]
                            .replace("\uE000", ",")
                            .replace("\uE001", "=");
                    String value = kvPair[1]
                            .replace("\uE000", ",")
                            .replace("\uE001", "=");
                    data.put(key, value);
                }
            }
        }

        // 过滤系统字段
        data.entrySet().removeIf(e ->
                e.getKey().startsWith("_") ||
                        e.getKey().equalsIgnoreCase("id") ||
                        e.getKey().equalsIgnoreCase("collection") ||
                        e.getKey().equalsIgnoreCase("timestamp")
        );

       Document  doc =  new Document(collection, data, timestamp);
        // 保留系统字段单独处理
        doc.setId(metadata.get("ID"));
        doc.setCollection(metadata.get("COLLECTION"));
        doc.setTimestamp(Long.parseLong(metadata.get("TIMESTAMP")));
       return doc;
    }





    public void put(String collection, Map<String, Object> originalData, String id) throws IOException {
        Map<String, Object> data = new HashMap<>(originalData);
        ensureCollectionDir(collection);

        // 使用全局WAL
        String docId = id;
        Document doc = new Document(collection, data);
        doc.setId(docId);

        // 1. 写入全局WAL
        wal.writeEntry(new WALEntry(WALEntry.Operation.PUT,collection,id, data));

        // 2. 写入内存表
        memTable.put(doc);

        // 3. 检查是否需要刷盘
        if (memTable.estimateSize() > maxMemTableSize) {
            flushMemTable();
        }
    }

    private Path ensureCollectionDir(String collection) throws IOException {
        Path collectionDir = dataDir.resolve(collection);
        if (!Files.exists(collectionDir)) {
            Files.createDirectories(collectionDir);
        }
        return collectionDir;
    }

    // 新增删除操作实现
    public void delete(String collection, String id) throws IOException {
        // 1. 写入全局WAL
        wal.writeEntry(new WALEntry(WALEntry.Operation.DELETE, collection, id));

        // 2. 检查文档是否存在（内存或磁盘）
        boolean documentExists = false;

        // 先查内存表
        if (memTable.get(id) != null) {
            documentExists = true;
        }
        // 再查SSTables
        else {
            Path collectionDir = dataDir.resolve(collection);
            for (SSTable ssTable : ssTables) {
                if (ssTable.getFilepath().getParent().equals(collectionDir)) {
                    if (ssTable.get(id) != null) {
                        documentExists = true;
                        break;
                    }
                }
            }
        }

        // 3. 只在文档存在时创建墓碑
        if (documentExists) {
            // 创建墓碑文档（即使原文档在磁盘）
            Document tombstone = Document.createTombstone(id, collection);
            tombstone.setTimestamp(System.currentTimeMillis());

            // 写入内存表
            memTable.put(tombstone);
        }

        // 4. 检查刷盘条件
        if (memTable.estimateSize() > maxMemTableSize && !memTable.isEmpty()) {
            flushMemTable();
        }
    }

    public void createCollection(String collection) {
        try {
            ensureCollectionDir(collection);
            // 添加WAL记录
            wal.writeEntry(new WALEntry(WALEntry.Operation.CREATE_COLLECTION, collection, collection));
        } catch (IOException e) {
            throw new RuntimeException("创建集合目录失败", e);
        }
    }

    public void update(String collection, String id, Map<String, Object> updateData) throws IOException {
        Document doc = memTable.get(id);
        if (doc == null) doc = get(collection, id);

        if (doc == null) {
            throw new IOException("文档不存在 " + id);
        }

        // 合并更新字段
        updateDocument(doc, updateData);
        // +++ 强制更新时间戳 +++
        doc.setTimestamp(System.currentTimeMillis());
        // 写入全局WAL
        wal.writeEntry(new WALEntry(WALEntry.Operation.UPDATE,collection,id, doc.getData()));

        // 更新内存表
        memTable.put(doc);

        // 条件刷盘
        if (memTable.estimateSize() >= maxMemTableSize && !memTable.isEmpty()) {
            flushMemTable();
        }
    }

    private void updateDocument(Document doc, Map<String, Object> updateData) {
        updateData.forEach((key, value) -> {
            // 允许更新所有非系统字段
            if (!key.startsWith("_") && !key.equalsIgnoreCase("id")) {
                doc.put(key, value);
            }
        });
        // 注意：时间戳更新已移到上层方法
    }

    // 修改后的刷盘方法：按集合分组刷盘
    public void flushMemTable() throws IOException {
        flushLock.lock();
        try {
            System.out.println("[Storage] 开始全局刷盘");

            // 按集合分组文档
            Map<String, List<Document>> collectionGroups = memTable.values().stream()
                    .collect(Collectors.groupingBy(Document::getCollection));

            // 为每个集合创建SSTable
            for (Map.Entry<String, List<Document>> entry : collectionGroups.entrySet()) {
                String collection = entry.getKey();
                List<Document> documents = entry.getValue();

                System.out.printf("[Storage] 刷盘集合: %s, 文档数: %d\n", collection, documents.size());

                Path collectionDir = ensureCollectionDir(collection);
                String fileName = "sstable_" + System.currentTimeMillis() + ".txt";
                Path filePath = collectionDir.resolve(fileName);

                // 创建临时MemTable写入SSTable
                MemTable tempTable = new MemTable();
                documents.forEach(tempTable::put);

                SSTable ssTable = new SSTable(filePath,0);
                ssTable.writeWithIndex(tempTable);
                ssTables.add(ssTable);
            }

            // 重置内存表和WAL
            memTable = new MemTable();
            wal.clear();
//            wal.rotate(); // 轮转WAL文件

        } finally {
            flushLock.unlock();
        }
    }

    // LSMStore.get()
    public Document get(String collection, String id) throws IOException {
        // 1. 先查内存表
        Document doc = memTable.get(id);
        if (doc != null) {
            if (doc.isTombstone()) {
                return null; // 内存表中有墓碑文档，直接返回 null
            }
            if (doc.getCollection().equals(collection)) {
                return doc; // 内存表中找到匹配文档，直接返回
            }
        }

        // 2. 再查SSTables
        Path collectionDir = dataDir.resolve(collection);
        List<Document> matchingDocs = new ArrayList<>();
        for (SSTable ssTable : ssTables) {
            if (ssTable.getFilepath().getParent().equals(collectionDir)) {
                doc = ssTable.get(id);
                if (doc != null) {
                    // 检查内存表是否有此文档的墓碑
                    Document tombstone = memTable.get(id);
                    if (tombstone != null && tombstone.isTombstone()) {
                        continue; // 跳过被标记删除的文档
                    }
                    if (doc.getCollection().equals(collection)) {
                        matchingDocs.add(doc);
                    }
                }
            }
        }

        // 如果没有找到匹配的文档，返回 null
        if (matchingDocs.isEmpty()) {
            // 若SSTables中未找到，遍历磁盘文件查找
            Files.walk(dataDir)
                    .filter(path -> {
                        // 仅处理.txt文件且不是WAL文件
                        String fileName = path.getFileName().toString();
                        return path.toString().endsWith(".txt")
                                && !path.getFileName().toString().startsWith("wal")
                                && path.getParent().endsWith(collection);
                    })
                    .sorted((p1, p2) -> {
                        // 从文件名提取时间戳进行排序
                        long t1 = extractTimestamp(p1);
                        long t2 = extractTimestamp(p2);
                        return Long.compare(t2, t1); // 降序排列
                    })
                    .forEach(path -> {
                        try (BufferedReader reader = Files.newBufferedReader(path)) {
                            String line;
                            boolean inDataSection = false;
                            while ((line = reader.readLine()) != null) {
                                if (line.equals("#DATA_START")) {
                                    inDataSection = true;
                                    continue;
                                }
                                if (line.equals("#INDEX_START")) {
                                    break; // 索引段无需处理直接退出
                                }
                                if (inDataSection) {
                                    Document diskDoc = parseTxtLine(line);
                                    if (diskDoc.getId().equals(id) && diskDoc.getCollection().equals(collection)) {
                                        // 检查内存表是否有此文档的墓碑
                                        Document memTombstone = memTable.get(id);
                                        if (memTombstone == null || !memTombstone.isTombstone()) {
                                            matchingDocs.add(diskDoc);
                                        }
                                    }
                                }
                            }
                        } catch (Exception e) {
                            System.err.println("从磁盘文件查找数据失败: " + path);
                            e.printStackTrace();
                        }
                    });
        }

        // 如果仍然没有找到匹配的文档，返回 null
        if (matchingDocs.isEmpty()) {
            return null;
        }

        // 按时间戳降序排序
        matchingDocs.sort(Comparator.comparingLong(Document::getTimestamp).reversed());

        // 返回时间戳最大的文档
        Document latestDoc = matchingDocs.get(0);
        System.out.println(latestDoc);
        return latestDoc;
    }

    // LSMStore.java
    public void close() {
        compactionExecutor.shutdownNow();
        try {
            if (!compactionExecutor.awaitTermination(5, TimeUnit.SECONDS)) {
                System.err.println("合并线程未正常终止");
            }
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
        }
    }

    public MemTable getMemTable() {
        return memTable;
    }
}

