package com.dbh.alg.algorithm.bigdata;

import java.io.*;
import java.util.*;

/**
 * 有一个1G大小的一个文件，里面每一行是一个词，词的大小不超过16字节，内存限制大小是1M。返回频数最高的100个词。
 */
public class TopKFrequentWords {

    // 分块数量（根据内存调整）
    private static final int NUM_CHUNKS = 1024;
    
    public static void main(String[] args) throws IOException {
        String inputFile = "big_file.txt";
        String chunkPrefix = "check_";
        int k = 100;
        // 步骤1：哈希分块
        splitFileIntoChunks(inputFile, chunkPrefix, NUM_CHUNKS);
        
        // 步骤2：统计每个小文件的top100
        List<Map.Entry<String, Integer>> topWords = processChunks(chunkPrefix, NUM_CHUNKS, k);
        
        // 步骤3：合并top100
        List<Map.Entry<String, Integer>> topK = mergeTopWords(topWords, k);

        System.out.println("Top" + k + "frequent words:");
        for (Map.Entry<String, Integer> entry : topK) {
            System.out.println(entry.getKey() + ":" + entry.getValue());
        }
    }

    private static void splitFileIntoChunks(String inputFile, String chunkPrefix, int numChunks) throws IOException {
        // 初始化小文件写入器
        BufferedWriter[] writers = new BufferedWriter[numChunks];
        for (int i = 0; i < numChunks; i++) {
            writers[i] = new BufferedWriter(new FileWriter(chunkPrefix + i));
        }
        // 逐行读取大文件并分块
        try (BufferedReader reader = new BufferedReader(new FileReader(inputFile))) {
            String line;
            while ((line = reader.readLine()) != null) {
                String word = line.trim();
                if (!word.isEmpty()) {
                    // 计算哈希值并分配到对应的小文件
                    int chunkIdx = Math.abs(word.hashCode()) % numChunks;
                    writers[chunkIdx].write(word + "\n");
                }
            }
        }
        // 关闭所有写入器
        for (BufferedWriter writer : writers) {
            writer.close();
        }
    }

    private static List<Map.Entry<String, Integer>> processChunks(String chunkPrefix, int numChunks, int k) throws IOException {
        List<Map.Entry<String, Integer>> topWords = new ArrayList<>();

        for (int i = 0; i < numChunks; i++) {
            String chunkFile = chunkPrefix + i;
            Map<String, Integer> wordCounts = new HashMap<>();

            // 读取小文件并统计词频
            try (BufferedReader reader = new BufferedReader(new FileReader(chunkFile))) {
                String line;
                while ((line = reader.readLine()) != null) {
                    String word = line.trim();
                    if (!word.isEmpty()) {
                        wordCounts.put(word, wordCounts.getOrDefault(word, 0) + 1);
                    }
                }
            }

            // 使用最小堆保留top k
            PriorityQueue<Map.Entry<String, Integer>> minHeap = new PriorityQueue<>(
                    Comparator.comparingInt(Map.Entry::getValue)
            );

            for (Map.Entry<String, Integer> entry : wordCounts.entrySet()) {
                if (minHeap.size() < k) {
                    minHeap.offer(entry);
                } else if (entry.getValue() > minHeap.peek().getValue()) {
                    minHeap.poll();
                    minHeap.offer(entry);
                }
            }

            // 将当前文件的top k加入全局列表
            topWords.addAll(minHeap);
        }

        return topWords;
    }

    private static List<Map.Entry<String, Integer>> mergeTopWords(List<Map.Entry<String, Integer>> topWords, int k) {
        // 统计全局频率
        Map<String, Integer> globalCounts = new HashMap<>();
        for (Map.Entry<String, Integer> entry : topWords) {
            String word = entry.getKey();
            globalCounts.put(word, globalCounts.getOrDefault(word, 0) + entry.getValue());
        }

        // 使用最小堆保留全局top k
        PriorityQueue<Map.Entry<String, Integer>> minHeap = new PriorityQueue<>(
                Comparator.comparingInt(Map.Entry::getValue)
        );

        for (Map.Entry<String, Integer> entry : globalCounts.entrySet()) {
            if (minHeap.size() < k) {
                minHeap.offer(entry);
            } else if (entry.getValue() > minHeap.peek().getValue()) {
                minHeap.poll();
                minHeap.offer(entry);
            }
        }

        // 堆中是升序，转换为降序
        List<Map.Entry<String, Integer>> topK = new ArrayList<>(minHeap);
        topK.sort((e1, e2) -> e2.getValue().compareTo(e1.getValue()));

        return topK;
    }

}
