package com.hxz.graduationdesign.util;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.seg.common.Term;

import java.io.*;
import java.util.*;
import java.util.stream.Collectors;

public class ChineseSegmenter {

    private final Set<String> stopWords;

    public ChineseSegmenter() throws IOException {
        this.stopWords = loadStopWords("text/stop-word.txt");
    }

    private Set<String> loadStopWords(String path) throws IOException {
        Set<String> words = new HashSet<>();
        try (InputStream is = getClass().getClassLoader().getResourceAsStream(path);
             BufferedReader reader = new BufferedReader(new InputStreamReader(is))) {
            if (is == null) {
                throw new FileNotFoundException("停用词文件未找到: " + path);
            }
            String line;
            while ((line = reader.readLine()) != null) {
                line = line.trim();
                if (!line.isEmpty()) {
                    words.add(line);
                }
            }
        }
        return words;
    }

    private String removeBracketedContent(String text) {
        if (text == null) {
            return null;
        }
        return text.replaceAll("\\[.*?\\]", "").trim();
    }


    private String removeReplyContent(String text) {
        if (text == null) {
            return null;
        }
        return text.replaceFirst("回复.*?:", "").trim();
    }

    public List<String> segment(String text) {
        text = removeBracketedContent(text);
        text = removeReplyContent(text); // 添加此行
        List<Term> termList = HanLP.segment(text);
        return termList.stream()
                .map(term -> term.word.toLowerCase())
                .filter(word -> !stopWords.contains(word))
                .filter(word -> word.length() > 1) // 过滤单字
                .collect(Collectors.toList());
    }


    public Map<String, Integer> segmentWithFrequency(String text) {
        text = removeBracketedContent(text);
        text = removeReplyContent(text); // 添加此行
        List<String> words = segment(text);
        Map<String, Integer> frequencyMap = new HashMap<>();
        for (String word : words) {
            frequencyMap.put(word, frequencyMap.getOrDefault(word, 0) + 1);
        }
        return sortByFrequency(frequencyMap);
    }

    private Map<String, Integer> sortByFrequency(Map<String, Integer> map) {
        return map.entrySet().stream()
                .sorted(Map.Entry.<String, Integer>comparingByValue().reversed())
                .collect(Collectors.toMap(
                        Map.Entry::getKey,
                        Map.Entry::getValue,
                        (e1, e2) -> e1,
                        LinkedHashMap::new));
    }


    public Map<String, Integer> batchSegment(List<String> texts) {
        Map<String, Integer> mergedResult = new HashMap<>();
        for (String text : texts) {
            text = removeBracketedContent(text);
            Map<String, Integer> singleResult = segmentWithFrequency(text);
            singleResult.forEach((word, count) ->
                    mergedResult.put(word, mergedResult.getOrDefault(word, 0) + count));
        }
        return sortByFrequency(mergedResult);
    }
}
