package com.qf.index;

import lombok.Data;

import java.io.Serializable;
import java.util.*;
import java.util.stream.Collectors;

public class Test6 {
    @Data
    public static class Document implements Serializable {
        private List<String> tiList = new ArrayList<>();
        private List<String> abList = new ArrayList<>();
        private List<String> kwList = new ArrayList<>();
        private Map<String, Double> score = new HashMap<>();
        private int aid = 0;
    }

    public static void main(String[] args) {
        List<Document> documentList = prepareDocuments();
        processTFIDF(documentList);
    }

    private static void processTFIDF(List<Document> documents) {
        // 计算主题和数量，每个主题在每篇论文中，不管出现多少次，最多只计算一次。这个在第一次循环时计算。
        Map<String, Integer> termAndSumMap = new HashMap<>();
        for (Document doc : documents) {
            Set<String> allTermSet = new HashSet<>();
            allTermSet.addAll(doc.getTiList());
            allTermSet.addAll(doc.getAbList());
            allTermSet.addAll(doc.getKwList());
            for (String term : allTermSet) {
                termAndSumMap.compute(term, (key, value) -> value != null ? (value + 1) : 1);
            }
        }

        // 下面的在更新时使用，需要传入termAndSumMap。
        for (int i = 0; i < documents.size(); i++) {
            Document doc = documents.get(i);
            List<String> allTermList = new ArrayList<>();
            allTermList.addAll(doc.getTiList());
            allTermList.addAll(doc.getAbList());
            allTermList.addAll(doc.getKwList());
            // 在每篇论文中，计算每个主题出现的频次，出现多少算多少，因此，可能是大于1次的。
            Map<String, Integer> termFreq = new HashMap<>();
            for (String term : allTermList) {
                termFreq.compute(term, (key, value) -> value != null ? (value + 1) : 1);
            }
            int totalTerms = allTermList.size();

            Map<String, Double> termAndScoreMap = new LinkedHashMap<>();
            Set<String> allTermSet = new HashSet<>(allTermList);
            for (String term : allTermSet) {
                // 计算TF （术语 t 在文档中出现的次数） / （文档中的术语总数）
                double tf = (double) termFreq.getOrDefault(term, 0) / totalTerms;
                // 计算IDF 某一特定词语的IDF，可以由总文件数目除以包含该词语的文件的数目，再将得到的商取对数得到。
                double idf = Math.log((double) documents.size() / (termAndSumMap.getOrDefault(term, 0) + 1e-12));
                // 计算TF-IDF
                termAndScoreMap.put(term, tf * idf);
            }

            // 排序关键词
            List<Map.Entry<String, Double>> sortedKeywords = new ArrayList<>(termAndScoreMap.entrySet());
            sortedKeywords.sort(Map.Entry.comparingByValue(Comparator.reverseOrder()));

            // 打印结果
            System.out.println("\n文档 " + doc.getAid() + " 关键词排序结果：");
            for (Map.Entry<String, Double> entry : sortedKeywords) {
                System.out.printf("%-20s %.6f%n", entry.getKey(), entry.getValue());
            }
        }
    }

    private static List<Document> prepareDocuments() {
        List<Document> documentList = new ArrayList<>();

        // 文档1（省略setter调用以节省空间）
        Document doc1 = new Document();
        doc1.setTiList(Arrays.asList("青年", "数字化阅读", "指尖阅读", "通过"));
        doc1.setKwList(Arrays.asList("指尖", "作为", "开启", "自我", "指尖阅读", "提升"));
        doc1.setAbList(Arrays.asList("指尖", "青年", "指尖阅读", "透视", "现象"));
        doc1.setAid(1);

        // 文档2
        Document doc2 = new Document();
        doc2.setTiList(Arrays.asList("索网结构", "光伏支架", "马鞍形", "预张力优化"));
        doc2.setKwList(Arrays.asList("索网结构", "关键刚度", "预张力优化", "刚度替换"));
        doc2.setAbList(Arrays.asList("索网结构", "调整", "拉索", "变化量", "提出", "马鞍形", "建立", "构件", "截面面积", "通过", "定量评价"));
        doc2.setAid(2);

        // 文档3
        Document doc3 = new Document();
        doc3.setTiList(Arrays.asList("对口支援", "院前急救", "民营医院管理", "医院营销", "院前急救"));
        doc3.setKwList(Arrays.asList("民营医院", "hospitals, public", "对口支援", "院前急救", "pre-hospital first aid", "private hospitals", "建设", "公立医院"));
        doc3.setAbList(Arrays.asList("民营医院", "流程", "作为", "选派", "美誉度", "结果", "结论", "探讨", "院前急救", "专家", "建设", "技术支持", "方法", "对口支援", "目的", "公立医院", "通过"));
        doc3.setAid(3);

        return Arrays.asList(doc1, doc2, doc3);
    }
}
