package com.stockprediction.analysis;

import java.io.*;
import java.util.*;

public class TFIDFVectorizer implements Serializable{
    private static final long serialVersionUID = 1L; // 序列化 ID
    private Map<String, Integer> wordIndex = new HashMap<>();  // 词 -> 索引
    private Map<String, Double> idfValues = new HashMap<>();  // 词 -> IDF 值
    private int maxFeatures = 10000;
    private int totalDocs = 0;

    // 1️⃣ 计算词汇表 & IDF
    public void fit(List<String> texts) {
        totalDocs = texts.size();
        Map<String, Integer> docFreq = new HashMap<>(); // 词 -> 出现的文档数

        for (String text : texts) {
            Set<String> seenWords = new HashSet<>(); // 记录当前文本中出现的词
            for (String word :  ChineseTokenizer.tokenize(text).split(" ")) {
                if (!seenWords.contains(word)) {
                    docFreq.put(word, docFreq.getOrDefault(word, 0) + 1);
                    seenWords.add(word);
                }
            }
        }

        // 选取高频前 maxFeatures 个单词
        List<Map.Entry<String, Integer>> sortedWords = new ArrayList<>(docFreq.entrySet());
        sortedWords.sort((a, b) -> b.getValue() - a.getValue());  // 按文档频率降序排序

        int index = 0;
        for (Map.Entry<String, Integer> entry : sortedWords) {
            if (index >= maxFeatures) break;
            String word = entry.getKey();
            wordIndex.put(word, index++);

            // 计算 IDF
            int df = entry.getValue();
            double idf = Math.log((double) (totalDocs + 1) / (df + 1)) + 1.0;
            idfValues.put(word, idf);
        }
    }

    // 2️⃣ 计算 TF-IDF 特征向量
    public double[] transform(String text) {
        Map<String, Integer> tf = new HashMap<>(); // 词 -> 词频
        for (String word : ChineseTokenizer.tokenize(text).split(" ")) {

            if (wordIndex.containsKey(word)) {
                tf.put(word, tf.getOrDefault(word, 0) + 1);
            }
        }

        double[] vector = new double[maxFeatures];
        for (Map.Entry<String, Integer> entry : tf.entrySet()) {
            String word = entry.getKey();
            int termFreq = entry.getValue();
            double idf = idfValues.getOrDefault(word, 0.0);
            vector[wordIndex.get(word)] = termFreq * idf; // 计算 TF-IDF
        }
        return vector;
    }

    // 3️⃣ **保存向量化器**
    public void save(String filePath) throws IOException {
        try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(filePath))) {
            oos.writeObject(this);
            System.out.println("✅ TF-IDF 向量化器已保存: " + filePath);
        }
    }

    // 4️⃣ **加载向量化器**
    public static TFIDFVectorizer load(String filePath) throws IOException, ClassNotFoundException {
        try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(filePath))) {
            TFIDFVectorizer vectorizer = (TFIDFVectorizer) ois.readObject();
            System.out.println("✅ TF-IDF 向量化器已加载: " + filePath);
            return vectorizer;
        }
    }
}
