package com.liuqi.openai.retrieve.bm25;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

/**
 * @author liuqi
 * @date 2025/8/7
 **/
public class BM25Retrieve<T> {

    private static final float def_k1 = 1.5f;
    private static final float def_b = 0.75f;

    /**
     * BM25参数   调节因子
     */
    private final float k1;
    private final float b;

    /**
     * 文档集合
     */
    private final List<BM25Document> bm25Documents;

    /**
     * 内部存储文档
     */
    private final List<Document> documents;

    /**
     * 文档总数
     */
    private final int documentSize;

    /**
     * 文档平均长度
     */
    private double avgDocLength;

    /**
     * 存储所有文档的词出现的频率(次数)
     *
     * 例如文档: ["机器学习", "深度学习", "神经网络", "机器学习"], [机器学习", "神经网络", "自然语言处理", "词向量"]
     * 则表示 {
     *     机器学习: 3,
     *     深度学习: 1,
     *     神经网络: 2,
     *     自然语言处理: 1,
     *     词向量: 1
     * }
     */
    private final Map<String, Integer> docFreq;

    /**
     * 倒排索引: 词 -> 包含该词的文档ID列表
     */
    private final Map<String, List<Integer>> invertedIndex;

    public BM25Retrieve(List<BM25Document> bm25Documents) {
        this(bm25Documents, def_k1, def_b);
    }

    public BM25Retrieve(List<BM25Document> bm25Documents, float k1, float b) {
        if (bm25Documents == null || bm25Documents.isEmpty()) {
            throw new IllegalArgumentException("bm25Documents cannot be empty");
        }
        this.bm25Documents = bm25Documents;
        this.k1 = k1;
        this.b = b;
        this.documents = new ArrayList<>(bm25Documents.size());
        this.documentSize = bm25Documents.size();
        this.docFreq = new HashMap<>();
        this.invertedIndex = new HashMap<>();

        this.init();
    }

    private void init() {
        // 文档关键词总大小
        int totalDocSize = 0;

        for (int index = 0; index < documentSize; index++) {
            List<String> doc = bm25Documents.get(index).getDocument();

            totalDocSize += doc.size();

            // 每个文档的词出现的频率(次数)
            Map<String, Integer> freq = new HashMap<>();

            for (String word : doc) {
                // 添加频率
                freq.put(word, freq.getOrDefault(word, 0) + 1);

                // 构建倒排索引
                if (!invertedIndex.containsKey(word)) {
                    invertedIndex.put(word, new ArrayList<>());
                }
                invertedIndex.get(word).add(index);

                // 存储所有文档的词出现的频率(次数)
                docFreq.put(word, docFreq.getOrDefault(word, 0) + 1);
            }

            // 保存内部文档
            documents.add(new Document(doc, freq));
        }

        // 计算平均文档长度
        this.avgDocLength = totalDocSize / documentSize;
    }

    /**
     * 计算文档<code>query</code>BM25分数
     *
     * @param query 需计算BM25分数的文档
     * @param index BM25中存储文档的索引
     * @return
     */
    public double score(List<String> query, int index) {
        double score = 0.0;

        // 待检索匹配的文档中词出现的频率(次数)
        Map<String, Integer> freq = documents.get(index).freq;
        // 待检索匹配的文档长度
        double docLength = documents.get(index).documents.size();

        for (String word : query) {
            if (!docFreq.containsKey(word)) continue;

            // 计算IDF
            Integer docWordFreq = this.docFreq.get(word);
            double idf = Math.log((documentSize - docWordFreq + 0.5) / (docWordFreq + 0.5));

            // 获取词频 (TF)
            int wordFreq = freq.getOrDefault(word, 0);

            // BM25计算公式
            double numerator = wordFreq * (k1 + 1);
            double denominator = wordFreq + k1 * (1 - b + b * docLength / avgDocLength);
            double wordWeight = idf * (numerator / denominator);

            score += wordWeight;
        }

        return score;
    }

    public List<BM25SearchResult<T>> search(List<String> query) {
        return search(query, 4);
    }

    public List<BM25SearchResult<T>> search(List<String> query, int topN) {
        return search(query, topN, 0.0F);
    }

    public List<BM25SearchResult<T>> search(List<String> query, int topN, float threshold) {
        if (query == null || query.isEmpty()) {
            return null;
        }
        if (topN < 1) {
            throw new IllegalArgumentException("topN cannot be less than 1");
        }
        if (threshold < 0.0F || threshold > 1.0F) {
            throw new IllegalArgumentException("threshold value range [0.0 ~ 1.0]");
        }

        // 从倒排索引中, 查询出<code>query</code>文档中关联的文档索引
        Set<Integer> candidateDocIds = new HashSet<>();
        for (String word : query) {
            List<Integer> list = invertedIndex.get(word);
            if (list != null) {
                candidateDocIds.addAll(list);
            }
        }

        // 检索相关文档
        List<BM25SearchResult<T>> results = candidateDocIds.stream()
                .map(index -> this.buildBM25SearchResult(query, index))
                .filter(result -> threshold > 0.0F ? sigmoid(result.getScore()) > threshold : true)
                .sorted((r1, r2) -> Double.compare(r2.getScore(), r1.getScore()))
                .limit(topN)
                .collect(Collectors.toList());

        return results;
    }

    private BM25SearchResult<T> buildBM25SearchResult(List<String> query, int index) {
        double score = score(query, index);
        return new BM25SearchResult(index,
                score,
                bm25Documents.get(index).getData());
    }

    private float sigmoid(double score) {
        return (float) (1.0 / (1.0 + Math.exp(-0.1 * score)));
    }

    /**
     * 内部存储文档
     */
    private class Document {
        /**
         * 文档
         */
        final List<String> documents;

        /**
         * 存储每个文档的词出现的频率(次数)
         * 例如文档: ["机器学习", "深度学习", "神经网络", "机器学习"]
         * 则表示 [{
         *     机器学习: 2,
         *     深度学习: 1,
         *     神经网络: 1
         * }]
         */
        final Map<String, Integer> freq;

        public Document(List<String> documents, Map<String, Integer> freq) {
            this.documents = documents;
            this.freq = freq;
        }
    }

    private class ScoredDoc {
        final int docId;
        final double score;
        ScoredDoc(int docId, double score) {
            this.docId = docId;
            this.score = score;
        }
    }

}
