#include "search/page_search.hpp"
#include <fstream>
#include <sstream>
#include <algorithm>
#include <cmath>
#include <unordered_map>
#include <mutex>

PageSearch::PageSearch() 
    : m_totalDocs(0)
    , m_tokenizer("resources/dictionaries/jieba.dict.utf8",
                  "resources/dictionaries/hmm_model.utf8",
                  "resources/dictionaries/user.dict.utf8",
                  "resources/dictionaries/idf.utf8",
                  "resources/dictionaries/stop_words.utf8") {}

bool PageSearch::init(const std::string& invertedIndexPath, 
                     const std::string& pagesPath, 
                     const std::string& offsetsPath) {
    // 加载停用词
    if (!loadStopWords()) return false;
    
    // 解析网页偏移库
    m_pagesPath = pagesPath;
    if (!parseOffsets(offsetsPath)) return false;
    
    // 加载倒排索引
    std::ifstream file(invertedIndexPath);
    if (!file.is_open()) {
        std::cerr << "无法打开倒排索引文件: " << invertedIndexPath << std::endl;
        return false;
    }
    
    std::string line;
    while (std::getline(file, line)) {
        std::stringstream ss(line);
        std::string keyword;
        int docCount;
        
        ss >> keyword >> docCount;
        if (keyword.empty() || docCount <= 0) continue;
        
        std::map<int, double> docWeights;
        for (int i = 0; i < docCount; ++i) {
            int docId;
            double weight;
            ss >> docId >> weight;
            if (docId <= 0 || weight <= 0) continue;
            
            docWeights[docId] = weight;
        }
        
        if (!docWeights.empty()) {
            m_invertedIndex[keyword] = docWeights;
        }
    }
    
    std::cout << "加载倒排索引完成，共 " << m_invertedIndex.size() << " 个关键词" << std::endl;
    std::cout << "总文档数: " << m_totalDocs << std::endl;
    
    return true;
}

std::vector<PageResult> PageSearch::search(const std::string& query, int count) {
    std::lock_guard<std::mutex> lock(m_mutex);
    std::vector<PageResult> results;
    
    if (query.empty() || m_invertedIndex.empty() || m_offsets.empty()) {
        return results;
    }
    
    // 处理查询，获取关键词及权重
    std::map<std::string, double> queryKeywords = processQuery(query);
    if (queryKeywords.empty()) {
        return results;
    }
    
    // 提取所有关键词
    std::vector<std::string> keywords;
    for (const auto& [word, _] : queryKeywords) {
        keywords.push_back(word);
    }
    
    // 找到包含所有关键词的文档
    std::map<int, std::map<std::string, double>> docKeywords;  // 文档ID -> {关键词 -> 权重}
    
    // 获取第一个关键词的文档集合
    if (keywords.empty()) return results;
    auto firstIt = m_invertedIndex.find(keywords[0]);
    if (firstIt == m_invertedIndex.end()) return results;
    
    // 初始化候选文档集合（包含第一个关键词的文档）
    for (const auto& [docId, weight] : firstIt->second) {
        docKeywords[docId][keywords[0]] = weight;
    }
    
    // 检查其他关键词，只保留包含所有关键词的文档
    for (size_t i = 1; i < keywords.size(); ++i) {
        const std::string& keyword = keywords[i];
        auto it = m_invertedIndex.find(keyword);
        
        if (it == m_invertedIndex.end()) {
            // 有关键词不存在于倒排索引中，返回空结果
            return results;
        }
        
        // 遍历当前候选文档，移除不包含当前关键词的文档
        std::vector<int> toRemove;
        for (auto& [docId, kwWeights] : docKeywords) {
            auto docIt = it->second.find(docId);
            if (docIt == it->second.end()) {
                toRemove.push_back(docId);
            } else {
                kwWeights[keyword] = docIt->second;
            }
        }
        
        // 移除不包含当前关键词的文档
        for (int docId : toRemove) {
            docKeywords.erase(docId);
        }
        
        if (docKeywords.empty()) break;
    }
    
    if (docKeywords.empty()) return results;
    
    // 计算查询向量的模
    double queryNorm = 0.0;
    for (const auto& [_, weight] : queryKeywords) {
        queryNorm += weight * weight;
    }
    queryNorm = std::sqrt(queryNorm);
    
    // 计算余弦相似度并排序
    std::vector<std::pair<int, double>> docScores;  // 文档ID -> 相似度得分
    
    for (const auto& [docId, kwWeights] : docKeywords) {
        // 计算文档向量与查询向量的点积
        double dotProduct = 0.0;
        double docNorm = 0.0;
        
        for (const auto& [kw, qWeight] : queryKeywords) {
            auto it = kwWeights.find(kw);
            if (it == kwWeights.end()) continue;
            
            double dWeight = it->second;
            dotProduct += qWeight * dWeight;
            docNorm += dWeight * dWeight;
        }
        
        if (docNorm == 0 || queryNorm == 0) continue;
        
        // 计算余弦相似度
        double cosine = dotProduct / (queryNorm * std::sqrt(docNorm));
        docScores.emplace_back(docId, cosine);
    }
    
    // 按相似度排序
    std::sort(docScores.begin(), docScores.end(),
              [](const std::pair<int, double>& a, const std::pair<int, double>& b) {
                  return a.second > b.second;
              });
    
    // 限制结果数量
    if (docScores.size() > count) {
        docScores.resize(count);
    }
    
    // 获取文档详情并生成摘要
    std::set<std::string> keywordSet(keywords.begin(), keywords.end());
    for (const auto& [docId, score] : docScores) {
        PageResult result;
        result.docId = docId;
        result.score = score;
        
        std::string title, content, link;
        if (getDocument(docId, title, content, link)) {
            result.title = title;
            result.link = link;
            result.abstract = generateAbstract(content, keywordSet);
            results.push_back(result);
        }
    }
    
    return results;
}

bool PageSearch::parseOffsets(const std::string& path) {
    std::ifstream file(path);
    if (!file.is_open()) {
        std::cerr << "无法打开网页偏移库文件: " << path << std::endl;
        return false;
    }
    
    std::string line;
    while (std::getline(file, line)) {
        std::stringstream ss(line);
        int docId;
        size_t offset, size;
        
        ss >> docId >> offset >> size;
        if (docId <= 0 || offset < 0 || size <= 0) continue;
        
        m_offsets[docId] = {offset, size};
    }
    
    m_totalDocs = m_offsets.size();
    return true;
}

bool PageSearch::getDocument(int docId, std::string& title, std::string& content, std::string& link) {
    auto it = m_offsets.find(docId);
    if (it == m_offsets.end()) return false;
    
    auto [offset, size] = it->second;
    
    // 打开网页库文件并读取指定位置和大小的内容
    std::ifstream file(m_pagesPath, std::ios::binary);
    if (!file.is_open()) {
        std::cerr << "无法打开网页库文件: " << m_pagesPath << std::endl;
        return false;
    }
    
    file.seekg(offset);
    std::string docContent(size, '\0');
    file.read(&docContent[0], size);
    
    // 解析文档内容，提取title、content和link
    size_t titleStart = docContent.find("<title>");
    size_t titleEnd = docContent.find("</title>", titleStart);
    size_t contentStart = docContent.find("<content>");
    size_t contentEnd = docContent.find("</content>", contentStart);
    size_t linkStart = docContent.find("<link>");
    size_t linkEnd = docContent.find("</link>", linkStart);
    
    if (titleStart == std::string::npos || titleEnd == std::string::npos ||
        contentStart == std::string::npos || contentEnd == std::string::npos ||
        linkStart == std::string::npos || linkEnd == std::string::npos) {
        return false;
    }

    // 提取标题
    title = docContent.substr(titleStart + 7, titleEnd - (titleStart + 7));
    // 提取链接
    link = docContent.substr(linkStart + 6, linkEnd - (linkStart + 6));
    // 提取内容
    content = docContent.substr(contentStart + 9, contentEnd - (contentStart + 9));
    return true;
}

std::map<std::string, double> PageSearch::processQuery(const std::string& query) {
    std::vector<std::string> words;
    m_tokenizer.Cut(query, words);  // 分词
    
    // 过滤停用词并统计词频
    std::map<std::string, int> tfMap;
    for (const std::string& word : words) {
        if (m_stopWords.find(word) == m_stopWords.end() && !word.empty()) {
            tfMap[word]++;
        }
    }
    
    if (tfMap.empty()) return {};
    
    // 计算TF-IDF（简化版：此处假设查询的IDF为1，实际应基于文档集合计算）
    // 实际场景中，查询的TF为词频/总词数，IDF与文档集合相关
    std::map<std::string, double> queryWeights;
    int totalWords = words.size();
    
    for (const auto& [word, tf] : tfMap) {
        // 简化TF计算：词频/总词数
        double tfVal = static_cast<double>(tf) / totalWords;
        // 假设查询IDF为1（实际应从倒排索引的DF计算：log2(N/(DF+1))）
        queryWeights[word] = tfVal;
    }
    
    // 归一化查询向量
    double norm = 0.0;
    for (const auto& [_, weight] : queryWeights) {
        norm += weight * weight;
    }
    norm = std::sqrt(norm);
    
    if (norm > 0) {
        for (auto& [word, weight] : queryWeights) {
            weight /= norm;
        }
    }
    
    return queryWeights;
}

std::string PageSearch::generateAbstract(const std::string& content, const std::set<std::string>& keywords) {
    if (content.empty()) return "";
    
    const size_t abstractLen = 150;  // 摘要长度
    const size_t windowSize = 50;    // 关键词上下文窗口大小
    
    // 优先提取包含关键词的片段
    for (size_t i = 0; i < content.size(); ++i) {
        // 检查当前位置是否包含关键词
        bool hasKeyword = false;
        size_t keywordPos = std::string::npos;
        std::string hitKeyword;
        
        for (const std::string& kw : keywords) {
            keywordPos = content.find(kw, i);
            if (keywordPos != std::string::npos) {
                hasKeyword = true;
                hitKeyword = kw;
                break;
            }
        }
        
        if (hasKeyword) {
            // 提取关键词前后的文本作为摘要
            size_t start = (keywordPos > windowSize) ? keywordPos - windowSize : 0;
            size_t end = std::min(content.size(), keywordPos + hitKeyword.size() + windowSize);
            std::string abstract = content.substr(start, end - start);
            
            // 如果摘要过短，从开头补充
            if (abstract.size() < abstractLen && start > 0) {
                abstract = content.substr(0, abstractLen - abstract.size()) + "..." + abstract;
            }
            
            // 截断过长的摘要
            if (abstract.size() > abstractLen) {
                abstract = abstract.substr(0, abstractLen) + "...";
            }
            
            return abstract;
        }
    }
    
    // 如果没有找到关键词，返回开头文本
    if (content.size() <= abstractLen) {
        return content;
    } else {
        return content.substr(0, abstractLen) + "...";
    }
}

bool PageSearch::loadStopWords(const std::string& path) {
    std::ifstream file(path);
    if (!file.is_open()) {
        std::cerr << "无法打开停用词文件: " << path << std::endl;
        return false;
    }
    
    std::string word;
    while (std::getline(file, word)) {
        m_stopWords.insert(word);
    }
    
    return true;
}

