#include "pageLibPrecessor.h"
#include "inicpp.hpp"
#include "../bin/simhash/Simhasher.hpp"
#include "tinyxml2.h"
#include <fstream>
#include <map>
#include <filesystem>
#include <cmath>
using std::ifstream;
using std::ofstream;
using std::cerr;
using std::stringstream;
using std::ios;
using std::cout;
using std::map;
using namespace simhash;
using namespace tinyxml2;
namespace fs = std::filesystem;

void updateProgressBar(int processedDocs, int totalDocs);

PageLibPrecessor::PageLibPrecessor(SplitTool* cuttor, inicpp::IniManager* ini)
: _cuttor(cuttor)
, _ini(ini)
{

}


// 创建去重网页库
void PageLibPrecessor::cutRedundantPage()
{
    Simhasher simhasher("../bin/dict/jieba.dict.utf8", "../bin/dict/hmm_model.utf8", "../bin/dict/idf.utf8", "../bin/dict/stop_words.utf8");
    ifstream webLibIfs((*_ini)["rtsp"]["ripepage"]);
    if(!webLibIfs){
        cerr << "file open fail" ;
    }
    ofstream dupRemoveWebPageLib((*_ini)["rtsp"]["dupRemoveWebPageLib"], ios::app);
    if(!dupRemoveWebPageLib){
        cerr << "file open fail" ;
    }

    string line;
    string docContent;
    bool inDoc = false;
    int i = 0;
    while (getline(webLibIfs, line)) {
        if (line == "<doc>") {
            inDoc = true;
            docContent.clear();
        } else if (line == "</doc>") {
            inDoc = false;
            //cout << "找到的 <doc> 内容：" << "\n" << docContent << "\n";
            size_t topN = 5;
            uint64_t u64 = 0; //用来存储指纹
            simhasher.make(docContent, topN, u64);//计算指纹
            //cout << "计算后的simhash值" << i++ << ":" << u64 << "\n";
            if(_webSimhash.find(u64) == _webSimhash.end())
            {
                _webSimhash.insert(u64);
                dupRemoveWebPageLib << docContent << "</doc>" << "\n";
            }
            else
            {
                continue;
            }
        }

        if (inDoc) {
            docContent += line + "\n";
        }
    }

    webLibIfs.close();
}

// 创建倒排索引库
void PageLibPrecessor::invertedIndex()
{
    //获取_pageCount
    getLineCount();
    //获取DF
    getDFs(); 
    //会遍历所有的page
    readPage((*_ini)["rtsp"]["dupRemoveWebPageLib"]);

    ofstream invertedIndexLibOfs((*_ini)["rtsp"]["invertedIndexLib"]);
    if(!invertedIndexLibOfs){
        cerr << "file open fail" ;
    }

    for(auto &tmp : _invertIndexLib)
    {   
        invertedIndexLibOfs << tmp.first << " ";
        for(auto &tmp1 : tmp.second)
        {
            invertedIndexLibOfs << tmp1.first << "," << tmp1.second  << " ";
        }
        invertedIndexLibOfs << "\n";
    }
    invertedIndexLibOfs.close();
}


void PageLibPrecessor::readPage(string filename)
{
    ifstream file(filename); 
    if (!file.is_open()) {
        cerr << "无法打开文件" << "\n";
        return;
    }

    string line;
    string docid;
    string description;
    bool inDoc = false;

    int totalDocs = _pageCount;
    int processedDocs = 0;

    while (getline(file, line)) {
        if (line == "<doc>") {
            inDoc = true;
        } else if (line == "</doc>") {
            inDoc = false;

            //拿到某一个docid和description
            //获得一个page中所有的TF
            //计算这个page中所有word的IDF
        
            map<string,int> TFs = calculateTF(description);
            calculateWeight(TFs, std::stoi(docid));//将字符串docid转换为整数   
            //cout << std::stoi(docid) << "\n";
            //要算这个page里所有的词在这个page的权重
            //先创建好一个只有所有词的数据结构，等着每次算好都往里边添加

            docid.clear();
            description.clear();

            processedDocs++;
            updateProgressBar(processedDocs, totalDocs);
        }

        if (inDoc) {
            if (line.find("<docid>")!= string::npos) {
                docid = line.substr(line.find("<docid>") + 7, line.find("</docid>") - line.find("<docid>") - 7);
            } else if (line.find("<description>")!= std::string::npos) {
                description = line.substr(line.find("<description>") + 13, line.find("</description>") - line.find("<description>") - 13);
                if (description.empty()) {
                    description = ""; // 如果 description 为空，赋空值
                }
            }
        }
    }

    file.close();
    cout << "\n";

    return;
}


//计算权重
void PageLibPrecessor::calculateWeight(map<string,int> &tfs, int docid)
{
  
    map<string,double> ws;//存放某个词和对应的w
    double sum = 0.0;//分母
    for(auto &word : tfs)
    {
        int DF = _DFs[word.first];
        double IDF = log2(((double)_pageCount / (double)(DF + 1)) + 1);
        double w = word.second * IDF;//得到这个page中这个词的权重
        ws.insert({word.first, w});
        sum += pow(w,2);
    }

    _wp.insert({docid,sum});

    for(auto &tmp : ws)
    {   
        _invertIndexLib[tmp.first].insert({docid, tmp.second / sqrt(sum)});
    }
}

//获取有多少个page
void PageLibPrecessor::getLineCount() {
    ifstream file((*_ini)["rtsp"]["offset"]);
    if (!file.is_open()) {
        cerr << "无法打开文件" << std::endl;
        return;
    }

    int lineCount = 0;
    string line;
    while (getline(file, line)) {
        if (!line.empty()) {
            lineCount++;
        }
    }
    file.clear();
    file.close();
    _pageCount = lineCount;
}

//计算TF
map<string,int> PageLibPrecessor::calculateTF(string webContent)
{
    //到这拿到了docid和description
    vector<string> tmpWords;//暂时保存单个page数据
    map<string,int> tmpWordFreqInSingleDoc;//暂时保存单个page的词频
    //切割这个page
    _cuttor->cutChineseWord(webContent, tmpWords, 1);

    for(auto &word : tmpWords)
    {
        if(tmpWordFreqInSingleDoc.find(word) == tmpWordFreqInSingleDoc.end())
        {
            //没找到
            tmpWordFreqInSingleDoc.insert({word,1});
        }
        else
        {
            //找到的话词频++
            tmpWordFreqInSingleDoc.find(word)->second++;
        }
    }

    return tmpWordFreqInSingleDoc;

}

//获取DF，只需要获取一次
void PageLibPrecessor::getDFs()
{
    std::ifstream file((*_ini)["rtsp"]["chineseIndexFile"]);
    if (!file.is_open()) {
        std::cerr << "无法打开文件" << std::endl;
        return;
    }

    string line;
    while (getline(file, line)) {
        istringstream iss(line);
        string word;
        int count = 0;
        if (iss >> word) {
            _DFs[word] = 0;
            while(iss >> count)//每读到一个数字，就给计数+1
            {
                _DFs[word] += 1;
            }
        }
    }

    file.close();
}

//计算单个字符串(搜索框内容)的向量
void PageLibPrecessor::calculateSearchContentWeight(vector<string> &searchKeyWords)
{
    int pageCount = 1;//search内容视为一个page
    

}

//成员对象的方法
void PageLibPrecessor::_cuttor_cutCnString(string str, vector<string> &words, int placeholder)
{
    _cuttor->cutChineseWord(str, words, placeholder);
}
//成员对象的方法
string PageLibPrecessor::_ini_config(string conf)
{
    return (*_ini)["rtsp"][conf];
}


//更新进度条
void updateProgressBar(int processedDocs, int totalDocs) {
    cout << "\rProgress: [";
    int barWidth = 50;
    float progress = static_cast<float>(processedDocs) / totalDocs;
    int pos = barWidth * progress;
    for (int i = 0; i < barWidth; ++i) {
        if (i < pos) cout << "=";
        else if (i == pos) cout << ">";
        else cout << " ";
    }
    cout << "] " << fixed << std::setprecision(2) << progress * 100.0 << "%";
    cout.flush();
}





