#include "../../../include/online/WebPageQuery.hpp"
#include <iterator>
#include <algorithm>
#include <sstream>
#include "../../../include/cppjieba/Jieba.hpp"
// #include "../../../include/nlohmann/json.hpp"

// const char* const DICT_PATH = "../../include/cppjieba/dict/jieba.dict.utf8";
// const char* const HMM_PATH = "../../include/cppjieba/dict/hmm_model.utf8";
// const char* const USER_DICT_PATH = "../../../include/cppjieba/dict/user.dict.utf8";
// const char* const IDF_PATH = "../../../include/cppjieba/dict/idf.utf8";
// const char* const STOP_WORD_PATH = "../../../include/cppjieba/dict/stop_words_zh.txt";

// 使用绝对路径
const char* const DICT_PATH = "/home/suika/SearchEngine/include/cppjieba/dict/jieba.dict.utf8";
const char* const HMM_PATH = "/home/suika/SearchEngine/include/cppjieba/dict/hmm_model.utf8";
const char* const USER_DICT_PATH = "/home/suika/SearchEngine/include/cppjieba/dict/user.dict.utf8";
const char* const IDF_PATH = "/home/suika/SearchEngine/include/cppjieba/dict/idf.utf8";
const char* const STOP_WORD_PATH = "/home/suika/SearchEngine/include/cppjieba/dict/stop_words_zh.txt";

// 构造函数
WebPageQuery::WebPageQuery()
// : _jieba(DICT_PATH, HMM_PATH, USER_DICT_PATH, IDF_PATH, STOP_WORD_PATH)
{
    loadLibrary();
}

// 执行查询，返回结果
std::vector<std::string> WebPageQuery::doQuery(const std::string& str)
{
    using std::vector;
    using std::string;
    using std::unordered_map;
    using std::pair;
    using std::cout;
    using std::endl;

    cout << "WPQ: doQuery -> " << str << endl;

    vector<string> results;     // 保存文档的分词结果
    cppjieba::Jieba jieba(DICT_PATH,
        HMM_PATH,
        USER_DICT_PATH,
        IDF_PATH,
        STOP_WORD_PATH);
    jieba.Cut(str, results, true);

    cout << "- split result: " << endl;
    for(auto& iter : results){
        cout << iter << endl;
    }

    size_t notInTable = 0;
    // 判断查询词是否在倒排索引表中
    for(auto & word : results) {
        auto it = _invertIndexTable.find(word);
        if(it == _invertIndexTable.end()) {
            ++notInTable;
        }
    }
    if(notInTable == results.size()) {
        // 如果所有查询词都不在倒排索引表中，则返回空容器
        cout << "- no matches in invertIndexTabel." << endl;
        return vector<string>();
    }

    // 获取查询词权重向量
    vector<double> queryWeightVec = getQuerywordsWeightVector(results);

    unordered_map<int, vector<double>> resultVec;
    // 执行查询，获取结果，存放在resultVec中
    bool exeResult = executeQuery(results, resultVec);
    if(exeResult) {
        // 计算文档与查询词的余弦相似度并排序
        //resultVec 1 (y1 y2 y3 ) 2(y1 y2 y3) ..
        double X = 0.0;
        for(double x : queryWeightVec) {
            X += x * x;
        }
        X = sqrt(X);
        vector<std::pair<int, double>> web_sort;
        for(auto it = resultVec.begin(); it != resultVec.end(); ++it) {
            int docid = it->first;
            double XY = 0.0, Y = 0.0;
            for(size_t i = 0; i < it->second.size(); ++i) {
                XY += 1.0 * it->second[i] * queryWeightVec[i];
                Y += 1.0 * it->second[i] * it->second[i];
            }
            Y = sqrt(Y);
            double COS = 1.0 * XY /(X * Y);
            web_sort.push_back(std::make_pair(docid, COS));
        }
        sort(web_sort.begin(), web_sort.end(), Compair);    // 对余弦相似度排序
       
        vector<int> web_in_order; //存放排序之后的网页

        for(auto it : web_sort) {
            web_in_order.push_back(it.first);
        }
        
        size_t sz = web_in_order.size();    // 网页数量
        std::cout << "Nums of webs founded: " << sz << std::endl;
        #if 1
        // 根据网页数量选择返回的结果数量
        if(sz < 5){
            
        } else if(sz >= 5 && sz < 10 ){
            web_in_order.resize(5);
        } else if(sz >= 10 && sz < 15) {
            web_in_order.resize(10);
        } else if (sz >= 15 && sz < 20) {
            web_in_order.resize(15);
        } else {
            web_in_order.resize(20);
        }
        #endif
        vector<string> rs = createJson(web_in_order, results);
        
        return rs;
    }
    else{ 
        return vector<string>();
    }
}

// 加载库文件
void WebPageQuery::loadLibrary()
{
    using std::ifstream;
    using std::cout;
    using std::endl;
    using std::string;

    cout << "**Offset lib loading...**" << endl;
    // ifstream ifs_offset("../../../data/dat/newOffset.dat");
    ifstream ifs_offset("/home/suika/SearchEngine/data/dat/testOffset.dat");
    string line;
    while (ifs_offset >> line)
    {
        int id = atoi(line.c_str());
        int begin;
        int end;
        ifs_offset >> begin;
        ifs_offset >> end;
        _offsetLib[id] = std::make_pair(begin, end);
    }
    ifs_offset.close();

    ifstream ifs_ripePage;
    cout << "**RipePage loading...**" << endl;
    // ifs_ripePage.open("../../../data/dat/newRipePage.dat");
    ifs_ripePage.open("/home/suika/SearchEngine/data/dat/testRipePage.dat");
    cppjieba::Jieba _jieba(DICT_PATH,
        HMM_PATH,
        USER_DICT_PATH,
        IDF_PATH,
        STOP_WORD_PATH);
    for(auto it = _offsetLib.begin(); it != _offsetLib.end(); ++it) {
        int begin = it->second.first;
        const size_t size = it->second.second;
        char buf[65536] = {0};
        string line;
        ifs_ripePage.seekg(begin);
        ifs_ripePage.read(buf, size);
        string doc = buf;
        #if 0
        cout << endl;
        cout << "e.g. doc content: " << doc << endl;
        cout << endl;
        #endif
        _pageLib.insert(std::make_pair(it->first, WebPage(doc, _jieba)));
    }
    ifs_ripePage.close();

    ifstream ifs_invertIndex;
    cout << "**InvertIndexTable loading...**" << endl;
    // ifs_invertIndex.open("../../../data/dat/invertIndex.dat");
    ifs_invertIndex.open("/home/suika/SearchEngine/data/dat/testInvertIndex.dat");
    string line2;
    string word;
    while(getline(ifs_invertIndex, line2)) {
        std::istringstream iss(line2);
        iss >> word;
        int docid;
        while (iss >> docid)
        {
           double w;
           iss >> w;
           _invertIndexTable[word].insert(std::make_pair(docid, w));
        }
    }
    ifs_invertIndex.close();

    cout << "**All libraries loaded.**" << endl;
}

// 计算查询词的权重值
std::vector<double> WebPageQuery::getQuerywordsWeightVector(std::vector<std::string>& queryWords)
{
    std::cout << "**Getting query words weight vector...**" << std::endl;
    using std::vector;
    using std::unordered_map;
    using std::string;

    vector<double> queryWeight;     // 返回值，存储查询词向量，此容器与查询词列表必须为一一对应关系
    unordered_map<string, int> tf;  // 查询词的词频
    double N = _pageLib.size() + 1; // 页面数
    // 统计每个查询词的词频
    for(string & word : queryWords) {
        int & isExit = tf[word];
        if(isExit) {
            ++isExit;
        }else{
            isExit = 1;
        }
    }

    vector<double> tmp;             // 存储每个查询词的权重
    // 计算查询词的权重
    for(string & word : queryWords) {
        auto it = _invertIndexTable[word];
        double DF = it.size();
        double IDF = log(N/(DF + 1)) / log(10);
        double TF = tf[word];
        double weight = 1.0 * TF * IDF; // 计算权重
        tmp.push_back(weight);
    }

    // 计算查询词向量的模长，并归一化每一个查询词的权重
    double AllWeight = 0.0;
    for(double weight : tmp) {
        AllWeight += weight * weight;
    }
    AllWeight = sqrt(AllWeight);
    for(double weight : tmp) {
        double FinWeight = weight / AllWeight;
        queryWeight.push_back(FinWeight);    // 将归一化后的权重存入查询词向量中
    }
    std::cout << "**Getting query words weight vector succeed.**" << std::endl;
    return queryWeight;

}

// 执行查询
bool WebPageQuery::executeQuery(const std::vector<std::string>& queryWords, std::unordered_map<int, std::vector<double>>& resultVec)
{
    std::cout << "**Executing Query...**" << std::endl;
    using std::vector;
    using std::set;
    using std::string;
    using std::cout;
    using std::endl;

    std::cout << "EQ: Step 1." << std::endl;
    // Step 1: 获取包含所有查询词的网页ID集合
    vector<set<int>> exist_web_id; //存储各个查询词对应的网页ID集合
    bool flag = false;  // 对任一查询词是否出现在倒排索引表中进行标记
    for(size_t i = 0; i < queryWords.size(); ++i) {
        string word = queryWords[i];
        auto it = _invertIndexTable[word];  // 获取包含该词的网页ID及其词频
        set<int> tmp;
        for(auto it2 = it.begin(); it2 != it.end(); ++it2 ) {
            int docid = it2->first;
            cout << "Found in web " << docid << endl;
            tmp.insert(docid);
            flag = true;    // 有查询词出现在倒排索引表中
        }
        exist_web_id.push_back(tmp);
    }
    if(flag == false) { 
        return false;
    }
    else{
        cout << "web matches partly: " << exist_web_id << endl;
    }

    std::cout << "EQ: Step 2." << std::endl;
    // Step 2: 求包含所有查询词的网页ID集合的交际
    set<int> web_match_all = exist_web_id[0];    // 初始化为第一个查询词对应的网页ID集合
    for(size_t i = 0; i < exist_web_id.size(); ++i) {
        set<int> tmp;
        set_intersection(web_match_all.begin(), web_match_all.end(), 
                         exist_web_id[i].begin(), exist_web_id[i].end(),
                         inserter(tmp, tmp.end())); // 求交集
        web_match_all.swap(tmp);
    }
    if(!web_match_all.size()) { 
        std::cout<<"no web matches all query words." << std::endl;
        return false;
    }
    else{
        cout << "webs matches all: " << web_match_all << endl;
    }

    std::cout << "EQ: Step 3." << std::endl;
    // Step 3: 获取每个网页中包含每个查询词的词频
    for(auto it = web_match_all.begin(); it != web_match_all.end(); ++it) {
        int docid = *it;    // 当前网页ID
        auto & it2 = resultVec[docid];  // 获取当前网页对应的查询词词频向量
        for(string  word : queryWords) {
            auto it3 = _invertIndexTable[word]; // 获取包含该词的网页ID及其词频
            for(auto it4 = it3.begin(); it4 != it3.end(); ++it4) {
                if(it4->first == docid) {   // 如果该词在当前网页中出现过
                    it2.push_back(it4->second); // 将该词的词频放入查询词词频向量中
                    break;
                }
            }
        }
    }
    std::cout << "**Executing Query succeed.**" << std::endl;
    return true;
}


// 将查询到的信息封装为Json字符串
std::vector<std::string> WebPageQuery::createJson(std::vector<int>& docIdVec, const std::vector<std::string>& queryWords)
{
    std::cout << "**Creating Json...**" << std::endl;
    using std::vector;
    using std::string;
    
    vector<string> result;  // 保存生成的Json字符串
    string Line = "----------------------------------------------------------------\n";
    size_t id = 0;
    for(auto & docid : docIdVec) {
        string res;
        string Summary = _pageLib[docid].summary(queryWords);
        string Title = _pageLib[docid].getTitle();
        string Url = _pageLib[docid].getUrl();
        char buf[65536] = {0};
        // 格式化Json字符串
        sprintf(buf, "%sPage No:\t%ld\nTitle:\t\t%s\nUrl:\t\t%s\nSummary:\t%s。\n", 
                Line.c_str() ,++id, Title.c_str(), Url.c_str(), Summary.c_str());
        res += buf;
        result.push_back(res);  // 将生成的Json字符串保存到容器中
        memset(buf, 0, sizeof(buf));
    }
    std::cout << "**Creating Json succeeded: **" << std::endl;
    // 打印语句
    #if 1
    for(auto it : result){
        std::cout << it << std::endl;
    }
    #endif
    return result;
}

// 
std::string WebPageQuery::returnNoAnswer()
{

}

