#include "../include/webPageQuery.h"
#include "../include/ConfigFile.h"
#include <math.h>
#include <nlohmann/json.hpp>
#include <iterator>
#include <algorithm>
#include <sstream>
#include<nlohmann/json.hpp>

using json = nlohmann::json;
using std::istringstream;
using std::ifstream;
using std::inserter;
using std::set_intersection;
using std::set_union;

web_page_query * web_page_query::pQuery = nullptr;
web_page_query::web_page_query()
: _jieba()
{
    loadWebAndOffset();
    loadInvertIndexTable();
}

web_page_query::~web_page_query()
{}

string web_page_query::doQuery(const string & str)//执行查询，返回结果
{
    string msgs;
    vector<string> results = _jieba.cutforSearch(str);
    for(auto &s:results)
    {
        cout<<s<<" ";
    }
    cout<<endl;
    size_t judeg = 0;
    for(auto & word : results) {
        auto it = _inverIndexTable.find(word);
        if(it == _inverIndexTable.end()) {
            ++judeg;
        }
    }
    if(judeg == results.size()) {
        vector<string> rs;
        rs.push_back("null");
        json j = rs;
        msgs = j.dump();
        MyRedis *redis = MyRedis::getInstance();
        redis->select(1);
        redis->multi();
        redis->set(str,msgs);
        redis->exec();
        return msgs;
    }
    vector<double> Base = getQueryWordsWeightVector(results);
    unordered_map<int, vector<double>> resultVec;
    if(executeQuery(results, resultVec)) {
        //resultVec 1 (y1 y2 y3 ) 2(y1 y2 y3) ..
        double X = 0.0;
        for(double x : Base) {
            X += x * x;
        }
        X = sqrt(X);
        vector<std::pair<int, double>> web_sort;
        for(auto it = resultVec.begin(); it != resultVec.end(); ++it) {
            int docid = it->first;
            double XY = 0.0, Y = 0.0;
            for(size_t i = 0; i < it->second.size(); ++i) {
                XY += 1.0 * it->second[i] * Base[i];
                Y += 1.0 * it->second[i] * it->second[i];
            }
            Y = sqrt(Y);
            double COS = 1.0 * XY /(X * Y);
            web_sort.push_back(std::make_pair(docid, COS));
        }
        sort(web_sort.begin(), web_sort.end(), ComPair);
       
        vector<int> web_fin; //存放排序之后的网页

        for(auto it : web_sort) {

            web_fin.push_back(it.first);
        }
        
        size_t sz = web_fin.size();
        if(sz > 30){
            web_fin.resize(30);
        }
        vector<string> rs = create_Json(web_fin, results);
        json j = rs;
        msgs = j.dump();
 
    }else{ 

        vector<string> rs;
        rs.push_back("null");
        json j = rs;
        msgs = j.dump();

    }


    MyRedis *redis = MyRedis::getInstance();
    redis->select(1);
    redis->multi();
    redis->set(str,msgs);
    redis->exec();


    return msgs;
}

vector<string> web_page_query::create_Json(vector<int> & docidVec, const vector<string> & queryWords) //构建json结构
{
    vector<string> result;
    string Line = "<-------------------------------------------------------------->\n";
    size_t id = 0;
    for(auto & docid : docidVec) {
        string res;
        string Summary = _pageLib[docid].summary(queryWords);
        string Title = _pageLib[docid].getTile();
        string Url = _pageLib[docid].getUrl();

        string temp;
        temp+= Line+"所查询网页:"+std::to_string(++id)+"\n\t标题:"+Title+"\n\t链接:"+Url+"\n\t摘要:"+Summary+"...\n";
        
        /*char buf[65535] = {0};
        sprintf(buf, "%s所查询网页:%ld\n\t标题:%s\n\t链接:%s\n\t摘要:%s。\n", 
                Line.c_str() ,++id, Title.c_str(), Url.c_str(), Summary.c_str());
        res += buf;*/
 
        result.push_back(temp);
        
    }

    return result;
}


bool web_page_query::executeQuery(const vector<string> & queryWords, unordered_map<int, vector<double>> & resultVec) //执行查询
{
    vector<set<int>> exist_web;
    bool flag = false;
    for(size_t i = 0; i < queryWords.size(); ++i) {
        string word = queryWords[i];
        auto it = _inverIndexTable[word]; //set<pair<int, double>>
        set<int> tmp;
        for(auto it2 = it.begin(); it2 != it.end(); ++it2 ) {
            int docid = it2->first;
            tmp.insert(docid);
            flag = true;
        }
        exist_web.push_back(tmp);
    }
    if(flag == false) { 
        return false;
    }
    set<int> SameWeb = exist_web[0];
    for(size_t i = 0; i < exist_web.size(); ++i) {
        set<int> tmp;
        /*set_union(SameWeb.begin(), SameWeb.end(), 
                         exist_web[i].begin(), exist_web[i].end(),
                         inserter(tmp, tmp.end()));*/
        set_intersection(SameWeb.begin(), SameWeb.end(), 
                          exist_web[i].begin(), exist_web[i].end(),
                          inserter(tmp, tmp.end()));
        SameWeb.swap(tmp);
    }
    if(!SameWeb.size()){ 
        return false;
    }
    for(auto it = SameWeb.begin(); it != SameWeb.end(); ++it) {
        int docid = *it;
        auto & it2 = resultVec[docid];
        for(string  word : queryWords) {
            auto it3 = _inverIndexTable[word];
            for(auto it4 = it3.begin(); it4 != it3.end(); ++it4) {
                if(it4->first == docid) {
                    it2.push_back(it4->second);
                    break;
                }
            }
        }
    }
    return true;
}

vector<double> web_page_query::getQueryWordsWeightVector(vector<string> & queryWords) //计算查询词的权重值
{
    vector<double> results;
    unordered_map<string, int> tf;
    double N = _pageLib.size() + 1;
    for(string & word : queryWords) {
        int & isExit = tf[word];
        if(isExit) {
            ++isExit;
        }else{
            isExit = 1;
        }
    }
    vector<double> tmp;
    for(string & word : queryWords) {
        auto it = _inverIndexTable[word];
        double DF = it.size();
        double IDF = log(N/(DF + 1)) / log(2);
        double TF = tf[word];
        double w = 1.0 * TF * IDF;
        tmp.push_back(w);
    }
    double AllW = 0.0;
    for(double w : tmp) {
        AllW += w * w;
    }
    AllW = sqrt(AllW);
    for(double w : tmp) {
        double FinW = w / AllW;
        results.push_back(FinW);
    }
    return results;
}




void web_page_query::loadWebAndOffset()
{
    cout << "[loadWebAndOffset]" << endl;
    ConfigFile *confPtr = ConfigFile::getInstance();
    string offsetPath = confPtr->getConf()["offsetPath"];
    string pageLibPath = confPtr->getConf()["ripePagePath"];
    ifstream offsetIfs;
    ifstream pageLibIfs;

    offsetIfs.open(offsetPath, std::ios::in);
    if(!offsetIfs.good()) {
        perror("offsetIfs open file fail!");
        return;
    }
    pageLibIfs.open(pageLibPath, std::ios::in);
    if(!pageLibIfs.good()) {
        perror("pageLibIfs open file fail!");
        return;
    }

    string line;
    int docId, offset, docLength;
    char buff[100000] = {0};
    //string buff;
    while (getline(offsetIfs, line)) {
        memset(buff, 0, sizeof(buff));
        istringstream iss(line);
        iss >> docId >> offset >> docLength;
        _offsetLib[docId] = std::make_pair(offset, docLength);
        pageLibIfs.read(buff, docLength);
        _pageLib[docId] = webPage(string(buff), &_jieba);
    }

    offsetIfs.close();
    pageLibIfs.close();
    cout << "[loadWebAndOffset ok]" << endl;
    
}

void web_page_query::loadInvertIndexTable()
{
    cout << "[loadInvertIndexTable]" << endl;
    ConfigFile *confPtr = ConfigFile::getInstance();
    string invertIndexTablePath = confPtr->getConf()["invertIndexPath"];
    ifstream invertIfs;
    invertIfs.open(invertIndexTablePath, std::ios::in);
    if(!invertIfs.good()) {
        perror("invertIfs open file fail!");
        return;
    }

    string line;
    string word;
    int docId;
    double weight;

    while (getline(invertIfs, line)) {
        istringstream iss(line);
        iss >> word;
        while(!iss.eof()) {
            iss >> docId >> weight;
            _inverIndexTable[word].insert(std::make_pair(docId, weight));
        }
    }
    invertIfs.close();
    cout << "[loadInvertIndexTable ok]" << endl;
}