#include"PageLibPreprocessor.h"
#include"nlohmann/json.hpp"
#include"tinyxml2.h"
#include<math.h>
#include<iostream>
#include<fstream>
#include<set>
#include<sstream>
#include<algorithm>
#include<sstream>

using std::cout;
using std::cerr;
using std::ifstream;
using std::set;
using std::ostringstream;
using std::for_each;
using std::istringstream;
using namespace tinyxml2;

int getbytenum_utf8(char byte)
{
    int bytenum = 0;
    for (int i = 0; i < 6; ++i)
    {
        if (byte & (1 << (7 - i)))
        {
            ++bytenum;
        }
        else
            break;
    }
    return bytenum = bytenum == 0 ? 1 : bytenum;
}


PageLibPreprocessor::PageLibPreprocessor(SplitTool* pt)
    :_wordCutter(pt)
{

}

void PageLibPreprocessor::cutRedundantPage()
{
    auto it=  _wordCutter->hasher();
    ifstream offifs("/home/sakura/all_file/search_engine/data/offset.dat");
    if(!offifs)
    {
        cerr<<" open offset.dat failed\n";
        return;
    }

    offifs.seekg(0,std::ios::end);
    size_t length=offifs.tellg();
    offifs.seekg(0,std::ios::beg);
    char * msg = new char[length+1]{0};

    offifs.read(msg,length);
    nlohmann::json json=nlohmann::json::parse(msg);
    delete []msg;
    map<int,pair<int,int>> off;
    for(auto& item: json)
    {
        off[item[0]]={item[1][0],item[1][1]};
    }

    ifstream ifs("/home/sakura/all_file/search_engine/data/ripage.dat");
    if(!ifs)
    {
        std::cerr << "loadFile fail    5    544\n" ;
        return;
    }

    vector<uint64_t> hashvec;
    vector<string> art;
    XMLDocument doc;
            string essay;
    int times=0;
    size_t topN=5;
    uint64_t u64=0;
    int begin ,end =-1;
    for(size_t i=1; i<=off.size();++i)
    {
        size_t cursor;
        if(i+999>=off.size())
        {
            cursor=off.size();
        }
        else
        {
            cursor=999+i;
        }

        size_t len =off[cursor].second-off[i].first+1;

        i = cursor;


        char* sentence = new char[len+1]{0};

        ifs.read(sentence,len);


    XMLError ret = doc.Parse(sentence);


    if(ret)
    {
        cerr<<" parse failed\n";
        delete []sentence;
        return;
    }
    XMLElement* itemNode=doc.FirstChildElement("doc");
    while(itemNode)
    {
        string docid = itemNode->FirstChildElement("docid")->GetText();
        string title = itemNode->FirstChildElement("title")->GetText();
        string link = itemNode->FirstChildElement("link")->GetText();
        string content;
    
    XMLElement* pt=itemNode->FirstChildElement("content"); 
    if(pt)
    {
        content = pt->GetText();
    }
    else
    {
        cerr<<" get fail xml 113 \n";
        return;
    }
    

        it->make(content,topN,u64);
        bool exit=false;
        for(auto& i: hashvec)
        {
            if(Simhasher::isEqual(u64,i))
            {
                exit=true;
                break;
            }
        }
        if(!exit)
        {
            ++times;
            ostringstream oss ;
            oss<<"<doc>\n\t<docid>"<<times<<
                "</docid>\n\t<title>"<<title<<
                "</title>\n\t<link>"<<link<<
                "</link>\n\t<content><![CDATA["<<content<<"]]></content>\n</doc>\n";
            essay=oss.str();
            art.emplace_back(essay);
            hashvec.emplace_back(u64);

            begin =end+1;
            end += essay.size();
            _offsetlib[times]={begin,end};

            if(times%2000==0)
            {
                std::ofstream ofs("/home/sakura/all_file/search_engine/data/newripe.data",std::ios::app); 
                for(auto&i: art)
                {
                    ofs<<i;
                }
                ofs.close();
                art.clear();
            }
        }
             itemNode=itemNode->NextSiblingElement("doc");
    }
            delete []sentence;
    }
            if(times%2000!=0)
            {
                std::ofstream ofs("/home/sakura/all_file/search_engine/data/newripe.data",std::ios::app); 
                for(auto&i: art)
                {
                    ofs<<i;
                }
                ofs.close();
            }
            cout<<"store on disk\n";
            storeOnDisk();
}

void PageLibPreprocessor::buildinvertindexMap()
{

    ifstream offifs("/home/sakura/all_file/search_engine/data/newoffset.dat");
    if(!offifs)
    {
        cerr<<" open offset.dat failed\n";
        return;
    }

    offifs.seekg(0,std::ios::end);
    size_t length=offifs.tellg();
    offifs.seekg(0,std::ios::beg);
    char * msg = new char[length+1]{0};
    offifs.read(msg,length);
    nlohmann::json json=nlohmann::json::parse(msg);
    delete []msg;
    map<int,pair<int,int>> off;
    vector<vector<string>> words;
    for(auto& item: json)
    {
        off[item[0]]={item[1][0],item[1][1]};
        // off.emplace(item.key(),item.value());
    }
    ifstream ifs("/home/sakura/all_file/search_engine/data/newripe.data");
    if(!ifs)
    {
        std::cerr << "loadFile fail  186\n" ;
        return;
    }
    vector<string> art;
    XMLDocument doc;
    for(size_t i=1; i<=off.size();++i)
    {
        size_t cursor;
        if(i+0>=off.size())
        {
            cursor=off.size();
        }
        else
        {
            cursor=0+i;
        }
        size_t len =off[cursor].second-off[i].first+1;
        i =cursor;
        char* sentence = new char[len+1]{0};
        ifs.read(sentence,len);
    XMLError ret = doc.Parse(sentence);
    if(ret)
    {
        cerr<<" parse failed\n";
        delete []sentence;
        return;
    }

    XMLElement* itemNode=doc.FirstChildElement("doc");
    while(itemNode)
    {
        string content = itemNode->FirstChildElement("content")->GetText();
        words.emplace_back(_wordCutter->cut(content));
        itemNode=itemNode->NextSiblingElement("doc");
    }
        delete []sentence;
    }
    set<string> stop;
    ifstream stopcnfis("/home/sakura/all_file/search_engine/static/stop/stop_words_zh.txt");
    string tmp;
    while(std::getline(stopcnfis,tmp))
    {
        tmp.pop_back();
        stop.emplace(tmp);
    }
    //map<词汇,map<文章号,频率>
    map<string,map<int,int>> TFD;
    //map<文章号，vector<pair<词汇,归一之前的权重>>
    map<int,vector<pair<string,double>>> weight;
    for(size_t vec =0 ;vec<words.size();++vec)
    {
        for(size_t i= 0 ;i<words[vec].size();++i)
        {
            if(stop.find(words[vec][i])==stop.end())
            { 
                string word= words[vec][i];
                int byte= getbytenum_utf8(word[0]);
                if(byte==3)
                {
                    bool ex=false;
                    for(int j=1;j<byte;++j)
                    {
                        if(byte+j>=word.size()||(word[j+byte]&0xC0)!=0x80)
                        {
                            ex=true;
                            break;
                        }
                    }
                    if(ex)
                    {
                        continue;
                    }
                ++TFD[words[vec][i]][vec+1];
                }
                // cout<< TFD[words[vec][i]][vec]<<" "<<vec<<" "<<words[vec][i];
            }
        }
    }
    int all = words.size();

    for(auto& i: TFD)
    {
        //所有文件中出现文档数量
        size_t frequency = i.second.size();
        // cout<<frequency<<"freq\n";
        for(auto& j: i.second)
        {
            double wi = j.second*log2(all/(frequency+1));
            // if(log2(all/(frequency+1))==0)
            // {
            //     cout<< all<<" "<<frequency<<"\n";
            // }
            if(wi!=0)
            {
                // cout<<" i ==0"<<i.first<<"\n";
            // cout<<"\n"<<i.first<<" "<<wi<<"  "<<frequency<<" "<<j.second<<" "<<j.first<<" weight\n";
            weight[j.first].push_back({i.first,wi});
            // if(j.first>3640)
            // {
            //     cout<<i.first<<" "<<wi<<" "<<j.first<<"\n";
            // }
            }
        }
    }
    //归一化处理
    vector<double> avg;

    for_each(weight.begin(),weight.end(),[&avg](pair<int,vector<pair<string,double>>> rhs){
             double temp=0;
             for(auto & i: rhs.second)
             {
                // cout<<i.second<<"\n";
             temp += i.second*i.second;
             }
            //  cout<<" temp: "<<temp<<"\n";
             avg.emplace_back(temp);
             });
    for(auto & pa:weight)
    {
        double sqr = sqrt(avg[pa.first-1]);
        if(sqr!=0)
        {
        for(auto & i:pa.second)
        {
            double w=i.second/sqr;
            _invertindexlib[i.first].emplace_back(pa.first,w);
        }
        }
    }
    std::ofstream invertofs("/home/sakura/all_file/search_engine/data/invertIndex.dat");
    nlohmann::json jsoninvert = _invertindexlib;
    invertofs<<jsoninvert.dump();
//    for(auto& i:_invertindexlib)
//    {
//        for(auto& j:i.second)
//        {
//            invertofs<<i.first<<" "<<j.first<<" "<<j.second<<"\n";
//        }
//    }
    invertofs.close();
    cout<<" store 倒排\n";
}

void PageLibPreprocessor::storeOnDisk()
{
    std::ofstream ofs("/home/sakura/all_file/search_engine/data/newoffset.dat");
    nlohmann::json json = _offsetlib;
    string tmp =json.dump();
    ofs<<tmp;
    ofs.close();
    cout<<" store 去重\n";
}
