#include "../../include/PageLib/SimHash.h"

SimHash::SimHash(Configuration* conf)
:_simHasher(conf->getConfMap()["dictPath"],
    conf->getConfMap()["hmmPath"],
    conf->getConfMap()["idfPath"],
    conf->getConfMap()["stopWordPath"])
{
    cout << "SimHash" << endl;
}
SimHash::~SimHash()
{
    cout << "~SimHash" << endl;
}


void SimHash::addPage(int id,WebPage page)
{
    uint64_t hashvalue;
    if(!_simHasher.make(page._docContent,5,hashvalue))
    {
        //日志
        perror("SimHash::addPage : addpage error!");
        exit(1);
    }

    _pageMap[id] = page;
    _simhashMap[id] = hashvalue;
}
 
unordered_map<int,WebPage>& SimHash::RemoveDulPage()
{
    //存储重复网页的id
    set<int> dulpicatePageId;

    //遍历去重
    for(const auto &pair1:_simhashMap)
    {
        //如果网页已经是重复的，跳过
        if(dulpicatePageId.count(pair1.first)>0)
        {
            continue;
        }

        int id1 = pair1.first;
        uint64_t hash1 = pair1.second;
        for(const auto &pair2:_simhashMap)
        {
            int id2 = pair2.first;
            uint64_t hash2 = pair2.second;
            //如果是同一个网页或是重复网页，跳过
            if(id1==id2||(dulpicatePageId.count(id2)>0))
            {
                continue;
            }
            //如果海明距离小于3，判定为相似
            if(_simHasher.isEqual(hash1,hash2,3))
            {
                //将重复文章id加入集合
                dulpicatePageId.insert(id2);
            }
        }
    }
    //遍历重复文章集合
    cout << "总的网页数量：" << _pageMap.size() << endl;
    cout << "重复的网页数量：" << dulpicatePageId.size() << endl;
    for (const auto &pageid : dulpicatePageId)
    {
        //删除重复文章
        _pageMap.erase(pageid);
    }

    return _pageMap;
}