// PageProcessor.h
#pragma once
#include <string>
#include <vector>
#include <set>
#include <map>
#include <memory>

#include "cppjieba/Jieba.hpp"
#include "simhash/Simhasher.hpp"

class PageProcessor
{
public:
    PageProcessor();
    void process(const std::string& dir="../../../data/webpages");

private:
    //提取文档
    void extract_documents(const std::string& dir);
    //去重文档
    void deduplicate_documents();
    //生产页面库和偏移库
    void build_pages_and_offsets(const std::string& pages="../../../data/pages.dat", 
        const std::string& offsets="../../../data/offsets.dat");
    //生产倒排索引库
    void build_inverted_index(const std::string& pages="../../../data/inverted_index.dat");
    //加载中文停用词库
    void load_ch_stop_words(const std::string& filename="../../../data/stopwords/cn_stopwords.txt");
    
    //计算两个哈希值的汉明距离
    int hamming_distance(uint64_t hash1, uint64_t hash2);
protected:
    struct Document 
    {
        int id;
        std::string link;
        std::string title;
        std::string content;
    };

private:
    cppjieba::Jieba m_tokenizer;
    simhash::Simhasher m_hasher;
    std::unique_ptr<std::vector<std::pair<uint64_t,Document>>> m_documents;    
    std::unique_ptr<std::set<std::string>> m_stopWords;    // 使用set, 而非vector, 是为了方便查找
    std::unique_ptr<std::map<std::string, std::map<int, double>>> m_invertedIndex;
};