#include "Url.hpp"
#include "Page.hpp"
#include "Host.hpp"
#include "Http.hpp"
#include "UrlFrontier.hpp"
#include "Crawl.hpp"
#include "TianwangFile.hpp"
#include "util/CheckEncoding.hpp"

#include <iostream>

using namespace std;

void
UrlTest()
{
    Url url;
    string url_key;
    while (cin >> url_key) {
        url.ParseUrl(url_key);
        cout << "Orgin Url: " << url.mOrginUrl << endl
             << "Host :" << url.mHost << endl
             << "Path :" << url.mPath << endl
             << "Port :" << url.mPort << endl
             << "IP :" << url.mIP << endl
             << endl;
    }
}

void
PageTest()
{
    Page page;
    string page_content, tmp;
    while (getline(cin, tmp)) {
        page_content += tmp;
    }
    page.SetOrginUrl("http://hahaha.com/");
    page.SetContent(page_content);
    page.ParseHyperLinks();
    vector<string> links = page.GetUrlFromPage();
    vector<string>::iterator it;
    for (it=links.begin(); it!=links.end(); ++it) {
        cout << *it << endl;
    }
    //     cout << page_content << endl;
}

void
HostTest()
{
    Host host;
    string tmp;
    cin >> tmp;
    Host_Elem elem = host.GetElem(tmp);
    cout << elem.IP << endl;
    for (unsigned i=0; i<elem.RobotRules.size(); ++i) {
        cout << elem.RobotRules[i] << endl;
    }
}

void
HttpTest()
{
    Url url;
    string tmp;
    cin >> tmp;
    url.ParseUrl(tmp);

    Host host;
    Host_Elem elem = host.GetElem(url.mHost);
    url.mIP = elem.IP;

    Http http;
    //     http.SetTimeOut(100);
    Page page;
    cout << "FetchUrl: " << http.FetchUrl(url, page) << endl;

    cout << page.GetHeader() << endl;
    cout << page.GetContent() << endl;
}

void
UrlFrontierTest()
{
    Host host;
    UrlFrontier url_frontier("./result/url.list", 10);
    string tmp;
    while (getline(cin, tmp)) {
        url_frontier.AddUrl(tmp);
    }

    vector<queue<Url> > task_queue;

    cout << url_frontier.GetTaskQueue(task_queue, host) << endl;

    for (unsigned i=0; i<task_queue.size(); ++i) {
        cout << "Queue " << i << endl;
        while (!task_queue[i].empty()) {
            Url url = task_queue[i].front();
            task_queue[i].pop();
            cout << "\t" << url.mOrginUrl << endl;
        }
    }
}

void
CrawlTest()
{
    Host host;
    UrlFrontier url_frontier("./result/url.list", 10);
    string tmp;
    while (getline(cin, tmp)) {
        url_frontier.AddUrl(tmp);
    }

    vector<queue<Url> > task_queue;

    cout << url_frontier.GetTaskQueue(task_queue, host) << endl;

//     for (unsigned i=0; i<task_queue.size(); ++i) {
//         cout << "Queue " << i << endl;
//         while (!task_queue[i].empty()) {
//             Url url = task_queue[i].front();
//             task_queue[i].pop();
//             cout << "\t" << url.mOrginUrl << endl;
//         }
//     }


    Crawl crawl("./Path/");
    cout << crawl.ImportTaskQueue(task_queue) << endl;
    cout << crawl.StartCrawling() << endl;
}

void CompressTest()
{
    TianwangFile aFile;
    string str1="I Love China";
    aFile.Test(str1);
}

void TianwangFileTest()
{
    TianwangFile file;
    string tmp;
    cin >> tmp;
    ifstream fin(tmp.c_str());
    Page page;
    if (file.Read(fin, page) != -1) {
        cout << page.GetContent() << endl;
    }
}

void TestCheckEncoding()
{
    string page_content, tmp;
    while (getline(cin, tmp)) {
        page_content += tmp;
    }
    CheckEncoding check_encoding;
    tmp = check_encoding.GetEncoding(page_content);
    cout << tmp << endl;
}

int
main(int argc, char** argv)
{
//     UrlTest();
//     PageTest();
//     HostTest();
//     HttpTest();
//     UrlFrontierTest();
//     CrawlTest();
//     CompressTest();
//     TianwangFileTest();
//     TestCheckEncoding();
}
