#include "fetcher.h"

#include <boost/regex.hpp>
#include <iostream>

namespace crawler { namespace client {
using namespace server;
namespace {
size_t WritePage(const char *in, size_t size, size_t nmemb, Page *out) {
  size_t r;
  r = size * nmemb;
  out->content.append(in, r);
  return(r);
}
boost::regex location_header_regex("Location:[[:space:]]*?(http.*?)(\\r|\\n)+");
boost::regex status_code_header_regex("HTTP.+?[[:space:]]+?([[:digit:]]{3})[[:space:]]+?.+?(\\r|\\n)+");
size_t WriteHeader(const char *in, size_t size, size_t nmemb, Page *out) {
  size_t r;
  r = size * nmemb;
  boost::match_results<const char *>what;
  if (regex_match(in, in + r, what, location_header_regex)) {
    out->__set_to_url(std::string(what[1].first, what[1].second));
  } else if (regex_match(in, in + r, what, status_code_header_regex)) {
    std::string http_code = std::string(what[1].first, what[1].second);
    out->__set_http_code(atoi(http_code.c_str()));
  }
  return(r);
}
}

void Fetcher::Work() {
  CURL *curl;
  char curl_errbuf[CURL_ERROR_SIZE];
  int err;

  curl = curl_easy_init();
  curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WritePage);
  curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, WriteHeader);

  curl_easy_setopt(curl, CURLOPT_TIMEOUT, 60);
  //curl_easy_setopt(curl, CURLOPT_FORBID_REUSE, 1);
  curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0L);
  curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);
  curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errbuf);

  curl_easy_setopt(curl, CURLOPT_ACCEPT_ENCODING, "gzip, deflate");
  curl_easy_setopt(curl, CURLOPT_AUTOREFERER, 1);
  curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 5);
  curl_easy_setopt(curl, CURLOPT_COOKIEFILE, "cookie");
  curl_easy_setopt(curl, CURLOPT_COOKIEJAR, "cookie");
  struct curl_slist *slist=NULL;
  slist = curl_slist_append(slist, "User-Agent: Mozilla/5.0 (Macintosh; Intel "
                                   "Mac OS X 10.7; rv:8.0.1) Gecko/20100101 Firefox/8.0.1");
  slist = curl_slist_append(slist, "Accept: text/html,application/xhtml+xml,"
                                   "application/xml;q=0.9,*/*;q=0.8");
  slist = curl_slist_append(slist, "Accept-Language: en-us,en;q=0.5");
  //slist = curl_slist_append(slist, "Accept-Encoding: gzip, deflate");
  slist = curl_slist_append(slist, "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7");
  slist = curl_slist_append(slist, "Connection: keep-alive");
  curl_easy_setopt(curl, CURLOPT_HTTPHEADER, slist);

  for (size_t i=0; i< urls_.size(); ++i) {
    Page page;
    page.__set_url(urls_[i]);
    page.__set_download_time(time(0));

    curl_easy_setopt(curl, CURLOPT_URL, page.url.c_str());
    curl_easy_setopt(curl, CURLOPT_WRITEDATA, &page);
    curl_easy_setopt(curl, CURLOPT_WRITEHEADER, &page);
    err = curl_easy_perform(curl);

    if (err != 0) {
      std::cerr << "Failed to fetch, url=" << page.url
                << " err=" << curl_errbuf << std::endl;
      page.__set_status(PageStatus::kFailedToDownload);
    } else {
      page.__set_status(PageStatus::kSuccessDownload);
    }
    //std::cout << page.content << std::endl;
    pages_.push_back(page);
    sleep(interval_);
  }
  curl_slist_free_all(slist);
  curl_easy_cleanup(curl);
}
}}

#ifdef TEST
#include <iostream>
using namespace std;

int main(int argc, char **argv)
{
  Fetcher fetcher;
  fetcher.AddUrl(argv[1]);
  fetcher.Work();
  std::vector<Page> &pages = fetcher.GetFetchedPages();
  for (size_t i=0; i<pages.size(); ++i) {
    cout << pages[i].url << endl;
    cout << pages[i].to_url << endl;
    cout << pages[i].content << endl;
    cout << endl;
  }
  return 0;
}

#endif

