#ifndef DOWNLOADPARSER_H_
#define DOWNLOADPARSER_H_

#include <iostream>
#include <stdio.h>
#include <curl/curl.h>
#include <curl/types.h>
#include <curl/easy.h>
#include <list>

#include <boost/tr1/memory.hpp>
#include <boost/thread/mutex.hpp>

#include "URL.h"
#include "../classes_dados/URLNormalizer.h"
#include "../classes_dados/Repository.h"

using namespace std; //temporario

// Referencia para CURL:  http://curl.haxx.se/libcurl/c/

class DownloadParser {
public:
	DownloadParser(){  
//		curl = curl_easy_init();   // dentro das funcoes
		s_href = "href";
		pagina_at = site_url;
		disco = Repository::getInstance();
	}
	~DownloadParser(){ 
//		curl_easy_cleanup(curl);  // dentro das funcoes
	}
	void setURL(string _url){ site_url = _url; }
	string getHeader(void);
	void cleanBuffer(){ data_site = ""; header_site = ""; }
	string getRobot(void);
	list<URL> getNewURLs(void);
	string achaDominio (string str);
	list<URL> eliminaManjados(list<URL> linkList);
	list<URL> parser(string buffer_page);
	
private:
	static size_t WriteMemoryCallback(void *ptr, size_t size, size_t nmemb, void *data);
	static size_t headerCallback(void *ptr, size_t size, size_t nmemb, void *userp);
	int curl_config(void);
	void rejeita_links(std::list <std::string> & lista_in);
	string getPage(void);	
	struct data { char trace_ascii; /* 1 or 0 */ };
	string data_site;
	string header_site;
	string site_url;
	CURL *curl;
	int res_code;
	URLNormalizer normalisa;
	std::list<URL> buffer_urls;
	std::string buffer_page;
	std::string url_rejeita;
	std::string pagina_at;
	std::string s_href;
	std::list<std::string> buffer_links;	
//	Repository disco;
	std::tr1::shared_ptr<Repository> disco;
	static int numSitesUrl;
};




#endif /*DOWNLOADPARSER_H_*/
