#include "pch.h"
#include "crawler.h"




Crawler::Crawler(services::provider::const_ptr provider,
				 DataWriter::Ptr dataProvider,
				 UrlReader::Ptr urlProvider,
				 QString userAgent,
				 int parallelRequests)
	: m_userAgent(userAgent)
	, m_numParallelRequests(parallelRequests)

	, m_scheduler(provider->service<services::scheduler>())
	, m_networkTask(new services::task(boost::bind(&Crawler::run, this)))

	, m_manager(ProxySettings())
	, m_dataProvider(dataProvider)
	, m_urlProvider(urlProvider)

	, m_completedRequests(0)
	, m_filteredReplies(0)

	, m_currentRequests(0)
	, m_currentDownload(0)
	, m_currentUpload(0)
{
	m_manager.finished().connect(boost::bind(&Crawler::requestFinished, this, _1));
}
///////////////////////////////////////////////////////////////////////////////////////////////////

Crawler::~Crawler()
{}
///////////////////////////////////////////////////////////////////////////////////////////////////




void Crawler::add(DataFilter::const_Ptr filter)
{
	m_filters.insert(filter);
}
///////////////////////////////////////////////////////////////////////////////////////////////////

void Crawler::remove(DataFilter::const_Ptr filter)
{
	Filters::iterator i = m_filters.find(filter);
	if(i != m_filters.end())
		m_filters.erase(i);
}
///////////////////////////////////////////////////////////////////////////////////////////////////




QList<QUrl> Crawler::activeRequests()
{
	QList<QUrl> urls;

	foreach(HTTPReply::Ptr reply, m_activeRequests)
	{
		urls << reply->request().url();
	}

	return urls;
}
///////////////////////////////////////////////////////////////////////////////////////////////////

quint64 Crawler::numCompletedRequests() const
{
	return m_completedRequests;
}
///////////////////////////////////////////////////////////////////////////////////////////////////

quint64 Crawler::numFilteredReplies() const
{
	return m_filteredReplies;
}
///////////////////////////////////////////////////////////////////////////////////////////////////

int Crawler::numActiveRequests() const
{
	return m_activeRequests.size();
}
///////////////////////////////////////////////////////////////////////////////////////////////////




float Crawler::requestRate() const
{
	return m_currentRequests;
}
///////////////////////////////////////////////////////////////////////////////////////////////////

float Crawler::downloadRate() const
{
	return m_currentDownload;
}
///////////////////////////////////////////////////////////////////////////////////////////////////

float Crawler::uploadRate() const
{
	return m_currentUpload;
}
///////////////////////////////////////////////////////////////////////////////////////////////////




void Crawler::run()
{
	while(m_manager.pendingRequests() <= 10)
	{
		boost::optional<QUrl> url = m_urlProvider->next();
		if(url)
			get(*url);
	}

	m_manager.run();
}
///////////////////////////////////////////////////////////////////////////////////////////////////

void Crawler::get(const QUrl &url)
{
	if(!url.isValid())
		return;

	HttpRequest request(url);
	HTTPReply::Ptr reply = m_manager.get(request);
	m_activeRequests << reply;
}
///////////////////////////////////////////////////////////////////////////////////////////////////

Filter::Result Crawler::filter(const Reply& reply) const
{
	//
	// Policy: A policy passed the test, when it passes each individual
	//         filter test. If it fails even one, then it doesn't.
	//

	foreach(DataFilter::const_Ptr filter, m_filters)
	{
		if(!filter->filterReply(reply))
			return Filter::Fail;
	}

	return Filter::Pass;
}
///////////////////////////////////////////////////////////////////////////////////////////////////




void Crawler::requestFinished(TCPSocket::Ptr networkReply)
{
	HTTPReply::Ptr httpReply = boost::shared_dynamic_cast<HTTPReply>(networkReply);
	if(!httpReply)
		return;

	// The difference to Qt is, that i don't have to delete the fucking reply.. morons
	Replies::iterator i   = m_activeRequests.begin();
	Replies::iterator end = m_activeRequests.end();
	while(i != end && *i != httpReply)
		++i;

	if(i == end)
		return;

	m_activeRequests.erase(i);

	// If the reply wasn't answered successfully, then we don't need to process anything
	if(httpReply->error())
		return;

	Reply reply(httpReply->request().url(), httpReply->body());
	if(filter(reply) == Filter::Fail)
		return;

	m_dataProvider->insert(reply);
}
///////////////////////////////////////////////////////////////////////////////////////////////////

void Crawler::update()
{
	/*
	if(!m_paused)
	{
		if(m_replyQueue.mustFlush())
			m_replyQueue.flush();

		if(m_activeRequests.size() < m_numParallelRequests)
		{
			int count = m_numParallelRequests - m_activeRequests.size();
			boostext::auto_list<QUrl> urls = m_urlQueue.pop(count);

			foreach(const QUrl &url, urls)
			{
				sendRequest(url);
			}
		}
	}


	float elapsed = m_timer.interval();
	int download = 0;
	QTime currentTime = QTime::currentTime();

	for(QList<QPair<int,QTime>>::iterator i = m_downloads.begin(); i != m_downloads.end();)
	{
		if(i->second.msecsTo(currentTime) >= 1000)
		{
			i = m_downloads.erase(i);
		}
		else
		{
			download += i->first;
			++i;
		}
	}

	int requests = 0;
	for(QList<QTime>::iterator i = m_requests.begin(); i != m_requests.end();)
	{
		if(i->msecsTo(currentTime) >= 1000)
		{
			i = m_requests.erase(i);
		}
		else
		{
			++requests;
			++i;
		}
	}

	m_currentDownload = download / 1024;
	m_currentRequests = requests;
	*/
}
///////////////////////////////////////////////////////////////////////////////////////////////////
