//============================================================================
// Name        : WebCrawler.cpp
// Author      : Brendon Beebe
// Version     :
// Copyright   : 
//============================================================================


#include <iostream>
using namespace std;
#include<fstream>
#include "WebCrawler.h"
#include <algorithm>

WebCrawler::WebCrawler(string startingUrl):stopWordCount(0){
	Url * tempurl = new Url(startingUrl);
	startUrl = new Url(startingUrl);

	urlqueu = new LinkedList<Url>;
	urlqueu->Insert(*tempurl);


	visitedLinks = new BST();
	excludedLinks = new BST();
	myIndex = new Index();
}
WebCrawler::~WebCrawler(){
	delete(visitedLinks);
	visitedLinks=NULL;
	delete(urlqueu);
	urlqueu=NULL;
	delete(excludedLinks);
	excludedLinks = NULL;
	delete(myIndex);
	myIndex=NULL;
	//delete[](stopWords);
	delete(startUrl);
	startUrl = NULL;
}

//!Initiate the crawl starting with the first url
bool WebCrawler::beginCrawl(){
	//While there are pages to be indexed
	while(!urlqueu->IsEmpty()){
		//Get the first URL in the queue
		const std::string path = urlqueu->GetLast()->GetValue().ToString();
		//Then remove it from the list
		urlqueu->Remove(urlqueu->GetLast());

		Page * temppage = new Page(path);

		saveToVisited(temppage, path);
		extractWords(temppage, path);
		extractUrls(temppage);

		cout << "Description:" <<  *temppage->GetDescription() << " ----- Url:"<< *temppage->GetBaseurl()<< endl;
		delete(temppage);
	}

}

//!Prints to the output file
void WebCrawler::printXml(string& loc){
	string OutFile = loc;
	ofstream Output(OutFile.c_str());
	Output << "<website><start-url>" << startUrl->ToString() << "</start-url>" << endl;
	Output << "<pages>"<< visitedLinks->toXmlDescString(*visitedLinks->GetRoot())<<"</pages>" << endl;
	Output << myIndex->toXmlString() << endl;
	Output << "</website>" << endl;
	Output.close();
}

//!Save the visited links with a description to print out later
void WebCrawler::saveToVisited(Page * temppage, const string & path){
	//Add it to visited
	if(path.find_last_of("/\\") != (path.length()-1)){
		BSTNode * newly = visitedLinks->Insert(path);
		if (newly != NULL)
			newly->setDescription(*temppage->GetDescription());
	}
}
int compare(const void *ap, const void *bp)
{
    const string a = *(string *) ap;
    const string b = *(string *) bp;
    return a.compare(b);
}
//!Extract the words from a page and add them to the index
void WebCrawler::extractWords(Page * temppage, const string & path){
	for(int i=0;i<temppage->getWordCount();i++){
		string key =temppage->getAllWords()[i];
		string * found = (string *) bsearch(&key, stopWords, stopWordCount, sizeof(stopWords[0]), compare);
		if(found ==NULL)
			myIndex->Insert(temppage->getAllWords()[i],path);
	}
}

//!Go through the pages URL and add them if they follow the prerequisites
void WebCrawler::extractUrls(Page * temppage){
	while(!temppage->getUrlList()->IsEmpty() ){

		LLNode<Url> * topUrl = temppage->getUrlList()->GetFirst();
		//Add URL to the list only if it's
		//!in the base url and
		//!if it's not a repeat
		std::string path = topUrl->GetValue().GetPath();
		std::string fullPath = topUrl->GetValue().ToString();
		if(IsInWebsite(path) && !IsVisited(fullPath)){
			Url * tempUrl = new Url(topUrl->GetValue().ToString(),this->startUrl->ToString());
			urlqueu->Insert(*tempUrl);
		}
		BSTNode * newly = excludedLinks->Insert(fullPath);
		//Remove Url from page list
		temppage->getUrlList()->Remove(topUrl);//<--------------------------Remove from List and deallocate

	}
}

//!Is the url in the website we want to crawl?
bool WebCrawler::IsInWebsite(string & url){
	return StringUtil::IsPrefix( url	,startUrl->GetPath().substr(0,startUrl->GetPath().find_last_of('/')));
}

//!Has this url been indexed already?
bool WebCrawler::IsVisited(string & url){
	if(excludedLinks->Find(url) != NULL)
		return true;
	else
		return false;
}

//!Load the stop words files into memory
bool WebCrawler::loadStopWords(string loc){
	std::string line;
	std::ifstream myFile(loc.c_str());
	int i=0;
	while (std::getline(myFile, line)){
		stopWordCount++;
		StringUtil::Trim(line);
	    stopWords[i++]=(line);
	}
	return true;
}
