
#include "webcrawler.h"
#include "openhttp.h"
#include "SimpleHTMLParser.h"

#include <stdlib.h>
#include <stdio.h>

#define MAXCHAR 128

void WebCrawler::writeWordFile(const char *wordFileName)
{
	FILE *file;
	file = fopen(wordFileName, "w");
	char *temp;
	for (int idx = 0; idx < _tailURL; idx++) {
		temp = strdup(_urlArray[idx]._description);
		for (unsigned int jdx = 0; jdx < strlen(temp); jdx++) {
			if (temp[idx] == '\n')
				temp[idx] = ' ';
		}
		fprintf(file, "%d %s\n%s\n\n", idx, _urlArray[idx]._url, temp);
		delete temp;
	}
	// TODO: Implement writeWordFile function
	fclose(file);
}



void WebCrawler::writeURLFile(const char *urlFileName)
{

	FILE *file;
	file = fopen(urlFileName, "w");
	// TODO: Implement writeURLFile function
	fclose(file);
}



void WebCrawler::crawl()
{
	char *temp;
	// Todo: Implement crawl method
	while (_headURL < _tailURL) {
		int *t = NULL;
		temp = fetchHTML(_urlArray[_headURL]._url, t);
		if (temp == NULL)
			continue;
		findWords(temp, *t, _headURL);
		parse(temp, *t);
		tdescription[dIdx+1] = 0;
		_urlArray[_headURL]._description = strdup(tdescription);
		clean(tdescription, dIdx);
		_headURL++;
	}
}



WebCrawler::WebCrawler(int maxUrls, int nurlRoots, const char **urlRoots)
{
	dIdx = 0;
	tdescription = new char[MAXCHAR];
	_maxUrls = maxUrls;
	_urlArray = (URLRecord*)malloc(sizeof(URLRecord)*_maxUrls);
	_headURL = 0;
	_tailURL = nurlRoots;
	_urlToUrlRecord = new HashTableTemplate<int>();
	URLRecordList *temp = new URLRecordList;
	URLRecordList *prev;
	for (int idx = 0; idx < nurlRoots; idx++) {
		_urlArray[idx]._url = strdup(urlRoots[idx]);
		_urlToUrlRecord->insertItem(urlRoots[idx], idx);
		prev = temp;
		temp = new URLRecordList;
		temp->_urlRecordIndex = idx;
		prev->_next = temp;
		_wordToURLRecordList->insertItem(urlRoots[idx], prev);
	}
}

void WebCrawler::onAnchorFound(char *url) {
	_urlArray[_tailURL]._url = strdup(url);
	_urlToUrlRecord->insertItem(url, _tailURL);
	_tailURL++;
}

void WebCrawler::onContentFound(char c) {
	tdescription[dIdx] = c;
	dIdx++;
}

void WebCrawler::clean(char *arry, int index) {
	if (index != -1)
		index = 0;
	for (unsigned int idx = 0; idx < sizeof(arry)/sizeof(char); idx++)
		arry[idx] = 0;
}

void WebCrawler::findWords(char *words, int characters, int url) {
	char *temp = (char*)malloc(sizeof(char)*MAXCHAR);
	int jdx = 0;
	for (int idx = 0; idx < characters; idx++) {
		if (words[idx] == ' ') {
			temp[jdx] = 0;
			URLRecordList *list;
			URLRecordList *tNode;
			if (_wordToURLRecordList->find(temp, &list)) {
				while (tNode->_next != NULL && tNode->_urlRecordIndex != url) {
					tNode = tNode->_next;
				}
				if (tNode->_urlRecordIndex != url) {
					URLRecordList *node = new URLRecordList;
					node->_next = NULL;
					node->_urlRecordIndex = url;
					tNode->_next = node;
				}
			}
			else {
				list = new URLRecordList;
				list->_next = NULL;
				list->_urlRecordIndex = url;
				_wordToURLRecordList->insertItem(temp, list);
			}
		}
		else
			temp[jdx] = words[idx];
	}
}
