var Q = require('q');
var config = require('./config.json');
var request = require('request');
var http = require('http');
var utils = require('./utils.js');

function Crawler(initUrl, depth) {
	args = Array.prototype.slice(arguments);
	if (args.length == 0) {
		initUrl = config.init_url;
		depth = config.max_depth;
	} else if (arg.length == 1) {
		this.initUrl = config.init_url;
		depth = initUrl;
	}

	this.depth = depth;
	this.list = [];
	this.depthList = [];

	this.list.push(initUrl);
	this.depthList.push(0);
	this.urlIndex = 0;

	this.totalDone = 0;
	this.currDepth = -1;
	this.loadCount = 0;
	this.finish = false;
}

var cp = Crawler.prototype;

cp.nextUrlItem = function() {
	var self = this;
	var item = null;
	if (self.list.length > self.urlIndex) {
		var url = self.list[self.urlIndex];
		var depth = self.depthList[self.urlIndex];
		item = {
			url: url,
			depth: depth
		}
		self.urlIndex += 1;
	} else if (self.loadCount == 0 && self.currDepth == config.max_depth) {
		this.finish = true;
	}
	return item;
}

cp.start = function() {
	var self = this;
	if (self.finish) {
		console.log("===finish===");
		return;
	}

	if (self.loadCount >= config.max_load_count) {
		console.log("loading reach max, waiting...");
		return;
	}

	var item = self.nextUrlItem();
	if (item) {
		self.loadOne(item);
	} else if (self.finish) {
		return;
	} else {
		console.log('list empty, waiting...');
		setTimeout(function() {
			self.start();
		}, 1000);
		return;
	}


	if (self.loadCount < config.max_load_count) {
		process.nextTick(function() {
			self.start();
		});
	}
}


cp.loadOne = function(item) {
	var self = this;
	console.log('load ' + item.url + ", run count=" + self.loadCount);
	self.currDepth = item.depth;
	this.loadCount++;

	// should we down this and parse ?
	var isParse = true;
	var notParseList = config.notParse;
	var index = 0;
	while (isParse && index < notParseList.length) {
		var arr = item.url.match(notParseList[index]);
		isParse = !(arr && arr.length > 0);
		index++;
	}
	if (!isParse) {
		// TODO down this file and save ?
		console.log('this url not parse, url=' + item.url);
		self.loadingDone(item.url, null);
		return;
	}

	request(item.url, function(error, response, body) {
		if (error || response.statusCode !== 200) {
			return console.log("====fail to get url=" + item.url);
		}

		var inUrls = null;
		if (item.depth > config.max_depth - 1) {
			console.log("======reach max depth, not push to list");
		} else {
			inUrls = utils.getHrefInHtml(body);
			inUrls = utils.urlFilter(inUrls, config.blacklist);
		}

		self.loadingDone(item, inUrls);
	});
}

cp.loadingDone = function(preItem, list) {
	var self = this;
	self.totalDone++;
	console.log('done', preItem.url, "totalDone=" + self.totalDone);
	self.loadCount--;
	if (list && list.length > 0) {
		var depth = preItem.depth + 1;
		list.forEach(function(item) {
			// remove duplicate
			var url = utils.getFullUrl(preItem.url, item);
			if (self.list.indexOf(url) == -1) {
				self.list.push(utils.getFullUrl(preItem.url, item));
				self.depthList.push(depth);
			}
		});
	} else {
		console.log('null or empty list');
	}
	self.start();
}

module.exports = Crawler;