package com.openness.spider;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;

import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.http.HttpStatus;
import org.apache.zookeeper.ZooKeeper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.openness.spider.commons.Statics;
import com.openness.spider.commons.UrlUtil;
import com.openness.spider.fetcher.CustomFetchStatus;
import com.openness.spider.fetcher.PageFetchResult;
import com.openness.spider.fetcher.PageFetcher;
import com.openness.spider.hbase.HBaseUtil;
import com.openness.spider.parser.HtmlParseData;
import com.openness.spider.parser.Parser;
import com.openness.spider.robotstxt.RobotstxtServer;
import com.openness.spider.template.TemplateUtil;
import com.openness.spider.zk.ZKUtil;
import com.openness.spider.zk.lock.DistributedLock;

/**
 * @author Rain
 * @version 2013-6-20 下午2:24:49
 */

public class SpiderThread extends Thread {

	private static final Logger LOGGER = LoggerFactory
			.getLogger(SpiderThread.class);

	private final static Pattern FILTERS = Pattern
			.compile(".*(\\.(css|js|bmp|gif|jpe?g"
					+ "|png|tiff?|mid|mp2|mp3|mp4"
					+ "|wav|avi|mov|mpeg|ram|m4v|pdf"
					+ "|rm|smil|wmv|swf|wma|zip|rar|gz))$");

	private SpiderConfig config;

	private ZooKeeper zk;

	private DistributedLock lock;

	private PageFetcher pageFetcher;

	private RobotstxtServer robotstxtServer;

	private Parser parser;

	private HtmlBuffer htmlBuffer;

	private UrlBuffer urlBuffer;

	private int maxWaitTimes;

	private int currentWaitTimes;

	private String zkNode;

	public SpiderThread(SpiderConfig config, ZooKeeper zk,
			PageFetcher pageFetcher, RobotstxtServer robotstxtServer,
			HtmlBuffer htmlBuffer, UrlBuffer urlBuffer) {
		this.config = config;

		this.zk = zk;

		this.lock = new DistributedLock(this.zk, Statics.ZK_LOCK_NODE);

		this.pageFetcher = pageFetcher;

		this.robotstxtServer = robotstxtServer;

		this.parser = new Parser(config);

		this.htmlBuffer = htmlBuffer;

		this.urlBuffer = urlBuffer;

		this.maxWaitTimes = config.getThreadMaxWaitTimes();

		this.currentWaitTimes = 0;
	}

	public void onStart() throws Exception {
		this.zkNode = ZKUtil.create(zk, Statics.ZK_THREADS_NODE_SUFFIX,
				Statics.EMPTY_BYTEARRAY, Statics.ACL,
				Statics.CREATEMODE_EPHEMERAL_SEQUENTIAL);
	}

	public void onStop() throws Exception {
		ZKUtil.delete(zk, zkNode, -1);
	}

	/**
	 * Classes that extends WebCrawler can overwrite this function to tell the
	 * crawler whether the given url should be crawled or not. The following
	 * implementation indicates that all urls should be included in the crawl.
	 * 
	 * @param url
	 *            the url which we are interested to know whether it should be
	 *            included in the crawl or not.
	 * @return if the url should be included in the crawl it returns true,
	 *         otherwise false is returned.
	 */
	public boolean shouldVisit(Url url) {
		String domain = UrlUtil.getDomain(url.getUrl());
		if (!(domain != null && !domain.isEmpty() && (domain
				.equals("sina.com.cn")
				|| domain.equals("qq.com")
				|| domain.equals("sohu.com") || domain.equals("163.com")))) {
			return false;
		}

		String subDomain = UrlUtil.getSubDomain(url.getUrl());
		if (!(subDomain != null && !subDomain.isEmpty() && (subDomain
				.equals("news")
				|| subDomain.equals("finance")
				|| subDomain.equals("tech")
				|| subDomain.equals("business")
				|| subDomain.equals("it") || subDomain.equals("money")))) {
			return false;
		}

		return !FILTERS.matcher(url.getUrl()).matches();
	}

	public void visit(Page page) {
		try {
			Url url = page.getUrl();

			String link = url.getUrl();

			String htmlSource = new String(page.getContentData(),
					page.getContentCharset());

			Html html = null;

			try {
				html = TemplateUtil.parse(link, htmlSource);
			} catch (Exception e) {
				LOGGER.error("template parse error: " + e.toString());
			}

			if (html != null) {
				htmlBuffer.put(html);

				LOGGER.info(Statics.HOSTNAME + "\t"
						+ Thread.currentThread().getName() + "\t"
						+ "put 1 html to htmlbuffer");
			}

			HtmlParseData htmlParseData = (HtmlParseData) page.getParseData();

			List<Url> urls = htmlParseData.getOutgoingUrls();

			if (urls.size() > 0) {
				urlBuffer.put(urls);

				LOGGER.info(Statics.HOSTNAME + "\t"
						+ Thread.currentThread().getName() + "\t" + "put "
						+ urls.size() + " urls to urlbuffer");
			}
		} catch (Exception e) {
			LOGGER.error("visit page error: " + e.toString());
		}

	}

	private void processPage(Url url) {
		if (url == null) {
			return;
		}

		PageFetchResult fetchResult = null;

		try {
			fetchResult = pageFetcher.fetchHeader(url);

			int statusCode = fetchResult.getStatusCode();

			/*
			 * statusCode is not ok
			 * 
			 * ******************************************************************
			 */
			if (statusCode != HttpStatus.SC_OK) {
				if (statusCode == HttpStatus.SC_MOVED_PERMANENTLY
						|| statusCode == HttpStatus.SC_MOVED_TEMPORARILY) {
					if (config.isFollowRedirects()) {
						String movedToUrl = fetchResult.getMovedToUrl();

						if (movedToUrl == null) {
							return;
						}

						Url moveUrl = new Url();

						moveUrl.setUrl(movedToUrl);

						moveUrl.setDepth(url.getDepth());

						if (shouldVisit(moveUrl)
								&& robotstxtServer.allows(moveUrl)) {
							// 调试此URL
						}
					}
				} else if (fetchResult.getStatusCode() == CustomFetchStatus.PageTooBig) {
					LOGGER.info("Skipping a page which was bigger than max allowed size: "
							+ url.getUrl());
				}

				return;
			}

			/*
			 * statusCode is ok
			 * 
			 * ******************************************************************
			 */
			Page page = new Page(url);

			if (!fetchResult.fetchContent(page)) {
				LOGGER.error("fetchContent error at " + url);

				return;
			}

			if (!parser.parse(page, url.getUrl())) {
				LOGGER.error("parse error at " + url);

				return;
			}

			try {
				visit(page);
			} catch (Exception e) {
				LOGGER.error("Exception while running the visit method. Message: '"
						+ e.getMessage() + "' at " + e.getStackTrace()[0]);
			}
		} catch (Exception e) {
			LOGGER.error(e.getMessage() + ", while processing: " + url.getUrl());
		} finally {
			if (fetchResult != null) {
				fetchResult.discardContentIfNotConsumed();
			}
		}
	}

	@Override
	public void run() {
		try {
			onStart();
		} catch (Exception e) {
			throw new RuntimeException("SpiderThread onStart error: "
					+ e.toString());
		}

		while (true) {
			/*
			 * 获取工作队列，主要有三个步骤：
			 * 
			 * （1）从workqueue中获取待爬取的urls
			 * 
			 * （2）将这些urls保存至processqueue中
			 * 
			 * （3）从workqueue中删除这些urls
			 */
			List<Url> workqueue = null;

			try {
				lock.lock();

				try {
					// (1)
					List<Result> results = HBaseUtil.get(
							Statics.HBASE_TABLE_WORKQUEUE, new Scan(),
							config.getFetchUrls());

					if (results != null && results.size() > 0) {
						workqueue = new ArrayList<Url>();

						for (Result result : results) {
							Url url = new Url();

							url.get(result);

							workqueue.add(url);
						}

						// (2)
						List<Put> puts = new ArrayList<Put>();

						for (Url url : workqueue) {
							puts.add(url.put());
						}

						HBaseUtil.put(Statics.HBASE_TABLE_PROCESSQUEUE, puts);

						// (3)
						List<Delete> deletes = new ArrayList<Delete>();

						for (Url url : workqueue) {
							deletes.add(url.delete());
						}

						HBaseUtil
								.delete(Statics.HBASE_TABLE_WORKQUEUE, deletes);
					}
				} catch (IOException e) {
					LOGGER.error(Statics.HOSTNAME + "\t"
							+ Thread.currentThread().getName() + "\t"
							+ "get workqueue urls error: " + e.toString());

					workqueue.clear();

					workqueue = null;
				} finally {
					lock.unlock();
				}
			} catch (Exception e) {
				throw new RuntimeException("lock error: " + e.toString());
			}

			LOGGER.info(Statics.HOSTNAME + "\t"
					+ Thread.currentThread().getName() + "\t"
					+ "get workqueue size is "
					+ (workqueue == null ? 0 : workqueue.size()));

			if (workqueue == null || workqueue.size() == 0) {
				currentWaitTimes++;

				if (currentWaitTimes > maxWaitTimes) {
					LOGGER.info(Statics.HOSTNAME + "\t"
							+ Thread.currentThread().getName() + "\t"
							+ "wait times more than " + maxWaitTimes
							+ " , finish");

					break;
				}

				try {
					Thread.sleep(5000);
				} catch (InterruptedException e) {
				}

				continue;
			} else {
				currentWaitTimes = 0;
			}

			/*
			 * 逐个处理workqueue中的每一个URL
			 */
			for (Url url : workqueue) {
				processPage(url);
			}

			/*
			 * 处理完成后:
			 * 
			 * (1) processqueue中移除这些URL
			 * 
			 * (2) 存入history中
			 */
			try {
				lock.lock();

				try {
					// (1)
					List<Delete> deletes = new ArrayList<Delete>();

					// (2)
					List<Put> puts = new ArrayList<Put>();

					for (Url url : workqueue) {
						deletes.add(url.delete());

						puts.add(url.put());
					}

					HBaseUtil.delete(Statics.HBASE_TABLE_PROCESSQUEUE, deletes);

					HBaseUtil.put(Statics.HBASE_TABLE_HISTORY, puts);

					deletes.clear();
					deletes = null;

					puts.clear();
					puts = null;
				} catch (IOException e) {
					LOGGER.error(Statics.HOSTNAME
							+ "\t"
							+ Thread.currentThread().getName()
							+ "\t"
							+ "delete from processqueue or put to history error: "
							+ e.toString());

					workqueue.clear();

					workqueue = null;
				} finally {
					lock.unlock();
				}
			} catch (Exception e) {
				throw new RuntimeException("lock error: " + e.toString());
			}
		}

		try {
			onStop();
		} catch (Exception e) {
			throw new RuntimeException("SpiderThread onStop error: "
					+ e.toString());
		}

		LOGGER.info(Statics.HOSTNAME + "\t" + Thread.currentThread().getName()
				+ "\t" + "stoped");
	}

}
