package com.yx.crawler.runtime;

import java.util.List;

import com.yx.crawler.typedef.CrawlerListener;
import com.yx.crawler.typedef.CrawlerPage;
import com.yx.crawler.typedef.CrawlerUrl;
import com.yx.crawler.typedef.Policy;
import com.yx.frame.log.LogFactory;
import com.yx.frame.log.LogWriter;

public class CrawlerProcessor extends Thread {
	private static LogWriter log = LogFactory.getInstance().getLogWriter();
	private CrawlerQueue crawlerQueue;
	public static final long SLEEP_TIME = 5;
	public static final long BATCH_SLEEP_TIME = 2000;
	public static final long LONG_SLEEP_TIME = 30 * 1000;
	public static final long BUSY_WORK_TIME = 3 * 60 * 1000;

	public CrawlerProcessor(CrawlerQueue crawlerQueue) {
		this.crawlerQueue = crawlerQueue;
	}

	private static long statusBeginTime = System.currentTimeMillis();
	private static boolean workStatus = true;

	public static boolean canDo() {
		long t = System.currentTimeMillis() - statusBeginTime;
		if (workStatus && t >= BUSY_WORK_TIME) {
			statusBeginTime = System.currentTimeMillis();
			workStatus = false;
			System.out.println("为了屏蔽网站安全策略，爬虫休息" + LONG_SLEEP_TIME + "毫秒。");
			return workStatus;
		}
		if (!workStatus && t >= LONG_SLEEP_TIME) {
			statusBeginTime = System.currentTimeMillis();
			System.out.println("爬虫休息完毕，开始工作" + BUSY_WORK_TIME + "毫秒。");
			workStatus = true;
			return workStatus;
		}
		return workStatus;
	}

	public void run() {

		while (true) {
			CrawlerQueuePolicy crawlerQueuePolicy = new CrawlerQueuePolicy(crawlerQueue, new BasicCrawlerQueue());
			if (crawlerQueuePolicy.copyBreadthBasedIntoDepthBased()) {
				try {
					doIt(crawlerQueuePolicy);
				} catch (Throwable e) {
				}
			}
			daemonSleep(BATCH_SLEEP_TIME);
		}
	}

	private void doIt(CrawlerQueuePolicy crawlerQueuePolicy) {
		CrawlerUrl crawlerUrl;
		while ((crawlerUrl = crawlerQueuePolicy.popDepthFirst()) != null) {
			if (crawlerUrl.getCrawlerListener() == null)
				continue;
			try {
				CrawlerPage crawlerPage = HttpProcessor.process(crawlerUrl);
				if (crawlerPage != null) {
					CrawlerListener crawlerListener = crawlerUrl.getCrawlerListener();
					List<CrawlerUrl> childCrawlerUrls = crawlerListener.crawlered(crawlerPage);
					if (childCrawlerUrls != null) {
						for (CrawlerUrl child : childCrawlerUrls) {
							Policy policy = child.policy();
							policy = policy != null ? policy : Policy.NONE;
							if (Policy.NONE == policy)
								continue;
							if (Policy.BREADTH_FIRST == policy) {
								crawlerQueuePolicy.putBreadthFirst(child);
							} else if (Policy.DEPTH_FIRST == policy) {
								crawlerQueuePolicy.putDepthFirst(child);
							}
						}
					}
				}
			} catch (Throwable e) {
				error(crawlerUrl.getUrl() + ":" + e.getMessage());
				// e.printStackTrace();
			}
			daemonSleep(SLEEP_TIME);
		}

	}

	private void daemonSleep(long time) {
		try {
			this.sleep(time);
		} catch (Exception e) {
		}
	}

	public static void error(String logMsg) {
		log.error(null, "爬虫容器", logMsg);
	}

	public static void debug(String logMsg) {
		log.debug(null, "爬虫容器", logMsg);
	}
}
