package com.yx.crawler.runtime;

import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;

import com.yx.crawler.typedef.CrawlerUrl;
import com.yx.frame.log.LogFactory;
import com.yx.frame.log.LogWriter;

public abstract class TimedCrawlerQueue extends Thread implements CrawlerQueue {
	private static LogWriter log = LogFactory.getInstance().getLogWriter();
	public static final long SHORT_SLEEP_TIME = 10;
	public static final long LONG_SLEEP_TIME = 5 * 1000;
	private int maxQueueLenght = 2000;
	private long restartTime = System.currentTimeMillis();
	private int batchNumber = 0;
	private LinkedBlockingQueue<CrawlerUrl> queue = new LinkedBlockingQueue<CrawlerUrl>();

	public TimedCrawlerQueue(int maxQueueLenght) {
		this.maxQueueLenght = maxQueueLenght;
		this.start();
	}

	@Override
	public void push(CrawlerUrl crawlerUrl) throws Exception {
		queue.put(crawlerUrl);
	}

	@Override
	public CrawlerUrl pop() throws Exception {
		return queue.take();
	}

	@Override
	public int length() {
		return queue.size();
	}

	public void run() {
		while (true) {
			try {
				long current = System.currentTimeMillis();
				if (current > restartTime && length() < maxQueueLenght) {
					List<CrawlerUrl> crawlerUrls = buildCrawlerUrl(batchNumber++);
					if (crawlerUrls != null) {
						for (CrawlerUrl crawlerUrl : crawlerUrls) {
							push(crawlerUrl);
						}
					}
					daemonSleep(SHORT_SLEEP_TIME);
				} else {
					daemonSleep(LONG_SLEEP_TIME);
				}
			} catch (Throwable e) {
				error(e.getMessage());
			}
		}
	}

	private void daemonSleep(long time) {
		synchronized (this) {
			try {
				wait(time);
			} catch (Exception e) {
			}
		}
	}

	public void finish(int restartHourStep) {
		batchNumber = 0;
		long restartAfterTime = restartHourStep * 60 * 60 * 1000;
		restartTime = System.currentTimeMillis() + restartAfterTime;		
	}
	public void startCrawler(int concurrent){
		CrawlerManager crawlerManager=new CrawlerManager(this);
		crawlerManager.start(concurrent);
	}

	public abstract List<CrawlerUrl> buildCrawlerUrl(int batch);
	public static void error(String logMsg) {
		log.error(null, "爬虫队列", logMsg);
	}

	public static void debug(String logMsg) {
		log.debug(null, "爬虫队列", logMsg);
	}
}
