package coc.core;

import java.util.List;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import coc.core.url.Url;
import coo.base.exception.UncheckedException;

/**
 * 爬虫执行器。它只是一个基础的爬虫执行器，定义了同步执行爬虫任务及异步执行爬虫任务的方法。<br/>
 * 可以基于该类来创建适用的爬虫执行器，通常只需覆盖生成同步爬虫和异步爬虫的方法。<br/>
 * 不基于该类来创建爬虫执行器也是允许的，这完全取决于应用程序的需要。
 */
public class CrawlerExecutor {
	protected Logger log = LoggerFactory.getLogger(getClass());
	protected ThreadPoolExecutor asyncExecutor;
	protected ThreadPoolExecutor syncExecutor;

	/**
	 * 构造方法。
	 * 
	 * @param maxCrawlerThread
	 *            最大并发执行爬虫数
	 */
	public CrawlerExecutor(Integer maxCrawlerThread) {
		this(maxCrawlerThread, maxCrawlerThread);
	}

	/**
	 * 构造方法。
	 * 
	 * @param asyncMaxCrawlerThread
	 *            异步最大并发执行爬虫数
	 * @param syncMaxCrawlerThread
	 *            同步最大并发执行爬虫数
	 */
	public CrawlerExecutor(Integer asyncMaxCrawlerThread,
			Integer syncMaxCrawlerThread) {
		asyncExecutor = new ThreadPoolExecutor(asyncMaxCrawlerThread,
				asyncMaxCrawlerThread, 0L, TimeUnit.MILLISECONDS,
				new LinkedBlockingQueue<Runnable>());
		syncExecutor = new ThreadPoolExecutor(syncMaxCrawlerThread,
				syncMaxCrawlerThread, 0L, TimeUnit.MILLISECONDS,
				new LinkedBlockingQueue<Runnable>());
		log.debug("允许最大并发执行爬虫数[异步：" + asyncMaxCrawlerThread + " 同步："
				+ syncMaxCrawlerThread + "]");
	}

	/**
	 * 执行异步爬虫任务。
	 * 
	 * @param crawlerConfig
	 *            爬虫配置
	 */
	public void asyncExecute(CrawlerConfig crawlerConfig) {
		log.info("接收异步爬虫任务[" + crawlerConfig.getTaskId() + "]["
				+ crawlerConfig.getTaskName() + "]");
		Crawler crawler = genAsyncCrawler(crawlerConfig);
		asyncExecutor.submit(crawler);
	}

	/**
	 * 执行同步爬虫任务。
	 * 
	 * @param crawlerConfig
	 *            爬虫配置
	 * @return 返回爬虫执行结果。
	 */
	public CrawlerResult syncExecute(CrawlerConfig crawlerConfig) {
		log.info("接收同步爬虫任务[" + crawlerConfig.getTaskId() + "]["
				+ crawlerConfig.getTaskName() + "]");
		try {
			Crawler crawler = genSyncCrawler(crawlerConfig);
			Future<CrawlerResult> future = syncExecutor.submit(crawler);
			return future.get();
		} catch (Exception e) {
			throw new UncheckedException("执行同步爬虫任务时发生异常。", e);
		}
	}

	public ThreadPoolExecutor getAsyncExecutor() {
		return asyncExecutor;
	}

	public ThreadPoolExecutor getSyncExecutor() {
		return syncExecutor;
	}

	/**
	 * 生成同步爬虫。
	 * 
	 * @param crawlerConfig
	 *            爬虫配置
	 * @return 返回生成的同步爬虫。
	 */
	protected Crawler genSyncCrawler(CrawlerConfig crawlerConfig) {
		return new DefualtCrawler(crawlerConfig);
	}

	/**
	 * 生成异步爬虫。
	 * 
	 * @param crawlerConfig
	 *            爬虫配置
	 * @return 返回生成的异步爬虫。
	 */
	protected Crawler genAsyncCrawler(CrawlerConfig crawlerConfig) {
		return new DefualtCrawler(crawlerConfig);
	}

	/**
	 * 默认爬虫。
	 */
	private class DefualtCrawler extends Crawler {
		/**
		 * 构造方法。
		 * 
		 * @param config
		 *            爬虫配置
		 */
		public DefualtCrawler(CrawlerConfig config) {
			super(config);
		}

		@Override
		protected List<Url> filterUrls(String taskId, List<Url> suspendUrls) {
			return suspendUrls;
		}

		@Override
		protected Integer saveSpiderResult(SpiderResult spiderResult) {
			result.addSpiderResult(spiderResult);
			return spiderResult.getDataSize();
		}

		@Override
		protected void complete() {
		}
	}
}
