package org.heydr.concurrent;

import java.util.List;
import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.heydr.Crawler;
import org.heydr.common.Config;
import org.heydr.common.TaskEngine;
import org.heydr.data.ResourceEngine;
import org.heydr.handler.HeyDrHandler;
import org.heydr.handler.impl.SubHandler;

public class CrawlerEngine {
	
	/* pool config object */
	private static Config config;
	/* task crawler pool */
	private static ThreadPoolExecutor threadPoolExecutor;
	
	public CrawlerEngine(){
	}
	
	/**
	 * <strong>Description</strong>:begin running heyDr crawling jobs
	 * @param config
	 * @param pool
	 * @param crawlerGenerator
	 * @return void
	 * @author carlos.xie
	 * 2013-5-20
	 */
	public static void go(Config config,ResourceEngine resourceEngine,Map<String,SubHandler> handlers,CrawlerGenerator crawlerGenerator){
		
		setConfig(config);
		TaskEngine.setResourceEngine(resourceEngine);
		TaskEngine.setFrequency(config.getTaskEmptyAlarmFrequency());
		//refresh sites into site map from database
		TaskEngine.refreshSites();
		//add all waiting crawl urls into the task queue
		TaskEngine.getTaskQueue().addAll(resourceEngine.availableUrls(config.getDbLimit()));
		TaskEngine.initHandler(handlers);
		TaskEngine.initFilter(config.getFilter());
		go(crawlerGenerator);
	}
	
	/**
	 * <strong>Description</strong>:simple method for go(Config config,TaskPool pool,Pumper pumper,CrawlerGenerator crawlerGenerator)
	 * availble for spring integration.
	 * @param crawlerGenerator
	 * @return void
	 * @author carlos.xie
	 * 2013-5-21
	 */
	public static void go(CrawlerGenerator crawlerGenerator){
		
		initThreadPool();
		
		for(int i=0;i<config.getTaskCoreThreadpoolSize();i++){
			Crawler taskCrawler = crawlerGenerator.generateCrawler();
			if(null == taskCrawler){
				throw new NullPointerException("the CrawlerGenerator implementation returns a null crawler.");
			}
			taskCrawler.setQueue(TaskEngine.getTaskQueue());
			threadPoolExecutor.execute(taskCrawler);
		}
	}
	
	/**
	 * <strong>Description</strong>:init crawler pool, include an url thread pool and content thread pool
	 * @return void
	 * @author carlos.xie
	 * 2013-5-20
	 */
	private static void initThreadPool(){
		
		threadPoolExecutor = new ThreadPoolExecutor(config.getTaskCoreThreadpoolSize(), config.getTaskMaxThreadpoolSize(), 600, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(config.getTaskMaxThreadpoolSize()),new ThreadPoolExecutor.DiscardPolicy());
		
	}
	
	/**
	 *	crawler generate interface
	 */
	public interface CrawlerGenerator{
		/**
		 * <strong>Description</strong>:create an crawler instance method
		 * @return
		 * @return Crawler
		 * @author carlos.xie
		 * 2013-5-21
		 */
		public Crawler generateCrawler();
		
	}

	public static Config getConfig() {
		return config;
	}

	public static void setConfig(Config config) {
		CrawlerEngine.config = config;
	}

}