package com.flute.icrawler.framework.framework;

import java.rmi.RemoteException;
import java.rmi.server.UnicastRemoteObject;
import java.util.ArrayList;
import java.util.List;

import org.apache.log4j.Logger;

import com.flute.icrawler.app.listener.AutoAdjustListenser;
import com.flute.icrawler.app.listener.UpdateModuleListenser;
import com.flute.icrawler.config.CrawlConfig;
import com.flute.icrawler.framework.autoadjust.AbleTimer;
import com.flute.icrawler.framework.autoadjust.FetchAdjuster;
import com.flute.icrawler.framework.autoadjust.IntervalCrawlUrlPool;
import com.flute.icrawler.framework.autoadjust.strategy.FetchAdjusterStrategyFactory;
import com.flute.tools.timer.IntervalExecutor;

public class JobServerController {
	private static Logger logger = Logger.getLogger(JobServerController.class);
	private IntervalExecutor statusExecutor;
	private List<JobChildController> childList;
	private List<ICrawlListener> listenerList;
	private IntervalCrawlUrlPool intervalUrlPool;
	private int jobStatus;
	private CrawlJob job;
	private FetchAdjuster fetchAdjuster = null;
	private FetchAdjusterStrategyFactory factory = null;
	private CrawlConfig crawlConfig = null;
	private AutoAdjustListenser autoAdjustListenser = null;
	private UpdateModuleListenser updateModuleListenser = null;
	private JobSetttings settings = new JobSetttings();

	public JobServerController(CrawlConfig crawlConfig)
			throws ClassNotFoundException {
		this.crawlConfig = crawlConfig;
		initJobSettings();
		fetchAdjuster = new FetchAdjuster(new AbleTimer(100));
		factory = new FetchAdjusterStrategyFactory(fetchAdjuster);
		fetchAdjuster.addAdjusterStrategy(factory.getDefaultStrategy());
		if (0 != crawlConfig.getStartHardwareAdjust()) {
			fetchAdjuster.addAdjusterStrategy(factory
					.getHardwareFetchStrategy());
		}

		this.childList = new ArrayList<JobChildController>();
		this.job = new CrawlJob(settings);
		intervalUrlPool = new IntervalCrawlUrlPool(crawlConfig, fetchAdjuster);

		intervalUrlPool.setCrawlJob(job);

		autoAdjustListenser = new AutoAdjustListenser(fetchAdjuster);
		//intervalUrlPool.registerObserver(autoAdjustListenser);

		if (1 == crawlConfig.getStartUpdate()) {
			updateModuleListenser = new UpdateModuleListenser(intervalUrlPool);
			intervalUrlPool.registerObserver(updateModuleListenser);
		}

		this.statusExecutor = getStatusExecutor();
		this.statusExecutor.setName("Job Status WatchDog");
		this.statusExecutor.setDaemMode(false);
		this.statusExecutor.setInterval(10000);

		this.listenerList = new ArrayList<ICrawlListener>();
	}

	private void initJobSettings() throws ClassNotFoundException {

//		ProcessorDependency pd = new ProcessorDependency();
//		settings.setDependency(pd);
//
//		List<AbstractProcessor> processorList = crawlConfig
//				.getProcessorConfig().getAbstractProcessors();
//
//		for (int i = 0; i < processorList.size(); i++) {
//
//			AbstractProcessor processor = processorList.get(i);
//
//			pd.addProcessor(processor);
//
//		}

		List<String> list = crawlConfig.getUrl().getSeeds();
		for (int i = 0; i < list.size(); i++) {
			settings.addSeed(new Seed(list.get(i).trim()));
			logger.info("seed=" + list.get(i).trim());
		}

		

	}

	public void addChildExecutor(JobChildController childController) {
		this.childList.add(childController);
	}

	public ICrawlUrlPool getCrawlUrlPool() {
		return intervalUrlPool;
	}

	/**
	 * 添加监听爬虫任务状态的监听器
	 * 
	 * @param listener
	 */
	public void addCrawlListener(ICrawlListener listener) {
		this.listenerList.add(listener);
	}

	public void start() {
		setStatus(JobStatus.START);

		this.statusExecutor.start();
		this.fetchAdjuster.run();

		for (JobChildController child : childList) {
			child.start();
		}

		try {
			UnicastRemoteObject.exportObject(intervalUrlPool);
		} catch (RemoteException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		setStatus(JobStatus.RUNNING);
	}

	public void checkPoint() {

	}

	public void stop() {
		if (1 == crawlConfig.getStartUpdate()) {
			return;
		}
		setStatus(JobStatus.ENDED);
		statusExecutor.setFinish(true);
		try {
			UnicastRemoteObject.unexportObject(intervalUrlPool, true);
		} catch (RemoteException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	private void setStatus(int status) {
		if (jobStatus != status) {
			jobStatus = status;
			for (ICrawlListener listener : listenerList) {
				listener.recieveNotification(jobStatus);
			}
		}
	}

	private IntervalExecutor getStatusExecutor() {

		return new IntervalExecutor() {
			@Override
			public void execute() {
				boolean finish = !intervalUrlPool.isProcessing()
						&& !intervalUrlPool.hasNext();
				logger.info("check job finish:" + finish);
				if (finish && 0 == crawlConfig.getStartUpdate()) {
					try {
						intervalUrlPool.reportCrawlerResult();
						setStatus(JobStatus.ENDED);
						fetchAdjuster.setFinished(true);
						intervalUrlPool.setFinished(true);

						UnicastRemoteObject.unexportObject(intervalUrlPool,
								true);
					} catch (RemoteException e) {
						// TODO Auto-generated catch block
						logger.error("error when cunexportObject:", e);
					}
					statusExecutor.setFinish(true);
				}
			}
		};
	}

	public interface JobStatus {
		public static final int START = 1;
		public static final int ENDED = 2;
		public static final int RUNNING = 3;
	}
}
