/*
 * Copyright (c) 2010 CCX(China) Co.,Ltd. All Rights Reserved.
 *
 * This software is the confidential and proprietary information of
 * CCX(China) Co.,Ltd. ("Confidential Information").
 * It may not be copied or reproduced in any manner without the express 
 * written permission of CCX(China) Co.,Ltd.
 * 
 *	Created on 2010-12-22 下午03:07:59
 */
package cn.com.ccxe.core.crawl;

import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import cn.com.ccxe.core.crawl.article.ArticleDataUpdater;
import cn.com.ccxe.core.crawl.article.ArticleService;
import cn.com.ccxe.core.crawl.listing.DataUpdater;
import cn.com.ccxe.core.crawl.listing.ListPageService;
import cn.com.ccxe.core.db.DatabaseManager;
import cn.com.ccxe.core.db.ListPageDB;
import cn.com.ccxe.core.db.ListPageWorkQueues;
import cn.com.ccxe.core.db.WebDB;
import cn.com.ccxe.core.db.WebWorkQueues;
import cn.com.ccxe.core.index.Indexer;

/**
 * 
 * 爬虫控制器，负责整体工作的调度。 工作的步骤如下：
 * <ul>
 * <li>读取数据库，将数据库中的文章列表全部置入 文章列表队列 中.</li>
 * <li>根据文章列表队列采集提供的信息，采集文章列表页面。</li>
 * <li>解析文章列表页面，将解析出的链接分为文章列表链接 和 文章链接，并将它们分别置入不同的库中。</li>
 * <li>采集文章。</li>
 * <li>解析文章，对文章信息进行相应的处理。</li>
 * </ul>
 * 
 * 
 * 第2、3步是一个循环的过程，直到列表页面没有数据位置。
 * 
 * @author hetao
 */
public class CrawlContorl {

	private DatabaseManager 	databaseManager = DatabaseManager.getInstance();
	private DataUpdater 		dataUpdater = new DataUpdater();
	private ListPageService 	listPageService = new ListPageService(new ThreadPoolService(5));
	private final Logger 		logger = LoggerFactory.getLogger(CrawlContorl.class);
	private ArticleService 		articleService = new ArticleService(new ThreadPoolService(10));
	private ArticleDataUpdater 	articleDataUpdater = new ArticleDataUpdater();
	private ListPageWorkQueues 	listPageWorkQueues = ListPageWorkQueues.getInstance();
	private ListPageDB 			listPageDB = ListPageDB.getInstance();
	private WebWorkQueues 		webWorkQueues = WebWorkQueues.getInstance();
	private WebDB 				webDB = WebDB.getInstance(); 
	private long 				listPageUpdateTime = 2*60*1000;
	private long 				articleUpdateTime = 5*60*1000;
	private static CrawlContorl	INSTANCE = new CrawlContorl();
	private static String		CONTENT_PATH;
	private ScheduledExecutorService indexer = Executors.newSingleThreadScheduledExecutor();
	
	
	private CrawlContorl(){}
	
	public synchronized static CrawlContorl getInstance() {
		if(INSTANCE == null) throw new NullPointerException("启动控制器失败....");
		return INSTANCE;
	}
	public void exit() {
		try {
			logger.info("准备退出...............");
			articleService.shutDown();
			listPageService.shutDown();
			indexer.shutdown();
			logger.info("队列中所有任务全部完成，准备退出....");
			sleep();
			if (articleService.isInterrupted() && listPageService.isInterrupted() && indexer.isShutdown()) {
				logger.info("活动线程已经关闭.....");
			}
		} catch (Exception e) {
			logger.error("系统无法正常退出....");
		}finally {
			listPageWorkQueues.remove();
			webWorkQueues.remove();
			databaseManager.close();
			logger.info("数据库已经关闭，系统完成退出.....");
		}
		
	}
	
	public void start() {}
	public void start1() {
		dataUpdater.init();
		listPageService.start();
		sleep();
		articleDataUpdater.process();
		sleep();
		articleService.start();
		indexer.execute(new Indexer());
		logger.info("系统开始运行.....");
		TimerTask listPageTask = new ListPageUpdateTimer();
		Timer listPageTimer = new Timer();
		listPageTimer.schedule(listPageTask, 10000, listPageUpdateTime);
		
		TimerTask articleTask = new ArticleUpdateTimer();
		Timer articleTimer = new Timer();
		articleTimer.schedule(articleTask, 10000, articleUpdateTime);
	}

	private void sleep() {
		try {
			TimeUnit.SECONDS.sleep(10);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}
	
	class ListPageUpdateTimer extends TimerTask {

		@Override
		public void run() {
			logger.info("列表更新器开始工作.....");
			logger.info("现在列表持久队列中有 {} 条数据，工作队列中有 {} 条数据。",listPageDB.size(),listPageWorkQueues.size());
			dataUpdater.updateWorkQueue();
		}
	}
	
	class ArticleUpdateTimer extends TimerTask {
		@Override
		public void run() {
			logger.info("文章更新器开始工作.....");
			logger.info("现在文章持久队列中有 {} 条数据，工作队列中有 {} 条数据。",webDB.size(),webWorkQueues.size());
			articleDataUpdater.process();
		}
	}
	
	public static void setContentPath(String path) {
		CONTENT_PATH = path;
	}
	
	public static String getContentPath() { 
		return CONTENT_PATH;
	}
}
