/**
 * Looker  新闻整合系统
 * Author  :solosky
 * File    :NewsImporter.java
 * Date    :May 22, 2009
 * Lisence : Apache License 2.0 
 */
package net.looker.util;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.FileHandler;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;

import net.looker.config.ConfigManager;
import net.looker.config.SiteConfig;
import net.looker.data.Category;
import net.looker.data.DataManager;
import net.looker.data.Lucene2DataDriver;
import net.looker.data.MysqlDataDriver;
import net.looker.data.News;
import net.looker.monitor.analyzer.CategoryAnalyzer;
import net.looker.monitor.crawler.CrawlerException;
import net.looker.monitor.crawler.WebCrawler;
import net.looker.monitor.modifier.AddModifier;
import net.looker.monitor.modifier.DispatcherManager;
import net.looker.work.Work;
import net.looker.work.WorkerPoolFactory;

import org.jdom.JDOMException;

/**
 * @author solosky
 *
 */
public class NewsImporter
{
	
	/**
	 * 建立的抓取列表工作
	 */
	private ArrayList<Work> workList;
	
	/**
	 * 日志记录器
	 */
	private Logger logger;
	
	/**
	 * 完成的抓取器计数器
	 */
	private AtomicInteger atomicInt;
	
	

	
	/**
	 * 构造函数
	 */
	public NewsImporter()
	{
		workList  = new ArrayList<Work>();
		atomicInt = new AtomicInteger(0);
	}
	
	
	public void startImport() throws InterruptedException
	{
		// 启动抓取列表工作
		Iterator<Work> it = workList.iterator();
		while(it.hasNext())
			WorkerPoolFactory.get("looker").assignWork(it.next());
	}
	/**
	 * 加载站点配置
	 * @param lookerConfigFile
	 * @throws JDOMException
	 * @throws IOException
	 */
	public void loadConfig(String lookerConfigFile) throws JDOMException, IOException
	{
		ConfigManager cm = ConfigManager.getInstance();
		//初始化栏目配置
		cm.loadLookerConfig(lookerConfigFile);
		//载入站点配置
		cm.loadSiteConfigs();
		//同步本地配置和数据库配置
		cm.SynchConfig();
		Logger.getLogger("looker").info("成功加载配置");
	}
	
	/**
	 * 初始化数据管理器
	 */
	public void initDataManager()
	{
		DataManager.getInstance().addDataDriver(new MysqlDataDriver());
		DataManager.getInstance().addDataDriver(new Lucene2DataDriver());
	}
	
	/**
	 * 初始化日志
	 * @throws SecurityException
	 * @throws IOException
	 */
	public void initLogger(String savepath)
	{
		logger  = Logger.getLogger("looker");
		FileHandler fh = null;
		try {
			fh = new FileHandler(savepath);
		} catch (SecurityException e) {
			logger.severe("日志安全错误");
		} catch (IOException e) {
			logger.severe("日志文件读取错误");
		}
		fh.setFormatter(new SimpleFormatter());
		logger.addHandler(fh);
		Logger.getLogger("looker").info("成功初始化日志记录:"+savepath);
	}
	
	/**
	 * 建立线程池
	 */
	public void initWorkerPool(int maxWorkerCnt)
	{

		WorkerPoolFactory.create("looker" ,maxWorkerCnt); //TODO ...
		logger.info("成功建立线程池:looker - " + maxWorkerCnt);
		logger.info("等待所有线程启动完毕，请稍候....(等待5秒)");
		try {
			Thread.sleep(5000);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}
	
	/**
	 * 输出当前配置，并让用户输入导入的栏目
	 * @throws IOException 
	 */
	public void initImport() throws IOException
	{
		Iterator<SiteConfig> it = ConfigManager.getInstance().getSiteConfigList().iterator();
		SiteConfig siteConfig   = null;
		Category   category     = null;
		int        cateNo       = 1;
		HashMap<Integer,Category> cateHash = new HashMap<Integer,Category>();
		System.out.println("===============================================");
		while(it.hasNext())
		{
			siteConfig = it.next();
			System.out.println("新闻站点:"+siteConfig.get("site.title")+"--"+siteConfig.get("site.url"));
			System.out.println("栏目:");
			Iterator <Category> cateit = siteConfig.getCategoryList().iterator();
			while (cateit.hasNext())
			{
				category  = cateit.next();
				System.out.println("\t["+cateNo+"]\t"+category.getTitle()+'\t'+category.getIndex());
				cateHash.put(cateNo, category);
				cateNo++;
			}
			System.out.println("------------------");
		}
		System.out.println("===============================================");
		BufferedReader  reader = new BufferedReader(new InputStreamReader(System.in));
		while(true)
		{
			System.out.println("请输入要导入的栏目编号和页数(如2-12 3-4):");
			workList.clear();
			String line = reader.readLine();
			if(line.length()==0)
				continue;
			String workInfo[] = line.split(" ");
			if(workInfo.length==0)
				continue;
			for(int i=0;i<workInfo.length;i++)
			{
				String []workInfos = workInfo[i].split("-");
				if(workInfos.length==1)
					continue;
				else if(cateHash.get(Integer.parseInt(workInfos[0]))!=null)
					workList.add( new CategoryListCrawlWork( cateHash.get(Integer.parseInt(workInfos[0])), Integer.parseInt(workInfos[1])));
				else{}
			}
			System.out.println("请确认一下任务，确定请输入y,重新输入请输入n");
			System.out.println("栏目\t页码\tURL");
			//输出任务信息，用户确认
			Iterator<Work> workit = workList.iterator();
			while(workit.hasNext())
			{
				CategoryListCrawlWork cw = (CategoryListCrawlWork) workit.next();
				System.out.println(cw.getCategory().getTitle()+'\t'+cw.getPages()+'\t'+cw.getCategory().getIndex());
			}
			System.out.print("确认?(y,n):");
			String confirmStr = reader.readLine();
			if(confirmStr.charAt(0)=='y')
				break;
			else
				continue;
		}
	}
	
	/**
	 * 等待所有的任务完成
	 * @throws InterruptedException 
	 */
	public void waitAllDone() throws InterruptedException
	{
		synchronized(atomicInt)
		{
			//如果完成的抓抓取器数目小于完成数目，就继续等待
			while(atomicInt.get()!=workList.size())
				atomicInt.wait();
			
			logger.info("所有的列表抓取线程完成。");
		}
	}
	
	/**
	 * 结束新闻导入，并做一些清理工作
	 * @throws InterruptedException
	 */
	public void finishImport() throws InterruptedException
	{
		logger.info("等待所有工作线程完成...");
		WorkerPoolFactory.get("looker").shutdown();
		logger.info("线程池已经关闭..");
		DispatcherManager.stopDispatch();
		logger.info("关闭更改分发器完成...");
		
		logger.info("所有新闻导入任务已经完成..");
	}
	/**
	 * 抓取新闻列表任务
	 * @author solosky
	 */
	public class CategoryListCrawlWork implements Work
	{

		/**
		 * 需要导入的栏目
		 */
		private Category category;
		
		/**
		 * 需要导入的页数
		 */
		private int   pages;
		/**
		 * 
		 * @param cate
		 * @param pages
		 */
		public CategoryListCrawlWork(Category category,int pages)
		{
			this.category = category;
			this.pages    =  pages;
		}
		/* (non-Javadoc)
		 * @see net.looker.work.Work#isFinished()
		 */
		@Override
		public boolean isFinished() {
			return false;
		}

		/* (non-Javadoc)
		 * @see net.looker.work.Work#startWork()
		 */
		@Override
		public void startWork()
		{
			
			//循环抓取每一页
			for(int i =1;i<=pages; i++)
				try {
					crawlList(i);
				} catch (InterruptedException e) {
					logger.info("列表页抓取线程中断.."+e.getMessage());
				} catch (IOException e) {
					logger.info("列表页抓取线程IO错误.."+e.getMessage());
				} catch (CrawlerException e) {
					logger.info("列表页抓取线程抓取器异常.."+e.getMessage());
				}
				
				//通知主线程已经完成
				synchronized(atomicInt)
				{
					atomicInt.getAndIncrement();
					atomicInt.notifyAll();
				}
		}
		
		/**
		 * 抓取其中一页并分析出链接
		 * @param page
		 * @throws InterruptedException
		 * @throws IOException
		 * @throws CrawlerException
		 */
		public void crawlList(int page) throws InterruptedException, IOException, CrawlerException
		{
			WebCrawler       crawler  = null;
			CategoryAnalyzer analyzer = null;
			ArrayList<News>  newsList = null;
			String           charset  = category.getSiteConfig().get("site.charset");
			
			//抓取新闻列表页
			crawler = new WebCrawler(category.getURL(page),charset);
			crawler.crawl();
			String html = (String) crawler.getCrawled();
			
			//分析出列表项目
			CategoryAnalyzer categoryAnalyzer = new CategoryAnalyzer(html, category);
			categoryAnalyzer.analyze();
			//保存当前分析的新闻列表
			newsList = (ArrayList<News>)categoryAnalyzer.getAnalyzed();
			//添加到需要添加的新闻队列中
			Iterator<News> it = newsList.iterator();
			while(it.hasNext())
				DispatcherManager.add(new AddModifier(it.next()));
			//logger.info("栏目监视器:共分析出新闻 "+newsList.size()+"条-"+category.getIndex());
		}
		
		public Category getCategory()
		{
			return category;
		}
		
		public int  getPages()
		{
			return pages;
		}
	}
		
	/**
	 * @param args
	 * @throws IOException 
	 * @throws JDOMException 
	 * @throws InterruptedException 
	 */
	public static void main(String[] args) throws JDOMException, IOException, InterruptedException
	{
		if(args.length==0){
			System.out.println("必须给定全局配置文件. 如 looker.xml");
			return ;
		}
		NewsImporter ni = new NewsImporter();
		ni.loadConfig(args[0]);
		ni.initDataManager();
		ni.initLogger(args.length==3 ? args[2] : "./newsImport.txt");
		ni.initImport();
		ni.initWorkerPool(args.length>1 ? Integer.parseInt(args[1]) : 50);
		ni.startImport();
		ni.waitAllDone();
		ni.finishImport();
	}

}
