/**
 * Project : Looker
 * Author  : solosky
 * File    : UpdateMonitor.java
 * Date    : 2009-4-11
 * Package : net.looker.monitor
 * Lisence : Apache License 2.0 
 */
package net.looker.monitor.schdule;

import java.io.IOException;
import java.net.MalformedURLException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.logging.Logger;

import net.looker.config.SiteConfig;
import net.looker.data.Category;
import net.looker.data.News;
import net.looker.monitor.analyzer.CategoryAnalyzer;
import net.looker.monitor.analyzer.NewsAnalyzer;
import net.looker.monitor.checker.AddChecker;
import net.looker.monitor.checker.Checker;
import net.looker.monitor.checker.DeleteChecker;
import net.looker.monitor.checker.UpdateChecker;
import net.looker.monitor.crawler.Crawler;
import net.looker.monitor.crawler.CrawlerException;
import net.looker.monitor.crawler.CrawlerGroup;
import net.looker.monitor.crawler.WebCrawler;
import net.looker.util.MysqlQuery;

/**
 * 更新监视器
 * 从数据库中读取给定条数的新闻列表，然后根据这些新闻的URL去网站下载新闻正文，判断是否有更新
 * 这里只判断更新，因为栏目监视器可以保证新闻列表的和新闻网站上新闻列表一致，但不能保证新闻正文一致，更新监视器就是保证新闻正文的一致
 * 至于每次更新多少条新闻，这就需要根据新闻栏目更新频率和更新监视器更新时间间隔来确定了，可以在配置文件中指定
 * @author solosky
 *
 */
public class UpdaterMonitor extends Monitor
{
	/**
	 * 站点配置
	 */
	private SiteConfig siteConfig;
	/**
	 * 栏目信息
	 */
	private Category category;
	
	/**
	 * 日志记录对象
	 */
	private  Logger logger;
	
	/**
	 * 栏目新闻管理器
	 */
	private  NewsManager newsManager;
	
	/**
	 * 构造函数
	 * @param cate
	 */
	public UpdaterMonitor(Category cate)
	{
		super(cate.getTitle()+"-更新器",cate.getCategoryHash(),cate.getMonitorInterval());
		category    = cate;
		siteConfig  = cate.getSiteConfig();
		newsManager = new NewsManager();
		logger      = Logger.getLogger("looker");
		
	}
	/* (non-Javadoc)
	 * @see net.looker.monitor.Monitor#startMonitor()
	 */
	@Override
	public void startMonitor()
	{
		if (status==Monitor.STATUS.RUN)
		{
			logger.info("栏目更新器:"+category.getTitle()+"开始检查");
			
			this.action = ACTION.MONITOR;
			
			try {
				//从数据库中获取新闻
				  getFromDB();
				//抓取当前新闻列表
				  crawlDetail();
				  analyzeDetail();
				  checkUpdate();		
				  this.action = ACTION.MONITOR;
			} catch (SQLException e) {
				logger.warning("栏目更新器:SQL错误->"+category.getTitle()+e.getMessage());
			} catch (MalformedURLException e) {
				logger.warning("栏目更新器:URL错误->"+category.getTitle()+e.getMessage());
			} catch (InterruptedException e) {
				logger.warning("栏目更新器:线程中断->"+category.getTitle()+e.getMessage());
			} finally {
				this.action = ACTION.WAITING;
				Logger.getLogger("looker").info("栏目更新器:"+category.getTitle()+"完成检查");
			}
		}
	}
	
	/**
	 * 获取新闻列表
	 * @throws MalformedURLException 
	 * @throws InterruptedException 
	 */
	public  void crawlDetail() throws MalformedURLException, InterruptedException
	{		
		 ArrayList<News> newsList = newsManager.getLastNewsList();
		 
		CrawlerGroup crawlerGroup = new CrawlerGroup();
		Iterator<News> it = newsList.iterator();
		String charset = siteConfig.get("site.charset");
		while (it.hasNext())
		{
			crawlerGroup.addCrawler((new WebCrawler(it.next().getUrl(), charset)));
		}
		crawlerGroup.startAll();
		crawlerGroup.waitAllDone();
		
		//TODO ..这里几段代码写得很不爽。。。。
		
		//现在需要把现在抓取下来的新闻正文放在一个新的新闻列表中，用于和数据库中的新闻比较正文的hash，判断新闻是否更新
		//不过这里的新闻URL放在原来的数组里，而抓取下来的新闻正文源代码放在抓取器里，所以这里需要查找，为了提高效率，这里先建立一个URL=>news的HASHmap
		HashMap<String,News> oldNewsHash = new HashMap<String, News>();
		it = newsList.iterator();
		News tmpNews = null; 
		while (it.hasNext())
		{
			tmpNews = it.next();
			oldNewsHash.put(tmpNews.getUrl(),tmpNews);
		}
		
		//现在把新闻标题，url,下载来的新闻正文放在一个新闻列表里，再分析出新闻正文
		ArrayList<News> curNewsList = new ArrayList<News>();
		Iterator<Crawler> cit = crawlerGroup.getCrawlerList().iterator();
		Crawler tmpCrawler = null;
		News    curNews = null;
		while(cit.hasNext())
		{
			tmpCrawler = cit.next();
			tmpNews = oldNewsHash.get(tmpCrawler.getUrl());
			
			curNews = new News();
			curNews.setAnchor(tmpNews.getAnchor());
			curNews.setUrl(tmpCrawler.getUrl());
			curNews.setSource( (String) tmpCrawler.getCrawled());
			curNews.setCategory(category);
			curNewsList.add(curNews);
		}
		
		//设置抓取下来的新闻为最新新闻
		newsManager.setCurNewsList(curNewsList);
		
	}
	
	/**
	 * 分析新闻列表
	 */
	public void analyzeDetail( )
	{
		ArrayList<News> newsList = newsManager.getCurNewsList();
		Iterator<News> it = newsList.iterator();
		NewsAnalyzer analyzer = null;
		while (it.hasNext())
		{
			analyzer = new NewsAnalyzer(it.next());
			analyzer.analyze();
		}
	}

	/**
	 * 检查新闻变化
	 */
	public void checkUpdate()
	{
		new UpdateChecker(newsManager).check();
	}
	
	/**
	 * 从数据库中获取指定条数的新闻
	 * @throws SQLException 
	 */
	private  void getFromDB() throws SQLException
	{
		String sql="SELECT title,url,contenthash FROM #news WHERE categoryhash ='"+category.getCategoryHash()+"' ORDER BY date DESC LIMIT "+category.getUpdaterNewsCnt();
		ResultSet rs = MysqlQuery.getInstance().query(sql);
		ArrayList<News> newsList = new ArrayList<News>();
		News curNews = null;
		while (rs.next())
		{
			curNews = new News();
			curNews.setAnchor(rs.getString("title"));		//新闻标题
			curNews.setUrl(rs.getString("url"));			//新闻地址
			curNews.setContenthash(rs.getString("contenthash"));	//新闻正文
			curNews.setCategory(category);					//栏目
			
			newsList.add(curNews);
		}
		//新闻管理器设置当前新闻列表
		newsManager.setCurNewsList(newsList);
		//更新新闻管理器，让当前新闻列表交换为上一次新闻
		newsManager.update();
	}
	



}
