package com.kd.crawler.parser.statistic;

import java.util.HashMap;
import java.util.List;

import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;

import com.kd.crawler.dao.AnalyzerDetailsDao;
import com.kd.crawler.dao.OriginalHtmlDao;
import com.kd.crawler.dao.StatisticDocumentDao;
import com.kd.crawler.dao.StatisticEntryUrlDao;
import com.kd.crawler.entity.CrawlerEntry;
import com.kd.crawler.entity.Document;
import com.kd.crawler.parser.HtmlParser;
import com.kd.crawler.parser.HtmlParserImpl;
import com.kd.crawler.parser.StandardAnalyzer;

public class ParserDetails {

	private static final Logger logger = Logger.getLogger(ParserDetails.class);
	
	public static void main(String[] args) {
		int start = 0;
		int limit = 12;
		while(true) {
			List<CrawlerEntry> ces = OriginalHtmlDao.getInstance().query(start, limit);
			if(null==ces || 0==ces.size()) {
				logger.info("select completed!");
				break;
			} else {
				logger.info("start parse number: " + ces.size());
			}
			for(CrawlerEntry ce : ces) {
				if(StringUtils.isBlank(ce.getHtml())) {
					OriginalHtmlDao.getInstance().update(ce.getId(), "Y");
					continue;
				}
				//new version
				parseForNew(ce, "N");
				//old version
				parseForOld(ce, "O");
				OriginalHtmlDao.getInstance().update(ce.getId(), "Y");
			}
		}
		
	}
	// old version
	private static HashMap<String,HtmlParser> parserCache = new HashMap<String,HtmlParser>();
	public static void parseForOld(CrawlerEntry ce, String analyzerFlag) {
		logger.info("old version parser start for url[" + ce.getUrl() + "] at id: " + ce.getId());
		//this is old
		long startTime = System.currentTimeMillis();
		HtmlParser parser=parserCache.get(ce.getParserName());
		if(parser==null){
			parser= new HtmlParserImpl(ce.getParserName());
			parserCache.put(ce.getParserName(),parser);
		}
		Document meta = parser.parse(ce);
		if(null == meta) {
			meta = new Document();
		}
		int spentTime = (int)(System.currentTimeMillis()-startTime);
		//start to statistic for number to parse title, date, author and content
		AnalyzerDetailsDao.getInstance().insert(meta, ce.getId(), analyzerFlag, spentTime);
		if(null == StatisticEntryUrlDao.getInstance().getLong(ce.getEntryId(), analyzerFlag)) {
			StatisticEntryUrlDao.getInstance().insert(ce, meta, 1, analyzerFlag);
		} else {
			StatisticEntryUrlDao.getInstance().update(ce, meta, analyzerFlag);
		}
		// statistic for document content
		StatisticDocumentDao.getInstance().insert(ce, meta, analyzerFlag);
		logger.info("old version parser has completed for url[" + ce.getUrl() + "] at id: " + ce.getId());
	}
	
	//new version
	private static final HtmlParser parser = new StandardAnalyzer();
	public static void parseForNew(CrawlerEntry ce, String analyzerFlag) {
		logger.info("new version parser start for url[" + ce.getUrl() + "] at id: " + ce.getId());
		Document meta = null;
		long StartTime = System.currentTimeMillis();
		try {
			meta = parser.parse(ce);
		} catch (Exception e) {
			meta = new Document();
			e.printStackTrace();
		} finally {
			int spentTime = (int)(System.currentTimeMillis()-StartTime);
			//start to statistic for number to parse title, date, author and content
			AnalyzerDetailsDao.getInstance().insert(meta, ce.getId(), analyzerFlag, spentTime);
			if(null == StatisticEntryUrlDao.getInstance().getLong(ce.getEntryId(), analyzerFlag)) {
				StatisticEntryUrlDao.getInstance().insert(ce, meta, 1, analyzerFlag);
			} else {
				StatisticEntryUrlDao.getInstance().update(ce, meta, analyzerFlag);
			}
			//statistic for document content
			StatisticDocumentDao.getInstance().insert(ce, meta, analyzerFlag);
			logger.info("new version parser has completed for url[" + ce.getUrl() + "] at id: " + ce.getId());
		}
	}
}
