package edu.hit.pact.pgse.crawler.bbs;

import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.concurrent.ConcurrentLinkedQueue;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import edu.hit.pact.pgse.bean.RawWebPage;
import edu.hit.pact.pgse.crawler.download.FetchorFactory;
import edu.hit.pact.pgse.crawler.download.WebPageFetcher;
import edu.hit.pact.pgse.crawler.store.FileSystemWebPageStore;
import edu.hit.pact.pgse.crawler.store.WebPageStore;
import edu.hit.pact.pgse.util.AbstractThread;
import edu.hit.pact.pgse.util.ChineseCharEncode;
import edu.hit.pact.pgse.util.Globals;

/**
 * @author napolean
 * 
 */
public class BBSArticleUpdateFetchor extends Thread {
	
	protected WebPageFetcher fetcher= null;
	protected WebPageStore webPageStore;
	protected int successedPageNum = 0;
	protected ConcurrentArrayList<ArticleUpdateInfo> updateList;
    protected Log logger;
    protected int interval; 
    protected static final String nextPageStr  = ChineseCharEncode.encode("下一页");
    private int updateTimes = 0;
    private final static int UPDATETIMES = 1;

	public BBSArticleUpdateFetchor(Log logger,
			 String threadName,ConcurrentArrayList<ArticleUpdateInfo> updateList) {
		super(threadName);
		//this.logger = logger;
		this.logger = LogFactory.getLog("updateArticleList");
		//this.interval = interval;
		this.fetcher = FetchorFactory.buildWebPageFetcher();
		this.webPageStore = new  FileSystemWebPageStore(true);
		this.updateList = updateList;
		this.interval = Globals.INTERVAL_BBS_UPDATE_FETCHOR * 3600 * 1000;
		//this.interval = 1000 * 60 ;
	}

	
	public synchronized int getInterval() {
		return interval;
	}


	public synchronized void setInterval(int interval) {
		this.interval = interval;
	}


	@Override
	public void run(){
		logger.info("update thread begin to work.");
		while(this.updateTimes < UPDATETIMES){
		    
			try {
				sleep(interval);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
			
			logger.info("update thread is wake up.");
			ArticleUpdateInfo[] list = new ArticleUpdateInfo[0];
//			for(int i=0;i<list.length;++i){
//				list[i] = new ArticleUpdateInfo();
//			}//
			list =  this.updateList.toArray(list);
			if(list == null){
				continue;
			}
				
			this.updateTimes++;
			
			
			for(int i=0;i<list.length;i++){
				ArticleUpdateInfo info = list[i];
				logger.info(info.getHomepage() + " " + info.getLastpage() + " " + info.getPageNum() + " " +info.getLelfTime() );
			}
			this.fetcher = FetchorFactory.buildWebPageFetcher();
			for (int i = 0; i < list.length; i++) {
				ArticleUpdateInfo info = list[i];
				if (info.getLelfTime() == 0) {
					updateOneArticle(info);
				}
				info.decreaseLeftTime();
				list[i] = info;
			}
			
			for(int i=0;i<list.length;i++){
				this.updateList.set(i,list[i]);
			}
		}
	}
	
	
	//strategy
	//just simple
	public void updateArticleInfo(ArticleUpdateInfo info){
		if(info.getPageNum() - info.getLastPageNum() > 0){
			info.upgratePriority();
		}else{
			info.downgratePriority();
		}
	}
	
	public void updateOneArticle(ArticleUpdateInfo articleInfo) {
		logger.info("begin to update the homepage:" + articleInfo.getHomepage().toString());
		URL lastUrl = articleInfo.getLastpage();
		
		
		URL url = lastUrl;
		boolean isFirstPage = true;
		int pageNum = articleInfo.getPageNum();
		while (null != url) {
			logger.info("the current update page is :" + url.toExternalForm());
			url = updateOnePage(url, articleInfo, isFirstPage);
			if (null != url) {
				articleInfo.setLastpage(url);
				isFirstPage = false;
			}
		}
		articleInfo.setLastPageNum(pageNum);
		updateArticleInfo(articleInfo);
		
		logger.info("completed the update :" + articleInfo.getHomepage());

	}
	
    
	public URL updateOnePage(URL url,ArticleUpdateInfo articleInfo,boolean isFirstPage){
		if(this.successedPageNum > 50){
			logger.info("rebuild the updateFetchor webPageFetchor.");
			this.successedPageNum = 0;
			this.fetcher = FetchorFactory.buildWebPageFetcher();
		}
		RawWebPage articlePage = this.fetcher.fetchURL(url, 3, null, null, null);
		int pageNo = articleInfo.getPageNum();
		int tryTimes = 0;
		while(articlePage == null){
			tryTimes++;
			if(tryTimes > 3){
				break;
			}
		}
		if (articlePage == null) {
			return null;
		}
		this.successedPageNum ++ ;
		logger.info("successed to update the "+pageNo+"th page:" + url.toExternalForm());
		this.webPageStore.storeWebPage(articlePage,articleInfo.getFolderName(),pageNo+"_"+this.updateTimes, true);
		//logger.info(articleInfo.getFolderName());
		HashMap<String, URL> linksAnchor = articlePage.getLinksAnchor();
		URL nextPage = null;
		if (!linksAnchor.containsKey(nextPageStr)) {
			return null;
		} else {
			articleInfo.addPageNum();
			nextPage = linksAnchor.get(nextPageStr);
			return nextPage;
		}
	}

	
	public void readUpdateInfo(String filename){
		
	}
	
	
}
