/**
 *
 * Copyright Pact Lab of H.I.T.
 *
 * Designed and Implemented by Grid Researching Group, 
 * Pact Lab, Harbin
 * 
 * This Project is part of the national 973 Project:
 * Internet Based Virtual Computing Environment
 *
 * http://pact518.hit.edu.cn
 * 
 * Author:       Meteor <meteorlxk@gmail.com> 
 * Copyright:    pact518 
 * Version:      1.0
 * Created:      2009-5-5 
 * LastModified: 2009-12-03 修改了第170到176行，在存储之前不再进行load操作
 */
package edu.hit.pact.pgse.crawler.web;


import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import edu.hit.pact.pgse.bean.RawWebPage;
import edu.hit.pact.pgse.bean.TaskPieceProperty;
import edu.hit.pact.pgse.crawler.CrawlerManager;
import edu.hit.pact.pgse.crawler.download.WebPageFetcher;
import edu.hit.pact.pgse.crawler.finish.FinishChecker;
import edu.hit.pact.pgse.crawler.store.WebPageStore;
import edu.hit.pact.pgse.crawler.util.DuplicateUrlFilter;
import edu.hit.pact.pgse.crawler.util.FileFilter;
import edu.hit.pact.pgse.crawler.util.RobotFileManager;
import edu.hit.pact.pgse.crawler.util.RobotFileScanner;
import edu.hit.pact.pgse.util.AbstractThread;
import edu.hit.pact.pgse.util.Globals;
import edu.hit.pact.pgse.util.Md5Sum;
import edu.hit.pact.pgse.util.Utilities;

/**
 * @author meteorlxk
 *
 */
public class PageFetchProcessor extends AbstractThread{

	private Log logger;
	
	private TaskPieceProperty taskPieceProperty;
	private PriorityBlockingQueue<UrlUnit> cleanUrlQueue;//use to keep the order of url, according to the number of backlinks
	private ConcurrentHashMap<String, UrlUnit> urlUnitMap;//use to trace the urlUnit and modify the backlinks
	private LinkedBlockingQueue<RawWebPage> fetchedPageQueue;
	private CrawlerManager crawlerManager;
	private WebPageFetcher fetcher;
	private WebPageStore webPageStore;
	private PagePrepareProcessor pagePrepare;
	
	private PrintWriter urlRecorder= null;
	private SimpleDateFormat formater = new SimpleDateFormat("yyyy-MM-dd");
	

	public PageFetchProcessor(CrawlerManager crawlerManager,TaskPieceProperty pieceProperty,
			WebPageFetcher fetcher, LinkedBlockingQueue<RawWebPage> fetchedPageQueue, 
			WebPageStore webPageStore, PagePrepareProcessor pagePrepare) {
		super(LogFactory.getLog(PageFetchProcessor.class));
		this.crawlerManager = crawlerManager;
		this.logger = LogFactory.getLog(PageFetchProcessor.class);
		this.taskPieceProperty = pieceProperty;
		this.cleanUrlQueue = pieceProperty.getCleanUrlQueue();
		this.urlUnitMap = pieceProperty.getUrlUnitMap();
		
		this.fetcher = fetcher;
		this.fetchedPageQueue = fetchedPageQueue;
		this.webPageStore = webPageStore;
		this.pagePrepare = pagePrepare;
		/*
		 * Construct url recorder
		 */
		try {
			String host = (new URL(pieceProperty.getPiece().getHomePage())).getHost();
			Long host_md5 = Md5Sum.getMd5LongSum(host);
			String dir = formater.format(new Date());
			Utilities.createdDir(dir);
			this.urlRecorder = new PrintWriter(new FileOutputStream(dir+"/"+host_md5.toString()+".txt", true), true);
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (MalformedURLException e) {
			e.printStackTrace();
		}
		logger.info("I am a page fetch processor!");
	}

	/* (non-Javadoc)
	 * @see edu.hit.pact.pgse.util.AbstractThread#mainRun()
	 */
	@Override
	public void mainRun() {
		//检查是否完成
		if (FinishChecker.isFinish(logger, taskPieceProperty)){
			logger.info("since this piece is canceled or finished, clear the clean url queue");
			cleanUrlQueue.clear();
			logger.info("kill this page fetch processor");
			setDeadLoop(false);
			return;
		}
		try {
			UrlUnit urlUnit  = this.cleanUrlQueue.take();
//			this.urlUnitMap.remove(urlUnit.getLink());
			String urlStr = urlUnit.getLink();
			//very important, when take a UrlUnit from the queue, it increases the semaphore
			//when finish download this UrlUnit, it decreases the semaphore
			taskPieceProperty.incrementSemaphore();

			/*
			 * fetch web page
			 */
			logger.info("begin to fetch web page :" + urlStr);
			
			WebTaskPiece webTaskPiece = (WebTaskPiece)taskPieceProperty.getPiece();
			RawWebPage page = this.fetcher.fetchURL(urlUnit.getLink(), urlUnit.getUrl(), webTaskPiece.getType(), webTaskPiece.getBelongTo(), webTaskPiece.getChannel(), webTaskPiece.getPlace()); 
			urlRecorder.println(urlUnit.getLink() +"\t" + urlUnit.getCurrDepth()+"\t" + urlUnit.getBacklinks());
			
			if (page != null) {
				logger.info("fetch web page successed:" + urlUnit.getLink());
				taskPieceProperty.incrementSucceededPageNum();
				crawlerManager.incrementLatestPageNum();
			
				this.webPageStore.storeWebPage(page);
	//			this.fetchedPageQueue.put(page);
				
				ArrayList<String> interlinks = page.getCommentLinks();//get all sublinks

				if (interlinks != null) {
					pagePrepare.filterLinks(interlinks, urlUnit);
				}//end if
			} else {
				logger.error("fetch web page failed:");
				taskPieceProperty.incrementFailedPageNum();
			}	
		} catch (InterruptedException e) {
			logger.error("put url into todoUrlQueue meets exception : ", e);
		}//end try
		
		//very important, when take a UrlUnit from the queue, it increases the semaphore
		//when finish download this UrlUnit, it decreases the semaphore
		taskPieceProperty.decrementSemaphore();

	}
	
//	public void filterLinks(List<String> internalLinks, UrlUnit currentUrlUnit) {
//		
//		logger.info("PagePrepareProcessor urlFilter");
//		
//		// 检查是否完成
//		if (FinishChecker.isFinish(logger, taskPieceProperty)) {
//			logger.info("since this piece is canceled or finished, clear the to do url queue");
//			cleanUrlQueue.clear();
//			logger.info("kill this page prepare processor");
//			return;
//		}
//
//		/*
//		 * We don't need to fetcher if url's depth larger than maxDepth.
//		 */
//		WebTaskPiece webTaskPiece = (WebTaskPiece)taskPieceProperty.getPiece();
//		if (currentUrlUnit.getCurrDepth() >= webTaskPiece.getMaxDepth()) {
//			logger.debug("the url's depth is larger than "+ webTaskPiece.getMaxDepth()+ ", so drop it.");
//			return;
//		}// end if
//
//		Pattern includePattern = Pattern.compile(((WebTaskPiece)taskPieceProperty.getPiece()).getDocURLRegexInclude());
//		Matcher includeMatcher = null;
//		
////		Pattern excludePattern = Pattern.compile(((WebTaskPiece)taskPieceProperty.getPiece()).getDocURLRegexExclude());
////		Matcher excludeMatcher = null;
////		
////		String dateString = ((WebTaskPiece)taskPieceProperty.getPiece()).getDateString();
//		
//		for (String internalLink : internalLinks){
//			if (urlUnitMap.containsKey(internalLink)){
//				urlUnitMap.get(internalLink).incrementBackLinks();
//			}
//			
//			/*
//			 * filter duplicate url most url is duplicate, so check filter duplicate
//			 * should execute first!
//			 */
//			if (duplicateUrlFilter.contains(internalLink)) {
//				continue;
//			}
//			
//			/*
//			 * only support HTTP protocol
//			 */
//			String protocol = null;
//			try {
//				protocol = (new URL(internalLink)).getProtocol();
//			} catch (MalformedURLException e) {
//				// TODO Auto-generated catch block
//				e.printStackTrace();
//			}
//			if (protocol != null && !protocol.equalsIgnoreCase("HTTP")) {
//				continue;
//			}
//			
//			/*
//			 * if supported.file.type.list contains the prefix of the todoUrl , it
//			 * return true if unsupported.file.type.list contains the prefix of the
//			 * todoUrl , it return false or else return true
//			 */
//			if (!this.fileFilter.iCanProcess(internalLink)) {
//				continue;
//			}
//
//			/*
//			 * filter robots file
//			 */
//			
//			UrlUnit urlUnit = new UrlUnit(internalLink, currentUrlUnit.getCurrDepth()+1);
//			if (this.robot.disallow(urlUnit)) {
//				continue;
//			}
//			
////			includeMatcher = includePattern.matcher(internalLink);
////			if (includeMatcher.find() == false){
////				continue;
////			}
//			
////			/*
////			 * use exclude to extract the date string, compare with dateString, if not same, continue;
////			 */
////			excludeMatcher = excludePattern.matcher(internalLink);
////			if (excludeMatcher.find() == true){
////				if (!excludeMatcher.group(0).equals(dateString))
////					continue;
////			}
////			
////			/*
////			 * use include to increase the url contains dateString
////			 */
////			includeMatcher = includePattern.matcher(internalLink);
////			if (includeMatcher.find() == true){
////				if (urlUnitMap.containsKey(internalLink)){
////					urlUnitMap.get(internalLink).incrementBackLinksByNum(10);
////				} else {
////					urlUnit.incrementBackLinksByNum(10);
////					urlUnitMap.put(internalLink, urlUnit);
////				}
////			} else {
////				if (!urlUnitMap.containsKey(internalLink))
////					urlUnitMap.put(internalLink, urlUnit);
////			}
//			
//			if (!urlUnitMap.containsKey(internalLink))
//				urlUnitMap.put(internalLink, urlUnit);
//			cleanUrlQueue.add(urlUnit);
//		}
//	}
	
	public void setDeadLoop(boolean deadloop){
		this.urlRecorder.close();
		super.setDeadLoop(false);
	}
	
}
