/*
 * Creater: shenwenbo
 * Created at 2009-12-04 to support the project
 * Function: receive all pieces and start a crawler to craw a piece
 */
package edu.hit.pact.pgse.crawler;

import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.PrintWriter;
import java.net.InetSocketAddress;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.PriorityQueue;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.mina.core.session.IoSession;

import edu.hit.pact.pgse.bean.RunnableTask;
import edu.hit.pact.pgse.bean.Task;
import edu.hit.pact.pgse.bean.TaskPiece;
import edu.hit.pact.pgse.bean.TaskPieceProperty;
import edu.hit.pact.pgse.bean.TaskPieceStatistics;
import edu.hit.pact.pgse.communication.message.AbstractMessage;
import edu.hit.pact.pgse.communication.message.SpeedReportMessage;
import edu.hit.pact.pgse.communication.message.TaskPieceStatisticsMessage;
import edu.hit.pact.pgse.communication.message.AbstractMessage.COMMUNICATION_TYPE;
import edu.hit.pact.pgse.crawler.bbs.BBSCrawler;
import edu.hit.pact.pgse.crawler.blog.BlogCrawler;
import edu.hit.pact.pgse.crawler.finish.FinishedReportedProcessor;
import edu.hit.pact.pgse.crawler.send.StoreNodeQuerier;
import edu.hit.pact.pgse.crawler.util.SleepThread;
import edu.hit.pact.pgse.crawler.web.WebCrawler;
import edu.hit.pact.pgse.util.Globals;
import edu.hit.pact.pgse.util.Utilities;

public class CrawlerManager {
	private Log logger;
	
	private ConcurrentMap<Long, RunnableTask> pieceProperties;
	private PriorityQueue<RunnableTask> waittingRunnableTasks;
	private ConcurrentMap<Long, Crawler> runningPieceProperties;
	
	public IoSession masterSession;
	
	private AtomicInteger latestPageNum;
//	private StoreNodeQuerier storeNodeQuerier;

	private Timer queueerrorRecorder;
	
	public CrawlerManager() {
		this(LogFactory.getLog(CrawlerManager.class));
	}
	
	public CrawlerManager(Log logger) {
		super();
		this.logger = logger;
		
		this.pieceProperties = new ConcurrentHashMap<Long, RunnableTask>();
		this.waittingRunnableTasks = new PriorityQueue<RunnableTask>();
		this.runningPieceProperties = new ConcurrentHashMap<Long, Crawler>();
		this.latestPageNum = new AtomicInteger(0);
//		this.storeNodeQuerier = new StoreNodeQuerier();

		this.queueerrorRecorder = new Timer();
		queueerrorRecorder.schedule(new DSerrorOutput(), 1*1000, 1*1000);
	}

	public void processCurrentSpeedReport(){
		AbstractMessage speedReportMessage = new SpeedReportMessage(COMMUNICATION_TYPE.SPEED_REPORT, false, latestPageNum.get());
		this.latestPageNum = new AtomicInteger(0);
		masterSession.write(speedReportMessage);
	}
	
	public synchronized void incrementLatestPageNum(){
		latestPageNum.incrementAndGet();
	}
	
//	public boolean processTaskCancel(long taskPieceId) {
//		logger.error("receive cancel taskPieceId(" + taskPieceId +") command.");
//		Set<Long> keys = pieceProperties.keySet();
//		for (Long key : keys) {
//			if (key.(taskPieceId)) {
//				logger.error("cancel task piece :" + key);
//				RunnableTask propertyNode = pieceProperties.get(key);
//				TaskPieceProperty property = propertyNode.getTaskPieceProperty();
//				property.setStatus(TaskPieceProperty.CANCELED);
//			}//end if
//		}//end for
//		return true;
//	}

	public void setMasterSession(IoSession masterSession) {
		this.masterSession = masterSession;
	}

//	public boolean stopAllJob() {
//		logger.warn("receive stop all job command.");
//		Set<String> keys = pieceProperties.keySet();
//		for (String key : keys) {
//			RunnableTask propertyNode = pieceProperties.get(key);
//			TaskPieceProperty property = propertyNode.getTaskPieceProperty();
//			property.setStatus(TaskPieceProperty.CANCELED);
//		}//end for
//		return true;
//	}
	
	public synchronized boolean processFinishedNotify(TaskPieceStatistics statistics) {
		/*
		 * report to master
		 */
		AbstractMessage message = new TaskPieceStatisticsMessage(statistics);
		masterSession.write(message);
		
		Long taskPieceId = statistics.getTaskPieceId();
		
		logger.error("remove task piece  from depository by taskPieceId : " + taskPieceId);
		this.pieceProperties.remove(taskPieceId);
		
		logger.error("remove task piece  from runningPieceProperties by taskPieceId : " + taskPieceId);
		Crawler formerCrawler = this.runningPieceProperties.remove(taskPieceId);
		logger.info("release resources in former web crawler");
		formerCrawler.deactivate();
		
		setupCrawler();
		
		return true;
	}
	
	public boolean processTaskPiecePush(TaskPiece piece) {
		logger.error("receive task piece push command");
		
		Long id = piece.getId();
//		InetSocketAddress storeAddr =  this.storeNodeQuerier.getStore(piece);
		
		RunnableTask runnableTask = new RunnableTask(new TaskPieceProperty(piece), null);
		logger.info(id + "is putting in the depository and waitting queue");
		
		this.pieceProperties.put(id, runnableTask );
		this.waittingRunnableTasks.add(runnableTask);
		setupCrawler();
		return true;
	}
	
	//schedule the first piece and setup a new crawler, and write one entry into the running map
	public void setupCrawler(){
		while ((runningPieceProperties.size() < Globals.CRAWLER_NUMBER_ONE_COMPUTER) && (waittingRunnableTasks.size() !=0)){//must be < 
			if (waittingRunnableTasks.size() !=0){
				RunnableTask runnableTask = waittingRunnableTasks.peek();
				logger.info("create a new crawler for the first one in wait queue whose id is " + runnableTask.getTaskPieceProperty().getPiece().getId());
				
				
				while (Globals.CRAWLER_SEND && !runnableTask.hasAddr()){//dead loop, until the first piece's store node is not null
					logger.error("store node is null, waitting");
					SleepThread.sleep(5000);
				}
				
				waittingRunnableTasks.poll();
				runnableTask.getTaskPieceProperty().setStartTime(Utilities.getDateInMillisecond());
				FinishedReportedProcessor finishedReporter = new FinishedReportedProcessor(this, runnableTask.getTaskPieceProperty());
				//update the task piece's startTime
				
				
				int type = runnableTask.getTaskPieceProperty().getPiece().getType();
				Crawler crawler = null;
				if (type == 1){
					crawler = new WebCrawler(this, runnableTask, finishedReporter, Globals.CRAWLER_SEND, Globals.CRAWLER_STORE);
				} else if (type == 2) {
					crawler = new BlogCrawler(runnableTask, finishedReporter, Globals.CRAWLER_SEND, Globals.CRAWLER_STORE);
				} else if (type == 3) {
					crawler = new BBSCrawler(runnableTask, finishedReporter, Globals.CRAWLER_SEND, Globals.CRAWLER_STORE);
				} else {
					logger.error("the type is wrong!");
					return;
				}
				try {
					runningPieceProperties.put(runnableTask.getTaskPieceProperty().getPiece().getId(), crawler);
					logger.error("one crawler is started!"+runningPieceProperties.size()+"  "+Globals.CRAWLER_NUMBER_ONE_COMPUTER);
					crawler.start();
				} catch (Exception e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}
		}
	}
	
	

	class DSerrorOutput extends TimerTask{
		private PrintWriter queueTracker= null;
		private SimpleDateFormat formater = new SimpleDateFormat("yyyy-MM-dd*hh:mm:ss");
		public DSerrorOutput(){
			try {
				queueTracker = new PrintWriter(new FileOutputStream("WebCrawlermanagererror.txt", true), true);
				queueTracker.println("\t\t\tdepo\trunning\twaitting\thasStoreNode");
			} catch (FileNotFoundException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
	
		public void run() {
			queueTracker.println(formater.format(new Date())+ "\t" +pieceProperties.size() + "\t" +runningPieceProperties.size()+ "\t" +waittingRunnableTasks.size());
		}

	}
}
