package com.flute.icrawler.framework.framework;

import java.io.File;
import java.rmi.RemoteException;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

import org.apache.log4j.Logger;

import com.flute.haflute.monitor.GeneralStatusCenter;
import com.flute.haflute.monitor.StatusMessage;
import com.flute.icrawler.app.service.CrawlService;
import com.flute.icrawler.config.CrawlConfig;
import com.flute.icrawler.config.CrawlConfigUtil;
import com.flute.icrawler.framework.framework.container.AdditionalUrlInformation;
import com.flute.icrawler.framework.framework.container.AdditionalUrlInformation.Additionals;
import com.flute.icrawler.framework.framework.container.BerkeleySimpleUrlQueue;
import com.flute.icrawler.framework.framework.container.BerkeleySortedUrlQueue;
import com.flute.icrawler.framework.framework.container.NumberStringValue;
import com.flute.icrawler.framework.framework.listener.ICrawlTaskListener;
import com.flute.icrawler.framework.framework.listener.ICrawlUrlSubject;
import com.flute.tools.data.DataIterator;
import com.flute.tools.timer.IntervalExecutor;

public class BerkeleyCrawlUrlPool implements ICrawlUrlPool, ICrawlUrlSubject {
	protected BerkeleySortedUrlQueue waitQueue;
	protected BerkeleySimpleUrlQueue sucessedQueue;
	protected BerkeleySimpleUrlQueue failedQueue;
	protected BerkeleySimpleUrlQueue processingQueue;
	protected BerkeleySimpleUrlQueue retriedQueue;
	protected BerkeleySimpleUrlQueue filterQueue;
	protected BerkeleySimpleUrlQueue unCheckQueue;

	protected CrawlJob job;
	private IntervalExecutor statusLogger;

	private static Logger logger = Logger.getLogger(BerkeleyCrawlUrlPool.class);

	private CrawlConfig crawlConfig = null;

	private List<ICrawlTaskListener> crawlTaskListeners = new ArrayList<ICrawlTaskListener>();

	/**
	 * 初始化
	 * 
	 * @param strEnv
	 *            BDB数据库环境 即保存数据库文件的路径
	 */
	public BerkeleyCrawlUrlPool(CrawlConfig crawlConfig) {
		this.crawlConfig = crawlConfig;
		String bdbEnvPath = "bdb" + File.separator + crawlConfig.getTaskName();
		this.waitQueue = new BerkeleySortedUrlQueue(bdbEnvPath, "wait");
		this.sucessedQueue = new BerkeleySimpleUrlQueue(bdbEnvPath, "sucessed");
		this.failedQueue = new BerkeleySimpleUrlQueue(bdbEnvPath, "failed");
		this.processingQueue = new BerkeleySimpleUrlQueue(bdbEnvPath,
				"processing");
		this.retriedQueue = new BerkeleySimpleUrlQueue(bdbEnvPath, "retired");
		this.filterQueue = new BerkeleySimpleUrlQueue(bdbEnvPath, "filter");
		this.statusLogger = getStatusExecutor();
		this.statusLogger.setName("Crawl Pool Logger");
		statusLogger.setInterval(CrawlConfigUtil.getBaseConfig()
				.getFrameworkPrintInterval() * 1000);
		statusLogger.start();

		if (job != null) {
			initialize();
		}
	}

	/**
	 * 初始化时 需要将上次正在处理的数据重新放回到等待的队列中
	 */
	private void initialize() {
		for (Seed seed : job.getSettings().getSeeds()) {
			CrawlUrl url = new CrawlUrl(job, seed.getUrl());
			AdditionalUrlInformation info = new AdditionalUrlInformation();
			info.updateValue(Additionals.Deep, new NumberStringValue(1));
			info.updateValue(Additionals.Position, new NumberStringValue(
					getTotalUrlCount() + 1));
			url.setAdditionalUrlInformation(info);
			waitQueue.addCrawlUrl(url);
		}

		CrawlUrl url = null;
		while ((url = processingQueue.remove()) != null) {
			logger
					.info("move processingQueue url in the last task to waitQuene"
							+ url);
			// CrawlUrl crawlUrl = new CrawlUrl(job, url);
			// AdditionalUrlInformation info = new AdditionalUrlInformation();
			// info.updateValue(Additionals.Deep, new NumberStringValue(1));
			// info.updateValue(Additionals.Position, new NumberStringValue(
			// getTotalUrlCount() + 1));
			// crawlUrl.setAdditionalUrlInformation(info);
			// waitQueue.addCrawlUrl(crawlUrl);
		}
	}

	public void setCrawlJob(CrawlJob job) {
		this.job = job;
		if (job != null) {
			initialize();
		}
	}

	@Override
	public synchronized void addAll(List<CrawlUrl> crawlUrls)
			throws RemoteException {
		long time = System.currentTimeMillis();
		// TODO Auto-generated method stub
		Iterator<CrawlUrl> iterator = crawlUrls.iterator();

		while (iterator.hasNext()) {
			CrawlUrl newUrl = iterator.next();
			// 不包含直接加入
			if (!contains(newUrl)) {
				waitQueue.addCrawlUrl(newUrl);
			} else {
				logger.debug("add echo url=" + newUrl);
			}
		}

		logger.info("bdbpool handle time:"
				+ (System.currentTimeMillis() - time) + "ms");
	}

	@Override
	public synchronized void add(CrawlUrl newUrl) throws RemoteException {
		// 不包含直接加入
		if (!contains(newUrl)) {
			waitQueue.addCrawlUrl(newUrl);
		} else {
			logger.info("add echo url=" + newUrl);
		}

	}

	public synchronized void addUpdateUrl(CrawlUrl updateUrl) {

		// 更新次数加1
		NumberStringValue valueRetryCount = (NumberStringValue) updateUrl
				.getAdditionalUrlInformation()
				.getValue(Additionals.UpdateCount);

		updateUrl.getAdditionalUrlInformation().updateValue(
				Additionals.UpdateCount,
				new NumberStringValue(
						valueRetryCount.getNumber().intValue() + 1));

		if (!this.waitQueue.contains(updateUrl)) {
			this.waitQueue.addCrawlUrl(updateUrl);
		}

	}

	public long getTotalUrlCount() {
		return waitQueue.size() + processingQueue.size() + sucessedQueue.size()
				+ failedQueue.size();
	}

	@Override
	public synchronized boolean contains(String url) {
		if (waitQueue.contains(new CrawlUrl(null, url))) {
			return true;
		} else if (sucessedQueue.contains(new CrawlUrl(null, url))) {
			return true;
		}
		// else if (failedQueue.contains(url)) {
		// return true;
		// }

		else if (processingQueue.contains(new CrawlUrl(null, url))) {
			return true;
		}

		return false;
	}

	public synchronized boolean contains(CrawlUrl url) {
		if (waitQueue.contains(url)) {
			return true;
		} else if (sucessedQueue.contains(url)) {
			return true;
		}
		// else if (failedQueue.contains(url)) {
		// return true;
		// }

		else if (processingQueue.contains(url)) {
			return true;
		}

		return false;
	}

	@Override
	public CrawlJob getCrawlJob() {
		return job;
	}

	@Override
	public synchronized boolean hasNext() {
		return waitQueue.size() > 0;
	}

	@Override
	public synchronized boolean isProcessing() {
		return processingQueue.size() > 0;
	}

	@Override
	public synchronized CrawlUrl nextUrl() {
		CrawlUrl url = waitQueue.getFirstCrawlUrl();

		if (url != null) {
			url.setJob(job);
			processingQueue.add(url);
		}
		return url;
	}

	@Override
	public List<CrawlUrl> nextUrl(int n) throws RemoteException {
		// TODO Auto-generated method stub
		List<CrawlUrl> list = new ArrayList<CrawlUrl>();
		while (n > 0) {
			CrawlUrl url = nextUrl();

			if (url != null) {
				url.setJob(job);
				list.add(url);
				n--;
			} else {
				break;
			}

		}

		return list;
	}

	@Override
	public synchronized void retry(CrawlUrl url) {
		retriedQueue.add(url);

		CrawlUrl newUrl = new CrawlUrl(job, url.getUrl());
		AdditionalUrlInformation info = url.getAdditionalUrlInformation()
				.newInstance();

		NumberStringValue value = (NumberStringValue) url
				.getAdditionalUrlInformation().getValue(Additionals.Deep);
		NumberStringValue valueRetryCount = (NumberStringValue) url
				.getAdditionalUrlInformation().getValue(Additionals.RetryCount);

		info.updateValue(Additionals.Deep, new NumberStringValue(value
				.getNumber().intValue()));
		info.updateValue(Additionals.Position, new NumberStringValue(
				getTotalUrlCount() + 1));
		info.updateValue(Additionals.RetryCount, new NumberStringValue(
				valueRetryCount.getNumber().intValue() + 1));

		newUrl.setAdditionalUrlInformation(info);

		waitQueue.addCrawlUrl(newUrl);
		processingQueue.remove(url);
	}

	@Override
	public synchronized void success(CrawlUrl url) {

		sucessedQueue.add(url);

		processingQueue.remove(url);

		retriedQueue.remove(url);

	}

	@Override
	public synchronized void filter(CrawlUrl url) throws RemoteException {
		// TODO Auto-generated method stub
		filterQueue.add(url);
	}

	@Override
	public synchronized void fail(CrawlUrl url) {
		failedQueue.add(url);
		processingQueue.remove(url);
	}

	@Override
	public void notityCrawlUrl(CrawlUrl url) throws RemoteException {
		// TODO Auto-generated method stub
		notityObservers(url);
	}

	public void setFinished(boolean isFinished) {
		statusLogger.setFinish(isFinished);
		waitQueue.closeIntervalExecutor();
	}

	/**
	 * 定时检查长时间没有被处理的链接
	 */
	public void checkCrawlerResult() {
		DataIterator<Object> dataIterator = processingQueue.iterator(null);
		while (dataIterator.hasNext()) {
			unCheckQueue.add((CrawlUrl) dataIterator.next());
		}
	}

	public void reportCrawlerResult() {

		int waitCount = waitQueue.size();
		int sucesseCount = sucessedQueue.size();
		int failedCount = failedQueue.size();
		int processingCount = processingQueue.size();
		int retiredCount = retriedQueue.size();
		int filterCount = filterQueue.size();
		// 保留三位小数
		DecimalFormat format = new DecimalFormat("#.###");
		// 每秒处理个数
		double urlPerSecd = 0;
		// 处理百分比
		String finishedRate = "";
		String strUrlPerSecd = "";
		// 剩余的需要处理时长 单位秒
		double timeRemain = 0;
		// 到目前为止的处理时间 单位秒
		double timeDone = (System.currentTimeMillis() - CrawlService
				.getInstance().getStartDateTimeMillis()) / 1000;

		int timeDone_sed = 0;
		int timeDone_min = 0;
		int timeDone_hour = 0;

		int timeRemain_sed = 0;
		int timeRemain_min = 0;
		int timeRemain_hour = 0;

		// 实际处理量等于成功的和失败的总数
		long dealCount = sucesseCount + failedCount;
		finishedRate = format.format((1.0 * dealCount * 100 / (waitCount
				+ processingCount + dealCount)));

		urlPerSecd = 1.0 * (sucesseCount + failedCount) / timeDone;

		strUrlPerSecd = format.format(urlPerSecd);

		logger.debug("urlPerSecd=" + urlPerSecd);

		timeRemain = ((waitCount + processingCount) / urlPerSecd);

		// 换算成小时分钟秒的格式
		timeRemain_hour = (int) (timeRemain / 3600);
		timeRemain_min = (int) ((timeRemain - 3600 * timeRemain_hour) / 60);
		timeRemain_sed = (int) ((timeRemain - 60 * timeRemain_min) % 3600);

		// 换算成小时分钟秒的格式
		timeDone_hour = (int) (timeDone / 3600);
		timeDone_min = (int) ((timeDone - 3600 * timeDone_hour) / 60);
		timeDone_sed = (int) ((timeDone - 60 * timeDone_min) % 3600);
		StringBuilder builderTopic = new StringBuilder();

		String seeds = "";

		List<String> list = crawlConfig.getUrl().getSeeds();
		{
			for (int i = 0; i < list.size(); i++) {
				seeds = seeds + list.get(i) + "\r\n";
			}
		}

		builderTopic.append("运行情况").append("\r\n任务名称:").append(
				crawlConfig.getTaskName()).append("; 种子页:").append(seeds)
				.append(";深度:").append(crawlConfig.getUrl().getDepth());

		String resultTopic = builderTopic.toString();

		StringBuilder builderResultMsg = new StringBuilder();

		builderResultMsg.append("等待中:").append(waitCount).append("; 已成功:")
				.append(sucesseCount).append("; 失败:").append("; 失败:").append(
						failedCount).append("; 正在处理:").append(processingCount)
				.append("; 重试:").append(retiredCount).append("; 被过滤:").append(
						filterCount).append("; 平均处理速度(个/秒):").append(
						strUrlPerSecd).append("; 剩余时间:")
				.append(timeRemain_hour).append("小时").append(timeRemain_min)
				.append("分钟").append(timeRemain_sed).append("秒").append(
						"; 任务已用时:").append(timeDone_hour).append("小时").append(
						timeDone_min).append("分钟").append(timeDone_sed).append(
						"秒").append("; 完成比例:").append(finishedRate).append("%");

		String resultMsg = builderResultMsg.toString();

		logger.info(resultTopic + ":" + resultMsg);

		//
		StatusMessage statusMessage = new StatusMessage();
		statusMessage.topic = resultTopic;
		statusMessage.content = resultMsg;

		GeneralStatusCenter.getInstance().report(statusMessage);
		//

	}

	private IntervalExecutor getStatusExecutor() {
		return new IntervalExecutor() {

			@Override
			public void execute() {
				reportCrawlerResult();

			}
		};
	}

	@Override
	public void notityObservers(CrawlUrl url) {
		for (int i = 0; i < crawlTaskListeners.size(); i++) {
			crawlTaskListeners.get(i).update(url);
		}

	}

	@Override
	public void unRegisterObserver(ICrawlTaskListener taskListener)
			throws RemoteException {
		// TODO Auto-generated method stub

	}

	@Override
	public void registerObserver(ICrawlTaskListener taskListener) {
		// TODO Auto-generated method stub
		crawlTaskListeners.add(taskListener);
	}

}
