package com.flute.icrawler.app.result;

import java.util.ArrayList;
import java.util.List;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.flute.icrawler.app.entity.CrawlResult;

/**
 * 爬虫结果存储池
 * 
 * @author jiangbo
 * 
 */
public class CrawlResultPool {

	private List<CrawlResult> list = new ArrayList<CrawlResult>();

	// 最小处理量
	private final int minDealCount = 10;

	// 锁超时时间
	private final int LOCK_TIMEOUT = 60000;

	private static final Logger LOGGER = LoggerFactory
			.getLogger(CrawlResultPool.class);

	public int size() {
		return list.size();
	}

	/**
	 * 添加一个结果到池中
	 * 
	 * @param crawlResult
	 */
	public void addCrawlResult(CrawlResult crawlResult) {
		synchronized (list) {

			list.add(crawlResult);

			LOGGER.info("CrawlResultPool:{}", "add " + crawlResult.getUrl());

			LOGGER.info("CrawlResultPool:{}", "CrawlResult count =  "
					+ list.size());
			// 有了10个才通知
			if (list.size() >= minDealCount) {
				list.notifyAll();
			}

		}
	}

	/**
	 * 从池中取出所有数据
	 * 
	 * @return
	 */
	public List<CrawlResult> removeCrawlResults() {

		List<CrawlResult> resultList = new ArrayList<CrawlResult>();
		synchronized (list) {

			try {
				// 等待LOCK_TIMEOUT时间 如果超时直接全部处理掉
				list.wait(LOCK_TIMEOUT);

			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}

			// 超时之后如果里面还有则将数据全部取出来
			if (list.size() > 0) {

				resultList.addAll(list);
				list.clear();

				return resultList;
			}

			return resultList;

		}
	}

}
