package com.omg.server.crawler;

import java.util.concurrent.BlockingQueue;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.omg.server.crawler.strategy.CrawlerStrategy;
import com.omg.server.util.CommonUtil;
import com.omg.util.ApplicationContextUtil;

public class RequestConsumerThread extends Thread {
	private static final Log log = LogFactory.getLog(RequestConsumerThread.class); 
	BlockingQueue<RequestContext> requestQueue;
	public RequestConsumerThread(BlockingQueue<RequestContext> requestQueue){
		this.requestQueue = requestQueue;
	}
	
	@Override
	public void run() {
		RequestContext requestContext = null;
		CrawlerStrategy crawlerStrategy = null;
		String strategyName = null;
		while(true){
			try {
				 requestContext = this.requestQueue.take();
				 strategyName = requestContext.getStrategyName();
				 if (CommonUtil.isNotEmpty(strategyName)){
				     //发送抓取请求
					 crawlerStrategy = (CrawlerStrategy)ApplicationContextUtil.getBean("crawler."+strategyName+"Strategy");
					 crawlerStrategy.handle(requestContext);
					 log.info("请求消费者抓取完成, strategyName:" + strategyName);
				 }
				 int pageIndex = requestContext.getStartIndex();
				 int endIndex = requestContext.getEndIndex();
				
				 if (pageIndex > endIndex){
					 requestContext.setStartIndex(pageIndex - 1);
					 this.requestQueue.offer(requestContext);//重复抓取
				 }
				 try {
				     Thread.sleep(1000);
				 }
				 catch (InterruptedException e) {
					log.error(e);
				}
				     
			}
			catch (InterruptedException e) {
				log.error("获取请求队列失败", e);
			}
		}
		
	}

}
