package com.suoluo.zenadmin.service.crawler.resolver;

import java.util.Date;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.suoluo.zenadmin.common.Application;
import com.suoluo.zenadmin.service.crawler.bean.HotelReviewBean;
import com.suoluo.zenadmin.service.hotel.HotelService;
import com.suoluo.zenadmin.vo.hotel.HotelReviewVO;



public class CrawlerResovlerTask implements Runnable{

	private static final Log logger = LogFactory.getLog("crawler");
	
	HotelService hotelService = Application.getInstance().getBean("hotelService", HotelService.class);
	
	private int hotelId;
	
	private int reviewId;
	
	private String reviewUrl;
	
	private int reviewType;
	
	
	public int getHotelId() {
		return hotelId;
	}

	public void setHotelId(int hotelId) {
		this.hotelId = hotelId;
	}

	public int getReviewId() {
		return reviewId;
	}

	public void setReviewId(int reviewId) {
		this.reviewId = reviewId;
	}

	public int getReviewType() {
		return reviewType;
	}

	public void setReviewType(int reviewType) {
		this.reviewType = reviewType;
	}
	
	public String getReviewUrl() {
		return reviewUrl;
	}

	public void setReviewUrl(String reviewUrl) {
		this.reviewUrl = reviewUrl;
	}

	@Override
	public void run() {
		logDebug("crawler start");
		//将状态更新成正在抓取中
		
		hotelService.updateCrawlerState(HotelReviewBean.CRAWLER_DOING, this.reviewId);
		CrawlerResolver resolver = CrawlerResolverFactory.createCrawlerResolver(this);
		if (resolver == null){
			logError("Not found resolver");
			//执行失败，失败次数需要增加一，抓取状态设置为失败
			int status = HotelReviewBean.CRAWLER_FAIL;
			hotelService.updateIncrementFailCountAndSetFail(status,reviewId);
			return;
		}
		logDebug("resolve start ");
		HotelReviewBean review = resolver.doResolver();
		if (review == null){
			logError("resolve fail,review is null");
			//执行失败，失败次数需要增加一，抓取状态设置为失败
//			hotelService.incrementFailCountAndSetFail(reviewId);
			hotelService.updateIncrementFailCountAndSetFail(HotelReviewBean.CRAWLER_FAIL, reviewId);
			return;
		}
		logDebug("resolve end");
		
		if (review.getCrawlerStatus() == HotelReviewBean.CRAWLER_FAIL){
			logError("crawler fail");
			//执行失败，失败次数需要增加一，抓取状态设置为失败
//			hotelDAO.incrementFailCountAndSetFail(reviewId);
			hotelService.updateIncrementFailCountAndSetFail(HotelReviewBean.CRAWLER_FAIL, reviewId);
			return;
		}
		else{
			logDebug("update data start after crawler suucess");
			//抓取成功，将数据更入库
			try{
				HotelReviewVO dbReview = hotelService.findHotelReviewById(this.reviewId);
				if (dbReview == null){
					logWarn("The review already have deleted,Don't be crawler");
				}else{
					Date date = new Date();
					review.setCrawlerTime(date);
					hotelService.updateCrawlerReviewInfo(review);
				}
			}catch (Exception e) {
				logError("fail to update data to db");
				logger.error("fail to update data", e);
			}
			logDebug("update data end after crawler suucess");
		}
	}
	
	/**
	 * 任务提交前需要处理，将任务更新成等待抓取
	 */
	public void submitBefore(){
		logDebug("submit a crawler task");
		//设置等待抓取
		hotelService.updateCrawlerState(HotelReviewBean.CRAWLER_WAIT, this.reviewId);
	}
	
	private void logDebug(String desc){
		logger.debug("hotelId=" + hotelId +"|reviewId="+reviewId+"|reviewType="+reviewType+"|" + desc);
	}
	
	private void logWarn(String desc){
		logger.warn("hotelId=" + hotelId +"|reviewId="+reviewId+"|reviewType="+reviewType+"|" + desc);
	}
	
	private void logError(String desc){
		logger.error("hotelId=" + hotelId +"|reviewId="+reviewId+"|reviewType="+reviewType+"|" + desc);
	}
}
