package com.xiaotu.common.aop;

import org.apache.commons.lang.StringUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.xiaotu.common.exception.SpiderException;
import com.xiaotu.common.exception.SpiderExceptionCode;
import com.xiaotu.common.model.JobLogModel;
import com.xiaotu.common.model.SpiderLogModel;
import com.xiaotu.common.util.Constants;
import com.xiaotu.common.util.GsonUtils;
import com.xiaotu.spider.APageProcessor;

import us.codecraft.webmagic.Page;

/**
 * @类名 ClientResponseAspect
 * @日期 2015年5月28日
 * @作者 高海军
 * @功能 针对具体对象的爬虫采集任务日志切面
 */
public class PageProcessLogAspect extends ALogAspect
{
	private static final Logger LOGGER = LoggerFactory
			.getLogger(PageProcessLogAspect.class);
	
	/**
	 * 对PageProcessor的doProcess方法进行横切，调用方法前后都做相应的异常处理
	 * @param joinPoint 连接点
	 */
	public void aroundMethod(ProceedingJoinPoint joinPoint)
	{
		JobLogModel logModel = new JobLogModel();
		Page page = null;
		try
		{
			
			if (!(joinPoint.getTarget() instanceof APageProcessor))
				throw new SpiderException(
						SpiderExceptionCode.PageProcessorClassErrorException);
			
			APageProcessor processor = (APageProcessor) joinPoint.getTarget();
			
			SpiderLogModel taskLog = processor.getSpiderLog();// 获取整个任务基本信息
			if (taskLog == null)
				throw new SpiderException(
						SpiderExceptionCode.SpiderTaskInfoErrorException);
			
			page = this.getPage(joinPoint);
			
			logModel.setSeriesNo(taskLog.getSeriesNo());
			logModel.setJobType(taskLog.getJobType());
			logModel.setDataType(processor.getPageDataType(page));
			
			logModel.setUrl(page.getUrl().toString());
			setLogInfo(page.getRequest().getExtras(), logModel);
			joinPoint.proceed();// 处理爬取的数据
			
			logModel.setTargetContent(
					GsonUtils.toJson(page.getResultItems().getAll()));
			if (StringUtils.isEmpty(logModel.getJobStatus()))
				logModel.setJobStatus(Constants.JobStatus.SUCCESS);
		}
		catch (SpiderException e)
		{
			this.logError(logModel, e);
			
			if (SpiderExceptionCode.DataInvalideException
					.equals(e.getExceptionCode()))
				logModel.setJobStatus(Constants.JobStatus.INVALIDE);// 说明数据不可用，为“无数据”
			else
				logModel.setJobStatus(Constants.JobStatus.FAIL);
			
			if (SpiderExceptionCode.HttpIOException
					.equals(e.getExceptionCode()))// 网络异常，为系统日志
				logModel.setLogType(Constants.LogConstants.TYPE_SYSTEM);
			
			logModel.setException(e.getExceptionCode().toString());
			logModel.setMessage(e.getMessage());
		}
		catch (Throwable e)
		{
			this.logError(logModel, e);
			logModel.setJobStatus(Constants.JobStatus.FAIL);
			logModel.setException(e.getClass().getSimpleName());
			logModel.setMessage(e.getMessage());
		}
		finally
		{
			logModel.setTimestamp(System.currentTimeMillis());
			if (LOGGER.isDebugEnabled())
				LOGGER.debug("[{}][{}] Page Process Log:{}",
						logModel.getJobType(), logModel.getDataType(),
						GsonUtils.toJson(logModel));
			this.saveLog(logModel);
		}
	}
	
	private Page getPage(ProceedingJoinPoint joinPoint)
	{
		Object[] args = joinPoint.getArgs();
		if (args == null || args.length < 1 || !(args[0] instanceof Page))
			throw new SpiderException(
					SpiderExceptionCode.PageProcessorParameterErrorException);
		return (Page) args[0];
	}
	
	private void logError(JobLogModel logModel, Throwable e)
	{
		if (Constants.PageDataType.SEARCH_LIST.equals(logModel.getDataType())
				&& e instanceof SpiderException)
		{
			SpiderException exception = (SpiderException) e;
			if (SpiderExceptionCode.DataInvalideException
					.equals(exception.getExceptionCode()))
			{
				LOGGER.warn("[{}][{}] search list no match:{}",
						logModel.getJobType(), logModel.getDataType(),
						logModel.getUrl(), e);
				return;
			}
		}
		LOGGER.error("[{}][{}] page processor error:{}", logModel.getJobType(),
				logModel.getDataType(), logModel.getUrl(), e);
	}
}
