package com.xiaotu.spider;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang.StringUtils;

import com.xiaotu.common.exception.SpiderException;
import com.xiaotu.common.exception.SpiderExceptionCode;
import com.xiaotu.common.model.SpiderLogModel;
import com.xiaotu.common.util.Constants;
import com.xiaotu.common.util.PropertiesUtil;
import com.xiaotu.common.util.SepratorUtil;
import com.xiaotu.spider.downloader.SpiderHttpClientDownloader;

import net.sf.json.JSONObject;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.processor.PageProcessor;

/**
 * @类名 APageProcessor
 * @日期 2017年2月8日
 * @作者 高海军
 * @功能 爬取页面数据处理基类
 */
public abstract class APageProcessor implements PageProcessor
{
    protected Site site = Site.me();

    protected SpiderLogModel spiderLog;// 整个任务日志信息

    protected List<JSONObject> targetList;// 爬取对象列表

    protected SpiderHttpClientDownloader downloader;

    public APageProcessor()
    {
        int retryTimes = Integer.parseInt(PropertiesUtil
                .getProperty(Constants.SpiderConfigKey.RETRY_TIME));
        site.setRetryTimes(retryTimes)
                .setTimeOut(Integer.parseInt(PropertiesUtil
                        .getProperty(Constants.SpiderConfigKey.HTTP_TIMEOUT))
                        * 1000)
                .setCycleRetryTimes(retryTimes);
    }


    public void setDownloader(SpiderHttpClientDownloader downloader)
    {
        this.downloader = downloader;
    }

    /**
     * 设置采集间隔时长，单位s
     *
     * @param sleepTime
     */
    public void setSleepTime(int sleepTime)
    {
        site.setSleepTime(sleepTime * 1000);
    }

    public void setRetryTimes(int times)
    {
        site.setRetryTimes(times);
    }

    public void setCycleRetryTimes(int times)
    {
        site.setCycleRetryTimes(times);
    }

    public void setCharSet(String charSet)
    {
        site.setCharset(charSet);
    }

    /**
     * 设置共用的请求头信息
     */
    public void setRequestHeader()
    {
        String headKey = PropertiesUtil
                .getProperty(Constants.SpiderConfigKey.REQUEST_HEADER);
        if (StringUtils.isNotEmpty(headKey))
            for (String key : headKey.split("\\|"))
                this.site.addHeader(key, PropertiesUtil.getProperty(key));
    }

    /**
     * 针对具体任务，设置请求头信息
     */
    public void setSiteRequestHeader()
    {
        String siteName = this.getSpiderType() + SepratorUtil.SEP_POINT;
        String headKey = PropertiesUtil.getProperty(
                siteName + Constants.SpiderConfigKey.REQUEST_HEADER);
        if (StringUtils.isNotEmpty(headKey))
            for (String key : headKey.split("\\|"))
                this.site.addHeader(key,
                        PropertiesUtil.getProperty(siteName + "site." + key));
        String cookieKey = PropertiesUtil.getProperty(
                siteName + Constants.SpiderConfigKey.REQUEST_COOKIE);
        if (StringUtils.isNotEmpty(cookieKey))
            for (String key : cookieKey.split("\\|"))
                this.site.addCookie(key,
                        PropertiesUtil.getProperty(siteName + "site." + key));
    }

    /**
     * 设置需要爬取的目标对象
     *
     * @param targetList 对象列表
     * @throws SpiderException
     */
    public void setTargetList(List<JSONObject> targetList,
            SpiderLogModel spiderLog)
    {
        if (targetList == null || targetList.isEmpty())
            throw new SpiderException(
                    SpiderExceptionCode.TargetInfoEmptyException);
        this.targetList = targetList;

        if (spiderLog == null)
            throw new SpiderException(
                    SpiderExceptionCode.SpiderTaskInfoErrorException);
        this.spiderLog = spiderLog;

        this.setSiteRequest();
    }

    @Override
    public Site getSite()
    {
        return this.site;
    }

    /**
     * 设置要爬取的站点地址
     */
    protected void setSiteRequest()
    {
        int i = targetList.size();
        for (JSONObject target : targetList)
            this.addStartRequest(this.getStartRequest(target), target, i--);
    }

    @SuppressWarnings({"unchecked", "deprecation"})
    protected void addStartRequest(Request request, JSONObject target,
            int priority)
    {
        if (request == null)
            return;
        request.setExtras(new HashMap<String, Object>(target));
        request.setPriority(priority);
        this.site.addStartRequest(request);
    }

    protected JSONObject getOtherTarget()
    {
        return this.setOtherTarget(new JSONObject());
    }

    protected JSONObject setOtherTarget(JSONObject target)
    {
        if (target != null)
            target.put(Constants.TargetType.OTHER_TYPE_KEY,
                    Constants.TargetType.OTHER);
        return target;
    }

    /**
     * 初始化爬虫任务运行时的参数
     *
     * @param map 参数列表
     */
    public void initRunParams(Map<String, Object> map)
    {

    }

    protected abstract Request getStartRequest(JSONObject target);

    /**
     * 获取爬虫任务的类型
     *
     * @return 任务类型标识
     */
    protected abstract String getSpiderType();

    /**
     * 获取页面数据的类型
     *
     * @param page 爬取页面
     * @return 类型标识
     */
    public abstract String getPageDataType(Page page);

    public SpiderLogModel getSpiderLog()
    {
        return spiderLog;
    }
}
