package com.xiaotu.spider.downloader;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;

import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;

import com.xiaotu.common.aop.ALogAspect;
import com.xiaotu.common.exception.SpiderExceptionCode;
import com.xiaotu.common.model.JobLogModel;
import com.xiaotu.common.model.SpiderLogModel;
import com.xiaotu.common.util.CacheHandler;
import com.xiaotu.common.util.Constants;
import com.xiaotu.common.util.DataRedisKeyUtils;
import com.xiaotu.common.util.GsonUtils;
import com.xiaotu.common.util.MathUtil;
import com.xiaotu.common.util.RegexUtils;
import com.xiaotu.common.util.ThreadPool;
import com.xiaotu.common.util.Constants.DataRedisKey;
import com.xiaotu.spider.APageProcessor;
import com.xiaotu.spider.SpiderFactory;

import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Task;
import us.codecraft.webmagic.downloader.HttpClientDownloader;
import us.codecraft.webmagic.proxy.Proxy;
import us.codecraft.webmagic.selector.PlainText;
import us.codecraft.webmagic.utils.WMCollections;

/**
 * @类名 HttpClientDownloader
 * @日期 2017年2月20日
 * @作者 高海军
 * @功能
 */
@Component("SpiderHttpClientDownloader")
@Scope("prototype")
public class SpiderHttpClientDownloader extends HttpClientDownloader
{

    private static final Logger LOGGER = LoggerFactory
            .getLogger(SpiderHttpClientDownloader.class);

    public static final String HEADER_MAP = "headerMap";// 自定义请求头的map key

    public static final String HEADER_HOST = "Host";// http请求头中的Host字段

    public static final String HEADER_REFERER = "Referer";// http请求头中的Host字段

    public static final String IOEXCEPTION_KEY = "_IOException_key";// io异常在request中的key

    private final Map<String, CloseableHttpClient> httpClients = new HashMap<String, CloseableHttpClient>();

    private MyHttpClientGenerator httpClientGenerator = new MyHttpClientGenerator();

    @Autowired
    private CacheHandler cacheHandler;

    private APageProcessor processor;

    private SpiderLogModel spiderLog;

    protected Map<String, Object> runParaMap;// 运行时需要的参数

    public void initDownloader(APageProcessor processor,
            SpiderLogModel spiderLog, Map<String, Object> runParaMap)
    {
        this.processor = processor;
        this.spiderLog = spiderLog;
        this.runParaMap = runParaMap;
        processor.setDownloader(this);
    }

    @Override
    public void setThread(int thread)
    {
        this.httpClientGenerator.setPoolSize(thread);
        this.httpClientGenerator.setDefaultMaxPerRoute(thread);
    }

    @Override
    public Page download(Request request, Task task)
    {
        JobLogModel logModel = null;
        try
        {
            this.randomSleep();
            Page page = this.superHttpDownload(request, task);

            if (page == null)
            {
                int statusCode = request.getExtra(Request.STATUS_CODE) == null
                        ? 0
                        : Integer.parseInt(
                        request.getExtra(Request.STATUS_CODE) + "");
                if (processor.getSite().getAcceptStatCode()
                        .contains(statusCode))
                    return null;

                logModel = this.getLogData(request);
                logModel.setMessage(
                        SpiderExceptionCode.HttpIOException.toString()
                                + statusCode);
                LOGGER.error(
                        "[{}][{}] " + SpiderExceptionCode.HttpIOException
                                + "statusCode-{}",
                        spiderLog.getJobType(), logModel.getDataType(),
                        statusCode);

                this.addCorrectCache(request);

                return null;
            }

            this.refreshCorrectCache(request);
            return page;
        }
        catch (Exception e)
        {
            logModel = this.getLogData(request);
            logModel.setMessage(e.getMessage());
            logModel.setException(e.getClass().getSimpleName());
            LOGGER.error("[{}][{}] Downloader error Log:",
                    spiderLog.getJobType(), logModel.getDataType(), e);
            this.addCorrectCache(request);
            return null;
        }
        finally
        {
            if (logModel != null)
            {
                LOGGER.error("[{}][{}] Downloader error Log:{}",
                        spiderLog.getJobType(), logModel.getDataType(),
                        GsonUtils.toJson(logModel));
                cacheHandler.addListCache(
                        DataRedisKeyUtils
                                .getKey(DataRedisKey.PageProcess_Log_YYYYMMDD),
                        logModel);
            }
        }

    }

    /**
     * 把爬取请求中被拒绝的数据放入缓存中，待后续任务补爬
     *
     * @param request 请求对象
     */
    public void addCorrectCache(Request request)
    {
        if (!runParaMap.containsKey(SpiderFactory.CORRECT_FAIL_REQUEST))
            return;

        String field = this.getCorrectCacheField(request);
        if (StringUtils.isEmpty(field))
            return;

        String key = DataRedisKeyUtils
                .getCorrectKey(this.spiderLog.getJobType());

        Map<String, Object> map = request.getExtras();
        map.remove(Request.CYCLE_TRIED_TIMES);
        cacheHandler.hSet(key, field, map);
        cacheHandler.setCacheTimeout(key);

        LOGGER.warn("[{}] Downloader add Correct Log:{}",
                spiderLog.getJobType(), GsonUtils.toJson(request.getExtras()));
    }

    /**
     * 补爬任务会通过次方法，去掉补爬池中，请求成功的目标数据
     *
     * @param request 请求对象
     */
    private void refreshCorrectCache(Request request)
    {
        if (!runParaMap.containsKey(SpiderFactory.REFRESH_FAIL_REQUEST))
            return;

        String field = this.getCorrectCacheField(request);
        if (StringUtils.isEmpty(field))
            return;

        String key = DataRedisKeyUtils
                .getCorrectKey(this.spiderLog.getJobType());
        cacheHandler.hDel(key, field);
        LOGGER.warn("[{}] Downloader refresh Correct Log:{}",
                spiderLog.getJobType(), GsonUtils.toJson(request.getExtras()));
    }

    private String getCorrectCacheField(Request request)
    {
        String targetType = ALogAspect.getTargetType(request.getExtras(),
                "tvid", "starid");
        String field = null;
        if (Constants.TargetType.TV.equals(targetType))
            field = request.getExtra("tvid") + "";
        else if (Constants.TargetType.STAR.equals(targetType))
            field = request.getExtra("starid") + "";
        return field;
    }

    /**
     * 复制了父类中的download方法，只是将IOException抛出了，并且记录了一下错误日志，无其他改动
     *
     * @param request
     * @param task
     * @return
     * @deprecated 新增修改：将getHttpClient(site,proxy).execute(httpUriRequest)改成this.executeAndGet()
     */
    private Page superHttpDownload(Request request, Task task)
            throws IOException
    {
        Site site = null;
        if (task != null)
        {
            site = task.getSite();
        }
        Set<Integer> acceptStatCode;
        String charset = null;
        Map<String, String> headers = null;
        if (site != null)
        {
            acceptStatCode = site.getAcceptStatCode();
            charset = site.getCharset();
            headers = site.getHeaders();
        }
        else
        {
            acceptStatCode = WMCollections.newHashSet(200);
        }
        LOGGER.debug("[{}] downloading page {}", spiderLog.getJobType(),
                request.getUrl());
        CloseableHttpResponse httpResponse = null;
        int statusCode = 0;
        try
        {
            HttpHost proxyHost = null;
            Proxy proxy = null; // TODO
            if (site.getHttpProxyPool() != null
                    && site.getHttpProxyPool().isEnable())
            {
                proxy = site.getHttpProxyFromPool();
                if (proxy != null)
                    proxyHost = proxy.getHttpHost();
            }
            else if (site.getHttpProxy() != null)
            {
                proxyHost = site.getHttpProxy();
            }

            HttpUriRequest httpUriRequest = getHttpUriRequest(request, site,
                    headers, proxyHost);
            // httpResponse = getHttpClient(site,
            // proxy).execute(httpUriRequest);
            httpResponse = this.executeAndGet(getHttpClient(site, proxy),
                    httpUriRequest);

            statusCode = httpResponse.getStatusLine().getStatusCode();
            request.putExtra(Request.STATUS_CODE, statusCode);
            if (statusAccept(acceptStatCode, statusCode))
            {
                Page page = handleResponse(request, charset, httpResponse,
                        task);
                onSuccess(request);
                return page;
            }
            else
            {
                LOGGER.error("[{}] get page {} error, status code {} ",
                        spiderLog.getJobType(), request.getUrl(), statusCode);
                return null;
            }
        }
        catch (Exception e)
        {
            LOGGER.error("[{}] download page {} error", spiderLog.getJobType(),
                    request.getUrl(), e);
            if (site.getCycleRetryTimes() > 0)
            {
                LOGGER.warn("[{}] add retry url[{}] by time[{}]",
                        spiderLog.getJobType(), request.getUrl(),
                        request.getExtra(Request.CYCLE_TRIED_TIMES));

				/*
				 * 如果page为空，说明已经达到重试次数，不会再发送请求，此时，需要记录该异常信息到待补爬数据的缓存中，供补爬任务参考
				 */
                Page page = addToCycleRetry(request, site);
                if (page == null)
                    request.putExtra(IOEXCEPTION_KEY, e.getMessage());
                return page;
            }

            onError(request);
            request.putExtra(IOEXCEPTION_KEY, e.getMessage());
            throw new RuntimeException(e);
        }
        finally
        {
			/*
			 * 此处加了异常处理（父类并未加），目的是保证下面的代码能够执行，以释放当前连接回连接池，供其他请求接着使用
			 */
            try
            {
                request.putExtra(Request.STATUS_CODE, statusCode);
                if (site.getHttpProxyPool() != null
                        && site.getHttpProxyPool().isEnable())
                {
                    site.returnHttpProxyToPool(
                            (HttpHost) request.getExtra(Request.PROXY),
                            (Integer) request.getExtra(Request.STATUS_CODE));
                }
            }
            catch (Exception e)
            {
                LOGGER.error("[{}] returnHttpProxyToPool fail",
                        spiderLog.getJobType(), e);
            }
            try
            {
                if (httpResponse != null)
                {
                    // ensure the connection is released back to pool
                    EntityUtils.consume(httpResponse.getEntity());
                }
            }
            catch (IOException e)
            {
                LOGGER.error("[{}] close response fail", spiderLog.getJobType(),
                        e);
                throw e;
            }
        }

    }

    /**
     * 将网络请求用Future管理起来，确保在指定时间内回到调用处，避免出现线程锁住的情况
     *
     * @param httpClient
     * @param request
     * @return
     * @throws InterruptedException
     * @throws ExecutionException
     * @throws TimeoutException
     */
    private CloseableHttpResponse executeAndGet(
            final CloseableHttpClient httpClient, final HttpUriRequest request)
            throws InterruptedException, ExecutionException, TimeoutException
    {
        return ThreadPool.futureGet(() -> httpClient.execute(request), processor.getSite().getTimeOut() * 3);
    }

    /**
     * 根据运行参数进行随机延时
     *
     * @throws NumberFormatException
     * @throws InterruptedException
     */
    private void randomSleep()
            throws NumberFormatException, InterruptedException
    {
        if (!runParaMap.containsKey(SpiderFactory.RANDOM_SLEEP)
                || !RegexUtils.regexMatch(RegexUtils.REGEX_INTEGER,
                runParaMap.get(SpiderFactory.RANDOM_SLEEP)
                        + StringUtils.EMPTY))
            return;
        int seconds = MathUtil.getRand(0,
                Integer.parseInt(runParaMap.get(SpiderFactory.RANDOM_SLEEP)
                        + StringUtils.EMPTY));
        LOGGER.warn("[{}] random sleep:{}s", spiderLog.getJobType(), seconds);
        if (seconds > 0)
            Thread.sleep(seconds * 1000);
    }

    /**
     * 在发送http请求前将自定义的header放入request中（由于该header是根据动态爬取的，故不能写在配置文件中）
     */
    @Override
    protected HttpUriRequest getHttpUriRequest(Request request, Site site,
            Map<String, String> headers, HttpHost proxy)
    {
        if (proxy != null)
            LOGGER.warn("[{}] proxy:{}:{}", spiderLog.getJobType(),
                    proxy.getHostName(), proxy.getPort());
        HttpUriRequest resq = super.getHttpUriRequest(request, site, headers,
                proxy);
        this.setRequestHeader(resq, request);
        return resq;

    }

    /**
     * 将自定义的请求头放入要发送http请求的request中
     *
     * @param resq    待发送的http请求
     * @param request 自定义请求
     */
    @SuppressWarnings({"unchecked"})
    private void setRequestHeader(HttpUriRequest resq, Request request)
    {
        if (request.getExtra(HEADER_REFERER) != null)
            resq.setHeader(HEADER_REFERER,
                    request.getExtra(HEADER_REFERER) + "");
        else
            resq.setHeader(HEADER_REFERER, request.getUrl());
        resq.setHeader(HEADER_HOST, resq.getURI().getHost());

        if (!(request.getExtra(HEADER_MAP) instanceof Map))
            return;
        Map<String, String> headerMap = (Map<String, String>) request
                .getExtra(HEADER_MAP);
        for (Map.Entry<String, String> entry : headerMap.entrySet())
            resq.addHeader(entry.getKey(), entry.getValue());
    }

    private JobLogModel getLogData(Request request)
    {
        Page page = new Page();
        page.setUrl(new PlainText(request.getUrl()));
        page.setRequest(request);

        JobLogModel logModel = new JobLogModel();
        logModel.setSeriesNo(spiderLog.getSeriesNo());
        logModel.setJobType(spiderLog.getJobType());
        logModel.setLogType(Constants.LogConstants.TYPE_SYSTEM);

        logModel.setJobStatus(Constants.JobStatus.FAIL);
        logModel.setException(SpiderExceptionCode.HttpIOException.toString());

        logModel.setUrl(request.getUrl());
        logModel.setTimestamp(System.currentTimeMillis());

        try
        {
            ALogAspect.setLogInfo(request.getExtras(), logModel);
            logModel.setDataType(processor.getPageDataType(page));
        }
        catch (Exception e)
        {
            LOGGER.error("[{}][{}] Downloader error Log:{}",
                    spiderLog.getJobType(), logModel.getDataType(), e);
        }

        return logModel;
    }

    /**
     * 复制了父类的getHttpClient方法，无其他改动（该方法被superHttpDownload调用，因为父类该方法为private，无法访问，只能拷贝一份）
     *
     * @param site
     * @param proxy
     * @return
     */
    private CloseableHttpClient getHttpClient(Site site, Proxy proxy)
    {
        if (site == null)
        {
            return httpClientGenerator.getClient(null, proxy);
        }
        String domain = site.getDomain();
        CloseableHttpClient httpClient = httpClients.get(domain);
        if (httpClient == null)
        {
            synchronized (this)
            {
                httpClient = httpClients.get(domain);
                if (httpClient == null)
                {
                    httpClient = httpClientGenerator.getClient(site, proxy);
                    httpClients.put(domain, httpClient);
                }
            }
        }
        return httpClient;
    }

    /**
     * 根据运行参数的配置，将请求后的response放入request中，供pageprocessor使用
     */
    @Override
    protected Page handleResponse(Request request, String charset,
            HttpResponse httpResponse, Task task) throws IOException
    {
        Page page = super.handleResponse(request, charset, httpResponse, task);
        if (runParaMap.containsKey(SpiderFactory.SET_RESPONSE))
            page.getRequest().putExtra(SpiderFactory.SET_RESPONSE,
                    httpResponse);
        return page;
    }
}
