package com.symone.crawler.action;

import cn.hutool.json.JSONUtil;
import com.symone.crawler.data.ProxyData;
import com.symone.crawler.log.Log;
import com.symone.crawler.log.LogFactory;
import org.jsoup.Connection;
import org.jsoup.Connection.Response;
import org.jsoup.HttpStatusException;
import org.jsoup.Jsoup;

import java.io.IOException;
import java.net.SocketTimeoutException;
import java.util.Map;

import com.symone.crawler.config.CrawlerConfig;

public class Catcher {

    private static Log logger = LogFactory.getLog(Catcher.class);   //根据网页的Url获取网页Document
    public Response catchDocument(String url, Map<String,String> params) throws IOException {
        return catchDocumentWithProxy(url,params,null);
    }

    public Response catchDocumentWithProxy(String url, Map<String,String> params, ProxyData proxy) throws IOException {
        String logInfo = proxy != null ? ("Proxy:" + proxy.getIp() + ":"  + proxy.getPort() + "    -   "  + CrawlerConfig.DATETIME_FORMATTER.format(proxy.getExpireTime()) + "  " + proxy.getCity()):"";
        logInfo += "    URL:" + url + "    params:" + JSONUtil.toJsonStr(params);
        logger.info(logInfo);

        Connection connection = Jsoup.connect(url)
                .header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8")
                .header("Accept-Encoding", "gzip, deflate, br")
                .header("Accept-Language","zh-CN,zh;q=0.9")
                .header("Content-Type", "application/json; charset=UTF-8")
                .header("User-Agent",CrawlerConfig.getUserAgent())
                .timeout(CrawlerConfig.HTTP_REQUEST_TIMEOUT)
                .ignoreContentType(true);
        if(params != null){
            connection = connection.data(params);
        }
        if(proxy != null){
            connection = connection.proxy(proxy.getIp(), proxy.getPort());
        }
        Response res = connection.execute();
        return res;
       // return res.body();
        //如果出现了超时问题就继续抓取

    }
}
