package com.slytherin.sns.spider.work;

import com.slytherin.sns.spider.client.HttpClientRequestContext;
import com.slytherin.sns.spider.client.HttpUriRequestConverter;
import com.slytherin.sns.spider.page.Page;
import com.slytherin.sns.spider.proxy.Proxy;
import com.slytherin.sns.spider.proxy.ProxyProvider;
import com.slytherin.sns.spider.result.Request;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.impl.client.CloseableHttpClient;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Task;
import us.codecraft.webmagic.downloader.HttpClientGenerator;
import us.codecraft.webmagic.selector.PlainText;
import us.codecraft.webmagic.utils.CharsetUtils;
import us.codecraft.webmagic.utils.HttpClientUtils;

import java.io.IOException;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

@Slf4j
public class SlytherinDownloader extends AbstractDownloader {

    /**
     *  http 请求uri 转换类 支持使用自定义的 但是必须自定义 HttpUriRequestConverter 进行方法重写
     */
    @Setter
    private HttpUriRequestConverter httpUriRequestConverter = new HttpUriRequestConverter();
    /**
     *  用来存储当前暂未使用的client
     */
    private final Map<String, CloseableHttpClient> httpClients = new ConcurrentHashMap<>();
    /**
     * 生成一个httpclient 请求客户端 用来发送http请求
     */
    private HttpClientGenerator httpClientGenerator = new HttpClientGenerator();
    /**、
     *  全局代理类 用来接受外部传值
     */

    private ProxyProvider proxyProvider;

    /**
     *  是否需要 请求的时候带回response 的header （部分登陆场景需要）
     */
    private boolean responseHeader = true;
    /**
     *  请求失败 重试3次
     */
    private static final int MAX_RETRIES = 3;
    /**
     *  请求失败 重试3次 保证线程变量在多线程中不被覆盖
     */
    ThreadLocal<Integer> maxRetriesLocal= ThreadLocal.withInitial(() -> MAX_RETRIES);

    @Override
    public Page download(Request request, Task task) {
        if (task == null || task.getSite() == null) {
            throw new NullPointerException("task or site can not be null");
        }
        // 设置重试机制
        ThreadLocal<Integer> retries = ThreadLocal.withInitial(() -> 0);

        Page page = Page.fail();
        while (retries.get() < maxRetriesLocal.get()) {

            try {
                page = doDownload(request, task);

                if (page != null && page.isDownloadSuccess()) {
                    onSuccess(request);
                    log.info("downloading page success {}", request.getUrl());
                    break;
                } else {

                    onError(request);
                    Integer currentRetries = retries.get();
                    retries.set(currentRetries + 1);
                    // 可以在这里添加退避策略，例如随机等待一段时间后再重试
                    try {
                        Thread.sleep(1000);
                    } catch (InterruptedException e) {
                        Thread.currentThread().interrupt();
                        onError(request);
                        return Page.fail();
                    }
                }
            }catch (Exception e) {
                log.warn("download page {} error", request.getUrl(), e);
                Integer currentRetries = retries.get();
                retries.set(currentRetries + 1);
                if (retries.get() >= MAX_RETRIES) {
                    onError(request);
                    return Page.fail();
                }
                // 退避策略 睡眠一秒后继续重新请求
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException ex) {
                    Thread.currentThread().interrupt();
                    onError(request);
                    return Page.fail();
                }
            }
            }


        // 响应体
//        CloseableHttpResponse httpResponse = null;
//        CloseableHttpClient httpClient = getHttpClient(task.getSite());
//        Proxy proxy = proxyProvider != null ? proxyProvider.getProxy(task) : null;

        return page;
    }



    private Page doDownload(Request request, Task task) throws IOException {
        CloseableHttpResponse httpResponse = null;
        CloseableHttpClient httpClient = getHttpClient(task.getSite());
        Proxy proxy = proxyProvider != null ? proxyProvider.getProxy(task) : null;
        HttpClientRequestContext requestContext = httpUriRequestConverter.convert(request, task.getSite(), proxy);
        try (CloseableHttpResponse response = httpClient.execute(requestContext.getHttpUriRequest(), requestContext.getHttpClientContext())) {
            return handleResponse(request, request.getCharset() != null ? request.getCharset() : task.getSite().getCharset(), response, task);
        } finally {
            if (httpClients.containsKey(task.getSite().getDomain())) {
                httpClients.remove(task.getSite().getDomain()).close();
            }
        }
    }


    protected Page handleResponse(Request request, String charset, HttpResponse httpResponse, Task task) throws IOException {
        byte[] bytes = IOUtils.toByteArray(httpResponse.getEntity().getContent());
        String contentType = httpResponse.getEntity().getContentType() == null ? "" : httpResponse.getEntity().getContentType().getValue();
        Page page = new Page();
        page.setBytes(bytes);
        if (!request.isBinaryContent()){
            if (charset == null) {
                charset = getHtmlCharset(request,contentType, bytes);
            }
            page.setCharset(charset);
            page.setRawText(new String(bytes, charset));
        }
        page.setUrl(new PlainText(request.getUrl()));
        page.setRequest(request);
        page.setStatusCode(httpResponse.getStatusLine().getStatusCode());
        page.setDownloadSuccess(true);
        if (responseHeader) {
            page.setHeaders(HttpClientUtils.convertHeaders(httpResponse.getAllHeaders()));
        }
        return page;
    }

    private String getHtmlCharset(Request request,String contentType, byte[] contentBytes) throws IOException {
        String charset = CharsetUtils.detectCharset(contentType, contentBytes);
        if (charset == null) {
            charset = Charset.defaultCharset().name();
            log.warn("Charset autodetect failed for URL {}: use {} as charset. Please specify charset in Site.setCharset()", request.getUrl(), Charset.defaultCharset());
        }
        return charset;
    }

    @Override
    public void setThread(int threadNum) {
            // 设置单个http 请求并发数
        httpClientGenerator.setPoolSize(threadNum);
    }



    private CloseableHttpClient getHttpClient(Site site) {
        if (site == null) {
            return httpClientGenerator.getClient(null);
        }
        String domain = site.getDomain();
        CloseableHttpClient httpClient = httpClients.get(domain);
        if (httpClient == null) {
            synchronized (this) {
                httpClient = httpClients.get(domain);
                if (httpClient == null) {
                    httpClient = httpClientGenerator.getClient(site);
                    httpClients.put(domain, httpClient);
                }
            }
        }
        return httpClient;
    }
    public void closeAllHttpClients() {
        for (CloseableHttpClient httpClient : httpClients.values()) {
            try {
                httpClient.close();
            } catch (IOException e) {
                log.error("Error closing httpClient", e);
            }
        }
        httpClients.clear();
    }


    /**
     *  设置代理
     * @param proxyProvider
     */
    public void setProxyProvider(ProxyProvider proxyProvider) {
        this.proxyProvider = proxyProvider;
    }

//    /**
//     *  重试机制
//     * @param request    执行的请求请求
//     * @param task     执行任务
//     * @param maxRetries maxRetries 重试次数
//     * @return
//     */
//    private Page downloadWithRetry(Request request, Task task, int maxRetries) {
//        int retries = 0;
//        Page page = null;
//        while (retries < maxRetries) {
//            try {
//                page = doDownload(request, task);
//                if (page != null && page.isDownloadSuccess()) {
//                    break;
//                }
//            } catch (IOException e) {
//                retries++;
//                if (retries >= maxRetries) {
//                    logger.error("Max retries reached for {}", request.getUrl());
//                    onError(request);
//                    return Page.fail();
//                }
//                try {
//                    Thread.sleep(1000); // 简单的延迟，实际中可能需要更复杂的退避策略
//                } catch (InterruptedException ie) {
//                    Thread.currentThread().interrupt();
//                    onError(request);
//                    return Page.fail();
//                }
//            }
//        }
//        return page;
//    }
}
