package ltd.hxya.novel.common.utils;

import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.HttpClientUtils;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import org.springframework.web.client.RestTemplate;

import javax.swing.text.html.HTMLDocument;
import java.io.*;
import java.net.*;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

@Component
@Slf4j
public class CrawlUtils {

    private static String type = "jsoup";
    /**
     * 将取出的Entity转换成String类型
     * @param url
     * @return
     * @throws IOException
     */
    @SneakyThrows
    public static String entityToString(String url,String avaliableHost) throws IOException{
        if ("jsoup".equals(type)){
            //return execPython(url);
            return execPython(url,avaliableHost);
            //return Jsoup.parse(execPython(url,avaliableHost)).body().toString();
        }
        return EntityUtils.toString(httpEntity(url));
    }

    public static String entityToString(String url,Boolean isProxy) throws IOException {

        Element body = getJsoupNotProxyConnect(url).get().body();
        return body.toString();
    }

    public static String execPython(String url,String avaliableHost) throws IOException, InterruptedException {
        //获取ip和端口
        //Jsoup.connect("https://ip.jiangxianli.com/api/proxy_ip");
        //从数据库中随机获得一个ip

        String[] cmdArr = BaseUtils.structPythonParam(avaliableHost,url);
        BaseUtils baseUtils = new BaseUtils();
        BufferedReader bufferedReader = baseUtils.execScript(cmdArr);
        String line="";
        String rowBody = "";
        while ((line=bufferedReader.readLine())!=null){
            rowBody=rowBody+line;
        }
        // String line = dataInputStream.readLine();
        bufferedReader.close();
        int flag = baseUtils.waitFor(avaliableHost);
        log.info("",flag);
        return rowBody;
    }

    //爬取可用代理


    public static Connection getJsoupConnect(String url) throws IOException {

        //System.setProperty("https.proxySet", "true");
       // System.getProperties().setProperty("https.proxyHost", "58.20.184.187");
       // System.getProperties().setProperty("https.proxyPort", "9091");
        Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("209.146.105.241", 80));
        return Jsoup.connect(url)
                //.userAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.42")
                .timeout(60000)
                //.method(Connection.Method.GET)
                .proxy(proxy);
    }

    public static Connection getJsoupNotProxyConnect(String url) throws IOException {

        return Jsoup.connect(url)
                .userAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.42")
                .timeout(60000);
    }

    public static String rowBody(String url) throws IOException {
        HttpURLConnection urlConnection = getUrlConnection(url);
        InputStream inputStream = urlConnection.getInputStream();
        InputStreamReader inputStreamReader = new InputStreamReader(inputStream, "utf-8");
        BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
        String message=null;
        String body = "";
        while ((message=bufferedReader.readLine())!=null){
            body = body+message;
        }
        return body;
    }

    public static HttpURLConnection getUrlConnection(String url) throws IOException {
        System.setProperty("proxyHost","103.111.120.138");
        System.setProperty("proxyPort","80");
        HttpURLConnection urlConnection = (HttpURLConnection) new URL("https://www.wxsy.net/novel/1238/read_59263289.html").openConnection();
       /* urlConnection.setConnectTimeout(5000);
        urlConnection.setDoOutput(true);
        urlConnection.setDoInput(true);
        urlConnection.setReadTimeout(5000);
        urlConnection.setUseCaches(false);
        urlConnection.setRequestProperty("User-Agent","Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.42");*/
        return urlConnection;
    }




    /**
     * 得到HttpEntity
     * @param url
     * @return
     * @throws IOException
     */
    public static HttpEntity httpEntity(String url) throws IOException {
        return getHttpClient(url).getEntity();
    }

    public static CloseableHttpResponse getHttpClient(String url) throws IOException {
        CloseableHttpClient httpClient = HttpClients.createDefault();
        HttpGet httpGet = new HttpGet(url);
        httpGet.setHeader("user-agent","Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36");
        CloseableHttpResponse response = httpClient.execute(httpGet);

        return response;
    }

    public static String getBookUrl(String crawlUrl,String bookHref,String crawlBookId){
        if (crawlUrl==null||bookHref==null||crawlBookId==null){
            return null;
        }

        return BaseUtils.getUrl(bookHref.replace("{bookId}", crawlBookId),crawlUrl);
    }

    public static String getSearchUrl(String crawlUrl,String searchUrl,String keyword){
        if (crawlUrl==null||searchUrl==null||keyword==null){
            return null;
        }
        return BaseUtils.getUrl(searchUrl.replace("{keyword}",keyword),crawlUrl);
    }

    public static String getChapterUrl(String crawlUrl,String chapterHref,String bookId,String indexId){
        if (crawlUrl==null||bookId==null||indexId==null||chapterHref==null){
            return null;
        }
        return BaseUtils.getUrl(chapterHref.replace("{bookId}", bookId).replace("{indexId}",indexId),crawlUrl);
    }

    /**
     * 对爬虫得到的结果进行正则表达式的匹配，返回匹配后的Matcher
     */
    public static Matcher patternCheck(String value,String pattern){
        //去除特殊字符
        //removeSpecial(value);
        return Pattern.compile(pattern).matcher(value);
    }



    public static String simpleCrawlRulePattern(String value, String pattern) {
        if (StringUtils.isEmpty(pattern)){
            return "";
        }
        Matcher matcher = patternCheck(value, pattern);
        String response=null;
        while (matcher.find()){
            response = matcher.group(1);
        }
        return response;
    }

    public static List<String> simpleCollectByPattern(String pattern,String value){
        Matcher matcher = patternCheck(value, pattern);
        List<String> list = new ArrayList<>();
        while (matcher.find()){
            String group = matcher.group(1);
            list.add(group);
        }
        return list;
    }
}
