package com.quanliang.org.crawlerquanliang.core;


import com.quanliang.org.crawlerquanliang.core.Container;
import com.quanliang.org.crawlerquanliang.pojo.Data;
import com.quanliang.org.crawlerquanliang.utils.ImgUtil;
import com.quanliang.org.crawlerquanliang.utils.JsoupUtil;
import com.quanliang.org.crawlerquanliang.utils.RegularUtil;
import lombok.SneakyThrows;
import org.apache.http.HttpEntity;
import org.apache.http.ParseException;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.jsoup.Jsoup;

import java.awt.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;

/**
 * 爬虫
 *
 * @author FYM
 * @data 2022/8/27 5:50
 */
public class Crawler {
    //全局唯一 池化思想  httpclient中默认有5个连接
    private static final CloseableHttpClient httpclient = HttpClients.createDefault();
    //爬虫线程池
    private static ExecutorService executor;

    public static void setExecutorService(ExecutorService e) {
        executor = e;
    }


    /**
     * 爬虫启动
     */
    @SneakyThrows
    public static void start() {
        //启动多线程爬虫任务
        Container.beginTime = System.currentTimeMillis();
        while (!Container.urlQueue.isEmpty()) {
            executor.execute(new CrawlerTask(Container.urlQueue.poll()));
            TimeUnit.MILLISECONDS.sleep(100);
        }
        Container.isOver = true;

        //轮询判断Excel是否全部写入完毕
        while (!Container.isEnd) {
            TimeUnit.SECONDS.sleep(1);
        }

        System.out.println("程序共用时：" + (System.currentTimeMillis() - Container.beginTime) / 1000 + "秒");
        System.out.println(Thread.currentThread().getName() + "正在关闭程序~");
        System.exit(0);
    }


    /**
     * 爬取单个URL方法
     */
    public static void httpWork(String url) {
        // get请求
        HttpGet httpget = new HttpGet(url);

        // 设置为人工访问
        httpget.setHeader("User-Agent",
                "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36");
        // 设置超时
        RequestConfig request = RequestConfig.custom()
                .setConnectTimeout(5000)//连接超时
                .setConnectionRequestTimeout(5000)//请求超时
                .setSocketTimeout(5000).build();//Socket协议超时
        // 设置连接
        httpget.setConfig(request);

        // 执行请求response
        try (CloseableHttpResponse response = httpclient.execute(httpget)) {
            //获取resp返回数据
            HttpEntity entity = response.getEntity();
            String res = EntityUtils.toString(entity, "UTF-8");

            //此处对res进行一系列正则操作
            String page = RegularUtil.getPageByURL(url);
            List<Data> dataList = getData(res, page);
            for (Data d : dataList) {
                System.out.println(Thread.currentThread().getName() + "将Data加入任务中===>" + d);
                Container.dataQueue.offer(d);
            }
        } catch (IOException | ParseException e) {
            e.printStackTrace();
        }

    }


    /**
     * 提取有效信息,并且保存图片至本地
     */
    private static List<Data> getData(String res, String page) {
        List<Data> dataList = JsoupUtil.getDataList(res);
        //使用正则将数据过滤
        //将图片保存至本地
        for (Data d : dataList) {
            d.setPage(page);
            String imgURL = d.getImgURL();
            String localPath = ImgUtil.downLoadURLImg(imgURL);
            d.setImgLocal(localPath);
        }
        return dataList;
    }

}
