package com.aurora.spider.utils;

import com.aurora.entity.Topic;
import com.geccocrawler.gecco.GeccoEngine;
import com.geccocrawler.gecco.request.HttpGetRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.net.URLEncoder;

/**
 * @program: blog-aurora
 * @description: 爬虫测试类
 * @author: jonk
 * @create: 2023-07-28 15:38
 **/

@Slf4j
@Component
public class SpiderUtil {

    public  int getGitee(Topic topic) {
        Integer pageNum = 1;
        Integer totalPage = 5;
        try {
            //查询课题表 拉取所有课题 遍历
            String key = topic.getTopicName();
            String encode = URLEncoder.encode(key, "UTF-8");
            for (int i = 0; i < totalPage; i++) {
                HttpGetRequest startUrl = new HttpGetRequest("https://search.gitee.com/?skin=rec&type=repository&q=" + encode + "&lang=java&sort=stars_count&pageno=" + pageNum);
                startUrl.setCharset("GBK");
                log.info("getGitee startUrl.getUrl {} ", startUrl.getUrl());
                GeccoEngine.create()
                        //Gecco搜索的包路径
                        .classpath("com.aurora.spider.gitee")
                        //开始抓取的页面地址
                        .seed(startUrl)
                        //开启几个爬虫线程
                        .thread(1)
                        //单个爬虫每次抓取完一个请求后的间隔时间
                        .interval(10000)
                        .run();
                pageNum++;
            }

        } catch (Exception e) {
            log.error("SpiderUtil.getGitee err {},{}", e.getMessage(), e);
            return 0;
        }
        return (pageNum - 1) * totalPage * 10;
    }


    public  void houyuantuan() {

        //拉取所有明星
        try (FileInputStream fis = new FileInputStream("D:\\dev\\space\\blog-aurora\\src\\main\\java\\com\\aurora\\spider\\houyuantuan\\star.txt");
             InputStreamReader isr = new InputStreamReader(fis);
             BufferedReader br = new BufferedReader(isr)) {
// 使用BufferedReader读取文件内容
            while(br.ready()){
                String key =  br.readLine();
                String encode = URLEncoder.encode(key, "UTF-8");
                    HttpGetRequest startUrl = new HttpGetRequest("https://www.houyuantuan.com/search/mingxing/?search="+encode);
                    startUrl.setCharset("GBK");
                    log.info("getGitee startUrl.getUrl {} "+startUrl.getUrl());
                    GeccoEngine.create()
                            //Gecco搜索的包路径
                            .classpath("com.aurora.spider.houyuantuan")
                            //开始抓取的页面地址
                            .seed(startUrl)
                            //开启几个爬虫线程
                            .thread(5)
                            //单个爬虫每次抓取完一个请求后的间隔时间
                            .interval(500)
                            .run();
            }
        } catch (Exception e) {
            log.info("SpiderUtil.getGitee err {},{}"+e.getMessage());
        }
    }
    public  void macysSearch() {
        HttpGetRequest startUrl = new HttpGetRequest("https://coupons.usatoday.com/macys");
        startUrl.setCharset("GBK");
        log.info("getGitee startUrl.getUrl {} "+startUrl.getUrl());
        GeccoEngine.create()
                //Gecco搜索的包路径
                .classpath("com.aurora.spider.site.macys")
                //开始抓取的页面地址
                .seed(startUrl)
                //开启几个爬虫线程
                .thread(1)
                //单个爬虫每次抓取完一个请求后的间隔时间
                .interval(500)
                .run();
    }

    /**
     * ingxing/2071.html
     * @param url
     * @return
     */
    public static long getUrlId(String url){
        String[] split = url.split("/");
        String str = split[split.length - 1].split("\\.")[0];
        long id = Long.parseLong(str);
        return id ;
    }

}