package designPatterns.com.course_design.scrapy;


import designPatterns.com.course_design.scrapy.http.request.Request;
import designPatterns.com.course_design.scrapy.http.request.ScrapyRequest;
import designPatterns.com.course_design.scrapy.spider.HashSetHandler;
import designPatterns.com.course_design.scrapy.spider.MySpider;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

/**
 * @spider 用户自定义网页分析函数
 * @scheduler 用户自定义队列类型，一般队列，优先队列，去重队列等等
 * @downloader 用户添加中间件，对请求头进行包装 控制下载延迟时间等等
 * @engine 引擎, 是各个工作节点的核心，角色是中介者
 */
public class Client {
    public static void main(String[] args) throws NoSuchFieldException, IllegalAccessException {
        //  初始化start urls
        List<Request> requests = new ArrayList<>();
        for (int i = 0; i < 3; i++) {
            Request request = new ScrapyRequest();
            request.setUrl("www." + i + ".com");
            requests.add(request);
        }
        //建造者模式 优化对象创建
        Engine engine = Engine.Builder
                .newInstance()
                .setSpider(new MySpider())
                .addRequestHandler(new HashSetHandler())
                .build();
        //  代入 start urls
        engine.init(requests);
        System.out.println(String.join("", Collections.nCopies(100, "=")));
        //  开始运行
        engine.start();
    }
}
