package designPatterns.com.course_design.scrapy.spider;


import designPatterns.com.course_design.scrapy.http.request.Request;

import java.util.Collections;
import java.util.HashSet;

/**
 * @projectName DesignPattern
 * @className designPatterns.com.course_design.scrapy.spider.HashSetDuplicateHandler
 * @description
 */

public class HashSetHandler implements RequestHandler {
    private HashSet<String> urlSet = new HashSet<>();
    //该层的职责是检查request中的请求url是否已经处理过，即重复检查
    @Override
    public void push(Request request) {
        String e = String.join("", Collections.nCopies(40,"="));
        if(this.urlSet.contains(request.getUrl())) {
            System.out.println(e+"DuplicateFilter fail:"+request.getUrl()+e);
        }
        else {
            System.out.println(e+"DuplicateFilter pass"+e);
            this.urlSet.add(request.getUrl());
            if(nextHandler != null)
                nextHandler.push(request);
        }
    }

    private RequestHandler nextHandler;
    @Override
    public void setNextHandler(RequestHandler handler) {
        this.nextHandler = handler;
    }
}
