package com.example.spider.event.listener;

import com.example.spider.event.DropTaskEvent;
import com.example.spider.event.RetriesLimitEvent;
import com.example.spider.util.FileBasedCollection;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;

import java.util.Set;
import java.util.concurrent.ConcurrentSkipListSet;

@Slf4j
@Component
public class SpiderUrlsManager {

    /**
     * 【需要人工维护】
     */
    public final Set<String>                                                limitRateUse503 = Set.of(
            "www.kuaidaili.com", "kuaidaili.com", "cdn-bo1.mangguo-youku.com:5278", "cdn-bo2.mangguo-youku.com:5278", "asp-bo2.mangguozy-videos.com:5278", "asp-bo1.mangguozy-videos.com:5278"
    );
    /**
     * 一般重试很多次肯定不行
     */
    @Autowired
    @Qualifier("noRetryUrls")
    private      FileBasedCollection<ConcurrentSkipListSet<String>, String> noRetryUrls;

    /**
     * 大概率是被服务器限流等拒绝，建议动态分析，如果这里url域名绝大部分是相同的，建议移动到503重试域名白名单
     */
    @Autowired
    @Qualifier("dropUrls")
    private FileBasedCollection<ConcurrentSkipListSet<String>, String> dropUrls;

    public boolean isLimitRateUse503(String domain) {
        return limitRateUse503.contains(domain);
    }

    /**
     * 重试次数过多的任务放这，失败率太高，暂时先不请求，后续单独拉起重试。
     */
    @EventListener(RetriesLimitEvent.class)
    public void onRetriesLimitEvent(RetriesLimitEvent event) {
        noRetryUrls.add(event.getTask().getUrl());
    }

    /**
     * 被丢弃的任务记录下来，根据实际情况后续换代理拉起重试？
     */
    @EventListener(DropTaskEvent.class)
    public void onDropTaskEvent(DropTaskEvent event) {
        dropUrls.add(event.getTask().getUrl());
    }

    public boolean inNoRetryUrls(String url) {
        return noRetryUrls.contains(url);
    }

}
