package top.jiangqiang.crawler.core.client;

import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.dromara.hutool.core.collection.CollUtil;
import org.dromara.hutool.core.io.IoUtil;
import top.jiangqiang.crawler.core.config.GlobalCrawlerConfig;
import top.jiangqiang.crawler.core.entities.Crawler;
import top.jiangqiang.crawler.core.entities.StreamCrawler;
import top.jiangqiang.crawler.core.handler.ResultHandler;
import top.jiangqiang.crawler.core.http.HttpService;
import top.jiangqiang.crawler.core.recorder.RecorderService;

import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;

@Slf4j
@Getter
public abstract class AbstractCrawlerService implements CrawlerService {
    private final GlobalCrawlerConfig globalCrawlerConfig = initGlobalCrawlerConfig();
    private final HttpService httpService = initHttpService();
    private final RecorderService recorderService = initRecorderService();
    private final ResultHandler resultHandler = initResultHandler();
    private final ThreadPoolExecutor requestExecutorService = initRequestExecutorService();
    private final ThreadPoolExecutor processingExecutorService = initProcessingExecutorService();

    @Override
    public void start() {
        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            log.info("程序准备结束");
            recorderService.saveBeforeEnd();
            log.info("程序结束了");
        }));
        List<Crawler> seeds = globalCrawlerConfig.getSeeds();
        if (CollUtil.isNotEmpty(seeds)) {
            for (Crawler seed : seeds.stream().distinct().toList()) {
                merge(seed);
            }
        }
        for (Crawler crawler : seeds) {
            requestExecutorService.execute(getTask(crawler));
            sleep();
        }
        Boolean allowEnd = globalCrawlerConfig.getAllowEnd();
        //记录上次循环时是否满足退出状态，连续两次满足则退出
        boolean flag = false;
        while (true) {
            Crawler crawler = recorderService.poll(globalCrawlerConfig.getGetTaskTimeout());
            requestExecutorService.execute(getTask(crawler));
            if (allowEnd) {
                if (requestExecutorService.getActiveCount() == 0
                        && requestExecutorService.getQueue().isEmpty()
                        && processingExecutorService.getActiveCount() == 0
                        && processingExecutorService.getQueue().isEmpty()) {
                    if (flag) {
                        requestExecutorService.shutdown();
                        processingExecutorService.shutdown();
                        break;
                    } else {
                        flag = true;
                    }
                } else {
                    flag = false;
                }
            }
            sleep();
        }
        if (!requestExecutorService.isShutdown() && (!processingExecutorService.isShutdown())) {
            requestExecutorService.shutdownNow();
            processingExecutorService.shutdownNow();
        }
    }

    private void sleep() {
        try {
            Thread.sleep(globalCrawlerConfig.getTimeIntervalSupplier().get());
        } catch (InterruptedException e) {
            throw new RuntimeException(e);
        }
    }

    @Override
    public Runnable getTask(Crawler crawler) {
        return () -> {
            if (crawler == null) {
                return;
            }
            Crawler responseCrawler = httpService.request(crawler);
            processingExecutorService.execute(() -> {
                Crawler resultCrawler = resultHandler.process(responseCrawler);
                if (responseCrawler instanceof StreamCrawler streamCrawler) {
                    IoUtil.closeQuietly(streamCrawler.getInputStream());
                }
                if (resultCrawler != null && CollUtil.isNotEmpty(resultCrawler.getCrawlers())) {
                    recorderService.push(resultCrawler.getCrawlers().stream().distinct().toList());
                }
            });
        };
    }
}
