package com.example.service;

import com.example.config.CrawlerConfig;
import com.example.model.CrawlerState;
import com.example.model.UrlDepth;
import com.example.util.HttpRequestUtil;
import com.example.util.UrlUtil;
import org.jsoup.nodes.Document;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.Random;

/**
 * 爬虫服务类
 * 职责：实现爬虫的核心逻辑，协调各个组件
 * 功能：
 * - 初始化爬虫状态
 * - 控制爬虫流程
 * - 协调各个组件工作
 * - 处理异常情况
 */
@Service
public class PcServiceImpl implements PcService {

    private final CrawlerConfig config;
    private final HttpRequestUtil httpRequestUtil;
    private final UrlUtil urlUtil;
    private final ContentExtractor contentExtractor;
    private final DataSaver dataSaver;
    private final CrawlerService crawlerService;

    private final AtomicInteger hourlyRequests = new AtomicInteger(0);
    private final AtomicInteger dailyRequests = new AtomicInteger(0);
    private final AtomicInteger consecutiveFailures = new AtomicInteger(0);
    private long lastHourReset = System.currentTimeMillis();
    private long lastDayReset = System.currentTimeMillis();

    @Autowired
    public PcServiceImpl(CrawlerConfig config,
                        HttpRequestUtil httpRequestUtil,
                        UrlUtil urlUtil,
                        ContentExtractor contentExtractor,
                        DataSaver dataSaver,
                        CrawlerService crawlerService) {
        this.config = config;
        this.httpRequestUtil = httpRequestUtil;
        this.urlUtil = urlUtil;
        this.contentExtractor = contentExtractor;
        this.dataSaver = dataSaver;
        this.crawlerService = crawlerService;
    }

    @Override
    public String startCrawler() {
        System.out.println("\n=== 开始爬虫任务 ===");
        System.out.println("配置信息：");
        System.out.println("- 种子URL: " + config.getSeedUrl());
        System.out.println("- 最大深度: " + config.getMaxDepth());
        System.out.println("- 最大抓取数量: " + config.getMaxCount());
        System.out.println("- 请求延迟: " + (config.getRequest().isDelayEnabled() ? 
            config.getRequest().getDelayMin() + "-" + config.getRequest().getDelayMax() + "ms" : "禁用"));
        System.out.println("- 保存路径: " + config.getSavePath());
        System.out.println("==================\n");

        String seedUrl = config.getSeedUrl();
        int maxDepth = config.getMaxDepth();
        long maxCount = config.getMaxCount();
        String savePath = config.getSavePath();

        CrawlerState state = new CrawlerState();
        state.setMaxCount(maxCount);

        try {
            state.setAllowedDomain(urlUtil.extractDomain(seedUrl));
            dataSaver.initWriter(savePath);
            System.out.println("初始化完成，开始抓取...\n");
        } catch (Exception e) {
            System.err.println("初始化失败: " + e.getMessage());
            return "初始化失败: " + e.getMessage();
        }

        state.getUrlQueue().add(new UrlDepth(seedUrl, 0));
        state.getVisitedUrls().add(seedUrl);

        try {
            while (!state.getUrlQueue().isEmpty() && state.getCrawledCount().get() < maxCount) {
                UrlDepth current = state.getUrlQueue().poll();
                if (current == null || current.getDepth() > maxDepth) {
                    System.out.println("跳过URL: " + (current == null ? "null" : current.getUrl()) + 
                                     " (深度: " + (current == null ? "N/A" : current.getDepth()) + ")");
                    continue;
                }

                System.out.println("\n正在处理URL: " + current.getUrl());
                System.out.println("当前深度: " + current.getDepth());
                System.out.println("已抓取数量: " + state.getCrawledCount().get() + "/" + maxCount);
                
                try {
                    processUrl(current, state, maxDepth);
                } catch (InterruptedException e) {
                    System.err.println("\n爬虫被中断: " + e.getMessage());
                    return "爬虫被中断: " + e.getMessage();
                } catch (IOException e) {
                    System.err.println("\n文件操作失败: " + e.getMessage());
                    return "文件操作失败: " + e.getMessage();
                } catch (RuntimeException e) {
                    System.err.println("\n" + e.getMessage());
                    return e.getMessage();
                }
            }
        } catch (Exception e) {
            System.err.println("\n抓取过程中发生异常: " + e.getMessage());
            e.printStackTrace();
            return "抓取过程中发生异常: " + e.getMessage();
        } finally {
            dataSaver.closeWriter();
        }

        String result = String.format("抓取完成，共抓取%d条数据", state.getCrawledCount().get());
        System.out.println("\n=== " + result + " ===\n");
        return result;
    }

    private void resetCountersIfNeeded() {
        long currentTime = System.currentTimeMillis();
        
        // 每小时重置计数器
        if (currentTime - lastHourReset >= 3600000) {
            hourlyRequests.set(0);
            lastHourReset = currentTime;
        }
        
        // 每天重置计数器
        if (currentTime - lastDayReset >= 86400000) {
            dailyRequests.set(0);
            lastDayReset = currentTime;
        }
    }

    private void checkRequestLimits() throws InterruptedException {
        resetCountersIfNeeded();
        
        // 检查每小时请求限制
        while (hourlyRequests.get() >= config.getRequest().getMaxPerHour()) {
            System.out.println("达到每小时请求限制，等待一小时...");
            Thread.sleep(3600000);
            resetCountersIfNeeded();
        }
        
        // 检查每天请求限制
        while (dailyRequests.get() >= config.getRequest().getMaxPerDay()) {
            System.out.println("达到每天请求限制，等待24小时...");
            Thread.sleep(86400000);
            resetCountersIfNeeded();
        }
        
        // 检查连续失败次数
        if (consecutiveFailures.get() >= config.getRequest().getMaxFailures()) {
            System.out.println("连续失败次数过多，等待" + (config.getRequest().getRetryDelay() / 1000) + "秒后重试...");
            Thread.sleep(config.getRequest().getRetryDelay());
            consecutiveFailures.set(0);
        }
    }

    private void applyRandomDelay() throws InterruptedException {
        if (config.getRequest().isDelayEnabled()) {
            int delay = new Random().nextInt(
                config.getRequest().getDelayMax() - config.getRequest().getDelayMin() + 1
            ) + config.getRequest().getDelayMin();
            Thread.sleep(delay);
        }
    }

    private void processUrl(UrlDepth current, CrawlerState state, int maxDepth) throws InterruptedException, IOException {
        try {
            checkRequestLimits();
            
            System.out.println("正在获取页面内容...");
            Document doc = httpRequestUtil.fetchDocument(current.getUrl(), 3);
            
            if (doc == null) {
                System.out.println("获取页面失败，跳过");
                consecutiveFailures.incrementAndGet();
                
                // 检查是否是登录失败导致的
                if (httpRequestUtil.isLoginFailure()) {
                    System.err.println("\u001B[31m登录失败，程序停止运行\u001B[0m");
                    crawlerService.shutdown();
                }
                return;
            }
            
            // 请求成功，重置失败计数
            consecutiveFailures.set(0);
            hourlyRequests.incrementAndGet();
            dailyRequests.incrementAndGet();

            if (contentExtractor.isArticlePage(doc)) {
                String title = contentExtractor.extractTitle(doc, current.getUrl());
                String content = contentExtractor.extractContent(doc, current.getUrl());
                
                System.out.println("\n======================");
                System.out.println("URL: " + current.getUrl());
                System.out.println("标题: " + title);
                System.out.println("内容预览: " + (content.length() > 100 ? content.substring(0, 100) + "..." : content));
                System.out.println("======================\n");
                
                try {
                    System.out.println("正在保存数据到文件...");
                    dataSaver.saveData(title, content, current.getUrl());
                    System.out.println("数据保存成功");
                    state.getCrawledCount().incrementAndGet();
                    System.out.printf("[进度 %d/%d] %s%n",
                            state.getCrawledCount().get(),
                            state.getMaxCount(),
                            current.getUrl());
                } catch (IOException e) {
                    System.err.println("保存数据失败: " + e.getMessage());
                    throw e;
                }
            } else {
                System.out.println("当前页面不是文章页面，跳过");
            }

            System.out.println("正在发现新URL...");
            urlUtil.discoverNewUrls(doc, current, 
                                  state.getVisitedUrls(), 
                                  state.getUrlQueue(),
                                  state.getAllowedDomain());
            System.out.println("等待随机延迟后继续...");
            applyRandomDelay();
            
        } catch (Exception e) {
            System.err.println("处理URL失败: " + current.getUrl() + " - " + e.getMessage());
            consecutiveFailures.incrementAndGet();
            throw e; // 抛出异常以停止程序
        }
    }

    @Override
    public void stop() {
        crawlerService.stop();
    }

    @Override
    public void shutdown() {
        crawlerService.shutdown();
    }
} 