package com.ningxun.pagentcrawler.controller;

import com.ningxun.pagentcrawler.Crawler;
import com.ningxun.pagentcrawler.service.core.ICrawlerService;
import com.ningxun.pagentcrawler.entity.policy.PolicyItem;
import com.ningxun.pagentcrawler.entity.CrawlerConfig;
import com.ningxun.pagentcrawler.dto.CrawlerResult;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.HashMap;
import java.util.Map;
import java.util.List;

@RestController
@RequestMapping("/crawler")
public class CrawlerController {
    @Autowired
    private Crawler crawler;
    
    @Autowired
    private ICrawlerService crawlerService;

    // 爬虫状态
    private boolean isRunning = false;
    private String lastRunTime = "";
    private int totalCrawledCount = 0;

    /**
     * 启动爬虫任务（原有功能）
     */
    @GetMapping("/run")
    public Map<String, Object> runCrawler() {
        Map<String, Object> result = new HashMap<>();
        
        if (isRunning) {
            result.put("success", false);
            result.put("message", "爬虫正在运行中，请稍后再试");
            result.put("timestamp", System.currentTimeMillis());
            return result;
        }
        
        try {
            isRunning = true;
            lastRunTime = java.time.LocalDateTime.now().toString();
            System.out.println("🕷️ 开始执行爬虫任务...");
            
            // 执行爬虫
            List<PolicyItem> policies = crawler.runCrawlerAndReturnData();
            
            // 保存数据
            if (policies != null && !policies.isEmpty()) {
                CrawlerResult saveResult = crawlerService.savePoliciesToDatabase(policies);
                totalCrawledCount += saveResult.getSuccessCount();
                
                result.put("success", true);
                result.put("message", "爬虫任务执行完成！成功爬取 " + policies.size() + " 条政策，成功保存 " + saveResult.getSuccessCount() + " 条");
                result.put("crawledCount", policies.size());
                result.put("savedCount", saveResult.getSuccessCount());
                result.put("failedCount", saveResult.getFailCount());
            } else {
                result.put("success", true);
                result.put("message", "爬虫任务执行完成，但未爬取到任何数据");
                result.put("crawledCount", 0);
                result.put("savedCount", 0);
                result.put("failedCount", 0);
            }
            
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            System.err.println("❌ 爬虫任务执行失败: " + e.getMessage());
            e.printStackTrace();
            
            result.put("success", false);
            result.put("message", "爬虫启动失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        } finally {
            isRunning = false;
        }
        
        return result;
    }
    
    /**
     * 使用配置执行爬虫（新增功能）
     */
    @PostMapping("/runWithConfig")
    public Map<String, Object> runCrawlerWithConfig(@RequestBody CrawlerConfig config) {
        Map<String, Object> result = new HashMap<>();
        
        if (isRunning) {
            result.put("success", false);
            result.put("message", "爬虫正在运行中，请稍后再试");
            result.put("timestamp", System.currentTimeMillis());
            return result;
        }
        
        try {
            isRunning = true;
            lastRunTime = java.time.LocalDateTime.now().toString();
            System.out.println("🕷️ 开始执行配置化爬虫任务: " + config.getConfigName());
            
            // 执行配置化爬虫
            List<PolicyItem> policies = crawler.runCrawlerWithConfig(config);
            
            // 保存数据
            if (policies != null && !policies.isEmpty()) {
                CrawlerResult saveResult = crawlerService.savePoliciesToDatabase(policies);
                totalCrawledCount += saveResult.getSuccessCount();
                
                result.put("success", true);
                result.put("message", "配置化爬虫任务执行完成！成功爬取 " + policies.size() + " 条政策，成功保存 " + saveResult.getSuccessCount() + " 条");
                result.put("crawledCount", policies.size());
                result.put("savedCount", saveResult.getSuccessCount());
                result.put("failedCount", saveResult.getFailCount());
                result.put("configName", config.getConfigName());
            } else {
                result.put("success", true);
                result.put("message", "配置化爬虫任务执行完成，但未爬取到任何数据");
                result.put("crawledCount", 0);
                result.put("savedCount", 0);
                result.put("failedCount", 0);
                result.put("configName", config.getConfigName());
            }
            
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            System.err.println("❌ 配置化爬虫任务执行失败: " + e.getMessage());
            e.printStackTrace();
            
            result.put("success", false);
            result.put("message", "配置化爬虫启动失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        } finally {
            isRunning = false;
        }
        
        return result;
    }
    
    /**
     * 获取爬虫状态
     */
    @GetMapping("/status")
    public Map<String, Object> getCrawlerStatus() {
        Map<String, Object> status = new HashMap<>();
        status.put("status", isRunning ? "running" : "idle");
        status.put("message", isRunning ? "爬虫正在运行中" : "爬虫服务正常运行");
        status.put("lastRunTime", lastRunTime);
        status.put("totalCrawledCount", totalCrawledCount);
        status.put("timestamp", System.currentTimeMillis());
        return status;
    }
    
    /**
     * 获取爬虫统计信息
     */
    @GetMapping("/stats")
    public Map<String, Object> getCrawlerStats() {
        Map<String, Object> stats = new HashMap<>();
        stats.put("totalCrawledCount", totalCrawledCount);
        stats.put("lastRunTime", lastRunTime);
        stats.put("isRunning", isRunning);
        stats.put("timestamp", System.currentTimeMillis());
        return stats;
    }
    
    /**
     * 重置爬虫统计
     */
    @GetMapping("/reset")
    public Map<String, Object> resetCrawlerStats() {
        Map<String, Object> result = new HashMap<>();
        totalCrawledCount = 0;
        lastRunTime = "";
        result.put("success", true);
        result.put("message", "爬虫统计已重置");
        result.put("timestamp", System.currentTimeMillis());
        return result;
    }
} 