package com.ruoyi.web.controller.related_links;

import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.time.LocalDateTime;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;

import com.ruoyi.common.annotation.RepeatSubmit;
import org.springframework.beans.factory.annotation.Value;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import com.ruoyi.common.annotation.Log;
import com.ruoyi.common.core.controller.BaseController;
import com.ruoyi.common.core.domain.AjaxResult;
import com.ruoyi.common.enums.BusinessType;
import com.ruoyi.related_links.domain.HsRelatedLinks;
import com.ruoyi.related_links.service.IHsRelatedLinksService;
import com.ruoyi.common.core.page.TableDataInfo;
import com.ruoyi.related_links.domain.CrawlerProgress;
import com.ruoyi.related_links.domain.dto.CrawlerProgressDTO;
import com.ruoyi.related_links.domain.vo.CrawlerProgressVO;
import io.swagger.v3.oas.annotations.tags.Tag;
import io.swagger.v3.oas.annotations.Operation;

/**
 * 相关链接Controller
 *
 * @author lzk
 * @date 2025-06-18
 */
@RestController
@RequestMapping("/related_links/related_links")
@Tag(name = "【相关链接】管理")
public class HsRelatedLinksController extends BaseController {

    private static final Logger log = LoggerFactory.getLogger(HsRelatedLinksController.class);

    @Autowired
    private IHsRelatedLinksService hsRelatedLinksService;

    // 爬虫进程引用
    private final AtomicReference<Process> crawlerProcess = new AtomicReference<>(null);

    // 进度存储
    private final Map<String, Object> progressData = new ConcurrentHashMap<>();

    @Value("${crawler.python.related_links.script.path}")
    private String pythonScriptPath;

    @Value("${crawler.python.related_links.stop.flag.path}")
    private String stopFlagPath;

    @Value("${crawler.python.executable}")
    private String pythonExecutable;

    /**
     * 查询相关链接列表
     */
    @Operation(summary = "查询相关链接列表")
    @PreAuthorize("@ss.hasPermi('related_links:related_links:list')")
    @GetMapping("/list")
    public TableDataInfo list(HsRelatedLinks hsRelatedLinks) {
        startPage();
        List<HsRelatedLinks> list = hsRelatedLinksService.selectHsRelatedLinksList(hsRelatedLinks);
        return getDataTable(list);
    }

    /**
     * 获取相关链接详细信息
     */
    @Operation(summary = "获取相关链接详细信息")
    @PreAuthorize("@ss.hasPermi('related_links:related_links:query')")
    @GetMapping(value = "/{id}")
    public AjaxResult getInfo(@PathVariable("id") Long id) {
        return success(hsRelatedLinksService.selectHsRelatedLinksById(id));
    }

    /**
     * 新增相关链接
     */
    @Operation(summary = "新增相关链接")
    @PreAuthorize("@ss.hasPermi('related_links:related_links:add')")
    @Log(title = "相关链接", businessType = BusinessType.INSERT)
    @PostMapping
    public AjaxResult add(@RequestBody HsRelatedLinks hsRelatedLinks) {
        return toAjax(hsRelatedLinksService.insertHsRelatedLinks(hsRelatedLinks));
    }

    /**
     * 修改相关链接
     */
    @Operation(summary = "修改相关链接")
    @PreAuthorize("@ss.hasPermi('related_links:related_links:edit')")
    @Log(title = "相关链接", businessType = BusinessType.UPDATE)
    @PutMapping
    public AjaxResult edit(@RequestBody HsRelatedLinks hsRelatedLinks) {
        return toAjax(hsRelatedLinksService.updateHsRelatedLinks(hsRelatedLinks));
    }

    /**
     * 删除相关链接
     */
    @Operation(summary = "删除相关链接")
    @PreAuthorize("@ss.hasPermi('related_links:related_links:remove')")
    @Log(title = "相关链接", businessType = BusinessType.DELETE)
    @DeleteMapping("/{ids}")
    public AjaxResult remove(@PathVariable(name = "ids") Long[] ids) {
        return toAjax(hsRelatedLinksService.deleteHsRelatedLinksByIds(ids));
    }

    /**
     * 启动爬虫
     */
    @Operation(summary = "启动文章爬虫")
    @PreAuthorize("@ss.hasPermi('related_links:related_links:crawler')")
    @PostMapping("/startCrawler")
    public AjaxResult startCrawler() {
        try {
            // 检查是否已在运行
            Process currentProcess = crawlerProcess.get();
            if (currentProcess != null && currentProcess.isAlive()) {
                return AjaxResult.error("爬虫已在运行中");
            }

            // 删除停止标志
            File stopFlag = new File(stopFlagPath);
            if (stopFlag.exists()) {
                stopFlag.delete();
            }

            // 获取待处理数量
            HsRelatedLinks query = new HsRelatedLinks();
            query.setCrawlStatus(0L);
            int totalCount = hsRelatedLinksService.selectHsRelatedLinksList(query).size();

            if (totalCount == 0) {
                return AjaxResult.error("没有需要爬取的链接");
            }

            // 初始化进度
            progressData.clear();
            progressData.put("total", totalCount);
            progressData.put("totalBatches", (totalCount + 49) / 50);
            progressData.put("currentBatch", 0);
            progressData.put("processed", 0);
            progressData.put("success", 0);
            progressData.put("failure", 0);
            progressData.put("status", 1); // 1-运行中, 2-完成, 3-停止, 4-错误

            // 异步启动Python脚本
            CompletableFuture.runAsync(this::executePythonScript);

            log.info("爬虫启动成功，总计{}个链接", totalCount);
            return AjaxResult.success("爬虫启动成功", progressData);

        } catch (Exception e) {
            log.error("启动爬虫失败: {}", e.getMessage(), e);
            return AjaxResult.error("启动爬虫失败: " + e.getMessage());
        }
    }

    /**
     * 执行Python脚本
     */
    private void executePythonScript() {
        Process process = null;


        try {
            log.info("开始执行Python脚本: {}", pythonScriptPath);

            ProcessBuilder pb = new ProcessBuilder(pythonExecutable, pythonScriptPath);
            pb.redirectErrorStream(false);

            // 设置环境变量确保输出不被缓冲
            Map<String, String> env = pb.environment();
            env.put("PYTHONUNBUFFERED", "1");
            env.put("PYTHONIOENCODING", "utf-8");

            process = pb.start();
            crawlerProcess.set(process);

            // 同时读取标准输出和错误输出
            Process finalProcess = process;
            CompletableFuture<Void> outputTask = CompletableFuture.runAsync(() -> {
                try (BufferedReader reader = new BufferedReader(
                        new InputStreamReader(finalProcess.getInputStream(), "UTF-8"))) {
                    String line;
                    while ((line = reader.readLine()) != null) {
                        log.info("Python输出: {}", line);
                        parseProgress(line);
                    }
                } catch (Exception e) {
                    log.error("读取Python输出异常: {}", e.getMessage());
                }
            });

            Process finalProcess1 = process;
            CompletableFuture<Void> errorTask = CompletableFuture.runAsync(() -> {
                try (BufferedReader errorReader = new BufferedReader(
                        new InputStreamReader(finalProcess1.getErrorStream(), "UTF-8"))) {
                    String line;
                    while ((line = errorReader.readLine()) != null) {
                        log.warn("Python错误输出: {}", line);
                    }
                } catch (Exception e) {
                    log.error("读取Python错误输出异常: {}", e.getMessage());
                }
            });

            // 等待进程结束
            int exitCode = process.waitFor();

            // 确保输出读取完成
            outputTask.get(5, TimeUnit.SECONDS);
            errorTask.get(5, TimeUnit.SECONDS);

            if (exitCode == 0) {
                progressData.put("status", 2); // 完成
                log.info("爬虫执行完成，退出码: {}", exitCode);
            } else {
                progressData.put("status", 4); // 错误
                log.error("爬虫执行失败，退出码: {}", exitCode);
            }

        } catch (Exception e) {
            log.error("执行Python脚本异常: {}", e.getMessage(), e);
            progressData.put("status", 4);
        } finally {
            crawlerProcess.set(null);
            if (process != null && process.isAlive()) {
                log.info("强制终止Python进程");
                process.destroyForcibly();
            }
        }
    }

    /**
     * 解析Python输出的进度信息
     */
    private void parseProgress(String line) {
        try {
            if (line.startsWith("PROGRESS:")) {
                // 解析格式: PROGRESS:批次:已处理:成功:失败
                String[] parts = line.split(":");
                if (parts.length >= 5) {
                    int currentBatch = Integer.parseInt(parts[1]);
                    int processed = Integer.parseInt(parts[2]);
                    int success = Integer.parseInt(parts[3]);
                    int failure = Integer.parseInt(parts[4]);

                    progressData.put("currentBatch", currentBatch);
                    progressData.put("processed", processed);
                    progressData.put("success", success);
                    progressData.put("failure", failure);

                    log.info("进度更新 - 批次:{}, 已处理:{}, 成功:{}, 失败:{}",
                            currentBatch, processed, success, failure);
                }
            } else if (line.startsWith("BATCH_START:")) {
                // 批次开始
                String[] parts = line.split(":");
                if (parts.length >= 3) {
                    int batchNum = Integer.parseInt(parts[1]);
                    int batchSize = Integer.parseInt(parts[2]);
                    log.info("开始批次 {} ，包含 {} 个链接", batchNum, batchSize);
                }
            } else if (line.equals("CRAWLER_STARTED")) {
                log.info("Python爬虫已启动");
                progressData.put("status", 1);
            } else if (line.equals("CRAWLER_FINISHED")) {
                log.info("Python爬虫已完成");
                progressData.put("status", 2);
            } else if (line.startsWith("CRAWLER_ERROR:")) {
                String error = line.substring("CRAWLER_ERROR:".length());
                log.error("Python爬虫报错: {}", error);
                progressData.put("status", 4);
                progressData.put("error", error);
            }
        } catch (Exception e) {
            log.debug("解析进度失败: {}, 错误: {}", line, e.getMessage());
        }
    }

    /**
     * 停止爬虫
     */
    @Operation(summary = "停止文章爬虫")
    @PreAuthorize("@ss.hasPermi('related_links:related_links:crawler')")
    @PostMapping("/stopCrawler")
    public AjaxResult stopCrawler() {
        try {
            // 创建停止标志文件
            File stopFlag = new File(stopFlagPath);
            if (!stopFlag.getParentFile().exists()) {
                stopFlag.getParentFile().mkdirs();
            }
            stopFlag.createNewFile();
            log.info("创建停止标志文件: {}", stopFlagPath);

            // 停止进程
            Process process = crawlerProcess.getAndSet(null);
            if (process != null && process.isAlive()) {
                log.info("正在停止Python进程...");
                process.destroy();
                if (!process.waitFor(10, TimeUnit.SECONDS)) {
                    log.warn("进程未能正常停止，强制终止");
                    process.destroyForcibly();
                }
                log.info("Python进程已停止");
            }

            progressData.put("status", 3); // 已停止
            return AjaxResult.success("爬虫已停止");

        } catch (Exception e) {
            log.error("停止爬虫失败: {}", e.getMessage(), e);
            return AjaxResult.error("停止爬虫失败: " + e.getMessage());
        }
    }

    /**
     * 获取爬虫进度
     */
    @Operation(summary = "获取爬虫进度")
    @PreAuthorize("@ss.hasPermi('related_links:related_links:crawler')")
    @GetMapping("/getProgress")
    public AjaxResult getProgress() {
        // 检查进程状态
        Process process = crawlerProcess.get();
        if (process != null && !process.isAlive() && (Integer) progressData.getOrDefault("status", 0) == 1) {
            progressData.put("status", 2); // 进程结束，标记完成
            log.info("检测到Python进程已结束，更新状态为完成");
        }

        // 创建响应数据的副本，避免并发修改
        Map<String, Object> responseData = new HashMap<>(progressData);
        return AjaxResult.success(responseData);
    }

    /**
     * 获取爬虫状态
     */
    @Operation(summary = "获取爬虫状态")
    @PreAuthorize("@ss.hasPermi('related_links:related_links:crawler')")
    @GetMapping("/getStatus")
    public AjaxResult getStatus() {
        Process process = crawlerProcess.get();
        boolean isRunning = process != null && process.isAlive();

        Map<String, Object> statusData = new HashMap<>();
        statusData.put("isRunning", isRunning);
        statusData.put("status", progressData.getOrDefault("status", 0));

        return AjaxResult.success("获取状态成功", statusData);
    }
}