package com.ruoyi.web.controller.topic;

import java.io.File;
import java.util.List;
import java.util.Map;
import java.util.TimerTask;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;

import org.springframework.beans.factory.annotation.Value;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import com.ruoyi.common.annotation.Log;
import com.ruoyi.common.core.controller.BaseController;
import com.ruoyi.common.core.domain.AjaxResult;
import com.ruoyi.common.enums.BusinessType;
import com.ruoyi.topic.domain.HsTopic;
import com.ruoyi.topic.service.IHsTopicService;
import com.ruoyi.common.core.page.TableDataInfo;
import com.ruoyi.framework.manager.AsyncManager;
import io.swagger.v3.oas.annotations.tags.Tag;
import io.swagger.v3.oas.annotations.Operation;

/**
 * 热搜主题Controller
 *
 * @author lzk
 * @date 2025-06-17
 */
@RestController
@RequestMapping("/topic/topic")
@Tag(name = "【热搜主题】管理")
public class HsTopicController extends BaseController {
    @Autowired
    private IHsTopicService hsTopicService;

    // 存储爬虫进程的引用，用于停止爬虫
    private Process crawlerProcess = null;

    // 使用配置文件配置路径
    @Value("${crawler.python.topic.script.path}")
    private String pythonScriptPath;

    @Value("${crawler.python.topic.stop.flag.path}")
    private String stopFlagPath;

    @Value("${crawler.python.executable:python}")
    private String pythonExecutable;

    /**
     * 查询热搜主题列表
     */
    @Operation(summary = "查询热搜主题列表")
    @PreAuthorize("@ss.hasPermi('topic:topic:list')")
    @GetMapping("/list")
    public TableDataInfo list(HsTopic hsTopic) {
        startPage();
        List<HsTopic> list = hsTopicService.selectHsTopicList(hsTopic);
        return getDataTable(list);
    }

    /**
     * 获取热搜主题详细信息
     */
    @Operation(summary = "获取热搜主题详细信息")
    @PreAuthorize("@ss.hasPermi('topic:topic:query')")
    @GetMapping(value = "/{id}")
    public AjaxResult getInfo(@PathVariable("id") Long id) {
        return success(hsTopicService.selectHsTopicById(id));
    }

    /**
     * 新增热搜主题
     */
    @Operation(summary = "新增热搜主题")
    @PreAuthorize("@ss.hasPermi('topic:topic:add')")
    @Log(title = "热搜主题", businessType = BusinessType.INSERT)
    @PostMapping
    public AjaxResult add(@RequestBody HsTopic hsTopic) {
        return toAjax(hsTopicService.insertHsTopic(hsTopic));
    }

    /**
     * 修改热搜主题
     */
    @Operation(summary = "修改热搜主题")
    @PreAuthorize("@ss.hasPermi('topic:topic:edit')")
    @Log(title = "热搜主题", businessType = BusinessType.UPDATE)
    @PutMapping
    public AjaxResult edit(@RequestBody HsTopic hsTopic) {
        return toAjax(hsTopicService.updateHsTopic(hsTopic));
    }

    /**
     * 删除热搜主题
     */
    @Operation(summary = "删除热搜主题")
    @PreAuthorize("@ss.hasPermi('topic:topic:remove')")
    @Log(title = "热搜主题", businessType = BusinessType.DELETE)
    @DeleteMapping("/{ids}")
    public AjaxResult remove(@PathVariable(name = "ids") Long[] ids) {
        return toAjax(hsTopicService.deleteHsTopicByIds(ids));
    }

    /**
     * 启动百度热搜爬虫
     */
    @Operation(summary = "启动百度热搜爬虫")
    @PreAuthorize("@ss.hasPermi('topic:topic:crawler')")
    @PostMapping("/startCrawler")
    public AjaxResult startCrawler(@RequestParam(defaultValue = "false") boolean immediate) {
        try {
            logger.info("收到启动爬虫请求，立即执行模式: {}", immediate);
            
            // 如果爬虫已经在运行，则返回提示信息
            if (crawlerProcess != null && crawlerProcess.isAlive()) {
                logger.warn("爬虫已经在运行中，拒绝重复启动");
                return AjaxResult.error("爬虫已经在运行中");
            }

            // 检查Python脚本文件是否存在
            File scriptFile = new File(pythonScriptPath);
            if (!scriptFile.exists()) {
                logger.error("Python脚本文件不存在: {}", pythonScriptPath);
                return AjaxResult.error("Python脚本文件不存在: " + pythonScriptPath);
            }
            logger.info("Python脚本文件存在: {}", pythonScriptPath);

            // 删除停止标志文件（如果存在）
            File stopFlag = new File(stopFlagPath);
            if (stopFlag.exists()) {
                stopFlag.delete();
                logger.info("已删除停止标志文件: {}", stopFlagPath);
            }

            // 异步执行Python脚本
            AsyncManager.me().execute(new TimerTask() {
                @Override
                public void run() {
                    try {
                        // 构建命令
                        List<String> command = new ArrayList<>();
                        command.add(pythonExecutable);
                        command.add(pythonScriptPath);
                        
                        // 如果是立即执行模式，添加参数
                        if (immediate) {
                            command.add("--immediate");
                            logger.info("添加立即执行参数: --immediate");
                        }

                        logger.info("执行命令: {}", String.join(" ", command));

                        ProcessBuilder pb = new ProcessBuilder(command);
                        pb.redirectErrorStream(true);
                        
                        // 设置环境变量确保输出不被缓冲
                        Map<String, String> env = pb.environment();
                        env.put("PYTHONUNBUFFERED", "1");
                        env.put("PYTHONIOENCODING", "utf-8");

                        crawlerProcess = pb.start();
                        logger.info("百度热搜爬虫已启动，进程ID: {}，模式: {}", 
                                  crawlerProcess.pid(), immediate ? "立即执行" : "定时任务");

                        // 读取输出流，避免进程阻塞
                        try (BufferedReader reader = new BufferedReader(
                                new InputStreamReader(crawlerProcess.getInputStream(), "UTF-8"))) {
                            String line;
                            while ((line = reader.readLine()) != null) {
                                // 记录所有输出，便于调试
                                logger.info("Python输出: {}", line);
                            }
                        }

                        // 等待进程结束
                        int exitCode = crawlerProcess.waitFor();

                        // 记录执行结果
                        if (exitCode == 0) {
                            logger.info("爬虫脚本执行完成，退出码: {}", exitCode);
                        } else {
                            logger.error("爬虫脚本执行失败，退出码: {}", exitCode);
                        }

                    } catch (Exception e) {
                        logger.error("异步执行Python脚本失败: {}", e.getMessage(), e);
                    } finally {
                        crawlerProcess = null;
                    }
                }
            });

            // 立即返回成功响应
            String message = immediate ? "爬虫已启动，正在立即执行爬取任务" : "爬虫已在后台启动，将按照预定时间执行爬取任务";
            logger.info("返回响应: {}", message);
            return AjaxResult.success(message);

        } catch (Exception e) {
            logger.error("启动爬虫失败: {}", e.getMessage(), e);
            return AjaxResult.error("启动爬虫失败: " + e.getMessage());
        }
    }

    @Operation(summary = "停止百度热搜爬虫")
    @PreAuthorize("@ss.hasPermi('topic:topic:crawler')")
    @PostMapping("/stopCrawler")
    public AjaxResult stopCrawler() {
        try {
            // 创建停止标志文件
            File stopFlag = new File(stopFlagPath);
            if (!stopFlag.exists()) {
                stopFlag.createNewFile();
            }

            // 检查进程是否存在并且在运行
            if (crawlerProcess != null) {
                try {
                    // 检查进程是否还活着
                    if (crawlerProcess.isAlive()) {
                        logger.info("正在停止爬虫进程，进程ID: {}", crawlerProcess.pid());

                        // 给进程一些时间来检测停止标志并优雅地退出
                        Thread.sleep(2000);

                        // 如果进程仍在运行，强制终止
                        if (crawlerProcess.isAlive()) {
                            crawlerProcess.destroy();
                            Thread.sleep(1000);

                            // 如果进程仍然没有终止，使用destroyForcibly
                            if (crawlerProcess.isAlive()) {
                                crawlerProcess.destroyForcibly();
                                logger.warn("强制终止爬虫进程");
                            }
                        }

                        logger.info("爬虫进程已停止");
                    }

                    crawlerProcess = null;
                    return AjaxResult.success("爬虫已停止");
                } catch (Exception e) {
                    logger.error("停止爬虫进程时出错: {}", e.getMessage(), e);
                    crawlerProcess = null; // 出错时也重置进程引用
                }
            }

            // 如果没有正在运行的进程或进程已经结束，只创建停止标志文件
            return AjaxResult.success("停止标志已设置，爬虫将在下次检查时停止");

        } catch (Exception e) {
            return AjaxResult.error("停止爬虫失败: " + e.getMessage());
        }
    }

    @Operation(summary = "获取爬虫状态")
    @PreAuthorize("@ss.hasPermi('topic:topic:crawler')")
    @GetMapping("/getCrawlerStatus")
    public AjaxResult getCrawlerStatus() {
        boolean isRunning = false;

        // 检查进程是否存在并且在运行
        if (crawlerProcess != null && crawlerProcess.isAlive()) {
            isRunning = true;
        }

        // 检查停止标志文件是否存在
        File stopFlag = new File(stopFlagPath);
        if (stopFlag.exists()) {
            isRunning = false;
        }

        return AjaxResult.success("获取爬虫状态成功", isRunning);
    }

    /**
     * 获取爬虫配置信息
     */
    @Operation(summary = "获取爬虫配置信息")
    @PreAuthorize("@ss.hasPermi('topic:topic:crawler')")
    @GetMapping("/getCrawlerConfig")
    public AjaxResult getCrawlerConfig() {
        try {
            // 读取Python脚本中的配置
            File scriptFile = new File(pythonScriptPath);

            if (!scriptFile.exists()) {
                return AjaxResult.error("爬虫脚本文件不存在");
            }

            // 读取文件内容
            java.nio.file.Path path = scriptFile.toPath();
            List<String> lines = java.nio.file.Files.readAllLines(path, java.nio.charset.StandardCharsets.UTF_8);

            // 查找schedule_times配置行
            String scheduleTimes = "";
            for (String line : lines) {
                if (line.contains("'schedule_times'")) {
                    // 提取配置值
                    int startIndex = line.indexOf("[");
                    int endIndex = line.indexOf("]");
                    if (startIndex != -1 && endIndex != -1 && endIndex > startIndex) {
                        scheduleTimes = line.substring(startIndex, endIndex + 1);
                    }
                    break;
                }
            }

            return AjaxResult.success("获取爬虫配置成功", scheduleTimes);
        } catch (Exception e) {
            return AjaxResult.error("获取爬虫配置失败: " + e.getMessage());
        }
    }

    /**
     * 更新爬虫配置信息
     */
    @Operation(summary = "更新爬虫配置信息")
    @PreAuthorize("@ss.hasPermi('topic:topic:crawler')")
    @PostMapping("/updateCrawlerConfig")
    public AjaxResult updateCrawlerConfig(@RequestBody Map<String, String> configParams) {
        try {
            String scheduleTime = configParams.get("scheduleTime");
            if (scheduleTime == null || scheduleTime.trim().isEmpty()) {
                return AjaxResult.error("定时时间不能为空");
            }

            // 验证时间格式 (HH:mm)
            if (!scheduleTime.matches("^([01]?[0-9]|2[0-3]):[0-5][0-9]$")) {
                return AjaxResult.error("时间格式不正确，请使用HH:mm格式");
            }

            // 读取Python脚本文件
            File scriptFile = new File(pythonScriptPath);
            if (!scriptFile.exists()) {
                return AjaxResult.error("爬虫脚本文件不存在");
            }

            // 读取文件内容
            java.nio.file.Path path = scriptFile.toPath();
            List<String> lines = java.nio.file.Files.readAllLines(path, java.nio.charset.StandardCharsets.UTF_8);

            // 更新schedule_times配置
            boolean updated = false;
            for (int i = 0; i < lines.size(); i++) {
                String line = lines.get(i);
                if (line.contains("'schedule_times'")) {
                    // 替换配置行
                    String updatedLine = line.replaceAll("\\['[^']*'\\]", "['" + scheduleTime + "']");
                    lines.set(i, updatedLine);
                    updated = true;
                    break;
                }
            }

            if (!updated) {
                return AjaxResult.error("未找到配置项，无法更新");
            }

            // 写回文件
            java.nio.file.Files.write(path, lines, java.nio.charset.StandardCharsets.UTF_8);

            return AjaxResult.success("爬虫定时配置已更新");
        } catch (Exception e) {
            return AjaxResult.error("更新爬虫配置失败: " + e.getMessage());
        }
    }

    /**
     * 清理并导出相关链接和文章
     */
    @Operation(summary = "清理并导出相关链接和文章")
    @PreAuthorize("@ss.hasPermi('topic:topic:clean-and-export')")
    @PostMapping("/clean-and-export")
    public AjaxResult cleanAndExport(@RequestParam(required = false) Integer period) {
        try {
            hsTopicService.cleanAndExport(period);
            return AjaxResult.success("清理与导出成功！");
        } catch (Exception e) {
            return AjaxResult.error("操作失败：" + e.getMessage());
        }
    }
}