import subprocess
import os
import signal
import psutil
import json
from datetime import datetime
import threading
import time

class SpiderManager:
    def __init__(self):
        self.running_spiders = {}  # {spider_name: process_info}
        self.spider_logs = {}      # {spider_name: [log_entries]}
        
    def start_spider(self, spider_name, category=None):
        """启动爬虫"""
        try:
            # 检查爬虫是否已经在运行
            if spider_name in self.running_spiders:
                return {
                    "success": False,
                    "message": f"爬虫 {spider_name} 已经在运行中"
                }
            
            # 构建启动命令
            cmd = ["scrapy", "crawl", spider_name]
            if category:
                cmd.extend(["-a", f"category={category}"])
            
            # 设置工作目录
            spider_dir = os.path.join(os.path.dirname(__file__), "..")
            
            # 启动爬虫进程
            process = subprocess.Popen(
                cmd,
                cwd=spider_dir,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                text=True,
                bufsize=1,
                universal_newlines=True
            )
            
            # 记录爬虫信息
            self.running_spiders[spider_name] = {
                "process": process,
                "pid": process.pid,
                "start_time": datetime.now(),
                "category": category,
                "status": "running"
            }
            
            # 启动日志监控线程
            log_thread = threading.Thread(
                target=self._monitor_spider_logs,
                args=(spider_name, process),
                daemon=True
            )
            log_thread.start()
            
            return {
                "success": True,
                "message": f"爬虫 {spider_name} 启动成功",
                "pid": process.pid,
                "start_time": self.running_spiders[spider_name]["start_time"].isoformat()
            }
            
        except Exception as e:
            return {
                "success": False,
                "message": f"爬虫启动失败: {str(e)}"
            }
    
    def stop_spider(self, spider_name):
        """停止爬虫"""
        try:
            if spider_name not in self.running_spiders:
                return {
                    "success": False,
                    "message": f"爬虫 {spider_name} 未在运行"
                }
            
            spider_info = self.running_spiders[spider_name]
            process = spider_info["process"]
            
            # 尝试优雅停止
            try:
                process.terminate()
                process.wait(timeout=10)
            except subprocess.TimeoutExpired:
                # 强制杀死进程
                process.kill()
                process.wait()
            
            # 更新状态
            spider_info["status"] = "stopped"
            spider_info["end_time"] = datetime.now()
            
            return {
                "success": True,
                "message": f"爬虫 {spider_name} 停止成功"
            }
            
        except Exception as e:
            return {
                "success": False,
                "message": f"爬虫停止失败: {str(e)}"
            }
    
    def get_spider_status(self, spider_name):
        """获取爬虫状态"""
        if spider_name not in self.running_spiders:
            return {
                "spider_name": spider_name,
                "status": "not_running",
                "message": "爬虫未运行"
            }
        
        spider_info = self.running_spiders[spider_name]
        process = spider_info["process"]
        
        # 检查进程是否还在运行
        if process.poll() is None:
            status = "running"
        else:
            status = "stopped"
            spider_info["status"] = "stopped"
        
        return {
            "spider_name": spider_name,
            "status": status,
            "pid": spider_info["pid"],
            "start_time": spider_info["start_time"].isoformat(),
            "category": spider_info.get("category"),
            "logs": self.spider_logs.get(spider_name, [])[-10:]  # 最近10条日志
        }
    
    def get_all_spiders_status(self):
        """获取所有爬虫状态"""
        return {
            spider_name: self.get_spider_status(spider_name)
            for spider_name in self.running_spiders.keys()
        }
    
    def _monitor_spider_logs(self, spider_name, process):
        """监控爬虫日志"""
        if spider_name not in self.spider_logs:
            self.spider_logs[spider_name] = []
        
        while process.poll() is None:
            try:
                # 读取输出
                output = process.stdout.readline()
                if output:
                    log_entry = {
                        "timestamp": datetime.now().isoformat(),
                        "message": output.strip(),
                        "type": "info"
                    }
                    self.spider_logs[spider_name].append(log_entry)
                
                # 读取错误输出
                error = process.stderr.readline()
                if error:
                    log_entry = {
                        "timestamp": datetime.now().isoformat(),
                        "message": error.strip(),
                        "type": "error"
                    }
                    self.spider_logs[spider_name].append(log_entry)
                
                time.sleep(0.1)
            except Exception as e:
                log_entry = {
                    "timestamp": datetime.now().isoformat(),
                    "message": f"日志监控错误: {str(e)}",
                    "type": "error"
                }
                self.spider_logs[spider_name].append(log_entry)
                break

# 全局爬虫管理器实例
spider_manager = SpiderManager() 