#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
线程管理器
用于统一管理所有数据下载线程，支持优雅停止
"""

import threading
import logging
import sys
import os
from typing import Dict, List, Any
from datetime import datetime

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)

class ThreadManager:
    """线程管理器单例类"""
    
    _instance = None
    _lock = threading.Lock()
    
    def __new__(cls):
        if cls._instance is None:
            with cls._lock:
                if cls._instance is None:
                    cls._instance = super(ThreadManager, cls).__new__(cls)
                    cls._instance._initialized = False
        return cls._instance
    
    def __init__(self):
        if self._initialized:
            return
            
        self._threads = {}  # 线程注册表
        self._scrapers = {}  # 爬虫实例注册表
        self._shutdown_event = threading.Event()
        self._manager_lock = threading.Lock()
        self._initialized = True
        
        # 注意：在Django应用中不能在子线程中注册信号处理器
        # 信号处理将由Django框架自行管理
        
        logger.info("线程管理器初始化完成")
    
    # 信号处理器已移除，由Django框架管理
    
    def register_scraper(self, name: str, scraper_instance) -> bool:
        """注册爬虫实例"""
        try:
            with self._manager_lock:
                if name in self._scrapers:
                    logger.warning(f"爬虫 {name} 已存在，将被替换")
                
                self._scrapers[name] = scraper_instance
                logger.info(f"爬虫 {name} 注册成功")
                return True
        except Exception as e:
            logger.error(f"注册爬虫 {name} 失败: {e}")
            return False
    
    def unregister_scraper(self, name: str) -> bool:
        """注销爬虫实例"""
        try:
            with self._manager_lock:
                if name in self._scrapers:
                    del self._scrapers[name]
                    logger.info(f"爬虫 {name} 注销成功")
                    return True
                else:
                    logger.warning(f"爬虫 {name} 不存在")
                    return False
        except Exception as e:
            logger.error(f"注销爬虫 {name} 失败: {e}")
            return False
    
    def register_thread(self, name: str, thread: threading.Thread) -> bool:
        """注册线程"""
        try:
            with self._manager_lock:
                if name in self._threads:
                    logger.warning(f"线程 {name} 已存在，将被替换")
                
                self._threads[name] = {
                    'thread': thread,
                    'start_time': datetime.now(),
                    'status': 'registered'
                }
                logger.info(f"线程 {name} 注册成功")
                return True
        except Exception as e:
            logger.error(f"注册线程 {name} 失败: {e}")
            return False
    
    def unregister_thread(self, name: str) -> bool:
        """注销线程"""
        try:
            with self._manager_lock:
                if name in self._threads:
                    del self._threads[name]
                    logger.info(f"线程 {name} 注销成功")
                    return True
                else:
                    logger.warning(f"线程 {name} 不存在")
                    return False
        except Exception as e:
            logger.error(f"注销线程 {name} 失败: {e}")
            return False
    
    def stop_scraper(self, name: str) -> bool:
        """停止指定爬虫"""
        try:
            with self._manager_lock:
                if name in self._scrapers:
                    scraper = self._scrapers[name]
                    if hasattr(scraper, 'stop_scraping'):
                        result = scraper.stop_scraping()
                        logger.info(f"爬虫 {name} 停止请求已发送，结果: {result}")
                        return result
                    else:
                        logger.warning(f"爬虫 {name} 不支持停止操作")
                        return False
                else:
                    logger.warning(f"爬虫 {name} 不存在")
                    return False
        except Exception as e:
            logger.error(f"停止爬虫 {name} 失败: {e}")
            return False
    
    def stop_all_scrapers(self) -> Dict[str, bool]:
        """停止所有爬虫"""
        results = {}
        with self._manager_lock:
            scraper_names = list(self._scrapers.keys())
        
        for name in scraper_names:
            results[name] = self.stop_scraper(name)
        
        logger.info(f"停止所有爬虫完成，结果: {results}")
        return results
    
    def stop_all_threads(self, timeout: float = 30.0) -> bool:
        """停止所有线程"""
        logger.info("开始停止所有线程...")
        
        # 首先停止所有爬虫
        self.stop_all_scrapers()
        
        # 等待线程结束
        with self._manager_lock:
            thread_items = list(self._threads.items())
        
        for name, thread_info in thread_items:
            thread = thread_info['thread']
            if thread.is_alive():
                logger.info(f"等待线程 {name} 结束...")
                thread.join(timeout=timeout)
                
                if thread.is_alive():
                    logger.warning(f"线程 {name} 在 {timeout} 秒后仍未结束")
                else:
                    logger.info(f"线程 {name} 已结束")
        
        # 设置关闭事件
        self._shutdown_event.set()
        
        logger.info("所有线程停止操作完成")
        return True
    
    def get_status(self) -> Dict[str, Any]:
        """获取所有线程和爬虫状态"""
        with self._manager_lock:
            thread_status = {}
            for name, thread_info in self._threads.items():
                thread_status[name] = {
                    'is_alive': thread_info['thread'].is_alive(),
                    'start_time': thread_info['start_time'].isoformat(),
                    'status': thread_info['status']
                }
            
            scraper_status = {}
            for name, scraper in self._scrapers.items():
                if hasattr(scraper, 'get_status'):
                    scraper_status[name] = scraper.get_status().value
                elif hasattr(scraper, 'status'):
                    scraper_status[name] = scraper.status.value
                else:
                    scraper_status[name] = 'unknown'
        
        return {
            'threads': thread_status,
            'scrapers': scraper_status,
            'shutdown_requested': self._shutdown_event.is_set()
        }
    
    def is_shutdown_requested(self) -> bool:
        """检查是否请求关闭"""
        return self._shutdown_event.is_set()
    
    def wait_for_shutdown(self, timeout: float = 30.0) -> bool:
        """等待关闭信号"""
        return self._shutdown_event.wait(timeout)

# 全局线程管理器实例
thread_manager = ThreadManager()

def get_thread_manager() -> ThreadManager:
    """获取线程管理器实例"""
    return thread_manager

if __name__ == '__main__':
    # 测试代码
    manager = get_thread_manager()
    print("线程管理器状态:", manager.get_status())