"""URL管理器，用于管理爬虫的URL队列和去重。"""
from collections import deque
import hashlib
from typing import Optional

import redis
import platform
from fin_senti_entity_platform.utils.config_loader import config_loader
from fin_senti_entity_platform.utils.logger import Logger

# 初始化日志记录器
logger = Logger.get_logger('url_manager', 'data_collection.log')

class URLManager:
    """
    URL管理器，负责URL的存储、去重和分配
    支持本地内存和Redis分布式存储两种模式
    """
    def __init__(self, use_redis=False):
        """
        初始化URL管理器
        :param use_redis: 是否使用Redis进行分布式管理
        """
        self.use_redis = use_redis
        self.redis_client: Optional[redis.Redis] = None
        
        # 初始化URL集合
        self.new_urls = set()  # 待爬取的URL集合
        self.old_urls = set()  # 已爬取的URL集合
        self.url_queue = deque()  # URL队列，用于FIFO顺序爬取
        
        # 配置信息
        self.max_urls = config_loader.get('data_collection', {}).get('max_urls', 1000000)
        self.redis_config = config_loader.get('data_collection', {}).get('redis', {})
        
        # 在Windows平台上提供提示但不强制禁用
        if platform.system() == 'Windows' and use_redis:
            logger.info("在Windows平台上使用Redis，请确保已正确安装和配置Redis服务")
        
        # 如果使用Redis，初始化Redis连接
        if self.use_redis:
            self._init_redis()
            
    def _init_redis(self):
        """
        初始化Redis连接
        """
        try:
            # 增强Redis连接参数，提高Windows兼容性
            self.redis_client = redis.Redis(
                host=self.redis_config.get('host', 'localhost'),
                port=self.redis_config.get('port', 6379),
                db=self.redis_config.get('db', 0),
                password=self.redis_config.get('password', None),
                decode_responses=True,
                # 添加超时和重试配置以增强稳定性
                socket_timeout=10,
                socket_connect_timeout=10,
                retry_on_timeout=True,
                # 增加连接池大小以适应Windows环境
                max_connections=100
            )
            # 测试连接
            self.redis_client.ping()
            logger.info(f"Redis连接成功: {self.redis_config.get('host', 'localhost')}:{self.redis_config.get('port', 6379)}")
        except Exception as e:
            logger.error(f"Redis连接失败: {str(e)}")
            # 回退到本地模式
            self.use_redis = False
            self.redis_client = None
            logger.warning("URL管理器回退到本地内存模式")

    def add_new_url(self, url):
        """
        添加一个新的URL
        :param url: URL字符串
        :return: 是否添加成功
        """
        if url is None or url == "":
            return False
            
        url_hash = self._get_url_hash(url)
        
        if self.use_redis and self.redis_client:
            try:
                # 使用Redis进行URL去重
                if not self.redis_client.sismember('old_urls', url_hash):
                    if len(self.new_urls) + len(self.old_urls) < self.max_urls:
                        self.redis_client.sadd('new_urls', url)
                        self.redis_client.lpush('url_queue', url)
                        return True
                    else:
                        logger.warning("已达到最大URL数量限制")
                        return False
                return False
            except redis.RedisError as e:
                logger.error(f"Redis操作失败: {str(e)}")
                # 发生Redis错误时回退到本地处理
                self.use_redis = False
                self.redis_client = None
                logger.warning("URL管理器回退到本地内存模式")
                # 继续在本地模式下处理这个URL
                
        # 使用本地内存进行URL去重
        if url_hash not in self.old_urls and url not in self.new_urls:
            if len(self.new_urls) + len(self.old_urls) < self.max_urls:
                self.new_urls.add(url)
                self.url_queue.append(url)
                return True
            else:
                logger.warning("已达到最大URL数量限制")
                return False
        return False

    def add_new_urls(self, urls):
        """
        添加多个新的URL
        :param urls: URL列表
        :return: 成功添加的URL数量
        """
        if urls is None or len(urls) == 0:
            return 0
            
        added_count = 0
        for url in urls:
            if self.add_new_url(url):
                added_count += 1
        
        logger.info(f"成功添加 {added_count} 个新URL")
        return added_count
    
    def has_new_url(self):
        """
        判断是否还有待爬取的URL
        :return: 布尔值
        """
        if self.use_redis and self.redis_client:
            try:
                return self.redis_client.llen('url_queue') > 0
            except redis.RedisError as e:
                logger.error(f"Redis操作失败: {str(e)}")
                self.use_redis = False
                self.redis_client = None
        return len(self.url_queue) > 0
    
    def get_new_url(self):
        """
        获取一个待爬取的URL
        :return: URL字符串
        """
        if self.use_redis and self.redis_client:
            try:
                url = self.redis_client.rpop('url_queue')
                if url:
                    url_hash = self._get_url_hash(url)
                    self.redis_client.sadd('old_urls', url_hash)
                    self.redis_client.srem('new_urls', url)
                return url
            except redis.RedisError as e:
                logger.error(f"Redis操作失败: {str(e)}")
                self.use_redis = False
                self.redis_client = None
                
        # 本地模式处理
        if self.has_new_url():
            url = self.url_queue.popleft()
            url_hash = self._get_url_hash(url)
            self.old_urls.add(url_hash)
            self.new_urls.remove(url)
            return url
        return None
    
    def get_new_url_count(self):
        """
        获取待爬取URL的数量
        :return: 数量
        """
        if self.use_redis and self.redis_client:
            try:
                return self.redis_client.scard('new_urls')
            except redis.RedisError as e:
                logger.error(f"Redis操作失败: {str(e)}")
                self.use_redis = False
                self.redis_client = None
        return len(self.new_urls)
    
    def get_old_url_count(self):
        """
        获取已爬取URL的数量
        :return: 数量
        """
        if self.use_redis and self.redis_client:
            try:
                return self.redis_client.scard('old_urls')
            except redis.RedisError as e:
                logger.error(f"Redis操作失败: {str(e)}")
                self.use_redis = False
                self.redis_client = None
        return len(self.old_urls)
    
    def _get_url_hash(self, url):
        """
        获取URL的哈希值，用于去重
        :param url: URL字符串
        :return: 哈希字符串
        """
        return hashlib.md5(url.encode('utf-8')).hexdigest()
    
    def clear(self):
        """
        清空URL管理器
        """
        if self.use_redis and self.redis_client:
            try:
                self.redis_client.delete('new_urls', 'old_urls', 'url_queue')
            except redis.RedisError as e:
                logger.error(f"Redis操作失败: {str(e)}")
                self.use_redis = False
                self.redis_client = None
        else:
            self.new_urls.clear()
            self.old_urls.clear()
            self.url_queue.clear()
        
        logger.info("URL管理器已清空")
        
    def save_state(self):
        """
        保存URL管理器状态（仅本地模式）
        """
        if not self.use_redis:
            try:
                import pickle
                state = {
                    'new_urls': self.new_urls,
                    'old_urls': self.old_urls,
                    'url_queue': list(self.url_queue)
                }
                with open('url_manager_state.pkl', 'wb') as f:
                    pickle.dump(state, f)
                logger.info("URL管理器状态已保存")
            except Exception as e:
                logger.error(f"保存URL管理器状态失败: {str(e)}")
        
    def load_state(self):
        """
        加载URL管理器状态（仅本地模式）
        """
        if not self.use_redis:
            try:
                import pickle
                with open('url_manager_state.pkl', 'rb') as f:
                    state = pickle.load(f)
                    self.new_urls = state['new_urls']
                    self.old_urls = state['old_urls']
                    self.url_queue = deque(state['url_queue'])
                logger.info("URL管理器状态已加载")
            except Exception as e:
                logger.error(f"加载URL管理器状态失败: {str(e)}")