# -*- coding: utf-8 -*-
"""
日志接入类
负责从Elasticsearch拉取日志并推送到Redis队列
"""

import time
import json
from typing import List, Dict, Any, Optional
import redis
from common.logger import get_logger
from common.config import config
from common.utils import safe_json_dumps, generate_hash
from .es_client import ESClient

logger = get_logger(__name__)


class LogAccessor:
    """日志接入类"""
    
    def __init__(self):
        """初始化日志接入器"""
        self.es_client = ESClient()
        self.redis_client = self._create_redis_client()
        self.queue_name = config.get('redis.queue_name', 'log_queue')
        self.batch_size = config.get('elasticsearch.batch_size', 1000)
        self.last_check_time = None
        
    def _create_redis_client(self) -> redis.Redis:
        """
        创建Redis客户端
        
        Returns:
            Redis客户端实例
        """
        redis_config = config.get('redis', {})
        
        try:
            client = redis.Redis(
                host=redis_config.get('host', 'localhost'),
                port=redis_config.get('port', 6379),
                db=redis_config.get('db', 0),
                password=redis_config.get('password'),
                decode_responses=True,
                socket_connect_timeout=5,
                socket_timeout=5
            )
            
            # 测试连接
            client.ping()
            logger.info(f"成功连接到Redis: {redis_config.get('host')}:{redis_config.get('port')}")
            return client
            
        except Exception as e:
            logger.error(f"连接Redis失败: {e}")
            raise
    
    def fetch_recent_logs(self, minutes: int = 5) -> List[Dict[str, Any]]:
        """
        获取最近的日志
        
        Args:
            minutes: 时间范围（分钟）
            
        Returns:
            日志列表
        """
        try:
            logs = self.es_client.get_recent_logs(minutes=minutes, size=self.batch_size)
            logger.info(f"从ES获取到 {len(logs)} 条日志")
            return logs
            
        except Exception as e:
            logger.error(f"获取最近日志失败: {e}")
            return []
    
    def fetch_logs_by_app(self, app_name: str, minutes: int = 5) -> List[Dict[str, Any]]:
        """
        根据应用名称获取日志
        
        Args:
            app_name: 应用名称
            minutes: 时间范围（分钟）
            
        Returns:
            日志列表
        """
        try:
            logs = self.es_client.get_logs_by_app(app_name, minutes=minutes, size=self.batch_size)
            logger.info(f"从ES获取到应用 {app_name} 的 {len(logs)} 条日志")
            return logs
            
        except Exception as e:
            logger.error(f"获取应用日志失败: {e}")
            return []
    
    def fetch_logs_by_level(self, level: str, minutes: int = 5) -> List[Dict[str, Any]]:
        """
        根据日志级别获取日志
        
        Args:
            level: 日志级别
            minutes: 时间范围（分钟）
            
        Returns:
            日志列表
        """
        try:
            logs = self.es_client.get_logs_by_level(level, minutes=minutes, size=self.batch_size)
            logger.info(f"从ES获取到级别 {level} 的 {len(logs)} 条日志")
            return logs
            
        except Exception as e:
            logger.error(f"获取级别日志失败: {e}")
            return []
    
    def push_logs_to_queue(self, logs: List[Dict[str, Any]]) -> int:
        """
        将日志推送到Redis队列
        
        Args:
            logs: 日志列表
            
        Returns:
            成功推送的日志数量
        """
        if not logs:
            return 0
        
        try:
            pushed_count = 0
            
            for log in logs:
                # 为日志添加唯一标识
                log_id = generate_hash(f"{log.get('id', '')}{log.get('timestamp', '')}")
                log['log_id'] = log_id
                
                # 序列化日志数据
                log_json = safe_json_dumps(log)
                
                # 推送到Redis队列
                self.redis_client.lpush(self.queue_name, log_json)
                pushed_count += 1
            
            logger.info(f"成功推送 {pushed_count} 条日志到Redis队列")
            return pushed_count
            
        except Exception as e:
            logger.error(f"推送日志到队列失败: {e}")
            return 0
    
    def get_queue_length(self) -> int:
        """
        获取队列长度
        
        Returns:
            队列中的日志数量
        """
        try:
            return self.redis_client.llen(self.queue_name)
        except Exception as e:
            logger.error(f"获取队列长度失败: {e}")
            return 0
    
    def pop_log_from_queue(self, timeout: int = 1) -> Optional[Dict[str, Any]]:
        """
        从队列中弹出一条日志
        
        Args:
            timeout: 超时时间（秒）
            
        Returns:
            日志数据或None
        """
        try:
            result = self.redis_client.brpop(self.queue_name, timeout=timeout)
            if result:
                log_json = result[1]
                return json.loads(log_json)
            return None
            
        except Exception as e:
            logger.error(f"从队列弹出日志失败: {e}")
            return None
    
    def pop_logs_from_queue(self, count: int = 100, timeout: int = 1) -> List[Dict[str, Any]]:
        """
        从队列中弹出多条日志
        
        Args:
            count: 弹出数量
            timeout: 超时时间（秒）
            
        Returns:
            日志列表
        """
        logs = []
        
        try:
            for _ in range(count):
                log = self.pop_log_from_queue(timeout=timeout)
                if log:
                    logs.append(log)
                else:
                    break
            
            return logs
            
        except Exception as e:
            logger.error(f"从队列弹出多条日志失败: {e}")
            return logs
    
    def clear_queue(self) -> bool:
        """
        清空队列
        
        Returns:
            是否成功
        """
        try:
            self.redis_client.delete(self.queue_name)
            logger.info("成功清空Redis队列")
            return True
            
        except Exception as e:
            logger.error(f"清空队列失败: {e}")
            return False
    
    def monitor_and_fetch(self, interval: int = 60, minutes: int = 5) -> None:
        """
        监控并获取日志（持续运行）
        
        Args:
            interval: 检查间隔（秒）
            minutes: 时间范围（分钟）
        """
        logger.info(f"开始监控日志，检查间隔: {interval}秒，时间范围: {minutes}分钟")
        
        while True:
            try:
                # 获取最近日志
                logs = self.fetch_recent_logs(minutes=minutes)
                
                if logs:
                    # 推送到队列
                    pushed_count = self.push_logs_to_queue(logs)
                    logger.info(f"监控周期完成，推送了 {pushed_count} 条日志")
                
                # 等待下次检查
                time.sleep(interval)
                
            except KeyboardInterrupt:
                logger.info("收到中断信号，停止监控")
                break
            except Exception as e:
                logger.error(f"监控过程中发生错误: {e}")
                time.sleep(interval)
    
    def get_es_stats(self) -> Dict[str, Any]:
        """
        获取ES统计信息
        
        Returns:
            ES统计信息
        """
        try:
            indices = self.es_client.get_indices()
            stats = {
                'indices_count': len(indices),
                'indices': indices,
                'queue_length': self.get_queue_length()
            }
            return stats
            
        except Exception as e:
            logger.error(f"获取ES统计信息失败: {e}")
            return {}
    
    def close(self):
        """关闭连接"""
        try:
            if self.es_client:
                self.es_client.close()
            if self.redis_client:
                self.redis_client.close()
            logger.info("日志接入器连接已关闭")
        except Exception as e:
            logger.error(f"关闭连接时发生错误: {e}") 