"""
改进的历史数据存储方案
支持文件持久化和更灵活的数据管理
"""
import json
import os
from datetime import datetime, timedelta
from typing import List, Dict, Any, Optional
from collections import deque
import asyncio
import aiofiles
from pathlib import Path

from app.utils.logger import get_logger

logger = get_logger(__name__)


class HistoricalDataStorage:
    """历史数据存储管理器"""
    
    def __init__(self, storage_dir: str = "data", max_memory_points: int = 200):
        self.storage_dir = Path(storage_dir)
        self.storage_dir.mkdir(exist_ok=True)
        
        # 内存中的快速访问缓存
        self.memory_cache = deque(maxlen=max_memory_points)
        
        # 文件存储配置
        self.daily_file_pattern = "performance_{date}.json"
        self.current_date = None
        self.current_file_handle = None
        
        # 数据保留策略
        self.max_file_days = 30  # 保留30天的文件数据
        self.cleanup_interval = 3600  # 每小时清理一次过期文件
        
        # 启动清理任务
        self.cleanup_task = None
        
    async def initialize(self):
        """初始化存储系统"""
        try:
            # 加载最近的数据到内存缓存
            await self._load_recent_data()
            
            # 启动定期清理任务
            self.cleanup_task = asyncio.create_task(self._periodic_cleanup())
            
            logger.info(f"历史数据存储系统初始化完成，缓存数据点: {len(self.memory_cache)}")
            
        except Exception as e:
            logger.error(f"初始化历史数据存储失败: {e}")
    
    async def store_data_point(self, data_point: Dict[str, Any]):
        """存储单个数据点"""
        try:
            # 添加到内存缓存
            self.memory_cache.append(data_point)
            
            # 持久化到文件
            await self._persist_to_file(data_point)
            
        except Exception as e:
            logger.error(f"存储数据点失败: {e}")
    
    async def get_historical_data(
        self, 
        time_range: str = "1h", 
        max_points: int = 100
    ) -> List[Dict[str, Any]]:
        """获取历史数据"""
        try:
            # 计算时间范围
            end_time = datetime.now()
            if time_range == "1h":
                start_time = end_time - timedelta(hours=1)
            elif time_range == "6h":
                start_time = end_time - timedelta(hours=6)
            elif time_range == "24h":
                start_time = end_time - timedelta(hours=24)
            elif time_range == "7d":
                start_time = end_time - timedelta(days=7)
            else:
                start_time = end_time - timedelta(hours=1)
            
            # 首先从内存缓存中获取
            memory_data = self._filter_memory_data(start_time, end_time)
            
            # 如果内存数据不够，从文件中补充
            if len(memory_data) < max_points and start_time < self._get_memory_start_time():
                file_data = await self._load_file_data(start_time, end_time)
                # 合并数据并去重
                all_data = file_data + memory_data
                all_data = self._deduplicate_data(all_data)
            else:
                all_data = memory_data
            
            # 数据采样（如果数据点太多）
            if len(all_data) > max_points:
                step = len(all_data) // max_points
                all_data = all_data[::step]
            
            return all_data
            
        except Exception as e:
            logger.error(f"获取历史数据失败: {e}")
            return []
    
    async def _persist_to_file(self, data_point: Dict[str, Any]):
        """持久化数据点到文件"""
        try:
            current_date = datetime.now().strftime("%Y-%m-%d")
            
            # 如果日期变了，切换文件
            if current_date != self.current_date:
                self.current_date = current_date
                if self.current_file_handle:
                    await self.current_file_handle.close()
                    self.current_file_handle = None
            
            # 打开当天的文件
            if not self.current_file_handle:
                file_path = self.storage_dir / self.daily_file_pattern.format(date=current_date)
                self.current_file_handle = await aiofiles.open(file_path, 'a', encoding='utf-8')
            
            # 写入数据点
            await self.current_file_handle.write(json.dumps(data_point, ensure_ascii=False) + '\n')
            await self.current_file_handle.flush()
            
        except Exception as e:
            logger.error(f"持久化数据点失败: {e}")
    
    async def _load_recent_data(self):
        """加载最近的数据到内存缓存"""
        try:
            # 加载最近2天的数据
            for i in range(2):
                date = (datetime.now() - timedelta(days=i)).strftime("%Y-%m-%d")
                file_path = self.storage_dir / self.daily_file_pattern.format(date=date)
                
                if file_path.exists():
                    async with aiofiles.open(file_path, 'r', encoding='utf-8') as f:
                        async for line in f:
                            try:
                                data_point = json.loads(line.strip())
                                # 只加载最近的数据点
                                point_time = datetime.fromisoformat(data_point['timestamp'].replace('Z', ''))
                                if (datetime.now() - point_time).total_seconds() < 7200:  # 2小时内
                                    self.memory_cache.append(data_point)
                            except Exception as e:
                                logger.warning(f"解析数据点失败: {e}")
                                continue
            
            logger.info(f"加载了 {len(self.memory_cache)} 个历史数据点到内存")
            
        except Exception as e:
            logger.error(f"加载历史数据失败: {e}")
    
    async def _load_file_data(self, start_time: datetime, end_time: datetime) -> List[Dict[str, Any]]:
        """从文件中加载指定时间范围的数据"""
        data = []
        
        try:
            # 计算需要读取的日期范围
            current_date = start_time.date()
            end_date = end_time.date()
            
            while current_date <= end_date:
                file_path = self.storage_dir / self.daily_file_pattern.format(
                    date=current_date.strftime("%Y-%m-%d")
                )
                
                if file_path.exists():
                    async with aiofiles.open(file_path, 'r', encoding='utf-8') as f:
                        async for line in f:
                            try:
                                data_point = json.loads(line.strip())
                                point_time = datetime.fromisoformat(data_point['timestamp'].replace('Z', ''))
                                
                                if start_time <= point_time <= end_time:
                                    data.append(data_point)
                                    
                            except Exception as e:
                                logger.warning(f"解析文件数据点失败: {e}")
                                continue
                
                current_date += timedelta(days=1)
            
            return sorted(data, key=lambda x: x['timestamp'])
            
        except Exception as e:
            logger.error(f"从文件加载数据失败: {e}")
            return []
    
    def _filter_memory_data(self, start_time: datetime, end_time: datetime) -> List[Dict[str, Any]]:
        """从内存缓存中过滤指定时间范围的数据"""
        filtered_data = []
        
        for data_point in self.memory_cache:
            try:
                point_time = datetime.fromisoformat(data_point['timestamp'].replace('Z', ''))
                if start_time <= point_time <= end_time:
                    filtered_data.append(data_point)
            except Exception as e:
                logger.warning(f"过滤内存数据失败: {e}")
                continue
        
        return sorted(filtered_data, key=lambda x: x['timestamp'])
    
    def _get_memory_start_time(self) -> datetime:
        """获取内存缓存中最早的时间"""
        if not self.memory_cache:
            return datetime.now()
        
        try:
            earliest_point = min(self.memory_cache, key=lambda x: x['timestamp'])
            return datetime.fromisoformat(earliest_point['timestamp'].replace('Z', ''))
        except Exception:
            return datetime.now()
    
    def _deduplicate_data(self, data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
        """去除重复的数据点"""
        seen_timestamps = set()
        unique_data = []
        
        for data_point in sorted(data, key=lambda x: x['timestamp']):
            timestamp = data_point['timestamp']
            if timestamp not in seen_timestamps:
                seen_timestamps.add(timestamp)
                unique_data.append(data_point)
        
        return unique_data
    
    async def _periodic_cleanup(self):
        """定期清理过期文件"""
        while True:
            try:
                await asyncio.sleep(self.cleanup_interval)
                await self._cleanup_old_files()
            except asyncio.CancelledError:
                break
            except Exception as e:
                logger.error(f"定期清理任务出错: {e}")
    
    async def _cleanup_old_files(self):
        """清理过期的数据文件"""
        try:
            cutoff_date = datetime.now() - timedelta(days=self.max_file_days)
            
            for file_path in self.storage_dir.glob("performance_*.json"):
                try:
                    # 从文件名提取日期
                    date_str = file_path.stem.replace("performance_", "")
                    file_date = datetime.strptime(date_str, "%Y-%m-%d")
                    
                    if file_date < cutoff_date:
                        file_path.unlink()
                        logger.info(f"删除过期数据文件: {file_path}")
                        
                except Exception as e:
                    logger.warning(f"处理文件 {file_path} 时出错: {e}")
            
        except Exception as e:
            logger.error(f"清理过期文件失败: {e}")
    
    async def close(self):
        """关闭存储系统"""
        try:
            if self.cleanup_task:
                self.cleanup_task.cancel()
            
            if self.current_file_handle:
                await self.current_file_handle.close()
            
            logger.info("历史数据存储系统已关闭")
            
        except Exception as e:
            logger.error(f"关闭存储系统失败: {e}")


# 全局存储实例
storage_instance: Optional[HistoricalDataStorage] = None


async def get_storage() -> HistoricalDataStorage:
    """获取存储实例"""
    global storage_instance
    
    if storage_instance is None:
        storage_instance = HistoricalDataStorage()
        await storage_instance.initialize()
    
    return storage_instance
