"""
数据存储模块

负责K线数据的本地存储，支持Parquet和CSV格式
"""

import os
import json
import pandas as pd
from datetime import datetime, timedelta
from typing import List, Dict, Any, Optional, Tuple
from pathlib import Path
from ..core import unified_logger
from ..core.exceptions import DataMissingError, ValidationError


class DataStorage:
    """数据存储管理器"""
    
    def __init__(self, storage_path: str = "./data"):
        """
        初始化数据存储管理器
        
        Args:
            storage_path: 数据存储根路径
        """
        self.storage_path = Path(storage_path)
        self.metadata_file = self.storage_path / "metadata.json"
        self.logger = unified_logger.get_logger('data_storage')
        
        # 创建存储目录
        self.storage_path.mkdir(parents=True, exist_ok=True)
        
        # 加载元数据
        self.metadata = self._load_metadata()
    
    def _load_metadata(self) -> Dict[str, Any]:
        """加载元数据"""
        if self.metadata_file.exists():
            try:
                with open(self.metadata_file, 'r', encoding='utf-8') as f:
                    return json.load(f)
            except Exception as e:
                self.logger.warning(f"加载元数据失败: {e}")
        
        return {
            "exchanges": {},
            "last_update": datetime.now().isoformat()
        }
    
    def _save_metadata(self) -> None:
        """保存元数据"""
        try:
            self.metadata["last_update"] = datetime.now().isoformat()
            with open(self.metadata_file, 'w', encoding='utf-8') as f:
                json.dump(self.metadata, f, indent=2, ensure_ascii=False)
        except Exception as e:
            self.logger.error(f"保存元数据失败: {e}")
    
    def _get_data_path(self, exchange: str, market_type: str, symbol: str, period: str) -> Path:
        """
        获取数据文件路径
        
        Args:
            exchange: 交易所名称
            market_type: 市场类型 (spot/futures)
            symbol: 交易对
            period: 时间周期
            
        Returns:
            数据文件路径
        """
        return self.storage_path / exchange / market_type / symbol / period
    
    def _get_file_path(self, exchange: str, market_type: str, symbol: str, 
                      period: str, date: str, format: str = 'parquet') -> Path:
        """
        获取具体数据文件路径
        
        Args:
            exchange: 交易所名称
            market_type: 市场类型
            symbol: 交易对
            period: 时间周期
            date: 日期 (YYYYMMDD)
            format: 文件格式 (parquet/csv)
            
        Returns:
            文件路径
        """
        data_path = self._get_data_path(exchange, market_type, symbol, period)
        return data_path / f"{date}.{format}"
    
    def save_kline_data(self, exchange: str, market_type: str, symbol: str, 
                       period: str, data: List[Dict[str, Any]], 
                       format: str = 'parquet') -> bool:
        """
        保存K线数据
        
        Args:
            exchange: 交易所名称
            market_type: 市场类型 (spot/futures)
            symbol: 交易对
            period: 时间周期
            data: K线数据列表
            format: 保存格式 (parquet/csv)
            
        Returns:
            是否保存成功
        """
        try:
            if not data:
                return True
            
            # 转换为DataFrame
            df = pd.DataFrame(data)
            
            # 验证数据格式
            required_columns = ['timestamp', 'open', 'high', 'low', 'close', 'volume']
            if not all(col in df.columns for col in required_columns):
                raise ValidationError(f"数据缺少必需字段: {required_columns}")
            
            # 转换时间戳为datetime
            df['datetime'] = pd.to_datetime(df['timestamp'], unit='s')
            df = df.sort_values('timestamp')
            
            # 按日期分组保存
            grouped = df.groupby(df['datetime'].dt.strftime('%Y%m%d'))
            
            data_path = self._get_data_path(exchange, market_type, symbol, period)
            data_path.mkdir(parents=True, exist_ok=True)
            
            saved_files = []
            for date, group_df in grouped:
                file_path = self._get_file_path(exchange, market_type, symbol, period, date, format)
                
                # 如果文件已存在，合并数据
                if file_path.exists():
                    existing_df = self._load_data_file(file_path, format)
                    if existing_df is not None:
                        # 合并并去重
                        combined_df = pd.concat([existing_df, group_df])
                        combined_df = combined_df.drop_duplicates(subset=['timestamp'])
                        combined_df = combined_df.sort_values('timestamp')
                        group_df = combined_df
                
                # 保存数据
                if format == 'parquet':
                    group_df.to_parquet(file_path, index=False)
                else:
                    group_df.to_csv(file_path, index=False)
                
                saved_files.append(str(file_path))
            
            # 更新元数据
            self._update_metadata(exchange, market_type, symbol, period, df)
            
            self.logger.data_log(symbol, period, f"保存了 {len(data)} 条数据到 {len(saved_files)} 个文件")
            return True
            
        except Exception as e:
            self.logger.error(f"保存K线数据失败: {e}")
            return False
    
    def _load_data_file(self, file_path: Path, format: str) -> Optional[pd.DataFrame]:
        """加载数据文件"""
        try:
            if format == 'parquet':
                return pd.read_parquet(file_path)
            else:
                return pd.read_csv(file_path)
        except Exception as e:
            self.logger.warning(f"加载数据文件失败 {file_path}: {e}")
            return None
    
    def load_kline_data(self, exchange: str, market_type: str, symbol: str, 
                       period: str, start_time: Optional[int] = None, 
                       end_time: Optional[int] = None, 
                       format: str = 'parquet') -> Optional[pd.DataFrame]:
        """
        加载K线数据
        
        Args:
            exchange: 交易所名称
            market_type: 市场类型
            symbol: 交易对
            period: 时间周期
            start_time: 开始时间戳
            end_time: 结束时间戳
            format: 文件格式
            
        Returns:
            K线数据DataFrame
        """
        try:
            data_path = self._get_data_path(exchange, market_type, symbol, period)
            
            if not data_path.exists():
                self.logger.warning(f"数据路径不存在: {data_path}")
                return None
            
            # 获取所有数据文件
            pattern = f"*.{format}"
            data_files = list(data_path.glob(pattern))
            
            if not data_files:
                self.logger.warning(f"未找到数据文件: {data_path}/{pattern}")
                return None
            
            # 加载所有数据文件
            dataframes = []
            for file_path in sorted(data_files):
                df = self._load_data_file(file_path, format)
                if df is not None:
                    dataframes.append(df)
            
            if not dataframes:
                return None
            
            # 合并所有数据
            combined_df = pd.concat(dataframes, ignore_index=True)
            combined_df = combined_df.drop_duplicates(subset=['timestamp'])
            combined_df = combined_df.sort_values('timestamp')
            
            # 时间范围过滤
            if start_time is not None:
                combined_df = combined_df[combined_df['timestamp'] >= start_time]
            if end_time is not None:
                combined_df = combined_df[combined_df['timestamp'] <= end_time]
            
            self.logger.data_log(symbol, period, f"加载了 {len(combined_df)} 条数据")
            return combined_df
            
        except Exception as e:
            self.logger.error(f"加载K线数据失败: {e}")
            return None
    
    def _update_metadata(self, exchange: str, market_type: str, symbol: str, 
                        period: str, df: pd.DataFrame) -> None:
        """更新元数据"""
        try:
            if exchange not in self.metadata["exchanges"]:
                self.metadata["exchanges"][exchange] = {}
            
            if market_type not in self.metadata["exchanges"][exchange]:
                self.metadata["exchanges"][exchange][market_type] = {}
            
            if symbol not in self.metadata["exchanges"][exchange][market_type]:
                self.metadata["exchanges"][exchange][market_type][symbol] = {}
            
            symbol_meta = self.metadata["exchanges"][exchange][market_type][symbol]
            
            if period not in symbol_meta:
                symbol_meta[period] = {
                    "first_timestamp": None,
                    "last_timestamp": None,
                    "total_records": 0,
                    "last_update": None
                }
            
            period_meta = symbol_meta[period]
            
            # 更新时间范围
            min_ts = int(df['timestamp'].min())
            max_ts = int(df['timestamp'].max())
            
            if period_meta["first_timestamp"] is None or min_ts < period_meta["first_timestamp"]:
                period_meta["first_timestamp"] = min_ts
            
            if period_meta["last_timestamp"] is None or max_ts > period_meta["last_timestamp"]:
                period_meta["last_timestamp"] = max_ts
            
            period_meta["total_records"] = len(df)
            period_meta["last_update"] = datetime.now().isoformat()
            
            self._save_metadata()
            
        except Exception as e:
            self.logger.error(f"更新元数据失败: {e}")
    
    def get_data_info(self, exchange: str, market_type: str, symbol: str, 
                     period: str) -> Optional[Dict[str, Any]]:
        """
        获取数据信息
        
        Args:
            exchange: 交易所名称
            market_type: 市场类型
            symbol: 交易对
            period: 时间周期
            
        Returns:
            数据信息字典
        """
        try:
            return (self.metadata.get("exchanges", {})
                   .get(exchange, {})
                   .get(market_type, {})
                   .get(symbol, {})
                   .get(period))
        except Exception:
            return None
    
    def get_missing_dates(self, exchange: str, market_type: str, symbol: str, 
                         period: str, start_date: str, end_date: str) -> List[str]:
        """
        获取缺失的日期列表
        
        Args:
            exchange: 交易所名称
            market_type: 市场类型
            symbol: 交易对
            period: 时间周期
            start_date: 开始日期 (YYYYMMDD)
            end_date: 结束日期 (YYYYMMDD)
            
        Returns:
            缺失日期列表
        """
        try:
            data_path = self._get_data_path(exchange, market_type, symbol, period)
            
            if not data_path.exists():
                # 如果目录不存在，所有日期都缺失
                start_dt = datetime.strptime(start_date, '%Y%m%d')
                end_dt = datetime.strptime(end_date, '%Y%m%d')
                
                missing_dates = []
                current_dt = start_dt
                while current_dt <= end_dt:
                    missing_dates.append(current_dt.strftime('%Y%m%d'))
                    current_dt += timedelta(days=1)
                
                return missing_dates
            
            # 获取现有文件
            existing_files = set()
            for file_path in data_path.glob("*.parquet"):
                date_str = file_path.stem
                existing_files.add(date_str)
            
            for file_path in data_path.glob("*.csv"):
                date_str = file_path.stem
                existing_files.add(date_str)
            
            # 计算缺失日期
            start_dt = datetime.strptime(start_date, '%Y%m%d')
            end_dt = datetime.strptime(end_date, '%Y%m%d')
            
            missing_dates = []
            current_dt = start_dt
            while current_dt <= end_dt:
                date_str = current_dt.strftime('%Y%m%d')
                if date_str not in existing_files:
                    missing_dates.append(date_str)
                current_dt += timedelta(days=1)
            
            return missing_dates
            
        except Exception as e:
            self.logger.error(f"获取缺失日期失败: {e}")
            return []
    
    def delete_data(self, exchange: str, market_type: str, symbol: str, 
                   period: str, date: Optional[str] = None) -> bool:
        """
        删除数据
        
        Args:
            exchange: 交易所名称
            market_type: 市场类型
            symbol: 交易对
            period: 时间周期
            date: 特定日期，如果为None则删除所有数据
            
        Returns:
            是否删除成功
        """
        try:
            if date:
                # 删除特定日期的数据
                for format in ['parquet', 'csv']:
                    file_path = self._get_file_path(exchange, market_type, symbol, period, date, format)
                    if file_path.exists():
                        file_path.unlink()
                        self.logger.info(f"删除数据文件: {file_path}")
            else:
                # 删除所有数据
                data_path = self._get_data_path(exchange, market_type, symbol, period)
                if data_path.exists():
                    import shutil
                    shutil.rmtree(data_path)
                    self.logger.info(f"删除数据目录: {data_path}")
            
            return True
            
        except Exception as e:
            self.logger.error(f"删除数据失败: {e}")
            return False
    
    def get_storage_stats(self) -> Dict[str, Any]:
        """
        获取存储统计信息
        
        Returns:
            存储统计信息
        """
        try:
            stats = {
                "total_size": 0,
                "total_files": 0,
                "exchanges": {}
            }
            
            for exchange_path in self.storage_path.iterdir():
                if exchange_path.is_dir() and exchange_path.name != "metadata.json":
                    exchange_stats = self._get_directory_stats(exchange_path)
                    stats["exchanges"][exchange_path.name] = exchange_stats
                    stats["total_size"] += exchange_stats["size"]
                    stats["total_files"] += exchange_stats["files"]
            
            return stats
            
        except Exception as e:
            self.logger.error(f"获取存储统计失败: {e}")
            return {}
    
    def _get_directory_stats(self, path: Path) -> Dict[str, Any]:
        """获取目录统计信息"""
        stats = {"size": 0, "files": 0}
        
        try:
            for item in path.rglob("*"):
                if item.is_file():
                    stats["size"] += item.stat().st_size
                    stats["files"] += 1
        except Exception:
            pass
        
        return stats