"""
数据管理器

负责数据的存储、加载和管理
"""

import os
import h5py
import pickle
import json
import numpy as np
from typing import Dict, Any, Optional, List
from loguru import logger
from pathlib import Path
import shutil


class DataManager:
    """数据管理器"""
    
    def __init__(self, config: Dict[str, Any]):
        """
        初始化数据管理器
        
        Args:
            config: 数据存储配置
        """
        self.config = config
        self.format = config.get('format', 'hdf5')
        self.compression = config.get('compression', 'gzip')
        self.compression_level = config.get('compression_level', 6)
        
        # 创建存储目录
        self.paths = config['paths']
        self._create_directories()
        
        logger.info("数据管理器初始化完成")
    
    def _create_directories(self):
        """创建必要的目录"""
        for path_name, path_value in self.paths.items():
            path = Path(path_value)
            path.mkdir(parents=True, exist_ok=True)
            logger.info(f"创建目录: {path}")
    
    def save_data(self, 
                  data: Any, 
                  filename: str, 
                  data_type: str = "processed",
                  metadata: Optional[Dict] = None):
        """
        保存数据
        
        Args:
            data: 要保存的数据
            filename: 文件名
            data_type: 数据类型 (raw, processed, models)
            metadata: 元数据
        """
        try:
            # 确定保存路径
            if data_type in self.paths:
                save_path = Path(self.paths[data_type]) / filename
            else:
                save_path = Path(self.paths['processed_data']) / filename
            
            # 根据格式保存数据
            if self.format == 'hdf5':
                self._save_hdf5(data, save_path, metadata)
            elif self.format == 'pickle':
                self._save_pickle(data, save_path, metadata)
            elif self.format == 'json':
                self._save_json(data, save_path, metadata)
            else:
                raise ValueError(f"不支持的数据格式: {self.format}")
            
            logger.info(f"数据已保存: {save_path}")
            
        except Exception as e:
            logger.error(f"数据保存失败: {e}")
            raise
    
    def load_data(self, 
                  filename: str, 
                  data_type: str = "processed") -> Any:
        """
        加载数据
        
        Args:
            filename: 文件名
            data_type: 数据类型
            
        Returns:
            加载的数据
        """
        try:
            # 确定加载路径
            if data_type in self.paths:
                load_path = Path(self.paths[data_type]) / filename
            else:
                load_path = Path(self.paths['processed_data']) / filename
            
            # 根据格式加载数据
            if self.format == 'hdf5':
                return self._load_hdf5(load_path)
            elif self.format == 'pickle':
                return self._load_pickle(load_path)
            elif self.format == 'json':
                return self._load_json(load_path)
            else:
                raise ValueError(f"不支持的数据格式: {self.format}")
            
        except Exception as e:
            logger.error(f"数据加载失败: {e}")
            raise
    
    def _save_hdf5(self, data: Any, filepath: Path, metadata: Optional[Dict] = None):
        """保存HDF5格式数据"""
        with h5py.File(filepath, 'w') as f:
            if isinstance(data, np.ndarray):
                f.create_dataset('data', data=data, compression=self.compression, 
                               compression_opts=self.compression_level)
            elif isinstance(data, dict):
                for key, value in data.items():
                    if isinstance(value, np.ndarray):
                        f.create_dataset(key, data=value, compression=self.compression,
                                       compression_opts=self.compression_level)
                    else:
                        f.attrs[key] = value
            
            # 保存元数据
            if metadata:
                for key, value in metadata.items():
                    f.attrs[f'metadata_{key}'] = value
    
    def _load_hdf5(self, filepath: Path) -> Any:
        """加载HDF5格式数据"""
        with h5py.File(filepath, 'r') as f:
            if 'data' in f:
                return f['data'][:]
            else:
                # 加载字典数据
                data = {}
                for key in f.keys():
                    data[key] = f[key][:]
                
                # 加载属性
                for key, value in f.attrs.items():
                    if key.startswith('metadata_'):
                        data[key[9:]] = value
                
                return data
    
    def _save_pickle(self, data: Any, filepath: Path, metadata: Optional[Dict] = None):
        """保存Pickle格式数据"""
        save_data = {
            'data': data,
            'metadata': metadata or {}
        }
        
        with open(filepath, 'wb') as f:
            pickle.dump(save_data, f, protocol=pickle.HIGHEST_PROTOCOL)
    
    def _load_pickle(self, filepath: Path) -> Any:
        """加载Pickle格式数据"""
        with open(filepath, 'rb') as f:
            loaded = pickle.load(f)
            return loaded.get('data', loaded)
    
    def _save_json(self, data: Any, filepath: Path, metadata: Optional[Dict] = None):
        """保存JSON格式数据"""
        # 转换numpy数组为列表
        if isinstance(data, np.ndarray):
            save_data = data.tolist()
        elif isinstance(data, dict):
            save_data = {}
            for key, value in data.items():
                if isinstance(value, np.ndarray):
                    save_data[key] = value.tolist()
                else:
                    save_data[key] = value
        else:
            save_data = data
        
        save_dict = {
            'data': save_data,
            'metadata': metadata or {}
        }
        
        with open(filepath, 'w', encoding='utf-8') as f:
            json.dump(save_dict, f, indent=2, ensure_ascii=False)
    
    def _load_json(self, filepath: Path) -> Any:
        """加载JSON格式数据"""
        with open(filepath, 'r', encoding='utf-8') as f:
            loaded = json.load(f)
            return loaded.get('data', loaded)
    
    def list_files(self, data_type: str = "processed") -> List[str]:
        """
        列出指定类型的所有文件
        
        Args:
            data_type: 数据类型
            
        Returns:
            文件列表
        """
        if data_type not in self.paths:
            return []
        
        path = Path(self.paths[data_type])
        if not path.exists():
            return []
        
        files = []
        for file_path in path.iterdir():
            if file_path.is_file():
                files.append(file_path.name)
        
        return files
    
    def delete_file(self, filename: str, data_type: str = "processed"):
        """
        删除文件
        
        Args:
            filename: 文件名
            data_type: 数据类型
        """
        try:
            if data_type in self.paths:
                file_path = Path(self.paths[data_type]) / filename
            else:
                file_path = Path(self.paths['processed_data']) / filename
            
            if file_path.exists():
                file_path.unlink()
                logger.info(f"文件已删除: {file_path}")
            else:
                logger.warning(f"文件不存在: {file_path}")
                
        except Exception as e:
            logger.error(f"文件删除失败: {e}")
    
    def get_storage_info(self) -> Dict[str, Any]:
        """
        获取存储信息
        
        Returns:
            存储信息字典
        """
        info = {}
        
        for data_type, path_value in self.paths.items():
            path = Path(path_value)
            if path.exists():
                # 计算目录大小
                total_size = 0
                file_count = 0
                
                for file_path in path.rglob('*'):
                    if file_path.is_file():
                        total_size += file_path.stat().st_size
                        file_count += 1
                
                info[data_type] = {
                    'path': str(path),
                    'total_size_bytes': total_size,
                    'total_size_mb': total_size / (1024 * 1024),
                    'file_count': file_count,
                    'exists': True
                }
            else:
                info[data_type] = {
                    'path': str(path),
                    'total_size_bytes': 0,
                    'total_size_mb': 0,
                    'file_count': 0,
                    'exists': False
                }
        
        return info
    
    def cleanup_old_data(self, max_days: int = 30):
        """
        清理旧数据
        
        Args:
            max_days: 最大保留天数
        """
        import time
        current_time = time.time()
        max_age = max_days * 24 * 3600  # 转换为秒
        
        for data_type, path_value in self.paths.items():
            path = Path(path_value)
            if not path.exists():
                continue
            
            deleted_count = 0
            for file_path in path.iterdir():
                if file_path.is_file():
                    file_age = current_time - file_path.stat().st_mtime
                    if file_age > max_age:
                        try:
                            file_path.unlink()
                            deleted_count += 1
                        except Exception as e:
                            logger.warning(f"删除文件失败: {file_path}, {e}")
            
            if deleted_count > 0:
                logger.info(f"清理了 {data_type} 目录中的 {deleted_count} 个旧文件")
    
    def backup_data(self, backup_path: str):
        """
        备份数据
        
        Args:
            backup_path: 备份路径
        """
        try:
            backup_dir = Path(backup_path)
            backup_dir.mkdir(parents=True, exist_ok=True)
            
            for data_type, path_value in self.paths.items():
                source_path = Path(path_value)
                if source_path.exists():
                    dest_path = backup_dir / data_type
                    shutil.copytree(source_path, dest_path, dirs_exist_ok=True)
            
            logger.info(f"数据已备份到: {backup_path}")
            
        except Exception as e:
            logger.error(f"数据备份失败: {e}")
    
    def __str__(self) -> str:
        return f"DataManager(format={self.format}, compression={self.compression})"
    
    def __repr__(self) -> str:
        return self.__str__() 