import zstandard as zstd
import json
import base64
import gzip
import lz4.frame
from typing import Any, Dict, Optional, Union
import logging
import sys
import os

# 添加项目根目录到Python路径
current_dir = os.path.dirname(os.path.abspath(__file__))
project_root = os.path.dirname(current_dir)
if project_root not in sys.path:
    sys.path.append(project_root)

from exception.base_exceptions import DataProcessingException
from exception.exception_handler import global_exception_handler

# 配置日志
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


class DataCompressor:
    """
    数据压缩和解压工具类
    支持多种压缩算法：zstandard, gzip, lz4
    专门用于睡眠数据的压缩存储
    """
    
    def __init__(self, algorithm: str = 'zstd', compression_level: int = 3):
        """
        初始化压缩器
        
        Args:
            algorithm: 压缩算法 ('zstd', 'gzip', 'lz4')
            compression_level: 压缩级别 (1-22 for zstd, 1-9 for gzip, 1-16 for lz4)
        """
        self.algorithm = algorithm.lower()
        self.compression_level = compression_level
        
        # 初始化压缩器
        if self.algorithm == 'zstd':
            self.compressor = zstd.ZstdCompressor(level=compression_level)
            self.decompressor = zstd.ZstdDecompressor()
        elif self.algorithm == 'gzip':
            self.compressor = None  # gzip不需要预初始化
            self.decompressor = None
        elif self.algorithm == 'lz4':
            self.compressor = None  # lz4不需要预初始化
            self.decompressor = None
        else:
            raise ValueError(f"不支持的压缩算法: {algorithm}")
    
    def compress_data(self, data: Any) -> Dict[str, Any]:
        """
        压缩数据
        
        Args:
            data: 要压缩的数据（可以是字典、列表等）
            
        Returns:
            包含压缩数据的字典
        """
        try:
            # 转换为JSON字符串
            json_str = json.dumps(data, separators=(',', ':'), ensure_ascii=False)
            original_size = len(json_str.encode('utf-8'))
            
            # 根据算法进行压缩
            if self.algorithm == 'zstd':
                compressed_bytes = self.compressor.compress(json_str.encode('utf-8'))
            elif self.algorithm == 'gzip':
                compressed_bytes = gzip.compress(json_str.encode('utf-8'), compresslevel=self.compression_level)
            elif self.algorithm == 'lz4':
                compressed_bytes = lz4.frame.compress(json_str.encode('utf-8'), compression_level=self.compression_level)
            
            # 转换为base64字符串
            compressed_b64 = base64.b64encode(compressed_bytes).decode('utf-8')
            compressed_size = len(compressed_b64.encode('utf-8'))
            
            # 计算压缩比
            compression_ratio = round(compressed_size / original_size, 3) if original_size > 0 else 0
            
            result = {
                'algorithm': self.algorithm,
                'compression_level': self.compression_level,
                'original_size': original_size,
                'compressed_size': compressed_size,
                'compression_ratio': compression_ratio,
                'compressed_data': compressed_b64,
                'is_compressed': True
            }
            
            logger.info(f"数据压缩完成: {original_size} -> {compressed_size} bytes (压缩比: {compression_ratio})")
            return result
            
        except Exception as e:
            compression_error = DataProcessingException(
                f"数据压缩失败: {str(e)}",
                operation="data_compression"
            )
            global_exception_handler.handle_exception(compression_error)
            raise compression_error
    
    def decompress_data(self, compressed_info: Dict[str, Any]) -> Any:
        """
        解压数据
        
        Args:
            compressed_info: 包含压缩信息的字典
            
        Returns:
            解压后的原始数据
        """
        try:
            algorithm = compressed_info.get('algorithm', self.algorithm)
            compressed_b64 = compressed_info['compressed_data']
            
            # 从base64解码
            compressed_bytes = base64.b64decode(compressed_b64.encode('utf-8'))
            
            # 根据算法进行解压
            if algorithm == 'zstd':
                decompressed_bytes = self.decompressor.decompress(compressed_bytes)
            elif algorithm == 'gzip':
                decompressed_bytes = gzip.decompress(compressed_bytes)
            elif algorithm == 'lz4':
                decompressed_bytes = lz4.frame.decompress(compressed_bytes)
            else:
                raise ValueError(f"不支持的解压算法: {algorithm}")
            
            # 解析JSON
            json_str = decompressed_bytes.decode('utf-8')
            data = json.loads(json_str)
            
            logger.info(f"数据解压完成: {len(compressed_bytes)} -> {len(decompressed_bytes)} bytes")
            return data
            
        except Exception as e:
            decompression_error = DataProcessingException(
                f"数据解压失败: {str(e)}",
                operation="data_decompression"
            )
            global_exception_handler.handle_exception(decompression_error)
            raise decompression_error
    
    def compress_sleep_data(self, sleep_data: Dict[str, Any]) -> Dict[str, Any]:
        """
        专门用于压缩睡眠数据的方法
        
        Args:
            sleep_data: 睡眠数据字典，包含brain_wave, delta, theta等字段
            
        Returns:
            压缩后的睡眠数据
        """
        try:
            compressed_sleep_data = {
                'id': sleep_data.get('id'),
                'session_id': sleep_data.get('session_id'),
                'user_id': sleep_data.get('user_id'),
                'created_at': sleep_data.get('created_at'),
                'duration_seconds': sleep_data.get('duration_seconds'),
                'device_count': sleep_data.get('device_count', 1),
                'metadata': sleep_data.get('metadata', {}),
                'compressed_fields': {}
            }
            
            # 压缩各个数据字段
            fields_to_compress = ['brain_wave', 'delta', 'theta', 'alpha', 'beta', 'gamma']
            
            for field in fields_to_compress:
                if field in sleep_data:
                    compressed_sleep_data['compressed_fields'][field] = self.compress_data(sleep_data[field])
            
            # 计算总体压缩统计
            total_original = sum(
                field_info['original_size'] 
                for field_info in compressed_sleep_data['compressed_fields'].values()
            )
            total_compressed = sum(
                field_info['compressed_size'] 
                for field_info in compressed_sleep_data['compressed_fields'].values()
            )
            
            compressed_sleep_data['compression_stats'] = {
                'total_original_size': total_original,
                'total_compressed_size': total_compressed,
                'overall_compression_ratio': round(total_compressed / total_original, 3) if total_original > 0 else 0
            }
            
            logger.info(f"睡眠数据压缩完成: 总体压缩比 {compressed_sleep_data['compression_stats']['overall_compression_ratio']}")
            return compressed_sleep_data
            
        except Exception as e:
            error_msg = f"睡眠数据压缩失败，会话ID: {sleep_data.get('session_id', 'unknown')}，原因: {str(e)}"
            compression_error = DataProcessingException(error_msg, data_type="sleep_data")
            global_exception_handler.handle_exception(compression_error)
            raise
    
    def decompress_sleep_data(self, compressed_sleep_data: Dict[str, Any]) -> Dict[str, Any]:
        """
        专门用于解压睡眠数据的方法
        
        Args:
            compressed_sleep_data: 压缩后的睡眠数据
            
        Returns:
            解压后的原始睡眠数据
        """
        try:
            decompressed_sleep_data = {
                'id': compressed_sleep_data.get('id'),
                'session_id': compressed_sleep_data.get('session_id'),
                'user_id': compressed_sleep_data.get('user_id'),
                'created_at': compressed_sleep_data.get('created_at'),
                'duration_seconds': compressed_sleep_data.get('duration_seconds'),
                'device_count': compressed_sleep_data.get('device_count', 1),
                'metadata': compressed_sleep_data.get('metadata', {})
            }
            
            # 解压各个数据字段
            compressed_fields = compressed_sleep_data.get('compressed_fields', {})
            
            for field_name, compressed_info in compressed_fields.items():
                decompressed_sleep_data[field_name] = self.decompress_data(compressed_info)
            
            logger.info("睡眠数据解压完成")
            return decompressed_sleep_data
            
        except Exception as e:
            error_msg = f"睡眠数据解压失败，会话ID: {compressed_sleep_data.get('session_id', 'unknown')}，原因: {str(e)}"
            decompression_error = DataProcessingException(error_msg, data_type="sleep_data")
            global_exception_handler.handle_exception(decompression_error)
            raise
    
    def get_compression_info(self, data: Any) -> Dict[str, Any]:
        """
        获取压缩信息（不实际压缩）
        
        Args:
            data: 要分析的数据
            
        Returns:
            压缩信息字典
        """
        try:
            json_str = json.dumps(data, separators=(',', ':'), ensure_ascii=False)
            original_size = len(json_str.encode('utf-8'))
            
            return {
                'algorithm': self.algorithm,
                'compression_level': self.compression_level,
                'original_size': original_size,
                'estimated_compressed_size': original_size // 3,  # 粗略估计
                'estimated_compression_ratio': 0.33
            }
        except Exception as e:
            error_msg = f"获取压缩信息失败，原因: {str(e)}"
            info_error = DataProcessingException(error_msg, data_type="compression_info")
            global_exception_handler.handle_exception(info_error)
            raise


# 便捷函数
def compress_data(data: Any, algorithm: str = 'zstd', level: int = 3) -> Dict[str, Any]:
    """
    便捷压缩函数
    
    Args:
        data: 要压缩的数据
        algorithm: 压缩算法
        level: 压缩级别
        
    Returns:
        压缩后的数据
    """
    compressor = DataCompressor(algorithm, level)
    return compressor.compress_data(data)


def decompress_data(compressed_info: Dict[str, Any]) -> Any:
    """
    便捷解压函数
    
    Args:
        compressed_info: 压缩信息
        
    Returns:
        解压后的数据
    """
    algorithm = compressed_info.get('algorithm', 'zstd')
    compressor = DataCompressor(algorithm)
    return compressor.decompress_data(compressed_info)


# 使用示例
if __name__ == "__main__":
    # 测试数据
    test_data = {
        "brain_wave": {
            "total_seconds": 7200,
            "frame_rate": 128,
            "wave": {
                "frames": [[i, i * 100] for i in range(1, 1001)]  # 1000个数据点
            }
        },
        "delta": {
            "total_seconds": 7200,
            "frame_rate": 1,
            "wave": {
                "frames": [[i, i * 10] for i in range(1, 101)]  # 100个数据点
            }
        }
    }
    
    # 测试压缩
    logger.info("=== 测试数据压缩 ===")
    compressor = DataCompressor('zstd', 3)
    
    # 压缩单个数据
    compressed = compressor.compress_data(test_data['brain_wave'])
    logger.info(f"原始大小: {compressed['original_size']} bytes")
    logger.info(f"压缩后大小: {compressed['compressed_size']} bytes")
    logger.info(f"压缩比: {compressed['compression_ratio']}")
    
    # 解压测试
    decompressed = compressor.decompress_data(compressed)
    logger.info(f"解压成功: {len(decompressed['wave']['frames'])} 个数据点")
    
    # 测试睡眠数据压缩
    logger.info("\n=== 测试睡眠数据压缩 ===")
    sleep_data = {
        'id': 'test_001',
        'session_id': 'sleep_session_001',
        'user_id': 'user_001',
        'created_at': '2024-12-01T22:00:00Z',
        'duration_seconds': 7200,
        'brain_wave': test_data['brain_wave'],
        'delta': test_data['delta']
    }
    
    compressed_sleep = compressor.compress_sleep_data(sleep_data)
    logger.info(f"睡眠数据总体压缩比: {compressed_sleep['compression_stats']['overall_compression_ratio']}")
    
    decompressed_sleep = compressor.decompress_sleep_data(compressed_sleep)
    logger.info(f"睡眠数据解压成功: {len(decompressed_sleep['brain_wave']['wave']['frames'])} 个脑电数据点")