#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
数据记录与保存模块
Data Logger and Save Module

功能特性：
- 实时数据记录
- 多种数据格式支持 (CSV, JSON, Binary)
- 文件自动轮转
- 数据压缩存储
- 配置化日志管理
"""

import os
import json
import csv
import struct
import gzip
import threading
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Union
from dataclasses import dataclass, asdict
from enum import Enum
import logging
from pathlib import Path

class DataType(Enum):
    """数据类型枚举"""
    LOG_MESSAGE = "log_message"
    COMMAND_DATA = "command_data"
    RESPONSE_DATA = "response_data"
    ADC_DATA = "adc_data"
    CALIBRATION_DATA = "calibration_data"
    SYSTEM_STATUS = "system_status"

@dataclass
class LogEntry:
    """日志条目"""
    timestamp: datetime
    data_type: DataType
    level: str
    message: str
    data: Optional[Dict[str, Any]] = None
    
    def to_dict(self) -> Dict[str, Any]:
        """转换为字典"""
        result = {
            'timestamp': self.timestamp.isoformat(),
            'data_type': self.data_type.value,
            'level': self.level,
            'message': self.message
        }
        if self.data:
            result['data'] = self.data
        return result
    
    @classmethod
    def from_dict(cls, data: Dict[str, Any]) -> 'LogEntry':
        """从字典创建"""
        return cls(
            timestamp=datetime.fromisoformat(data['timestamp']),
            data_type=DataType(data['data_type']),
            level=data['level'],
            message=data['message'],
            data=data.get('data')
        )

class DataLogger:
    """数据记录器"""
    
    def __init__(self, log_dir: str = "logs", max_file_size: int = 10*1024*1024,
                 max_files: int = 10, enable_compression: bool = True):
        """
        初始化数据记录器
        
        Args:
            log_dir: 日志目录
            max_file_size: 最大文件大小（字节）
            max_files: 最大文件数量
            enable_compression: 是否启用压缩
        """
        self.log_dir = Path(log_dir)
        self.max_file_size = max_file_size
        self.max_files = max_files
        self.enable_compression = enable_compression
        
        # 创建日志目录
        self.log_dir.mkdir(exist_ok=True)
        
        # 初始化日志文件
        self.current_log_file = None
        self.current_csv_file = None
        self.current_binary_file = None
        
        # 线程锁
        self.lock = threading.Lock()
        
        # 内存缓冲区
        self.log_buffer = []
        self.buffer_size = 1000
        
        # 配置标准日志记录器
        self.logger = logging.getLogger(__name__)
        self.logger.setLevel(logging.INFO)
        
        # 文件处理器
        log_file_path = self.log_dir / "application.log"
        file_handler = logging.FileHandler(log_file_path, encoding='utf-8')
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        file_handler.setFormatter(formatter)
        self.logger.addHandler(file_handler)
        
        # 启动时记录系统信息
        self.log_system_info()
    
    def log_system_info(self):
        """记录系统信息"""
        import platform
        import sys
        
        system_info = {
            'platform': platform.platform(),
            'python_version': sys.version,
            'architecture': platform.architecture(),
            'processor': platform.processor(),
            'hostname': platform.node(),
            'start_time': datetime.now().isoformat()
        }
        
        self.log_entry(
            data_type=DataType.SYSTEM_STATUS,
            level="INFO",
            message="系统启动",
            data=system_info
        )
    
    def log_entry(self, data_type: DataType, level: str, message: str, data: Optional[Dict[str, Any]] = None):
        """记录日志条目"""
        entry = LogEntry(
            timestamp=datetime.now(),
            data_type=data_type,
            level=level,
            message=message,
            data=data
        )
        
        with self.lock:
            self.log_buffer.append(entry)
            
            # 检查是否需要刷新缓冲区
            if len(self.log_buffer) >= self.buffer_size:
                self._flush_buffer()
    
    def log_message(self, message: str, level: str = "INFO"):
        """记录普通日志消息"""
        self.log_entry(DataType.LOG_MESSAGE, level, message)
        
        # 同时记录到标准日志
        if level == "ERROR":
            self.logger.error(message)
        elif level == "WARNING":
            self.logger.warning(message)
        elif level == "DEBUG":
            self.logger.debug(message)
        else:
            self.logger.info(message)
    
    def log_command(self, command_code: int, parameters: bytes, response_data: bytes, 
                   success: bool, error_msg: str = ""):
        """记录命令数据"""
        command_data = {
            'command_code': command_code,
            'parameters': parameters.hex(),
            'response_data': response_data.hex(),
            'success': success,
            'error_msg': error_msg
        }
        
        level = "INFO" if success else "ERROR"
        message = f"命令 0x{command_code:02X} {'成功' if success else '失败'}"
        
        self.log_entry(DataType.COMMAND_DATA, level, message, command_data)
    
    def log_adc_data(self, channel: int, raw_value: int, calibrated_value: float, 
                    temperature: float):
        """记录ADC数据"""
        adc_data = {
            'channel': channel,
            'raw_value': raw_value,
            'calibrated_value': calibrated_value,
            'temperature': temperature
        }
        
        message = f"ADC通道{channel}数据"
        self.log_entry(DataType.ADC_DATA, "INFO", message, adc_data)
    
    def log_calibration_data(self, calibration_type: str, parameters: Dict[str, Any]):
        """记录校准数据"""
        message = f"校准数据: {calibration_type}"
        self.log_entry(DataType.CALIBRATION_DATA, "INFO", message, parameters)
    
    def _flush_buffer(self):
        """刷新缓冲区"""
        if not self.log_buffer:
            return
        
        try:
            # 写入JSON格式
            self._write_json_log()
            
            # 写入CSV格式
            self._write_csv_log()
            
            # 写入二进制格式
            self._write_binary_log()
            
            # 清空缓冲区
            self.log_buffer.clear()
            
        except Exception as e:
            self.logger.error(f"刷新日志缓冲区失败: {e}")
    
    def _write_json_log(self):
        """写入JSON格式日志"""
        log_file = self._get_log_file("json")
        
        with open(log_file, 'a', encoding='utf-8') as f:
            for entry in self.log_buffer:
                json.dump(entry.to_dict(), f, ensure_ascii=False)
                f.write('\n')
    
    def _write_csv_log(self):
        """写入CSV格式日志"""
        csv_file = self._get_log_file("csv")
        
        # 检查文件是否存在，如果不存在则写入表头
        write_header = not csv_file.exists()
        
        with open(csv_file, 'a', newline='', encoding='utf-8') as f:
            writer = csv.writer(f)
            
            if write_header:
                writer.writerow(['时间戳', '数据类型', '级别', '消息', '数据'])
            
            for entry in self.log_buffer:
                data_str = json.dumps(entry.data, ensure_ascii=False) if entry.data else ""
                writer.writerow([
                    entry.timestamp.isoformat(),
                    entry.data_type.value,
                    entry.level,
                    entry.message,
                    data_str
                ])
    
    def _write_binary_log(self):
        """写入二进制格式日志"""
        binary_file = self._get_log_file("bin")
        
        with open(binary_file, 'ab') as f:
            for entry in self.log_buffer:
                # 序列化日志条目
                serialized = self._serialize_log_entry(entry)
                
                # 写入长度和数据
                f.write(struct.pack('<I', len(serialized)))
                f.write(serialized)
    
    def _serialize_log_entry(self, entry: LogEntry) -> bytes:
        """序列化日志条目"""
        # 简单的二进制序列化
        data = entry.to_dict()
        json_str = json.dumps(data, ensure_ascii=False)
        return json_str.encode('utf-8')
    
    def _get_log_file(self, file_type: str) -> Path:
        """获取日志文件路径"""
        date_str = datetime.now().strftime("%Y%m%d")
        filename = f"log_{date_str}.{file_type}"
        
        if self.enable_compression and file_type != "bin":
            filename += ".gz"
        
        log_file = self.log_dir / filename
        
        # 检查文件大小，如果超过限制则轮转
        if log_file.exists() and log_file.stat().st_size > self.max_file_size:
            self._rotate_log_file(log_file)
        
        return log_file
    
    def _rotate_log_file(self, log_file: Path):
        """轮转日志文件"""
        # 生成新的文件名
        base_name = log_file.stem
        if base_name.endswith('.gz'):
            base_name = base_name[:-3]
        
        suffix = log_file.suffix
        
        # 查找下一个可用的序号
        counter = 1
        while True:
            new_name = f"{base_name}_{counter:03d}{suffix}"
            new_file = log_file.parent / new_name
            
            if not new_file.exists():
                log_file.rename(new_file)
                break
            
            counter += 1
        
        # 清理旧文件
        self._cleanup_old_files(log_file.parent, log_file.suffix)
    
    def _cleanup_old_files(self, log_dir: Path, suffix: str):
        """清理旧的日志文件"""
        # 获取所有相同类型的日志文件
        pattern = f"*{suffix}"
        files = list(log_dir.glob(pattern))
        
        # 按修改时间排序
        files.sort(key=lambda x: x.stat().st_mtime, reverse=True)
        
        # 删除超过最大数量的文件
        for file in files[self.max_files:]:
            try:
                file.unlink()
                self.logger.info(f"删除旧日志文件: {file}")
            except Exception as e:
                self.logger.error(f"删除旧日志文件失败: {file}, 错误: {e}")
    
    def flush(self):
        """手动刷新缓冲区"""
        with self.lock:
            if self.log_buffer:
                self._flush_buffer()
    
    def export_data(self, start_time: Optional[datetime] = None, 
                   end_time: Optional[datetime] = None,
                   data_types: Optional[List[DataType]] = None,
                   output_format: str = "json") -> str:
        """
        导出数据
        
        Args:
            start_time: 开始时间
            end_time: 结束时间
            data_types: 数据类型过滤
            output_format: 输出格式 (json, csv, excel)
            
        Returns:
            导出文件路径
        """
        # 首先刷新缓冲区
        self.flush()
        
        # 读取所有日志数据
        all_entries = self._load_all_entries()
        
        # 过滤数据
        filtered_entries = self._filter_entries(all_entries, start_time, end_time, data_types)
        
        # 导出数据
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        export_file = self.log_dir / f"export_{timestamp}.{output_format}"
        
        if output_format == "json":
            self._export_to_json(filtered_entries, export_file)
        elif output_format == "csv":
            self._export_to_csv(filtered_entries, export_file)
        elif output_format == "excel":
            self._export_to_excel(filtered_entries, export_file)
        else:
            raise ValueError(f"不支持的输出格式: {output_format}")
        
        return str(export_file)
    
    def _load_all_entries(self) -> List[LogEntry]:
        """加载所有日志条目"""
        entries = []
        
        # 读取JSON日志文件
        json_files = list(self.log_dir.glob("log_*.json*"))
        
        for json_file in json_files:
            try:
                if json_file.name.endswith('.gz'):
                    with gzip.open(json_file, 'rt', encoding='utf-8') as f:
                        for line in f:
                            if line.strip():
                                data = json.loads(line.strip())
                                entries.append(LogEntry.from_dict(data))
                else:
                    with open(json_file, 'r', encoding='utf-8') as f:
                        for line in f:
                            if line.strip():
                                data = json.loads(line.strip())
                                entries.append(LogEntry.from_dict(data))
            except Exception as e:
                self.logger.error(f"读取日志文件失败: {json_file}, 错误: {e}")
        
        return sorted(entries, key=lambda x: x.timestamp)
    
    def _filter_entries(self, entries: List[LogEntry], start_time: Optional[datetime],
                       end_time: Optional[datetime], data_types: Optional[List[DataType]]) -> List[LogEntry]:
        """过滤日志条目"""
        filtered = entries
        
        if start_time:
            filtered = [e for e in filtered if e.timestamp >= start_time]
        
        if end_time:
            filtered = [e for e in filtered if e.timestamp <= end_time]
        
        if data_types:
            filtered = [e for e in filtered if e.data_type in data_types]
        
        return filtered
    
    def _export_to_json(self, entries: List[LogEntry], output_file: Path):
        """导出为JSON格式"""
        with open(output_file, 'w', encoding='utf-8') as f:
            data = [entry.to_dict() for entry in entries]
            json.dump(data, f, indent=2, ensure_ascii=False)
    
    def _export_to_csv(self, entries: List[LogEntry], output_file: Path):
        """导出为CSV格式"""
        with open(output_file, 'w', newline='', encoding='utf-8') as f:
            writer = csv.writer(f)
            
            # 写入表头
            writer.writerow(['时间戳', '数据类型', '级别', '消息', '数据'])
            
            # 写入数据
            for entry in entries:
                data_str = json.dumps(entry.data, ensure_ascii=False) if entry.data else ""
                writer.writerow([
                    entry.timestamp.isoformat(),
                    entry.data_type.value,
                    entry.level,
                    entry.message,
                    data_str
                ])
    
    def _export_to_excel(self, entries: List[LogEntry], output_file: Path):
        """导出为Excel格式"""
        try:
            import pandas as pd
            
            # 准备数据
            data = []
            for entry in entries:
                data.append({
                    '时间戳': entry.timestamp,
                    '数据类型': entry.data_type.value,
                    '级别': entry.level,
                    '消息': entry.message,
                    '数据': json.dumps(entry.data, ensure_ascii=False) if entry.data else ""
                })
            
            # 创建DataFrame
            df = pd.DataFrame(data)
            
            # 写入Excel文件
            with pd.ExcelWriter(output_file, engine='openpyxl') as writer:
                df.to_excel(writer, sheet_name='日志数据', index=False)
                
        except ImportError:
            # 如果没有pandas，使用CSV格式
            csv_file = output_file.with_suffix('.csv')
            self._export_to_csv(entries, csv_file)
            raise ImportError("需要安装pandas才能导出Excel格式，已导出为CSV格式")
    
    def get_statistics(self) -> Dict[str, Any]:
        """获取日志统计信息"""
        # 首先刷新缓冲区
        self.flush()
        
        # 加载所有条目
        all_entries = self._load_all_entries()
        
        # 统计信息
        stats = {
            'total_entries': len(all_entries),
            'by_type': {},
            'by_level': {},
            'date_range': {
                'start': None,
                'end': None
            },
            'file_info': {
                'total_files': 0,
                'total_size': 0
            }
        }
        
        # 按类型统计
        for entry in all_entries:
            data_type = entry.data_type.value
            stats['by_type'][data_type] = stats['by_type'].get(data_type, 0) + 1
            
            level = entry.level
            stats['by_level'][level] = stats['by_level'].get(level, 0) + 1
        
        # 时间范围
        if all_entries:
            stats['date_range']['start'] = all_entries[0].timestamp
            stats['date_range']['end'] = all_entries[-1].timestamp
        
        # 文件信息
        log_files = list(self.log_dir.glob("log_*"))
        stats['file_info']['total_files'] = len(log_files)
        stats['file_info']['total_size'] = sum(f.stat().st_size for f in log_files)
        
        return stats
    
    def cleanup(self):
        """清理资源"""
        # 刷新缓冲区
        self.flush()
        
        # 清理旧文件
        for suffix in ['.json', '.csv', '.bin']:
            self._cleanup_old_files(self.log_dir, suffix)

class DataExporter:
    """数据导出器"""
    
    def __init__(self, data_logger: DataLogger):
        self.data_logger = data_logger
    
    def export_adc_data(self, start_time: Optional[datetime] = None,
                       end_time: Optional[datetime] = None) -> str:
        """导出ADC数据"""
        return self.data_logger.export_data(
            start_time=start_time,
            end_time=end_time,
            data_types=[DataType.ADC_DATA],
            output_format="csv"
        )
    
    def export_command_history(self, start_time: Optional[datetime] = None,
                              end_time: Optional[datetime] = None) -> str:
        """导出命令历史"""
        return self.data_logger.export_data(
            start_time=start_time,
            end_time=end_time,
            data_types=[DataType.COMMAND_DATA],
            output_format="json"
        )
    
    def export_calibration_data(self) -> str:
        """导出校准数据"""
        return self.data_logger.export_data(
            data_types=[DataType.CALIBRATION_DATA],
            output_format="json"
        )
    
    def export_all_data(self, format: str = "excel") -> str:
        """导出所有数据"""
        return self.data_logger.export_data(output_format=format)