"""
全局归一化管理器
统一训练和预测的归一化逻辑，使用全局最大值统一缩放
"""

import numpy as np
import logging
from datetime import datetime, timedelta
from sqlalchemy.orm import Session
from sqlalchemy import text, func
from typing import Optional, Tuple
import pickle
import os

from backend.config.database import get_db_session
from backend.entities.load_data_new import LoadData

logger = logging.getLogger(__name__)

class GlobalScaler:
    """全局归一化器 - 使用最近30天数据库最大值统一缩放"""
    
    def __init__(self):
        self.global_max_load = None
        self.global_min_load = None
        self.scaler_file = "backend/models/saved_models/global_scaler.pkl"
        
    def get_global_load_stats(self, days: int = 30) -> Tuple[float, float]:
        """从数据库获取最近N天的负荷统计信息"""
        try:
            with get_db_session() as db:
                # 计算查询的起始日期
                end_date = datetime.now().date()
                start_date = end_date - timedelta(days=days)
                
                # 查询最近N天的负荷统计
                stats = db.execute(text("""
                    SELECT 
                        MIN(load_val) as min_load, 
                        MAX(load_val) as max_load,
                        AVG(load_val) as avg_load,
                        COUNT(*) as record_count
                    FROM load_data 
                    WHERE dt >= :start_date AND dt <= :end_date
                    AND load_val IS NOT NULL
                """), {"start_date": start_date, "end_date": end_date}).first()
                
                if stats and stats.max_load is not None and stats.min_load is not None:
                    max_load = float(stats.max_load)
                    min_load = float(stats.min_load)
                    avg_load = float(stats.avg_load) if stats.avg_load else (max_load + min_load) / 2
                    record_count = int(stats.record_count)
                    
                    logger.info(f"获取最近{days}天负荷统计: 最大值={max_load:.2f}MW, "
                              f"最小值={min_load:.2f}MW, 平均值={avg_load:.2f}MW, "
                              f"记录数={record_count}")
                    
                    # 确保最大值合理（比利时负荷范围检查）
                    if max_load < 5000 or max_load > 20000:
                        logger.warning(f"检测到异常的负荷最大值: {max_load}MW，使用默认值")
                        max_load = 15000.0  # 比利时典型峰值负荷
                        min_load = 6000.0   # 比利时典型谷值负荷
                    
                    return max_load, min_load
                else:
                    logger.warning(f"无法获取最近{days}天的负荷数据，使用默认值")
                    return 15000.0, 6000.0  # 比利时典型负荷范围
                    
        except Exception as e:
            logger.error(f"获取全局负荷统计失败: {e}")
            return 15000.0, 6000.0
    
    def fit(self, days: int = 30, force_update: bool = False) -> None:
        """拟合全局缩放器"""
        # 如果已有缓存且不强制更新，直接加载
        if not force_update and self.load_scaler():
            logger.info(f"加载已缓存的全局缩放器: 最大值={self.global_max_load:.2f}MW")
            return
            
        # 从数据库获取全局统计
        self.global_max_load, self.global_min_load = self.get_global_load_stats(days)
        
        # 保存缩放器
        self.save_scaler()
        
        logger.info(f"全局缩放器初始化完成: 最大值={self.global_max_load:.2f}MW, "
                   f"最小值={self.global_min_load:.2f}MW")
    
    def normalize_load_data(self, data: np.ndarray) -> np.ndarray:
        """归一化负荷数据 - 使用全局最大值"""
        if self.global_max_load is None:
            self.fit()
            
        # 使用全局最大值进行归一化到[0,1]
        normalized = data / self.global_max_load
        
        # 确保归一化结果在合理范围内
        normalized = np.clip(normalized, 0.0, 2.0)  # 允许超出一点以应对极端情况
        
        logger.debug(f"负荷数据归一化: 原始范围[{data.min():.2f}, {data.max():.2f}] -> "
                    f"归一化范围[{normalized.min():.4f}, {normalized.max():.4f}]")
        
        return normalized
    
    def denormalize_load_data(self, normalized_data: np.ndarray) -> np.ndarray:
        """反归一化负荷数据 - 使用全局最大值"""
        if self.global_max_load is None:
            self.fit()
            
        # 使用全局最大值进行反归一化
        denormalized = normalized_data * self.global_max_load
        
        # 确保反归一化结果在合理范围内
        denormalized = np.clip(denormalized, 0.0, self.global_max_load * 1.5)
        
        logger.debug(f"负荷数据反归一化: 归一化范围[{normalized_data.min():.4f}, {normalized_data.max():.4f}] -> "
                    f"原始范围[{denormalized.min():.2f}, {denormalized.max():.2f}]")
        
        return denormalized
    
    def is_prediction_reasonable(self, predictions: np.ndarray) -> bool:
        """检查预测结果是否合理"""
        if self.global_max_load is None or self.global_min_load is None:
            return True  # 无法验证，认为合理
            
        pred_min = predictions.min()
        pred_max = predictions.max()
        
        # 检查是否在合理范围内（允许一定扩展）
        reasonable_min = self.global_min_load * 0.5  # 允许比历史最小值低50%
        reasonable_max = self.global_max_load * 1.3  # 允许比历史最大值高30%
        
        is_reasonable = (pred_min >= reasonable_min) and (pred_max <= reasonable_max)
        
        if not is_reasonable:
            logger.warning(f"预测结果超出合理范围: 预测[{pred_min:.2f}, {pred_max:.2f}]MW, "
                          f"合理范围[{reasonable_min:.2f}, {reasonable_max:.2f}]MW")
        else:
            logger.info(f"预测结果在合理范围内: [{pred_min:.2f}, {pred_max:.2f}]MW")
            
        return is_reasonable
    
    def save_scaler(self) -> None:
        """保存缩放器到文件"""
        try:
            os.makedirs(os.path.dirname(self.scaler_file), exist_ok=True)
            
            scaler_data = {
                'global_max_load': self.global_max_load,
                'global_min_load': self.global_min_load,
                'created_at': datetime.now().isoformat(),
                'version': '1.0'
            }
            
            with open(self.scaler_file, 'wb') as f:
                pickle.dump(scaler_data, f)
                
            logger.info(f"全局缩放器已保存: {self.scaler_file}")
            
        except Exception as e:
            logger.error(f"保存全局缩放器失败: {e}")
    
    def load_scaler(self) -> bool:
        """从文件加载缩放器"""
        try:
            if not os.path.exists(self.scaler_file):
                return False
                
            with open(self.scaler_file, 'rb') as f:
                scaler_data = pickle.load(f)
                
            self.global_max_load = scaler_data.get('global_max_load')
            self.global_min_load = scaler_data.get('global_min_load')
            
            if self.global_max_load is None or self.global_min_load is None:
                return False
                
            created_at = scaler_data.get('created_at', 'unknown')
            logger.info(f"全局缩放器加载成功: 最大值={self.global_max_load:.2f}MW, "
                       f"创建时间={created_at}")
            
            return True
            
        except Exception as e:
            logger.error(f"加载全局缩放器失败: {e}")
            return False
    
    def get_stats(self) -> dict:
        """获取缩放器统计信息"""
        return {
            'global_max_load': self.global_max_load,
            'global_min_load': self.global_min_load,
            'scale_factor': self.global_max_load,
            'load_range': self.global_max_load - self.global_min_load if self.global_min_load else None
        }

# 全局实例
global_scaler = GlobalScaler()