import torch
import numpy as np
from typing import Dict, Any, List
import logging
import time
from pathlib import Path

# 从主utils.py导入常用函数
from ....utils import save_model, load_model, preprocess_data

def calculate_model_difference(
    model1: Dict[str, torch.Tensor], 
    model2: Dict[str, torch.Tensor]
) -> Dict[str, float]:
    """
    计算两个模型之间的差异
    
    参数:
        model1: 第一个模型的状态字典
        model2: 第二个模型的状态字典
        
    返回:
        每层差异的字典
    """
    differences = {}
    for key in model1.keys():
        differences[key] = torch.mean(
            torch.abs(model1[key] - model2[key])
        ).item()
    return differences

def federated_average(
    client_weights: List[Dict[str, torch.Tensor]],
    sample_sizes: List[int]
) -> Dict[str, torch.Tensor]:
    """
    执行联邦平均算法
    
    参数:
        client_weights: 客户端权重列表
        sample_sizes: 每个客户端的样本数量
        
    返回:
        聚合后的全局权重
    """
    total_samples = sum(sample_sizes)
    averaged_weights = {}
    
    for key in client_weights[0].keys():
        weighted_sum = torch.zeros_like(client_weights[0][key])
        for i, weights in enumerate(client_weights):
            weighted_sum += weights[key] * sample_sizes[i]
        averaged_weights[key] = weighted_sum / total_samples
        
    return averaged_weights

def clip_gradients(
    model: torch.nn.Module, 
    max_norm: float = 1.0
) -> None:
    """
    裁剪梯度以防止梯度爆炸
    
    参数:
        model: 要裁剪梯度的模型
        max_norm: 最大梯度范数
    """
    torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm)

def add_differential_privacy(
    model: Dict[str, torch.Tensor],
    noise_scale: float = 0.01
) -> Dict[str, torch.Tensor]:
    """
    添加差分隐私噪声
    
    参数:
        model: 模型状态字典
        noise_scale: 噪声尺度
        
    返回:
        添加噪声后的模型
    """
    noisy_model = {}
    for key in model.keys():
        noisy_model[key] = model[key] + torch.randn_like(model[key]) * noise_scale
    return noisy_model

def measure_communication_cost(
    model_state: Dict[str, torch.Tensor]
) -> int:
    """
    测量模型状态的通信成本(字节数)
    
    参数:
        model_state: 模型状态字典
        
    返回:
        字节大小
    """
    total_bytes = 0
    for tensor in model_state.values():
        total_bytes += tensor.numel() * tensor.element_size()
    return total_bytes

def setup_federated_logging(log_dir: str = "logs") -> logging.Logger:
    """
    设置联邦学习专用的日志记录
    
    参数:
        log_dir: 日志目录
        
    返回:
        配置好的logger对象
    """
    if not Path(log_dir).exists():
        Path(log_dir).mkdir(parents=True)
        
    logger = logging.getLogger("FederatedLearning")
    logger.setLevel(logging.INFO)
    
    # 文件处理器
    file_handler = logging.FileHandler(Path(log_dir) / "federated.log")
    file_handler.setFormatter(logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
    ))
    
    # 控制台处理器
    console_handler = logging.StreamHandler()
    console_handler.setFormatter(logging.Formatter(
        '%(name)s - %(levelname)s: %(message)s'
    ))
    
    logger.addHandler(file_handler)
    logger.addHandler(console_handler)
    
    return logger