import torch
import torch.nn.functional as F

def kl_divergence_loss(teacher_outputs, student_outputs, temperature=3.0):
    """
    计算KL散度损失（3.2.1节）
    """
    soft_teacher = F.softmax(teacher_outputs / temperature, dim=1)
    soft_student = F.log_softmax(student_outputs / temperature, dim=1)
    
    return F.kl_div(soft_student, soft_teacher, reduction='batchmean') * (temperature ** 2)

def aggregate_knowledge(clients, config):
    """
    知识聚合（3.2.1节）
    基于客户端置信度的加权融合
    """
    aggregated_knowledge = None
    total_weight = 0
    
    for client in clients:
        weight = torch.exp(config['beta'] * client.accuracy_history[-1])
        if aggregated_knowledge is None:
            aggregated_knowledge = client.prediction_distribution * weight
        else:
            aggregated_knowledge += client.prediction_distribution * weight
        total_weight += weight
    
    aggregated_knowledge /= total_weight
    return aggregated_knowledge
