#!/usr/bin/env python3
# -*- coding: utf-8 -*-

"""
联邦学习客户端模块

功能：
1. 本地模型训练
2. 差分隐私保护
3. 模型更新压缩
4. 与服务器通信
"""

import torch
import torch.nn as nn
import torch.optim as optim
from typing import Dict
import numpy as np

class FederatedClient:
    """联邦学习客户端"""
    
    def __init__(self, client_id: int, model: nn.Module, config: Dict):
        """
        初始化客户端
        
        参数:
            client_id: 客户端唯一标识
            model: 本地模型架构
            config: 配置字典
        """
        self.client_id = client_id
        self.local_model = model
        self.config = config
        self.optimizer = optim.SGD(
            self.local_model.parameters(),
            lr=config.get('learning_rate', 0.01),
            momentum=config.get('momentum', 0.9)
        )
        
    def train(self, train_loader, epochs: int = 1):
        """
        本地模型训练
        
        参数:
            train_loader: 本地数据加载器
            epochs: 训练轮数
            
        返回:
            训练后的模型参数
        """
        self.local_model.train()
        
        for epoch in range(epochs):
            for data, target in train_loader:
                self.optimizer.zero_grad()
                output = self.local_model(data)
                loss = nn.functional.cross_entropy(output, target)
                loss.backward()
                self.optimizer.step()
                
                # 应用差分隐私
                if self.config.get('dp_enabled', False):
                    self._apply_differential_privacy()
        
        return self._get_model_updates()
    
    def _apply_differential_privacy(self):
        """应用差分隐私保护"""
        with torch.no_grad():
            for param in self.local_model.parameters():
                noise = torch.randn_like(param.grad) * self.config.get('dp_sigma', 0.1)
                param.grad += noise
    
    def _get_model_updates(self):
        """
        获取模型更新
        
        返回:
            包含模型参数和样本数的字典
        """
        updates = {}
        for name, param in self.local_model.named_parameters():
            updates[name] = param.data.clone()
            
        return {
            'params': updates,
            'num_samples': len(train_loader.dataset),
            'client_id': self.client_id
        }
    
    def evaluate(self, test_loader):
        """评估本地模型性能"""
        self.local_model.eval()
        total_loss = 0.0
        correct = 0
        total = 0
        
        with torch.no_grad():
            for data, target in test_loader:
                output = self.local_model(data)
                total_loss += nn.functional.cross_entropy(output, target, reduction='sum').item()
                pred = output.argmax(dim=1, keepdim=True)
                correct += pred.eq(target.view_as(pred)).sum().item()
                total += target.size(0)
        
        return {
            'accuracy': correct / total,
            'loss': total_loss / total,
            'client_id': self.client_id
        }