from typing import List, Optional, Dict
from torch import nn, tensor, float32
import torch.nn.functional as F
import loss


class CombinedLoss(nn.Module):
    def __init__(self, loss_instances, loss_weights, repeat_inputs=False, ):
        super().__init__()
        # 判断损失函数 和 损失权重 
        assert loss_instances is not None and loss_weights is not None, "loss_instance and loss_weights must not None"
        assert len(loss_instances) == len(loss_weights), "loss_instance and loss_weights must have same length"

        if loss_weights:
            self.loss_weights = loss_weights.copy()
        else:
            self.loss_weights = [1.0 for _ in range(len(loss_instances))]
        

        if isinstance(loss_instances[0], dict):
            self.loss_instances = nn.ModuleList([
                getattr(nn, name)(**opt)  
                for inst in loss_instances 
                for name, opt in inst.items()
            ])
        else: # 损失函数
            self.loss_instances = nn.ModuleList([ self._create_loss(loss_type) for loss_type in loss_instances ])

        self.loss_names = [inst.__class__.__name__ for inst in self.loss_instances]
        self.repeat_inputs = repeat_inputs

    def _create_loss(self, loss_type):
        if loss_type == 'mse':
            return nn.MSELoss(reduction='mean')
    
    def forward(self, inputs):
        loss_vals = []
        total_loss = tensor(0, dtype=float32) #.to(inputs[0][0].device)
        for i, loss_fn in enumerate(self.loss_instances):
            loss_input = inputs[0] if self.repeat_inputs else inputs[i]
            loss_val = loss_fn(*loss_input)
            total_loss = total_loss + (self.loss_weights[i] * loss_val)
            loss_vals.append(loss_val.detach())

        return total_loss, loss_vals

    def __len__(self):
        return len(self.loss_instances)
