from abc import abstractmethod
import torch.nn as nn
from torchmetrics.functional import accuracy
from ..builder import build_loss
from utils import get_logger

logger = get_logger("paddlevideo")


class BaseHead(nn.Module):
    """Base class for head part.

    All head should subclass it.
    All subclass should overwrite:

    - Methods: ```init_weights```, initializing weights.
    - Methods: ```forward```, forward function.

    Args:
        num_classes (int): The number of classes to be classified.
        in_channels (int): The number of channels in input feature.
        loss_cfg (dict): Config for building loss. Default: dict(type='CrossEntropyLoss').
        ls_eps (float): label smoothing epsilon. Default: 0. .

    """
    def __init__(
        self,
        num_classes,
        in_channels,
        loss_cfg=dict(
            name="CrossEntropyLoss"
        ),  #TODO(shipping): only pass a name or standard build cfg format.
        #multi_class=False, NOTE(shipping): not supported now.
        ls_eps=0.):

        super().__init__()
        self.num_classes = num_classes
        self.in_channels = in_channels

        self.loss_func = build_loss(loss_cfg)
        self.ls_eps = ls_eps
        #self.multi_class = multi_class NOTE(shipping): not supported now


    @abstractmethod
    def forward(self, x):
        """Define how the head is going to run.
        """
        raise NotImplemented

    def loss(self, scores, labels, valid_mode=False, if_top5=True, **kwargs):
        outputs = dict()
        if len(labels) == 1:  # commonly case, without mixup
            labels = labels[0]

            if self.ls_eps != 0. and not valid_mode:  # train phase:label_smooth
                loss = self.loss_func(scores, labels, self.ls_eps)
            else:  # valid
                # pytorch CrossEntropyLoss inputs: scores shape:[N,C],labels shape[N]
                loss = self.loss_func(scores, labels)
            # TorchMetrics accuracy inputs: scores shape:[N,C], labels shape:[N,] or [N,1]
            top1 = accuracy(scores, labels, top_k=1)
            outputs['loss'] = loss
            outputs['Acc@1'] = top1
            if if_top5:
                top5 = accuracy(scores, labels, top_k=5)
                outputs['Acc@5'] = top5
            return outputs

        elif len(labels) == 3:  # mix_up only in train phase
            labels_a, labels_b, lam = labels
            lam = lam[0]  # get lam value
            if self.ls_eps != 0:
                loss_a = self.loss_func(scores, labels_a, self.ls_eps)
                loss_b = self.loss_func(scores, labels_b, self.ls_eps)
            else:
                loss_a = self.loss_func(scores, labels_a)
                loss_b = self.loss_func(scores, labels_b)
            loss = lam * loss_a + (1 - lam) * loss_b

            top1a = accuracy(scores, labels_a, top_k=1)
            top1b = accuracy(scores, labels_b, top_k=1)
            top1 = lam * top1a + (1 - lam) * top1b

            outputs['loss'] = loss
            outputs['Acc@1'] = top1
            if if_top5:
                top5a = accuracy(scores, labels_a, top_k=5)
                top5b = accuracy(scores, labels_b, top_k=5)
                top5 = lam * top5a + (1 - lam) * top5b
                outputs['Acc@5'] = top5
            return outputs
        else:
            raise NotImplemented




