from __future__ import print_function, division

import torch
from torch import nn
from torch.nn import functional as F
from torch.autograd import Variable

from config import config

class focalLoss(nn.Module):
    """
    focal loss for retineNet
    """
    def __init__(self, num_classes=config.dataset.num_classes):
        super(focalLoss, self).__init__()
        self._num_classes   = num_classes

    def _focal_loss_alt(self, x, y):
        """
        focal loss alternative
        Input:
            x: sized [N,D]
            y: sized [N,]
        """
        alpha   = 0.25
        t       = 

    def forward(self, loc_pred, loc_target, cls_pred, cls_target):
        """
        Input:(tensor or variable)
            loc_pred:   predicted locations, sized [batch_size, #anchors, 4]
            loc_target: encoded target locations, sized [batch_size, #anchors, 4]
            cls_pred:   predicted class confidences, sized [batch_size, #anchors, #classes]
            cls_target: encoded target labels, sized [batch_size, #anchors]
        """

        batch_size, num_boxes   = cls_target.size()[0:2]
        pos_index               = cls_target > 0
        num_pos                 = pos.data.long().sum()

        # location loss for positive sample patches
        mask                    = pos.unsqueeze(2).expand_as(loc_pred)
        masked_loc_pred         = loc_pred[mask].view(-1, 4)
        masked_loc_target       = loc_target[mask].view(-1, 4)
        _loc_loss               = F.smooth_l1_loss(masked_loc_pred, masked_loc_target, size_average=False)

        posneg_index            = cls_target > -1
        mask                    = posneg_index.unsqueeze(2).expand_as(cls_pred)
        masked_cls_pred         = loc_pred[mask].view(-1, self._num_classes)
        _cls_loss               = self._focal_loss_alt(masked_cls_pred, cls_target[posneg_index])

        _loss   = _loc_loss + _cls_loss
        return _loss
