import torch
import math
from torch import nn
from torch.autograd import Function
from torch.autograd.function import once_differentiable

from taa_core._C import pred_rand_gt

MAX_ROW = 5000

class RandMatchGT(Function):
    @staticmethod
    def forward(ctx, pred_gt_loss, non_label, ratio):
        return pred_rand_gt(pred_gt_loss, non_label, ratio)

    @staticmethod
    @once_differentiable
    def backward(ctx, grad_outputs):
        return None


rand_match_gt = RandMatchGT.apply


class AnchorMatcher(nn.Module):
    def __init__(self, mode="sgl"):
        super(AnchorMatcher, self).__init__()
        self.mode = mode

    def forward_spl(self, pred_gt_loss, non_label, ratio=0.0):
        # pred_gt_loss = pred_gt_loss.contiguous()
        # non_label = non_label.contiguous()
        assert pred_gt_loss.shape == non_label.shape
        pred_gt_loss = pred_gt_loss.split(MAX_ROW, dim=0)
        non_label = non_label.split(MAX_ROW, dim=0)
        results = []
        _mins = []        
        for loss, mask in zip(pred_gt_loss, non_label):
            assert loss.shape == mask.shape
            _ = rand_match_gt(loss, mask, ratio)
            results.append(_[:-1])
            _mins.append(_[-1])
            # results.append(rand_match_gt(loss, mask, ratio))
        inds = torch.cat(results, dim=0)
        _mins = sum(_mins) == len(_mins)
        return inds, _mins

    def forward_sgl(self, pred_gt_loss, non_label, ratio=0.0):
        assert pred_gt_loss.shape == non_label.shape
        pred_gt_loss = pred_gt_loss.contiguous()
        non_label = non_label.contiguous()
        
        _ = rand_match_gt(pred_gt_loss, non_label, ratio)
        return _[:-1], _[-1]


    def forward(self, pred_gt_loss, non_label, ratio=0.0):
        if self.mode == "sgl":
            return self.forward_sgl(pred_gt_loss, non_label, ratio)
        elif self.mode == "spl":
            return self.forward_spl(pred_gt_loss, non_label, ratio)
        else:
            raise NotImplementedError
