import torch
from torch import nn
from torch.autograd  import Function
from ._C import p2e_logproba_forward, p2e_logproba_backward
  
  
class _p2e_logproba(Function):
    @staticmethod
    def forward(ctx, sxy, oxy, invcov, logdet_invcov, fids=None):
        """
        sample_xy: (face_num, sample_num, 2)
        ellipse_xy: (kernel_num, 2)
        invcov: (kernel_num, 4)
        logdet_invcov: (kernel_num)
        fid: (kernel_num)            
        """
        if fids is None:
            fids = torch.arange(oxy.size(0)).to(torch.int32, device=oxy.device)
        ctx.save_for_backward(sxy, oxy, invcov, logdet_invcov)
        diff_logproba, max_idx = p2e_logproba_forward(sxy, oxy, invcov, logdet_invcov, fids)    
        ctx.fids = fids
        ctx.max_idx = max_idx
        return diff_logproba
      
    @staticmethod
    def backward(ctx, grad_output):
        sxy, oxy, invcov, logdet_invcov = ctx.saved_tensors
        fids = ctx.fids
        max_idx = ctx.max_idx
        grad_sxy, grad_oxy, grad_invcov, grad_logdet_invcov = p2e_logproba_backward(grad_output, sxy, oxy, invcov, logdet_invcov, fids, max_idx)
        return grad_sxy, grad_oxy, grad_invcov, grad_logdet_invcov, None, None