﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using TorchSharp;

namespace NgpSharp
{
    public class SHEncoderFunction : torch.autograd.SingleTensorFunction<SHEncoderFunction>
    {
        public override string Name => "SHEncoder";

        public override torch.Tensor forward(torch.autograd.AutogradContext ctx, params object[] vars)
        {
            using var c = torch.NewDisposeScope();
            torch.Tensor inputs = (torch.Tensor)vars[0];
            inputs = inputs.contiguous();
            uint degree = (uint)vars[1];
            bool calc_grad_inputs = (bool)vars[2];

            uint B = (uint)inputs.shape[0];
            uint input_dim = (uint)inputs.shape[1];
            uint output_dim = (uint)Math.Pow(degree, 2);

            torch.Tensor outputs = torch.empty(B, output_dim, dtype: inputs.dtype, device: inputs.device);

            torch.Tensor? dy_dx = null;
            if (calc_grad_inputs)
                dy_dx = torch.empty(B, input_dim * output_dim, dtype: inputs.dtype, device: inputs.device);

            SHEncoderLoader.sh_encode_forward(inputs.Handle, outputs.Handle, B, input_dim, degree, dy_dx is null ? IntPtr.Zero : dy_dx.Handle);

            ctx.save_for_backward([inputs.MoveToOuterDisposeScope(), dy_dx?.MoveToOuterDisposeScope()]);
            ctx.save_data("dims", new uint[] { B, input_dim, degree });

            return outputs.MoveToOuterDisposeScope();
        }

        public override List<torch.Tensor> backward(torch.autograd.AutogradContext ctx, torch.Tensor grad_output)
        {
            using var c = torch.NewDisposeScope();
            var tensors = ctx.get_saved_variables();
            using var inputs = tensors[0]; using var dy_dx = tensors[1];
            var dims = (uint[])ctx.get_data("dims");
            if (dy_dx is not null)
            {
                using var output = grad_output.contiguous();
                uint B = dims[0]; var input_dim = dims[1]; var degree = dims[2];
                torch.Tensor grad_inputs = torch.zeros_like(inputs);
                SHEncoderLoader.sh_encode_backward(output.Handle, inputs.Handle, B, input_dim, degree, dy_dx.Handle, grad_inputs.Handle);
                grad_output.Dispose();
                return [grad_inputs.MoveToOuterDisposeScope(), null, null];

            }
            else
            {
                grad_output.Dispose();
                return [null, null, null];
            }
        }
    }
}
