﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using TorchSharp;
using static TorchSharp.torch;

namespace NgpSharp
{
    public class HashEncoderFunction : torch.autograd.SingleTensorFunction<HashEncoderFunction>
    {
        public override string Name => "HashEncoder";
  
        public override torch.Tensor forward(torch.autograd.AutogradContext ctx, params object[] vars)
        {
            using var c = torch.NewDisposeScope();

            torch.Tensor inputs = (torch.Tensor)vars[0];
            torch.Tensor embeddings = (torch.Tensor)vars[1];
            torch.Tensor offset = (torch.Tensor)vars[2];
            Tensor box_min = (Tensor)vars[3];
            Tensor box_max = (Tensor)vars[4];
            Tensor grad_embeddings = (Tensor)vars[5];
            double base_resolution = (double)vars[6];
            double b = (double)vars[7];
            int num_levels = (int)vars[8];
            int level_dim = (int)vars[9];

            inputs = inputs.contiguous();

            torch.Tensor outputs = torch.empty(inputs.shape[0], num_levels * level_dim, device: inputs.device, dtype: embeddings.dtype);

            HashEncoderLoader.hash_encoder_forward(inputs.Handle, embeddings.Handle, offset.Handle,
                box_min.Handle, box_max.Handle, base_resolution, b, outputs.Handle, IntPtr.Zero);

            ctx.save_for_backward([inputs.MoveToOuterDisposeScope(),
                embeddings.MoveToOuterDisposeScope(),
                offset.MoveToOuterDisposeScope(),
                box_min.MoveToOuterDisposeScope(),
                box_max.MoveToOuterDisposeScope(),
                grad_embeddings.MoveToOuterDisposeScope()
            ]);
            ctx.save_data("dims", new List<double>() { base_resolution, b, num_levels, level_dim });

            return outputs.MoveToOuterDisposeScope();
        }

        public override List<torch.Tensor> backward(torch.autograd.AutogradContext ctx, torch.Tensor grad)
        {
            using var c = torch.NewDisposeScope();
            List<Tensor> tensors = ctx.get_saved_variables();
            using Tensor inputs = tensors[0];
            using Tensor embeddings = tensors[1];
            using Tensor offsets = tensors[2];
            using Tensor box_min = tensors[3];
            using Tensor box_max = tensors[4];
            Tensor grad_embeddings = tensors[5];
            List<double> dims = (List<double>)ctx.get_data("dims");
            double base_resolution = dims[0];
            double b = dims[1];
            double num_levels = dims[2];
            double level_dim = dims[3];

            grad = grad.contiguous();

            HashEncoderLoader.hash_encoder_backward(grad.Handle, inputs.Handle, embeddings.Handle, offsets.Handle,
                box_min.Handle, box_max.Handle, base_resolution, b, grad_embeddings.Handle);

            grad.Dispose();
            return [null, grad_embeddings.MoveToOuterDisposeScope(), null, null, null, null, null, null, null, null];
        }
    }
}
