﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using TorchSharp;
using static TorchSharp.torch;

namespace NgpSharp.RayMaching
{
    /// <summary>
    /// 
    /// </summary>
    public class CompositeRaysTrain : torch.autograd.MultiTensorFunction<CompositeRaysTrain>
    {
        /// <summary>
        /// 
        /// </summary>
        public override string Name => nameof(CompositeRaysTrain);

        public override List<torch.Tensor> forward(torch.autograd.AutogradContext ctx, params object[] vars)
        {
            using var c = torch.NewDisposeScope();
            Tensor sigmas = (Tensor)vars[0];
            Tensor rgbs = (Tensor)vars[1];
            Tensor deltas = (Tensor)vars[2];
            Tensor rays = (Tensor)vars[3];
            float T_thresh = (float)vars[4];

            long M = sigmas.shape[0];
            long N = rays.shape[0];

            Tensor weights_sum = torch.empty(N, dtype: sigmas.dtype, device: sigmas.device);
            Tensor depth = torch.empty(N, dtype: sigmas.dtype, device: sigmas.device);
            Tensor image = torch.empty(N, 3, dtype: sigmas.dtype, device: sigmas.device);

            RayMachingLoader.composite_rays_train_forward(sigmas.Handle, rgbs.Handle, deltas.Handle, rays.Handle,
                (uint)M, (uint)N, T_thresh, weights_sum.Handle, depth.Handle, image.Handle);
            weights_sum = weights_sum.MoveToOuterDisposeScope();
            depth = depth.MoveToOuterDisposeScope();
            image = image.MoveToOuterDisposeScope();
            ctx.save_for_backward([
                sigmas.MoveToOuterDisposeScope(),
                rgbs.MoveToOuterDisposeScope(),
                deltas.MoveToOuterDisposeScope(),
                rays.MoveToOuterDisposeScope(),
                weights_sum,
                depth,
                image,
            ]);
            ctx.save_data("dims", new float[] { M, N, T_thresh });

            return [weights_sum, depth, image];
        }

        public override List<torch.Tensor> backward(torch.autograd.AutogradContext ctx, List<torch.Tensor> grad_outputs)
        {
            using var c = torch.NewDisposeScope();
            Tensor grad_weights_sum = grad_outputs[0];
            Tensor grad_image = grad_outputs[2];
            grad_weights_sum = grad_weights_sum.contiguous();
            grad_image = grad_image.contiguous();

            using Tensor sigmas = ctx.get_saved_variables()[0];
            using Tensor rgbs = ctx.get_saved_variables()[1];
            using Tensor deltas = ctx.get_saved_variables()[2];
            using Tensor rays = ctx.get_saved_variables()[3];
            using Tensor weights_sum = ctx.get_saved_variables()[4];
            using Tensor depth = ctx.get_saved_variables()[5];
            using Tensor image = ctx.get_saved_variables()[6];

            float[] dims = (float[])ctx.get_data("dims");
            float M = dims[0];
            float N = dims[1];
            float T_thresh = dims[2];

            Tensor grad_sigmas = torch.zeros_like(sigmas);
            Tensor grad_rgbs = torch.zeros_like(rgbs);

            RayMachingLoader.composite_rays_train_backward(grad_weights_sum.Handle,
                grad_image.Handle, sigmas.Handle, rgbs.Handle, deltas.Handle,
                rays.Handle, weights_sum.Handle, image.Handle, (uint)M, (uint)N, T_thresh,
                grad_sigmas.Handle, grad_rgbs.Handle);

            foreach (var item in grad_outputs) item.Dispose();

            return [
                grad_sigmas.MoveToOuterDisposeScope(),
                grad_rgbs.MoveToOuterDisposeScope(),
                null, null, null];
        }
    }
}
