﻿using ICSharpCode.SharpZipLib.BZip2;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using TorchSharp;
using TorchSharp.Modules;
using static TorchSharp.torch;

namespace NgpSharp
{
    public class GridEncoder : nn.Module<Tensor, Tensor, Tensor>
    {
        public int OutDim { get; private set; }

        private int input_dim;
        private int num_levels;
        private int level_dim;
        private int log2_hashmap_size;
        private int base_resolution;
        private double b; //论文中的b
        public Parameter embeddings { get; private set; }

        public GridEncoder(string name, int input_dim = 3,
            int num_levels = 16, int level_dim = 2, 
            int base_resolution = 16, int finest_resolution = 512, int log2_hashmap_size = 19)
            : base(name)
        {
            this.input_dim = input_dim; //coord dims, 2 or 3
            this.num_levels = num_levels; //num levels, each level multiply resolution by 2
            this.level_dim = level_dim; //encode channels per level
            this.log2_hashmap_size = log2_hashmap_size;
            this.base_resolution = base_resolution;
            this.OutDim = num_levels * level_dim;

            b = Math.Exp((Math.Log(finest_resolution) - Math.Log(base_resolution)) / (num_levels - 1));
            List<int> offsets = [];
            int offset = 0;
            double max_params = Math.Pow(2, log2_hashmap_size);
            for (int i = 0; i < num_levels; i++)
            {
                offsets.Add(offset);
                offset += (int)max_params;
            }
            offsets.Add(offset);

            torch.Tensor tOffsets = torch.from_array(offsets.ToArray(), dtype: torch.int32);
            this.register_buffer("offsets", tOffsets);

            Tensor grad_embeddings = torch.empty(offset, level_dim);
            this.register_buffer("grad_embeddings", grad_embeddings);

            Tensor embedding = torch.empty(offset, level_dim);
            embedding.uniform_(-1e-4, 1e-4);
            embeddings = torch.nn.Parameter(embedding);
            RegisterComponents();
        }

        public override torch.Tensor forward(torch.Tensor x, Tensor aabb)
        {
            using var c = torch.NewDisposeScope();
            uint channel = (uint)x.shape[^1];
            Tensor box_min = aabb[TensorIndex.Slice(0, channel)];
            Tensor box_max = aabb[TensorIndex.Slice(channel, channel * 2)];
            if (!torch.all(x <= box_max).ToBoolean() || !torch.all(x >= box_min).ToBoolean())
            {
                x = torch.clamp(x, min: box_min, max: box_max);
            }

            var offset = this.get_buffer("offsets").to(x.device);
            var grad_embeddings = get_buffer("grad_embeddings").to(x.device).zero_();
            double base_resolution = this.base_resolution;
            double b = this.b;
            int num_levels = this.num_levels;
            int level_dim = this.level_dim;

            torch.Tensor outputs = HashEncoderFunction.apply(x, embeddings, offset,
                box_min, box_max, grad_embeddings, base_resolution, b, num_levels, level_dim);

            return outputs.MoveToOuterDisposeScope();
        }
    }
}
