﻿using SixLabors.ImageSharp;
using SixLabors.ImageSharp.PixelFormats;
using SixLabors.ImageSharp.Processing;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Numerics;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.Json;
using System.Threading.Tasks;
using TorchSharp;
using TorchSharp.Modules;
using static Tensorboard.TensorShapeProto.Types;
using static TorchSharp.torch;

namespace NgpSharp
{
    public class NeRFDataset : torch.utils.data.Dataset
    {
        public override long Count 
        { 
            get => ImagesLength; 
        }

        /// <summary>
        /// 
        /// </summary>
        public NeRFDatasetType Type {  get; set; }
        /// <summary>
        /// 
        /// </summary>
        public NeRFDatasetMode Mode { get; set; }
        /// <summary>
        /// 
        /// </summary>
        public Device Device { get; private set; }
        public double Radius { get; private set; }
        /// <summary>
        /// 
        /// </summary>
        public double Downscale { get; set; } = 1;
        /// <summary>
        /// 摄影机位置缩放系数，默认使用论文推荐的0.33
        /// </summary>
        public double CameraScale { get; set; } = 0.33;
        /// <summary>
        /// 摄影机坐标偏移，默认使用论文推荐的0.5
        /// </summary>
        public Vector3 CameraOffset { get; set; } = new Vector3(0.5f);
        /// <summary>
        /// 高度
        /// </summary>
        public int Height {  get; private set; }
        /// <summary>
        /// 宽度
        /// </summary>
        public int Width { get; private set; }
        /// <summary>
        /// 数据集图片数量
        /// </summary>
        public int ImagesLength { get; private set; }
        /// <summary>
        /// 每批次射线数
        /// </summary>
        public int NumRays { get; set; } = 128;
        /// <summary>
        /// 
        /// </summary>
        public bool Training {  get; set; }
        /// <summary>
        /// 批次
        /// </summary>
        public int BatchSize { get; set; } = 1;
        /// <summary>
        /// 数据集包围盒最小值
        /// </summary>
        public Vector3 BoxMin { get; private set; }
        /// <summary>
        /// 数据集包围盒最大值
        /// </summary>
        public Vector3 BoxMax { get; private set; }
        /// <summary>
        /// 相机近平面
        /// </summary>
        public float Near {  get; set; }
        /// <summary>
        /// 相机远平面
        /// </summary>
        public float Far {  get; set; }
        /// <summary>
        /// 包围盒缩放系数
        /// </summary>
        public float AABBScale { get; set; } = 1;

        public float fl_x { get; private set; }
        public float fl_y { get; private set; }
        public float cx {  get; private set; }
        public float cy { get; private set; }
        /// <summary>
        /// 
        /// </summary>
        public Tensor Images { get; private set; }
        /// <summary>
        /// 
        /// </summary>
        public Tensor Poses { get; private set; }
        /// <summary>
        /// 生成张量
        /// </summary>
        /// <param name="index"></param>
        /// <returns></returns>
        public override Dictionary<string, Tensor> GetTensor(long index)
        {
            using var c = torch.NewDisposeScope();
            long msb = (long)Math.Floor((double)index / ImagesLength) * ImagesLength;
            index = (index - msb) % ImagesLength;
            var poses = Poses[index];

            var gPose = poses.unsqueeze(0);
            var n = Training ? NumRays : -1;
            var rays = GetRays(gPose, Width, Height, n);

            var images = Images[index];
            images = images.unsqueeze(0);
            if (Training)
            {
                var C = images.shape[^1];
                var inds = rays["inds"];
                List<Tensor> stacks = new List<Tensor>();
                for(int i = 0; i < C; i++)
                {
                    stacks.Add(inds);
                }
                inds = torch.stack(stacks, -1);
                images = torch.gather(images.view(1, -1, C), 1, inds);
            }
            rays.Add("images", images);
            foreach (var item in rays)
            {
                rays[item.Key] = item.Value.MoveToOuterDisposeScope();
            }
            return rays;
        }

        /// <summary>
        /// 生成射线
        /// </summary>
        /// <param name="N"></param>
        /// <param name="patch_size"></param>
        /// <returns></returns>
        public Dictionary<string, Tensor> GetRays(Tensor poses, int W, int H, int N = -1, int patch_size = 1)
        {
            using var c = torch.NewDisposeScope();
            var device = poses.device;
            long B = poses.shape[0];
            float fx = fl_x; float fy = fl_y;
            float cx = this.cx; float cy = this.cy;
            var ij = torch.meshgrid([torch.linspace(0, W - 1, W, device: device), torch.linspace(0, H - 1, H, device: device)]);
            var i = ij[0].t().reshape(1, H * W).expand(B, H * W) + 0.5;
            var j = ij[1].t().reshape(1, H * W).expand(B, H * W) + 0.5;
            Tensor inds;

            Dictionary<string, Tensor> result = new();
            if(N > 0)
            {
                N = Math.Min(N, H * W);
                inds = torch.randint(0, H * W, size: N, device: device);
                inds = inds.expand(B, N);

                i = torch.gather(i, -1, inds);
                j = torch.gather(j, -1, inds);

                result.Add("inds", inds.MoveToOuterDisposeScope());
            }
            else
            {
                inds = torch.arange(H * W, device: device).expand(B, H * W);
            }

            var zs = torch.ones_like(i);
            var xs = (i - cx) / fx * zs;
            var ys = (j - cy) / fy * zs;
            var directions = torch.stack([xs, ys, zs], dim: -1);
            directions = directions / torch.norm(directions, dimension: -1, keepdim: true);
            var c2w = poses[TensorIndex.Colon, TensorIndex.Slice(0, 3), TensorIndex.Slice(0, 3)].transpose(-1, -2);
            var rays_d = torch.matmul(directions, c2w);

            var rays_o = poses[TensorIndex.Ellipsis, TensorIndex.Slice(0, 3), TensorIndex.Single(3)];
            rays_o = rays_o[TensorIndex.Ellipsis, TensorIndex.None, TensorIndex.Colon].expand_as(rays_d);

            result.Add("rays_o", rays_o.MoveToOuterDisposeScope());
            result.Add("rays_d", rays_d.MoveToOuterDisposeScope());
            return result;
        }

        /// <summary>
        /// 
        /// </summary>
        private NeRFDataset()
        {
            if (!torch.cuda.is_available()) throw new NotSupportedException("This module need to running in cuda");
            else Device = torch.device(DeviceType.CUDA, 0);
        }

        /// <summary>
        /// 从指定Transforms.json中加载数据集
        /// </summary>
        /// <param name="jsonPath"></param>
        /// <param name="dataNum"></param>
        /// <returns></returns>
        /// <exception cref="FileNotFoundException"></exception>
        /// <exception cref="InvalidDataException"></exception>
        public static async Task<NeRFDataset> LoadFromTransformsJson(string jsonPath, DatasetOption? option = null)
        {
            if(!File.Exists(jsonPath)) throw new FileNotFoundException(jsonPath);
            if (option == null) option = new DatasetOption(0, 0.33f, new Vector3(0.5f));
            using FileStream stream = File.OpenRead(jsonPath);
            DatasetBody? body = JsonSerializer.Deserialize<DatasetBody>(stream) ?? throw new InvalidDataException("Invalid dataset");
            FileInfo jInof = new FileInfo(jsonPath);
            body.RootPath = jInof.Directory!.FullName;
            NeRFDataset dataset = new NeRFDataset() 
            { 
                CameraScale = option.CameraScale, 
                CameraOffset = option.CameraOffset,
                Near = option.Near,
                Far = option.Far,
                AABBScale = option.AABBScale,
            };
            await ParseBody(dataset, body, option.ReadNum);
            await CalculateAABB(dataset);
            return dataset;
        }
        /// <summary>
        /// 导出为DataLoader
        /// </summary>
        /// <returns></returns>
        public NeRFDataLoader ToDataLoader(bool? shuffle = null)
        {
            int b = Math.Max(BatchSize, 1);
            var loader = new NeRFDataLoader(this, b, shuffle: shuffle.HasValue ? shuffle.Value : this.Training, Device, num_worker: 0);
            return loader;
        }

        private static async Task ParseBody(NeRFDataset dataset, DatasetBody body, int dataNum = 0)
        {
            if (body.frames.Count <= 0) throw new InvalidDataException("Invalid dataset");
            int imageStart = 0; int imageLength = body.frames.Count;
            if (dataNum > 0) imageLength = Math.Min(dataNum, imageLength);
            List<Task<(torch.Tensor, torch.Tensor)>> tasks = new(imageLength);
            List<torch.Tensor> data = new(imageLength);
            List<torch.Tensor> cameras = new(imageLength);

            if(body.h.HasValue && body.w.HasValue)
            {
                dataset.Height = (int)Math.Floor((double)(body.h / dataset.Downscale));
                dataset.Width = (int)Math.Floor((double)(body.w / dataset.Downscale));
            }

            if (dataset.Width == 0 || dataset.Height == 0)
            {
                var frame = body.frames[0];
                imageStart = 1;
                var filePath = Path.GetFullPath(frame.file_path, body.RootPath);
                var split = filePath.Split(Path.DirectorySeparatorChar);
                if (!split[^1].Contains('.')) filePath += ".png";
                while (!File.Exists(filePath))
                {
                    frame = body.frames[imageStart];
                    imageStart++;
                }
                var d = ReadImage(dataset, filePath);
                var c = ReadPose(dataset, frame, dataset.CameraOffset);
                data.Add(d);
                cameras.Add(c);
            }
            for(int i = imageStart; i < imageLength; i++)
            {
                var frame = body.frames[i];
                var filePath = Path.GetFullPath(frame.file_path, body.RootPath);
                var split = filePath.Split(Path.DirectorySeparatorChar);
                if (!split[^1].Contains('.')) filePath += ".png";
                if (!File.Exists(filePath)) continue;
                Task<(torch.Tensor, torch.Tensor)> t = Task.Factory.StartNew(() =>
                {
                    torch.Tensor tensor = ReadImage(dataset, filePath);
                    torch.Tensor camera = ReadPose(dataset, frame, dataset.CameraOffset);
                    return (tensor, camera);
                });
                tasks.Add(t);
            }
            if (tasks.Count > 0)
            {
                await Task.WhenAll(tasks);
                foreach (var task in tasks)
                {
                    if (task.Status == TaskStatus.RanToCompletion)
                    {
                        data.Add(task.Result.Item1);
                        cameras.Add(task.Result.Item2);
                    }
                }
            }
            imageLength = data.Count;
            torch.Tensor final = torch.stack(data, 0);
            torch.Tensor camFinal = torch.stack(cameras, 0);
            dataset.Images = final;
            dataset.Poses = camFinal;
            dataset.ImagesLength = imageLength;

            float radius = camFinal[TensorIndex.Colon, TensorIndex.Slice(0, 3), TensorIndex.Single(3)].norm(dim: -1).mean([0]).item<float>();
            dataset.Radius = radius;

            if (body.fl_x.HasValue || body.fl_y.HasValue)
            {
                dataset.fl_x = (float)((body.fl_x.HasValue ? body.fl_x : body.fl_y!.Value) / dataset.Downscale);
                dataset.fl_y = (float)((body.fl_y.HasValue ? body.fl_y : body.fl_x!.Value) / dataset.Downscale);
            }
            else if (body.camera_angle_x.HasValue || body.camera_angle_y.HasValue)
            {
                double? localX = body.camera_angle_x.HasValue ? 0.5 * dataset.Width / Math.Tan(body.camera_angle_x.Value * 0.5) : null;
                dataset.fl_x = localX.HasValue ? (float)localX.Value : 0;
                double? localY = body.camera_angle_y.HasValue ? 0.5 * dataset.Height / Math.Tan(body.camera_angle_y.Value * 0.5) : null;
                dataset.fl_y = localY.HasValue ? (float)localY.Value : 0;
                if (!localX.HasValue) { dataset.fl_x = (float)localY!.Value; }
                if (!localY.HasValue) { dataset.fl_y = (float)localX!.Value; }
            }
            else throw new InvalidDataException("Invalid dataset, camera intrinsics lost");
            dataset.cx = body.cx.HasValue ? (float)(body.cx / dataset.Downscale) : dataset.Width / 2;
            dataset.cy = body.cy.HasValue ? (float)(body.cy / dataset.Downscale) : dataset.Height / 2;

            if(body.aabb_scale.HasValue) dataset.AABBScale = (float)body.aabb_scale.Value;


            foreach (var d in data) d.Dispose();
            foreach(var d in cameras) d.Dispose();
        }
        private static Tensor ReadImage(NeRFDataset dataset, string filePath)
        {
            using FileStream stream = File.OpenRead(filePath);
            using Image<Rgba32> image = Image.Load<Rgba32>(stream);
            if(dataset.Width == 0 || dataset.Height == 0)
            {
                dataset.Width = (int)(image.Width / dataset.Downscale);
                dataset.Height = (int)(image.Height / dataset.Downscale);
            }
            else if(image.Width != dataset.Width || image.Height != dataset.Height)
            {
                int width = dataset.Width;
                int height = dataset.Height;
                image.Mutate(x => x.Resize(width, height));
            }
            float[,,] total = new float[image.Height, image.Width, 4];
            image.ProcessPixelRows((accessor) =>
            {
                for(int i =0; i < accessor.Height; i++)
                {
                    var row = accessor.GetRowSpan(i);
                    for(int j =0; j < accessor.Width; j++)
                    {
                        Rgba32 color = row[j];
                        float alpha = color.A / (float)255;
                        float r = color.R / (float)255;
                        float g = color.G / (float)255;
                        float b = color.B / (float)255;
                        float a = color.A / (float)255;
                        total[i, j, 0] = r;
                        total[i, j, 1] = g;
                        total[i, j, 2] = b;
                        total[i, j, 3] = a;
                    }
                }
            });
            torch.Tensor result = torch.from_array(total);
            return result;
        }
        private static Tensor ReadPose(NeRFDataset dataset, DatasetFrame frame, Vector3 offset, float scale = 0.33f)
        {
            var pose = frame.transform_matrix;
            float[,] new_pose = new float[4, 4]
            {
                { pose[1][0], -pose[1][1], -pose[1][2], pose[1][3] * scale + offset[0] },
                { pose[2][0], -pose[2][1], -pose[2][2], pose[2][3] * scale + offset[1] },
                { pose[0][0], -pose[0][1], -pose[0][2], pose[0][3] * scale + offset[2] },
                { 0, 0, 0, 1 }
            };
            return torch.from_array(new_pose);
        }
        private async static Task CalculateAABB(NeRFDataset dataset)
        {
            if (dataset.Poses is null || dataset.Height <= 0 || dataset.Width <= 0) return;
            var p = dataset.Poses;
            uint B = (uint)p.shape[0]; int H = dataset.Height; int W = dataset.Width;
            float near = (float)(dataset.Near * dataset.CameraScale); float far = (float)(dataset.Far * dataset.CameraScale);
            List<Task<Tuple<Vector3, Vector3>>> tasks = [];
            List<Tuple<Vector3, Vector3>> aabbs = [];
            Vector3 min = Vector3.Zero; Vector3 max = Vector3.Zero;
            for (uint i = 0; i < B; i++)
            {
                Tensor onePose = p[i].unsqueeze(0);
                tasks.Add(CalculateAABBOnes(dataset, onePose, H, W, near, far));
            }
            await Task.WhenAll(tasks);
            foreach(var task in tasks)
            {
                if (task.IsCompletedSuccessfully)
                {
                    Tuple<Vector3, Vector3> r = task.Result;
                    min = Vector3.Min(r.Item1, min);
                    max = Vector3.Max(r.Item2, max);
                }
            }
            Vector3 center = (min + max) * 0.5f;
            Vector3 ext = center - min;
            float pp = Math.Max(Math.Max(ext.X, ext.Y), ext.Z);
            dataset.BoxMin = new Vector3(-(float)Math.Floor(pp) * dataset.AABBScale);
            dataset.BoxMax = new Vector3((float)Math.Floor(pp) * dataset.AABBScale);
        }

        private static Task<Tuple<Vector3, Vector3>> CalculateAABBOnes(NeRFDataset dataset, Tensor onePose, int H, int W, float near = 2.0f, float far = 6.0f)
        {
            return Task.Factory.StartNew(() =>
            {
                using var c = torch.NewDisposeScope();
                var rays = dataset.GetRays(onePose, W, H, -1, 1);
                var rays_o = rays["rays_o"];
                var rays_d = rays["rays_d"];
                uint channel = (uint)rays_o.shape[^1];
                rays_o = rays_o.view(-1, channel);
                rays_d = rays_d.view(-1, channel);
                Tensor nearP = rays_o + near * rays_d;
                Tensor farP = rays_o + far * rays_d;
                Vector3 minNP = GetMinVector(nearP);
                Vector3 maxNP = GetMaxVector(nearP);
                Vector3 minFP = GetMinVector(farP);
                Vector3 maxFP = GetMaxVector(farP);
                Vector3 minP = Vector3.Min(minNP, minFP);
                Vector3 maxP = Vector3.Max(maxNP, maxFP);
                return Tuple.Create(minP, maxP);
            });
        }
        private static Vector3[] ConvertTensor(Tensor tensor)
        {
            return MemoryMarshal.Cast<byte, Vector3>(tensor.bytes).ToArray();
        }
        private static Vector3 GetMinVector(Tensor input)
        {
            using var c = torch.NewDisposeScope();
            (Tensor min, Tensor idx) = torch.min(input, dim: 0, keepdim: true);
            Vector3[] r = ConvertTensor(min);
            return r[0];
        }
        private static Vector3 GetMaxVector(Tensor input)
        {
            using var c = torch.NewDisposeScope();
            (Tensor max, Tensor idx) = torch.max(input, dim: 0, keepdim: true);
            Vector3[] r = ConvertTensor(max);
            return r[0];
        }
    }

    public class NeRFDataLoader : DataLoader<Dictionary<string, Tensor>, Dictionary<string, Tensor>>
    {
        public NeRFDataLoader(utils.data.Dataset dataset,
                int batchSize, bool shuffle = false,
                Device device = null, int? seed = null,
                int num_worker = 1, bool drop_last = false,
                bool disposeBatch = true, bool disposeDataset = true) 
            : base(dataset, batchSize, Collate, shuffle, device, seed, 
                  num_worker, drop_last, disposeBatch, disposeDataset)
        {
        }
        public NeRFDataLoader(utils.data.Dataset dataset, 
            int batchSize, IEnumerable<long> shuffler, 
            Device device = null, int num_worker = 1, bool drop_last = false, 
            bool disposeBatch = true, bool disposeDataset = true) 
            : base(dataset, batchSize, Collate, shuffler, 
                  device, num_worker, drop_last, disposeBatch, disposeDataset)
        {
        }

        protected static Dictionary<string, Tensor> Collate(IEnumerable<Dictionary<string, Tensor>> dic, Device device)
        {
            using (torch.NewDisposeScope())
            {
                Dictionary<string, torch.Tensor> batch = new();
                foreach (var x in dic.First().Keys)
                {
                    var t = cat(dic.Select(k => k[x].alias()).ToArray(), 0);
                    if (t.device_type != device.type || t.device_index != device.index)
                        t = t.to(device);
                    batch[x] = t.MoveToOuterDisposeScope();
                }
                return batch;
            }
        }
    }

    public enum NeRFDatasetType
    {
        Train,
        Val,
        Test
    }
    /// <summary>
    /// 
    /// </summary>
    public enum NeRFDatasetMode
    {
        /// <summary>
        /// 
        /// </summary>
        Blender,
        Colmap
    }
}
