using BepuUtilities;
using BepuUtilities.Collections;
using BepuUtilities.Memory;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
#if MYCODE
using BepuUtilities.Vectors;
#else
using System.Numerics;
#endif
namespace BepuPhysics.Trees
{
    partial struct Tree
    {
        internal unsafe struct SweepResources
        {
            public BoundingBox* Bounds;
            public int* IndexMap;
            public int* IndexMapX;
            public int* IndexMapY;
            public int* IndexMapZ;
            public float* CentroidsX;
            public float* CentroidsY;
            public float* CentroidsZ;
            public BoundingBox* Merged;
        }

        unsafe struct IndexMapComparer : IComparerRef<int>
        {
            public float* Centroids;

            [MethodImpl(MethodImplOptions.AggressiveInlining)]
            public int Compare(ref int a, ref int b)
            {
                var centroidA = Centroids[a];
                var centroidB = Centroids[b];
                return centroidA.CompareTo(centroidB);
            }
        }


        unsafe void FindPartitionForAxis(BoundingBox* boundingBoxes, BoundingBox* aMerged, float* centroids, int* indexMap, int count,
            out int splitIndex, out float cost, out BoundingBox a, out BoundingBox b, out int leafCountA, out int leafCountB)
        {
            Debug.Assert(count > 1);
            // TODO：请注意,没有必要对每个级别进行排序。就像我们在其中一个更古老的空间拆分实现中一样,您只需排序一次,然后再进行排序
            // 只需执行一个O(N)操作,将叶数据混洗到分区每一侧的相关位置。这样我们就可以把所有的分类工作都推到前一步了。
            // 在那里,我们可以抛出一个优化的并行排序。或者只是独立地做三个轴,也许隐藏在一些其他的工作旁边。
            // 我怀疑并行排序的常见问题可以通过让其中三个并行排序同时进行而得到一定程度的缓解-更多的负载平衡机会。
            // 还要注意,在每一步,上面的分区方案和排序都会产生要处理的连续挡路数据。
            // 如果您已经在进行这样的集合,那么您不妨将更大范围的SIMD投入到问题中。这个版本只有3宽,这对AVX2和AVX512来说是不幸的。
            // 有了这些更改,我们可能可以让扫描构建器比v1的插入构建器更快-它几乎已经在那里了。
            // (您还需要将其与类似的SIMD加速的二进制方法进行比较,以便在增量优化中使用。如果它不是很慢,额外的质量会带来好处
            // (由于加速了自检,这可能会使它在网络上变得更快,而自检是一项占主导地位的成本。)
            var comparer = new IndexMapComparer { Centroids = centroids };
            QuickSort.Sort(ref indexMap[0], 0, count - 1, ref comparer);

            // 搜索最佳分割。
            // 从低到高横扫,缓存每个点的合并大小和叶数。
            // 索引N包括从0到N(包括0和N)的每个子树。因此索引0包含子树0的信息。
            var lastIndex = count - 1;

            aMerged[0] = boundingBoxes[indexMap[0]];
            for (int i = 1; i < lastIndex; ++i)
            {
                var index = indexMap[i];
                BoundingBox.CreateMerged(aMerged[i - 1], boundingBoxes[index], out aMerged[i]);
            }

            // 从高到低扫视。
            BoundingBox bMerged = new BoundingBox { Min = new Vector3(float.MaxValue), Max = new Vector3(float.MinValue) };
            cost = float.MaxValue;
            splitIndex = 0;
            a = bMerged;
            b = bMerged;
            leafCountA = 0;
            leafCountB = 0;
            for (int i = lastIndex; i >= 1; --i)
            {
                int aIndex = i - 1;
                var subtreeIndex = indexMap[i];
                BoundingBox.CreateMerged(bMerged, boundingBoxes[subtreeIndex], out bMerged);

                // 请注意与原始SAH相比对成本函数的修改。
                // 首先,我们用一个非常温和的二次项来表示计数,这样偏斜分布就会受到惩罚。
                // 这个惩罚很弱,除非在病理性的情况下,比如所有的包围盒重叠,否则它实际上永远不会起作用。
                // 其次,我们将极小的(最小的正常浮点数)基线包含到评估边界度量中。
                // 这确保了即使是一组完全重叠的零点界限也将使用中点分割,而不是倾斜分割。
                const float normalEpsilon = 1.1754943508e-38f;
                var aCost = i * (1f + i * 0.001f) * (normalEpsilon + ComputeBoundsMetric(ref aMerged[aIndex]));
                var bCount = count - i;
                var bCost = bCount * (1f + bCount * 0.001f) * (normalEpsilon + ComputeBoundsMetric(ref bMerged));

                var totalCost = aCost + bCost;
                if (totalCost < cost)
                {
                    cost = totalCost;
                    splitIndex = i;
                    a = aMerged[aIndex];
                    b = bMerged;
                    leafCountA = i;
                    leafCountB = count - i;
                }

            }

        }

        unsafe void FindPartition(ref SweepResources leaves, int start, int count,
               out int splitIndex, out BoundingBox a, out BoundingBox b, out int leafCountA, out int leafCountB)
        {
            // 这里存在各种潜在的微优化。

            // 初始化每轴候选贴图。
            for (int i = 0; i < count; ++i)
            {
                var originalValue = leaves.IndexMap[i + start];
                leaves.IndexMapX[i] = originalValue;
                leaves.IndexMapY[i] = originalValue;
                leaves.IndexMapZ[i] = originalValue;
            }

            FindPartitionForAxis(leaves.Bounds, leaves.Merged, leaves.CentroidsX, leaves.IndexMapX, count,
                out int xSplitIndex, out float xCost, out BoundingBox xA, out BoundingBox xB, out int xLeafCountA, out int xLeafCountB);
            FindPartitionForAxis(leaves.Bounds, leaves.Merged, leaves.CentroidsY, leaves.IndexMapY, count,
                out int ySplitIndex, out float yCost, out BoundingBox yA, out BoundingBox yB, out int yLeafCountA, out int yLeafCountB);
            FindPartitionForAxis(leaves.Bounds, leaves.Merged, leaves.CentroidsZ, leaves.IndexMapZ, count,
                out int zSplitIndex, out float zCost, out BoundingBox zA, out BoundingBox zB, out int zLeafCountA, out int zLeafCountB);

            int* bestIndexMap;
            if (xCost <= yCost && xCost <= zCost)
            {
                splitIndex = xSplitIndex;
                a = xA;
                b = xB;
                leafCountA = xLeafCountA;
                leafCountB = xLeafCountB;
                bestIndexMap = leaves.IndexMapX;
            }
            else if (yCost <= zCost)
            {
                splitIndex = ySplitIndex;
                a = yA;
                b = yB;
                leafCountA = yLeafCountA;
                leafCountB = yLeafCountB;
                bestIndexMap = leaves.IndexMapY;
            }
            else
            {
                splitIndex = zSplitIndex;
                a = zA;
                b = zB;
                leafCountA = zLeafCountA;
                leafCountB = zLeafCountB;
                bestIndexMap = leaves.IndexMapZ;
            }
            for (int i = 0; i < count; ++i)
            {
                leaves.IndexMap[i + start] = bestIndexMap[i];
            }

            splitIndex += start;


        }

        unsafe void SplitLeavesIntoChildren(ref SweepResources leaves, int start, int count, int nodeIndex)
        {
            Debug.Assert(count >= 2);
            FindPartition(ref leaves, start, count, out int splitIndex, out BoundingBox aBounds, out BoundingBox bBounds, out int leafCountA, out int leafCountB);

            ref var node = ref Nodes[nodeIndex];

            ref var a = ref node.A;
            ref var b = ref node.B;
            a.Min = aBounds.Min;
            a.Max = aBounds.Max;
            b.Min = bBounds.Min;
            b.Max = bBounds.Max;

            a.LeafCount = leafCountA;
            b.LeafCount = leafCountB;

            if (leafCountA > 1)
            {
                a.Index = CreateSweepBuilderNode(nodeIndex, 0, ref leaves, start, leafCountA);
            }
            else
            {
                Debug.Assert(leafCountA == 1);
                // 只有一片叶子。不要创建另一个节点。
                var leafIndex = leaves.IndexMap[start];
                Leaves[leafIndex] = new Leaf(nodeIndex, 0);
                a.Index = Encode(leafIndex);
            }
            if (leafCountB > 1)
            {
                b.Index = CreateSweepBuilderNode(nodeIndex, 1, ref leaves, splitIndex, leafCountB);
            }
            else
            {
                Debug.Assert(leafCountB == 1);
                // 只有一片叶子。不要创建另一个节点。
                var leafIndex = leaves.IndexMap[splitIndex];
                Leaves[leafIndex] = new Leaf(nodeIndex, 1);
                b.Index = Encode(leafIndex);
            }
        }

        unsafe int CreateSweepBuilderNode(int parentIndex, int indexInParent,
            ref SweepResources leaves, int start, int count)
        {
            var nodeIndex = AllocateNode();
            ref var metanode = ref Metanodes[nodeIndex];
            metanode.Parent = parentIndex;
            metanode.IndexInParent = indexInParent;
            metanode.RefineFlag = 0;

            if (count <= 2)
            {
                // 不需要做任何分类。这个节点可以容纳剩下的每一棵子树。
                ref var children = ref Nodes[nodeIndex].A;
                for (int i = 0; i < count; ++i)
                {
                    // 扫描构建器为叶子预先分配了空间,并将leavCount设置为匹配。
                    // 索引图告诉我们要创建哪些原始树叶。
                    var leafIndex = leaves.IndexMap[i + start];
                    Leaves[leafIndex] = new Leaf(nodeIndex, i);
                    ref var child = ref Unsafe.Add(ref children, i);
                    child.Min = leaves.Bounds[leafIndex].Min;
                    child.Max = leaves.Bounds[leafIndex].Max;
                    child.Index = Encode(leafIndex);
                    child.LeafCount = 1;
                }
                return nodeIndex;
            }



            SplitLeavesIntoChildren(ref leaves, start, count, nodeIndex);


            return nodeIndex;

        }


        public unsafe void SweepBuild(BufferPool pool, Buffer<BoundingBox> leafBounds)
        {
            if (leafBounds.Length <= 0)
                throw new ArgumentException("Length must be positive.");
            if (LeafCount != 0)
                throw new InvalidOperationException("Cannot build a tree that already contains nodes.");
            // 树的根部有一个空节点,以使插入工作更容易。
            // 只要是这种情况(并且只要这不是构造函数),
            // 我们必须把它清理干净。
            nodeCount = 0;

            // 保证在构建期间不会发生大小调整。
            if (Leaves.Length < leafBounds.Length)
            {
                Resize(pool, leafBounds.Length);
            }
            leafCount = leafBounds.Length;


            pool.TakeAtLeast<int>(leafBounds.Length, out var indexMap);
            pool.TakeAtLeast<int>(leafBounds.Length, out var indexMapX);
            pool.TakeAtLeast<int>(leafBounds.Length, out var indexMapY);
            pool.TakeAtLeast<int>(leafBounds.Length, out var indexMapZ);
            pool.TakeAtLeast<float>(leafBounds.Length, out var centroidsX);
            pool.TakeAtLeast<float>(leafBounds.Length, out var centroidsY);
            pool.TakeAtLeast<float>(leafBounds.Length, out var centroidsZ);
            pool.TakeAtLeast<BoundingBox>(leafBounds.Length, out var merged);
            SweepResources leaves;
            leaves.Bounds = leafBounds.Memory;
            leaves.IndexMap = indexMap.Memory;
            leaves.IndexMapX = indexMapX.Memory;
            leaves.IndexMapY = indexMapY.Memory;
            leaves.IndexMapZ = indexMapZ.Memory;
            leaves.CentroidsX = centroidsX.Memory;
            leaves.CentroidsY = centroidsY.Memory;
            leaves.CentroidsZ = centroidsZ.Memory;
            leaves.Merged = merged.Memory;


            for (int i = 0; i < leafBounds.Length; ++i)
            {
                var bounds = leaves.Bounds[i];
                // 索引映射将遍历中的索引关联回原始叶位置。
                leaves.IndexMap[i] = i;
                // 这里不需要初始化每轴索引映射。它们是在使用时填写的。

                var centroid = bounds.Min + bounds.Max;
                centroidsX[i] = centroid.X;
                centroidsY[i] = centroid.Y;
                centroidsZ[i] = centroid.Z;
            }

            // 现在执行自上而下的扫描构建。
            CreateSweepBuilderNode(-1, -1, ref leaves, 0, leafBounds.Length);


            // 返还资源。
            pool.ReturnUnsafely(centroidsX.Id);
            pool.ReturnUnsafely(centroidsY.Id);
            pool.ReturnUnsafely(centroidsZ.Id);
            pool.ReturnUnsafely(indexMap.Id);
            pool.ReturnUnsafely(indexMapX.Id);
            pool.ReturnUnsafely(indexMapY.Id);
            pool.ReturnUnsafely(indexMapZ.Id);
            pool.Return(ref merged);

        }
    }
}
