using System;
using System.Collections.Generic;
using System.Text;
using surfInterop;
using Blimpbots.Recognition.Algorithms;


namespace Blimpbots.Recognition
{
    public struct CentroidEstimate
    {
        public double X; public double Y; public double Scale;
    }

    public class Clustering
    {
        private IList<SurfKeypoint> _model;
        private double _modelWidth;
        private double _modelHeight;
        private double _modelRadius;
        private int _maxRepetitions;

        /* Want to give the Scale dimension some weight for clustering
         * but not so much that it splits objects apart 
         * 
         * ~10.0 seems good here.
         * 
         */
        private readonly double ScaleMultiplier = 10.0;

        public Clustering(IList<SurfKeypoint> model, int modelWidth, int modelHeight, int maxKMeansRepetitions)
        {
            _model = model;
            _modelWidth = (double)modelWidth;
            _modelHeight = (double)modelHeight;
            _maxRepetitions = maxKMeansRepetitions;

            // Model size metric, used to evaluate clusters
            // Center-to-corner
            _modelRadius = Math.Sqrt(Math.Pow(modelWidth, 2)
                                   + Math.Pow(modelHeight, 2));
        }

        // IMPORTANT
        // TODO: weight the scale dimension appropriately so that it actually matters

        public List<Cluster> Cluster(ref SurfKeypoint[] data, List<KeypointMatch> matches)
        {
            double[] dimWeight = new double[] { 1.0, 1.0, ScaleMultiplier };

            List<CentroidEstimate> est = EstimateCentroids(ref data, matches);
            
            KMeans kMeans = new KMeans();

            // Build an array of object detection center estimates
            double[][] estimates = new double[est.Count][];
		    for(int i = 0; i < est.Count; i++)
                estimates[i] = new double[3] { est[i].X, est[i].Y, est[i].Scale };

            List<Cluster> clusters = null;

            ClusterCollection cc = null;

            for (int k = 1; k < 3; k++)
            {
                if (estimates.Length <= k) return clusters; // HACK

                cc = kMeans.ClusterDataSet(k, estimates, dimWeight, _maxRepetitions);

                clusters = new List<Cluster>(cc.Count);
                int goodClusters = 0;
                foreach (Cluster c in cc)
                {
                    double stD = c.ComputeStandardDeviation(new int[] { 0, 1, 2 });
                    double scaleEst = c.ClusterMean[2];

                    // A cluster is "good" if it seems to be reasonably restricted in size
                    // relative to the expected size given the model
                    // TODO: This factor is a parameter
                    //       that defines how tight a cluster must be

                    // 1.00 -- splits only very obvious things
                    // 2.50 -- splits quite tightly
                    
                    if ((stD * 1.00) < (_modelRadius * scaleEst)) goodClusters++;

                    if (c.Count > 0)
                    {
                        List<KeypointMatch> clusterMatches = new List<KeypointMatch>();
                        for (int i = 0; i < c.Count; i++)
                            clusterMatches.Add(matches[c.OriginalIndex(i)]);
                        c.Matches = clusterMatches;
                        clusters.Add(c);
                    }
                }

                if (goodClusters == k) break;

            }

            return clusters;
        }

        /// <summary>
        /// Using point correspondences, come up with a rough estimate for the X-Y coordinate
        /// and scale of a possible object match in the scene.
        /// </summary>
        /// <param name="data">Keypoints extracted from the data.</param>
        /// <param name="matches">Matches previously found.</param>
        /// <returns></returns>
        public List<CentroidEstimate> EstimateCentroids(ref SurfKeypoint[] data, 
                                       List<KeypointMatch> matches)
        {
            List<CentroidEstimate> estimates = new List<CentroidEstimate>(matches.Count);
            
            for(int i = 0; i < matches.Count; i++)
            {
                SurfKeypoint modelPt = _model[matches[i].modelIdx];
                SurfKeypoint dataPt  = data[matches[i].dataIdx];

                // Angle difference 
                double angle = modelPt.orientation - dataPt.orientation;
                double scale = dataPt.scale / modelPt.scale;

                double d = modelPt.x -_modelWidth / 2;
                double h = modelPt.y -_modelHeight / 2;

                double x_off = -Math.Cos(angle)*d + Math.Sin(angle)*h;
                double y_off = -Math.Cos(angle)*h - Math.Sin(angle)*d;

                CentroidEstimate e;
                e.X = dataPt.x + x_off;
                e.Y = dataPt.y + y_off;
                e.Scale = scale;

                estimates.Add(e);
            }

            return estimates;

        }



    }
}
