﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Collections.ObjectModel;

namespace Performance
{
    /**
     * Multimedia Retrieval
     * Heat Kernel Signature project
     * Frank Borgonjen, Joeri van der Lei & Kevin van der Lei
     * 28-10-2011
     * 
     * The Analyzer class is used to compute the performance of our implementation by using
     * several performance measures
     */
    public class Analyzer
    {
        public  const  int      CLUSTER_SIZE        = 57;
        public  const  string   PR                  = "PRECISIONRECALL";
        public  const  string   ROC                 = "ROC";
        public  const  string   FIRST_TIER          = "FIRSTTIER";
        public  const  string   SECOND_TIER         = "SECONDTIER";
        private static string[] fileNames;
        private static string   performanceType;
        public  static Dictionary<double, double> xyValues;
        public  static Dictionary<double, int> xCount;
        
        /**
         * Calculate the (x,y) pairs that will be used to plot a Performance Recall Graph or a ROC Curve
         * based on the given distance matrix.
         * 
         * @param distances The distance matrix
         * @param names     The filenames of the 3D models
         * @param type      The performance measure we want to use
         */
        public static Dictionary<double, double> performance(double[][] distances, string[] names, string type)
        {
            xyValues        = new Dictionary<double, double>();
            xCount          = new Dictionary<double,int>();
            fileNames       = names;
            performanceType = type;
            int database    = distances.GetLength(0);
            int scope       = database;
            for (int i = 0; i < database; i++)
            {
                int[] sortedD   = sortedDistances(distances[i]); // sort the distances of the ith row 
                int tp          = 0; // Number of true positive
                int lastTp      = -1; // reset the last recall used for precision recall
                for (int s = 0; s < scope; s++)
                {
                    // if we have a match, increase True Positive
                    if (isInCategory(i, sortedD[s]))
                    {
                        tp++;
                    }
                    int fp = (s+1) - tp;                       // False Positive = should not be retrieved, but are so
                    int fn = CLUSTER_SIZE - tp;                      // False Negative = should be retrieved, but are not
                    int tn = Math.Max(0, database - (tp + fp + fn)); // True Negative  = should not be retrieved, and are not

                    // Everytime a True Positive is found, add Precision recall values
                    if (performanceType == PR && lastTp != tp)
                    {
                        double precision = (double)tp / (s + 1);  // True positive / scope
                        double recall = (double)tp / (tp + fn);  // True positive / (True positive + False negative)
                        if (precision > 0.05 || recall >= 0.05)
                        {
                            addResult(recall, precision);
                        }
                        lastTp = tp;
                    }
                    else if (performanceType == ROC)
                    {
                        // This is for the ROC Curve
                        double specificity = (double)tn / (fp + tn); // True negative / (False positive + True negative)
                        double sensitivity = (double)tp / (tp + fn); // True positive / (True positive + False negative)
                        addResult(specificity, sensitivity);
                    }
                }
            }
            // Order the values and calculate the mean of all values on one x-location
            xyValues = xyValues.OrderBy(x => x.Key).ToDictionary(x => x.Key, x => x.Value);
            Dictionary<double, double> result = new Dictionary<double, double>();
            foreach (KeyValuePair<double, double> kvp in xyValues)
            {
                result[kvp.Key] = (kvp.Value / xCount[kvp.Key]);
            }
            return result;
        }

        /**
         * Adds a (x,y) pair to the results. If a y-value already exists on the given x-value the
         * y-value will be added and a counter for the x-value is increased.
         * 
         * @param x The x value
         * @param y The y value
         */
        public static void addResult(double x, double y)
        {
            if (xyValues.ContainsKey(x))
            {
                xyValues[x] = xyValues[x] + y;
                xCount[x] = xCount[x] + 1;
            }
            else
            {
                xyValues.Add(x, y);
                xCount.Add(x, 1);
            }
        }


        /**
         * Calculate the kth tier, which is the percentage of retrieved objects from the cluster
         * divided within the first k(cluster - 1) hits. kth tier = (v * 100) / k(c-1), s = k(c-1)
         * 
         * @param distanced The distance matrix
         * @param names The fileNames of the models
         * @param tier The k
         */
        public static double kthTier(double[][] distances, string[] names, int k)
        {
            fileNames  = names;
            int scope  = k * (CLUSTER_SIZE - 1); // s = k(c-1)
            double pct = 0;
            for (int i = 0; i < distances.GetLength(0); i++)
            {
                int[] sortedD   = sortedDistances(distances[i]); // sort distances for the ith row
                int tp          = 0; // True positive
                for (int s = 2; s < scope; s++)
                {
                    // if we have a match, increase True Positive
                    if (isInCategory(i, sortedD[s]))
                    {
                        tp++;
                    }
                }
                // Sum up the percentages for each row...
                pct += (double)tp / scope; // (v / k(c-1))
            }
            // ...and then divide the percentage by the number of rows
            return (pct / distances.GetLength(0)) * 100;
        }

        /**
         * Calculate the Bulls eye Percentage (BEP), which is the percentage of retrieved objects 
         * within a scope of two times the cluster size (s = 2*c). BEP = (v * 100) / c
         * 
         * @param distanced The distance matrix
         * @param names The fileNames of the models
         */
        public static double BEP(double[][] distances, string[] names)
        {
            fileNames = names;
            int scope = 2 * CLUSTER_SIZE; // s = 2*c
            double pct = 0;
            for (int i = 0; i < distances.GetLength(0); i++)
            {
                int[] sortedD = sortedDistances(distances[i]); // sort distances for the ith row
                int tp = 0; // True positive
                for (int s = 2; s < scope; s++)
                {
                    // if we have a match, increase True Positive
                    if (isInCategory(i, sortedD[s]))
                    {
                        tp++;
                    }
                }
                // Sum up the percentages for each row...
                pct += (double)tp / CLUSTER_SIZE; // v/c
            }
            // ...and then divide the percentage by the number of rows
            return (pct / distances.GetLength(0)) * 100;
        }

        /**
         * Calculate the Average Precision, which is the percentage of retrieved objects 
         * within a scope of two times the cluster size (s = 2*c). BEP = (v * 100) / c
         * 
         * @param distanced The distance matrix
         * @param names The fileNames of the models
         */
        public static double avgPrec(double[][] distances, string[] names)
        {
            fileNames = names;
            double avgPrec = 0; // total average precision
            double rowAvgPrec; // average precision for 1 row
            int tp; // True Positive
            for (int i = 0; i < distances.GetLength(0); i++)
            {
                int[] sortedD = sortedDistances(distances[i]); // sort distances for the ith row
                tp = 0;
                rowAvgPrec = 0;
                for (int s = 2; s < distances[i].GetLength(0); s++)
                {
                    // if we have a match, increase True Positive and calculate precision
                    if (isInCategory(i, sortedD[s]))
                    {
                        tp++;
                        rowAvgPrec += (double)tp / s;
                    }
                }
                // add all average precisions
                avgPrec += (double)rowAvgPrec / tp;
            }
            // ...and then divide the percentage by the number of rows
            return (avgPrec / distances.GetLength(0)) * 100;
        }

        /**
         * Returns whether the file, with given index toTest is in the same category
         * as the file with index original.
         * 
         * @param original The index of the original file
         * @param toTest   The index of the file we want to test
         */
        public static Boolean isInCategory(int original, int toTest)
        {
            return getCategory(original) == getCategory(toTest);
        }

        /*
         * Returns the category that belongs to file with the given index
         * 
         * @param key The key of the file
         */
        public static int getCategory(int key)
        {
            int cat = Convert.ToInt32(fileNames[key].Substring(0, 4));
            return cat;
        }

        /**
         * Returns the first nrMatches keys (index) of the models with the shortest distance
         * 
         * @param distances The row of distances that we want to order
         */
        public static int[] sortedDistances(double[] distances)
        {
            Dictionary<int, double> dict = new Dictionary<int, double>();
            for (int i = 0; i < distances.GetLength(0); i++)
            {
                dict.Add(i, distances[i]);
            }
            dict = dict.OrderBy(x => x.Value).ToDictionary(x => x.Key, x => x.Value);

            int[] res = new int[distances.GetLength(0)];
            int count = 0;
            foreach (KeyValuePair<int, double> kvp in dict)
            {
                res[count] = kvp.Key;
                count++;
            }
            //Console.WriteLine();

            return res;
        }
    }
}
