﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using snat.algorithms.network.TFIDF;
using snat.model;
using System.IO;

namespace snat.algorithms.node.LinkPrediction {

    /// <summary>
    /// This class controls the link prediction algorithm that trains and tests the interaction prediction
    /// classifers.
    /// Requires a relabel cluster factory that returns LinkPredictorNode.
    /// </summary>
    /// <typeparam name="T"></typeparam>
    public class LinkPredictorAlgorithm<T> : IAlgorithm where T : Document {

        /*  USE THIS:

            NetworkModel.Instance.Connect("mysql5.dcs.warwick.ac.uk", 3389, "snag", "snag_read", "JeBngXFmMQFMheBD");

            NetworkModel.Instance.InitNodes(new RelabelClusterFactory("../../../snat/Resources/emails"));

            //NetworkModel.Instance.Init();
            //MessageClusterer<EnronDocument> mc = new MessageClusterer<EnronDocument>(0.35);
            //mc.Register();
            //NetworkModel.Instance.Play();

            EdgeClusterer clusterer = new EdgeClusterer();
            clusterer.Register();

            NetworkModel.Instance.Init();
            NetworkModel.Instance.SkipAllMessages();
            clusterer.DeRegister();

            LinkPredictorAlgorithm<EnronDocument> lpa = new LinkPredictorAlgorithm<EnronDocument>();
            lpa.Register();
            NetworkModel.Instance.Reset();

            NetworkModel.Instance.Step(new DateTime(2001,10,25));
            Console.WriteLine("TRAINING IS OVER");
            lpa.ActivateTesting();

            NetworkModel.Instance.Step(new TimeSpan(7, 0, 0, 0));
        */

        public static Dictionary<String, Term> Terms = new Dictionary<String, Term>();

        Random rand;
        bool testing;

        List<double> thresholds;

        // store predicted interactions with neighbours in next time frame, given a particular threshold.
        Dictionary<Double, Dictionary<int, Dictionary<int, bool>>> historySTPredictions;
        Dictionary<Double, Dictionary<int, Dictionary<int, bool>>> historySTClassifierPredictions;
        Dictionary<Double, Dictionary<int, Dictionary<int, bool>>> clusterMRPredictions;
        Dictionary<Double, Dictionary<int, Dictionary<int, bool>>> clusterContentPredictions;
        Dictionary<Double, Dictionary<int, Dictionary<int, bool>>> historyLTPredictions;

        // store actual interactions.
        Dictionary<int, Dictionary<int, bool>> actualLinks;

        // From threshold to which link predictor used to the resulting confusion matrix.
        Dictionary<double, Dictionary<int, ConfusionMatrix>> results;
        
        public LinkPredictorAlgorithm() {
            rand = new Random();
            testing = false;
            thresholds = new List<double>();
            double threshold = 0;

            for (int i = 0; i < 1001; i++) {
                thresholds.Add(threshold);
                threshold += 0.001;
            }
            
        }

        public void OnMessageProcessed(Message m) {
            if (testing) {
                if (actualLinks.ContainsKey(m.Sender.ID)) {
                    foreach (LinkPredictorNode<EnronDocument> n in m.Recipients) {
                        actualLinks[m.Sender.ID][n.ID] = true;
                    }
                }
            }
        }

        public string Name {
            get { return "Link Prediction Algorithm"; }
        }

        public void ActivateTesting() {
            testing = true;
            foreach (LinkPredictorNode<T> n in NetworkModel.Instance.Nodes.Values) {
                n.MessageSend -= n.OnMessageSend;
                NetworkModel.Instance.MessageProcessed -= n.OnMessageProcessed;
            }
            NetworkModel.Instance.MessageProcessed += OnMessageProcessed;
            NetworkModel.Instance.TapePaused += AnalyseResults;

            // initialise the results sections
            clusterContentPredictions = new Dictionary<double, Dictionary<int, Dictionary<int, bool>>>();
            clusterMRPredictions = new Dictionary<double, Dictionary<int, Dictionary<int, bool>>>();
            historySTPredictions = new Dictionary<double, Dictionary<int, Dictionary<int, bool>>>();
            historyLTPredictions = new Dictionary<double, Dictionary<int, Dictionary<int, bool>>>();
            historySTClassifierPredictions = new Dictionary<double, Dictionary<int, Dictionary<int, bool>>>();

            foreach (double threshold in thresholds) {
                clusterContentPredictions.Add(threshold, new Dictionary<int, Dictionary<int, bool>>());
                clusterMRPredictions.Add(threshold, new Dictionary<int, Dictionary<int, bool>>());
                historySTPredictions.Add(threshold, new Dictionary<int, Dictionary<int, bool>>());
                historyLTPredictions.Add(threshold, new Dictionary<int, Dictionary<int, bool>>());
                historySTClassifierPredictions.Add(threshold, new Dictionary<int, Dictionary<int, bool>>());
            }

            actualLinks = new Dictionary<int, Dictionary<int, bool>>();

            // NOTE: FOREACH NODE, THERE ARE LOTS OF EARLY STOPPING CRITERIA.

            foreach (LinkPredictorNode<EnronDocument> n in NetworkModel.Instance.Nodes.Values) {

                actualLinks.Add(n.ID, new Dictionary<int, bool>());
                foreach (double threshold in thresholds) {
                    clusterContentPredictions[threshold].Add(n.ID, new Dictionary<int, bool>());
                    clusterMRPredictions[threshold].Add(n.ID, new Dictionary<int, bool>());
                    historySTPredictions[threshold].Add(n.ID, new Dictionary<int, bool>());
                    historyLTPredictions[threshold].Add(n.ID, new Dictionary<int, bool>());
                    historySTClassifierPredictions[threshold].Add(n.ID, new Dictionary<int, bool>());
                    foreach (Edge e in n.OutEdges) {
                        LinkPredictorNode<EnronDocument> n2 = (LinkPredictorNode<EnronDocument>)e.Target;
                        clusterContentPredictions[threshold][n.ID].Add(n2.ID, false);
                        clusterMRPredictions[threshold][n.ID].Add(n2.ID, false);
                        historySTPredictions[threshold][n.ID].Add(n2.ID, false);
                        historyLTPredictions[threshold][n.ID].Add(n2.ID, false);
                        historySTClassifierPredictions[threshold][n.ID].Add(n2.ID, false);
                        if (!actualLinks[n.ID].ContainsKey(n2.ID)) {
                            actualLinks[n.ID].Add(n2.ID, false);
                        }
                    }
                }
            }
        }


        public void AnalyseResults() {
            foreach (LinkPredictorNode<EnronDocument> n in NetworkModel.Instance.Nodes.Values.OrderBy(e => e.ID)) {

                /*
                // NOTE: FOREACH NODE, THERE ARE LOTS OF EARLY STOPPING CRITERIA.
                 
                int numberDocs = 0;
                foreach(Document d in n.SentDocuments) {
                    foreach (BaseNode bn in d.Recipients) {
                        numberDocs++;
                    }
                }
                
                if ((numberDocs > 0) && (n.OutDegree > 0)) {
                    Dictionary<int, double> temp1 = new Dictionary<int, double>();
                    Dictionary<int, double> temp2 = new Dictionary<int, double>();
                    Dictionary<int, double> temp3 = new Dictionary<int, double>();
                    Dictionary<int, double> temp4 = new Dictionary<int, double>();

                    foreach (Edge e in n.OutEdges) {
                        LinkPredictorNode<EnronDocument> n2 = (LinkPredictorNode<EnronDocument>)e.Target;

                        // populate the temporary vectors
                        temp1.Add(n2.ID, n.predictWithHistoryST(n2));
                        temp2.Add(n2.ID, n.predictWithClusterMR(n2));
                        temp3.Add(n2.ID, n.predictWithClusterContent(n2));
                        temp4.Add(n2.ID, n.predictWithHistoryLT(n2));
                    }

                    if (!((temp1.Values.Max() == 0) && (temp2.Values.Max() == 0) && (temp3.Values.Max() == 0) && (temp3.Values.Max() == 0))) {
                        foreach (double threshold in thresholds) {
                            for (int i = 0; i < numberDocs; i++) {
                                double r = rand.NextDouble();
                                double total = 0;
                                foreach (KeyValuePair<int, double> kvp in temp1.OrderBy(e => rand.NextDouble())) {
                                    total += kvp.Value;
                                    if (r <= total) {
                                        if (kvp.Value < threshold) {
                                            historySTPredictions[threshold][n.ID][kvp.Key] = true;
                                            break;
                                        }
                                    }
                                }
                                r = rand.NextDouble();
                                total = 0;
                                foreach (KeyValuePair<int, double> kvp in temp2.OrderBy(e => rand.NextDouble())) {
                                    total += kvp.Value;
                                    if (r <= total) {
                                        if (kvp.Value < threshold) {
                                            clusterMRPredictions[threshold][n.ID][kvp.Key] = true;
                                            break;
                                        }
                                    }
                                }
                                r = rand.NextDouble();
                                total = 0;
                                foreach (KeyValuePair<int, double> kvp in temp3.OrderBy(e => rand.NextDouble())) {
                                    total += kvp.Value;
                                    if (r <= total) {
                                        if (kvp.Value < threshold) {
                                            clusterContentPredictions[threshold][n.ID][kvp.Key] = true;
                                            break;
                                        }
                                    }
                                }
                                r = rand.NextDouble();
                                total = 0;
                                foreach (KeyValuePair<int, double> kvp in temp4.OrderBy(e => rand.NextDouble())) {
                                    total += kvp.Value;
                                    if (r <= total) {
                                        if (kvp.Value < threshold) {
                                            historyLTPredictions[threshold][n.ID][kvp.Key] = true;
                                            break;
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                */

                foreach (Edge e in n.OutEdges) {
                    LinkPredictorNode<EnronDocument> n2 = (LinkPredictorNode<EnronDocument>)e.Target;
                    double pred1 = n.predictWithClusterContent(n2);
                    double pred2 = n.predictWithClusterMR(n2);
                    double pred3 = n.predictWithHistoryLT(n2);
                    double pred4 = n.predictWithHistoryST(n2);
                    double pred5 = n.predictWithHistorySTclassifier(n2);

                    foreach (double t in thresholds) {
                        if (pred1 > t) { clusterContentPredictions[t][n.ID][n2.ID] = true; }
                        if (pred2 > t) { clusterMRPredictions[t][n.ID][n2.ID] = true; }
                        if (pred3 > t) { historyLTPredictions[t][n.ID][n2.ID] = true; }
                        if (pred4 > t) { historySTPredictions[t][n.ID][n2.ID] = true; }
                        if (pred5 > t) { historySTClassifierPredictions[t][n.ID][n2.ID] = true; }
                    }
                }
                Console.WriteLine("FINISHED PROCESSING PREDICTIONS FOR NODE : {0}", n.ID);
            }

            // initialise results data structure
            results = new Dictionary<double, Dictionary<int, ConfusionMatrix>>();

            foreach (double threshold in thresholds) {
                results.Add(threshold, new Dictionary<int, ConfusionMatrix>());
                results[threshold].Add(1, new ConfusionMatrix(0, 0, 0, 0));
                results[threshold].Add(2, new ConfusionMatrix(0, 0, 0, 0));
                results[threshold].Add(3, new ConfusionMatrix(0, 0, 0, 0));
                results[threshold].Add(4, new ConfusionMatrix(0, 0, 0, 0));
                results[threshold].Add(5, new ConfusionMatrix(0, 0, 0, 0));
            }

            // proceed to calculate confusion matrices
            foreach (int n in actualLinks.Keys) {
                foreach (int n2 in actualLinks[n].Keys) {
                    foreach (double threshold in thresholds) {
                        bool actual = actualLinks[n][n2];
                        bool predictedHistST = historySTPredictions[threshold][n][n2];
                        bool predictedTFIDF = clusterContentPredictions[threshold][n][n2];
                        bool predictedMR = clusterMRPredictions[threshold][n][n2];
                        bool predictedHistLT = historyLTPredictions[threshold][n][n2];
                        bool predictedHistSTClassifier = historySTClassifierPredictions[threshold][n][n2];

                        if (actual == false) {
                            if (predictedHistST == true) { results[threshold][1].FP++; } else { results[threshold][1].TN++; }

                            if (predictedMR == true) { results[threshold][2].FP++; } else { results[threshold][2].TN++; }

                            if (predictedTFIDF == true) { results[threshold][3].FP++; } else { results[threshold][3].TN++; }

                            if (predictedHistLT == true) { results[threshold][4].FP++; } else { results[threshold][4].TN++; }

                            if (predictedHistSTClassifier == true) { results[threshold][5].FP++; } else { results[threshold][5].TN++; }
                        } else {
                            if (predictedHistST == true) { results[threshold][1].TP++; } else { results[threshold][1].FN++; }

                            if (predictedMR == true) { results[threshold][2].TP++; } else { results[threshold][2].FN++; }

                            if (predictedTFIDF == true) { results[threshold][3].TP++; } else { results[threshold][3].FN++; }

                            if (predictedHistLT == true) { results[threshold][4].TP++; } else { results[threshold][4].FN++; }

                            if (predictedHistSTClassifier == true) { results[threshold][5].TP++; } else { results[threshold][5].FN++; }
                        }
                    }
                }
            }

            using (StreamWriter output2 = new StreamWriter("STHistoryLinkPredictionCM.csv")) {
                output2.WriteLine("Recent History Only Link Prediction");
                output2.WriteLine("Threshold,TP,FN,FP,TN,TPRate(Sensitivity),FPRate");
                foreach (double threshold in thresholds) {
                    double FPRate = ((double)results[threshold][1].FP / (double)(results[threshold][1].FP + results[threshold][1].TN));
                    double TPRate;
                    if ((results[threshold][1].TP + results[threshold][1].FN) == 0) {
                        TPRate = 0;
                    } else {
                        TPRate = (double)(results[threshold][1].TP / (double)(results[threshold][1].TP + results[threshold][1].FN));
                    }
                    output2.WriteLine(threshold + "," + results[threshold][1].TP + "," + results[threshold][1].FN + "," + results[threshold][1].FP + "," + results[threshold][1].TN + "," + TPRate + "," + FPRate);
                }
            }

            using (StreamWriter output2 = new StreamWriter("ClusterMRLinkPredictionCM.csv")) {
                output2.WriteLine("Cluster Message Rates Link Prediction");
                output2.WriteLine("Threshold,TP,FN,FP,TN,TPRate(Sensitivity),FPRate");
                foreach (double threshold in thresholds) {
                    double FPRate = ((double)results[threshold][2].FP / (double)(results[threshold][2].FP + results[threshold][2].TN));
                    double TPRate;
                    if ((results[threshold][2].TP + results[threshold][2].FN) == 0) {
                        TPRate = 0;
                    } else {
                        TPRate = (double)(results[threshold][2].TP / (double)(results[threshold][2].TP + results[threshold][2].FN));
                    }
                    output2.WriteLine(threshold + "," + results[threshold][2].TP + "," + results[threshold][2].FN + "," + results[threshold][2].FP + "," + results[threshold][2].TN + "," + TPRate + "," + FPRate);
                }
            }

            using (StreamWriter output2 = new StreamWriter("ClusterContentLinkPredictionCM.csv")) {
                output2.WriteLine("Cluster Message Content Link Prediction");
                output2.WriteLine("Threshold,TP,FN,FP,TN,TPRate(Sensitivity),FPRate");
                foreach (double threshold in thresholds) {
                    double FPRate = ((double)results[threshold][3].FP / (double)(results[threshold][3].FP + results[threshold][3].TN));
                    double TPRate;
                    if ((results[threshold][3].TP + results[threshold][3].FN) == 0) {
                        TPRate = 0;
                    } else {
                        TPRate = ((double)results[threshold][3].TP / (double)(results[threshold][3].TP + results[threshold][3].FN));
                    }
                    output2.WriteLine(threshold + "," + results[threshold][3].TP + "," + results[threshold][3].FN + "," + results[threshold][3].FP + "," + results[threshold][3].TN + "," + TPRate + "," + FPRate);
                }
            }

            using (StreamWriter output2 = new StreamWriter("LTHistoryLinkPredictionCM.csv")) {
                output2.WriteLine("Long Term Interaction History Link Prediction");
                output2.WriteLine("Threshold,TP,FN,FP,TN,TPRate(Sensitivity),FPRate");
                foreach (double threshold in thresholds) {
                    double FPRate = ((double)results[threshold][4].FP / ((double)results[threshold][4].FP + results[threshold][4].TN));
                    double TPRate;
                    if ((results[threshold][4].TP + results[threshold][4].FN) == 0) {
                        TPRate = 0;
                    } else {
                        TPRate = ((double)results[threshold][4].TP / (double)(results[threshold][4].TP + results[threshold][4].FN));
                    }
                    output2.WriteLine(threshold + "," + results[threshold][4].TP + "," + results[threshold][4].FN + "," + results[threshold][4].FP + "," + results[threshold][4].TN + "," + TPRate + "," + FPRate);
                }
            }

            using (StreamWriter output2 = new StreamWriter("STHistoryClassifierLinkPredictionCM.csv")) {
                output2.WriteLine("Recent history classifier Link Prediction");
                output2.WriteLine("Threshold,TP,FN,FP,TN,TPRate(Sensitivity),FPRate");
                foreach (double threshold in thresholds) {
                    double FPRate = ((double)results[threshold][5].FP / ((double)results[threshold][5].FP + results[threshold][5].TN));
                    double TPRate;
                    if ((results[threshold][5].TP + results[threshold][5].FN) == 0) {
                        TPRate = 0;
                    } else {
                        TPRate = ((double)results[threshold][5].TP / (double)(results[threshold][5].TP + results[threshold][5].FN));
                    }
                    output2.WriteLine(threshold + "," + results[threshold][5].TP + "," + results[threshold][5].FN + "," + results[threshold][5].FP + "," + results[threshold][5].TN + "," + TPRate + "," + FPRate);
                }
                output2.WriteLine();
                output2.WriteLine("ANALYSIS COMPLETE");
            }
        }

        public void Register() {
            foreach (LinkPredictorNode<T> n in NetworkModel.Instance.Nodes.Values) {
                n.MessageSend += n.OnMessageSend;
                NetworkModel.Instance.MessageProcessed += n.OnMessageProcessed;
            }
        }

        public void DeRegister() {
            foreach (BaseNode n in NetworkModel.Instance.Nodes.Values) {
            }
        }
    }

    public class ConfusionMatrix {

        public int TP;
        public int TN;
        public int FP;
        public int FN;

        public ConfusionMatrix(int i, int j, int k, int l) {
            TP = i;
            TN = j;
            FP = k;
            FN = l;
        }
    }
}

