﻿using System;
using System.Collections.Generic;
using System.Configuration;
using System.Data.SqlClient;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.Serialization.Formatters.Binary;
using Elderos.AI;
using Elderos.Highlight;
using Elderos.Highlight.Search;
using Elderos.Ontology;
using Elderos.Utils.Logging;
using Encog.Engine.Network.Activation;
using Encog.ML.Data.Basic;
using Encog.MathUtil.Randomize;
using Encog.Neural.Networks;
using Encog.Neural.Networks.Layers;
using Encog.Neural.Networks.Training.Propagation.Resilient;

namespace AccordTrainer
{
    class Program
    {
        private static readonly string OntologyString = ConfigurationManager.ConnectionStrings["Ontology"].ConnectionString;

        private static Random _rnd = new Random();

        static void Main(string[] args)
        {
            var ontology = new OntologyData(OntologyString);
            ontology.Initialize();

            var searcher = new RabinKarpSearcher(ontology);
            searcher.Initialize();

            var ranker = new RuleRanker(ontology);
            var rules = ranker.RankingRules.ToArray();
            var ruleCounts = LoadExcludedRuleCounts(OntologyString);

            for (int i = 0; ; i++)
            {
                RankingRule excludedRule = //null;
                    ChooseExcludedRule(rules, ruleCounts);

                Predicate<RankingRule> ruleFilter = x => x.Name != excludedRule.Name;
                //Predicate<RankingRule> ruleFilter = x => true;
                string comment = "Excluded rule: " + (excludedRule == null ? "none" : excludedRule.Name);
                
                var trainItems = GetTrainItems(ontology, searcher, ruleFilter);

                int trainingSetSize = trainItems.Length * 3 / 4;
                var trainingSet = trainItems.Take(trainingSetSize).SelectMany(x => x).ToArray();
                var validationSet = trainItems.Skip(trainingSetSize).SelectMany(x => x).ToArray();

                Logger.Info(comment);

                INormalizer normalizer;
                double learningTime;
                BasicNetwork network = TrainNetwork(trainingSet, validationSet, out normalizer, out learningTime);
                //LoadNetworkFromDB(1, ontology, out threshold, out normalizer);

                double delta = 0.001;

                Logger.Info("Training set quality:");
                var trainingSetQuality = ComputeSetQuality(trainingSet, network, delta, normalizer);
                LogQuality(trainingSetQuality);

                Logger.Info("Validation set quality: ");
                var validationSetQuality = ComputeSetQuality(validationSet, network, delta, normalizer);
                LogQuality(validationSetQuality);

                var net = CreateNetworkDTO(network, normalizer, validationSetQuality, trainingSetQuality, comment, learningTime);

                SaveNetwork(ontology, net);
                Logger.Info("Network saved");

                if (excludedRule != null)
                {
                    if (!ruleCounts.ContainsKey(excludedRule.Name))
                        ruleCounts.Add(excludedRule.Name, 1);
                    else ruleCounts[excludedRule.Name]++;
                }
            }

            Console.ReadLine();
        }

        private static RankingRule ChooseExcludedRule(IEnumerable<RankingRule> rules, Dictionary<string, int> ruleCounts)
        {
            return rules
                .OrderBy(x =>
                             {
                                 if (!ruleCounts.ContainsKey(x.Name)) return 0;
                                 return ruleCounts[x.Name];
                             })
                .FirstOrDefault();
        }

        private static Dictionary<string, int> LoadExcludedRuleCounts(string ontologyString)
        {
            string query = @"select Comment, count(*)
from LastNetworks
where Comment like 'Excluded%'
group by Comment";

            var ruleCounts = new Dictionary<string, int>();

            using (var connection = new SqlConnection(ontologyString))
            using (var cmd = connection.CreateCommand())
            {
                cmd.CommandText = query;
                connection.Open();
                using(var reader = cmd.ExecuteReader())
                {
                    while (reader.Read())
                    {
                        string comment = reader.GetString(0);
                        int count = reader.GetInt32(1);
                        string rulename = comment.Substring("Excluded rule: ".Length);

                        if (!ruleCounts.ContainsKey(rulename))
                            ruleCounts.Add(rulename, count);
                    }
                }
            }

            return ruleCounts;
        }

        private static Network CreateNetworkDTO(BasicNetwork network, INormalizer normalizer, Quality validationSetQuality, Quality trainingSetQuality, string comment, double learningTime)
        {
            byte[] networkData = SerializerToByteArray(network);
            byte[] normalizerData = SerializerToByteArray(normalizer);

            var net = new Network();
            net.NetworkData = networkData;
            net.Precision = validationSetQuality.Precision;
            net.Recall = validationSetQuality.Recall;
            net.Threshold = validationSetQuality.Threshold;
            net.TrainingSetPrecision = trainingSetQuality.Precision;
            net.TrainingSetRecall = trainingSetQuality.Recall;
            net.TrainingSetThreshold = trainingSetQuality.Threshold;
            net.NormalizerData = normalizerData;
            net.Comment = comment;
            net.LearningTime = learningTime;
            net.CreationDate = DateTime.Now;
            return net;
        }

        private static void LogQuality(Quality quality)
        {
            Logger.Info("Threshold: " + quality.Threshold);
            Logger.Info("F-measure: " + quality.FMeasure());
            Logger.Info("Precision: " + quality.Precision);
            Logger.Info("Recall: " + quality.Recall);
        }

        private static IGrouping<int, TrainItem<ItemInfo>>[] GetTrainItems(OntologyData ontology, ISearchStrategy searcher, Predicate<RankingRule> ruleFilter = null)
        {
            var ranker = new RuleRanker(ontology, ruleFilter);

            var highlighter = new Highlighter(new[]{searcher}, new SearchResultAdapter(), ranker,
                                              new SimpleAmbiguityResolver(0), ontology);

            var trainingSetMgr = new TrainingSetManager();
            trainingSetMgr.Highlighter = highlighter;
            trainingSetMgr.DegreeOfParallelism = Environment.ProcessorCount * 2;

            var rnd = new Random();

            var trainItems = trainingSetMgr
                .GetTrainingSet()
                .Where(x => x != null)
                .GroupBy(x => x.AdditionalInfo.ArticleID)
                .OrderBy(x => rnd.Next())
                .ToArray();

            SaveFactorValues(trainItems.SelectMany(x => x).ToArray(), trainingSetMgr.GetFactorNames());

            return trainItems;
        }

        private static byte[] SerializerToByteArray(object obj)
        {
            var serializer = new BinaryFormatter();
            using (var ms = new MemoryStream())
            {
                serializer.Serialize(ms, obj);

                ms.Position = 0;
                byte[] serialized = ms.ToArray();
                return serialized;
            }
            
        }

        private static void SaveFactorValues(IEnumerable<TrainItem<ItemInfo>> fullset, IEnumerable<string> factorNames)
        {
            var names = new List<string>();
            names.Add("ArticleID");
            names.Add("PositionID");
            names.Add("EntityID");
            names.AddRange(factorNames.Select((x, i) => i + "|" + x));
            names.Add("MustWin");
            using(var file = File.CreateText("C:\\factors.txt"))
            {
                file.WriteLine(string.Join("\t", names));
                foreach (var trainItem in fullset)
                {
                    var factors = new List<string>();
                    factors.Add(trainItem.AdditionalInfo.ArticleID.ToString(CultureInfo.InvariantCulture));
                    factors.Add(trainItem.AdditionalInfo.PositionID.ToString(CultureInfo.InvariantCulture));
                    factors.Add(trainItem.AdditionalInfo.EntityID.ToString(CultureInfo.InvariantCulture));
                    factors.AddRange(trainItem.Inputs.Select(x => x.ToString(CultureInfo.InvariantCulture)));
                    factors.Add(trainItem.Output.ToString(CultureInfo.InvariantCulture));
                    file.WriteLine(string.Join("\t", factors));
                }
            }
        }

        private static Quality ComputeSetQuality(TrainItem<ItemInfo>[] trainItems, BasicNetwork network, double delta, INormalizer normalizer)
        {
            Quality bestQuality = null;

            var qualityComputer = new GroupQualityComputer {LeftThreshold = -0.6, RightThreshold = 0.6};
            
            FillSetOutputs(network, trainItems, normalizer);

            var parallel = new ParallelQualityComputer(){MaxDegreeOfParallelism = 8};
            bestQuality = parallel.GetBestQuality(trainItems, qualityComputer, delta);
            
            return bestQuality;
        }

        private static void FillSetOutputs(BasicNetwork network, TrainItem<ItemInfo>[] trainItems, INormalizer normalizer)
        {
            foreach (var trainItem in trainItems)
            {
                trainItem.AdditionalInfo.RealWeight = network.Compute(new BasicMLData(normalizer.Normalize(trainItem.Inputs)))[0];
            }
        }

        private static BasicNetwork LoadNetworkFromDB(int networkID, IOntologyData ontology, out double threshold, out INormalizer normalizer)
        {
            Network serialized = ontology.GetNetwork(networkID);

            threshold = serialized.Threshold;

            normalizer = Deserialize<Normalizer>(serialized.NormalizerData);

            var network = Deserialize<BasicNetwork>(serialized.NetworkData);

            return network;
        }

        private static T Deserialize<T>(byte[] data)
        {
            var serializer = new BinaryFormatter();
            using(var ms = new MemoryStream())
            {
                ms.Write(data, 0, data.Length);
                ms.Position = 0;

                T result = (T) serializer.Deserialize(ms);
                return result;
            }
        }

        private static void SaveNetwork(OntologyData ontology, Network net)
        {
            ontology.InsertNetwork(net);
        }

        private static BasicNetwork TrainNetwork(TrainItem<ItemInfo>[] trainItems, TrainItem<ItemInfo>[] validationItems, out INormalizer normalizer, out double learningTime)
        {
            var random = new Random();
            trainItems = trainItems.OrderBy(x => random.Next()).ToArray();

            double[][] inputs = trainItems.Select(x => x.Inputs).ToArray();

            normalizer = new Normalizer(inputs);
            INormalizer normalizer1 = normalizer;
            inputs = inputs.Select(normalizer1.Normalize).ToArray();

            double[][] outputs = trainItems.Select(x => new[] { x.Output }).ToArray();

            int inputLength = inputs[0].Length;

            Logger.Info("Input length: " + inputLength);

            var network = CreateBasicNetwork(inputLength);

            Logger.Info("Training size: " + trainItems.Length);
            Logger.Info("Validation size: " + validationItems.Length);

            var trainingSet = new BasicMLDataSet(inputs.ToArray(), outputs.ToArray());

            var trainer = new ResilientPropagation(network, trainingSet);
            trainer.ThreadCount = 0;

            double initialError = 1;
            const double errorDelta = 0.0001;
            const int initialIterations = 100;

            double fmeasure = 0;
            byte[] bestNetwork = null;

            int fails = 0;

            int previousI = 0;

            var timer = new Stopwatch();
            timer.Start();

            for (int i = 0; ; i++)
            {
                if (timer.Elapsed.TotalMinutes > 90) break;

                trainer.Iteration();

                Logger.Info("Iteration #" + i + "|Error: " + (trainer.Error*100).ToString("F3") + "%|Target: " + (initialError*100).ToString("F3") + "%");

                if (double.IsNaN(trainer.Error)) break;
                if (i < initialIterations) continue;

                if (trainer.Error > initialError)
                {
                    if (i - previousI > 35) break;
                    continue;
                }

                previousI = i;

                var quality = ComputeSetQuality(validationItems, network, 0.01, normalizer);
                LogQuality(quality);
                if (quality.FMeasure() > fmeasure)
                {
                    fails = 0;
                    bestNetwork = SerializerToByteArray(network);
                    fmeasure = quality.FMeasure();
                }
                else
                {
                    fails++;
                    if (fails > 15 && fmeasure - quality.FMeasure() > 0.004)
                        break;
                }

                initialError = trainer.Error - errorDelta;
                if (initialError <= 0) break;
            }

            trainer.FinishTraining();

            timer.Stop();

            learningTime = timer.Elapsed.TotalMinutes;
            Logger.Info("Learning time: " + learningTime);

            if (bestNetwork == null || double.IsNaN(trainer.Error)) return null;

            return Deserialize<BasicNetwork>(bestNetwork);
        }

        private static BasicNetwork CreateBasicNetwork(int inputLength)
        {
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, inputLength));
            network.AddLayer(new BasicLayer(new ActivationTANH(), true, inputLength * 2 / 3));
            network.AddLayer(new BasicLayer(new ActivationTANH(), true, inputLength / 3));
            network.AddLayer(new BasicLayer(new ActivationTANH(), true, inputLength / 3));
            network.AddLayer(new BasicLayer(new ActivationTANH(), true, 1));

            network.Structure.FinalizeStructure();
            network.Reset();

            new ConsistentRandomizer(-1, 1, new Random().Next()).Randomize(network);
            return network;
        }

        private static void CheckNan(IEnumerable<TrainItem<ItemInfo>> trainItems)
        {
            foreach (var trainItem in trainItems)
            {
                if (trainItem.Inputs.Any(double.IsNaN))
                    Logger.Info("NaN input occured.");
                if (double.IsNaN(trainItem.Output))
                    Logger.Info("NaN output occured.");
                if (trainItem.Inputs.Any(x => double.IsNegativeInfinity(x) || double.IsPositiveInfinity(x)))
                    Logger.Info("Infinite input occured.");
                if (double.IsNegativeInfinity(trainItem.Output) || double.IsPositiveInfinity(trainItem.Output))
                    Logger.Info("Infinite output occured.");
            }
        }
    }
}
