﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NeuralNetwork;
using NeuralNetwork.Layers;
using NeuralNetwork.Neurons;

namespace NeuralNetwork.Training
{
    public class TrainingSimulatedAnnealing : Training 
    {
        private double cycles = 0;
        private double start = 1.0;
        private double stop = 0.0;

        public TrainingSimulatedAnnealing(double start, double stop, int cycles)                                                      
        {
            Cycles = cycles;
            this.start = start;
            this.stop = stop;
            
            // Validate using the properties
            Start = start;
            Stop = stop;
        }

        public double Start
        {
            get 
            { 
                return start; 
            }
            set 
            { 
                if ((value > 1) && (value > stop))
                    start = value;  
                else
                    throw new Exception("Start has to be bigger than Stop!");
            }
        }

        public double Stop
        {
            get 
            { 
                return stop; 
            }
            set
            {
                if ((value > 1) && (start > value))
                    stop = value;
                else
                    throw new Exception("Stop has to be less than Start and bigger than 1!");
            }
        }

        public int Cycles
        {
            get 
            { 
                return (int)cycles; 
            }
            set
            {
                if (value > 1)
                    cycles = (double)value;
                else
                    throw new Exception("Cycles has to be bigger than 1!");
            }
        }
        
        private void Randomize(Network network, double temp, bool retry)
        {
            double delta;
            foreach(Layer layer in network.layers)
            {
                foreach (Neuron neuron in layer.neurons)
                {
                    if (neuron.axons.Count > 0)
                    {
                        foreach (Axon axon in neuron.axons)
                        {
                            if (retry) axon.Revert();
                            delta = temp * ((network.rand.NextDouble() - 0.5) / Start); 
                            axon.AdjustWeight(delta);
                        }
                    }
                }
            }
        }

        public void Train(Network network,
                          List<Test> LearningSet,
                          double limit)
        {
            bool firstRun = true;
            double best = limit;
            double temp = Start;
            double ratio = 0;
            Test ls = null;
            double error = 0;

            network.log.Print("Starting Training using Simulated Annealing", Log.LogLevel.LOG_INFO);
            while ((best >= limit) && (temp >= Stop))
            {
                network.log.Print("-----------------------------------------------------------", Log.LogLevel.LOG_INFO);
                network.log.Print("- Temperature : " + temp.ToString("F2").Trim(), Log.LogLevel.LOG_INFO);

                // Evaluate
                error = 0;
                for (int i = 0; i < LearningSet.Count; i++)
                {
                    ls = LearningSet[i];
                    network.input.Observe(ls.input);
                    error += network.output.Error(ls.outputs);
                    network.Reset();
                }
                error = Math.Abs(error / (double)LearningSet.Count);
                network.log.Print("- Error : " + error.ToString("F3").Trim(), Log.LogLevel.LOG_INFO);


                // If best - store and retry. Otherwise revert to old and retry.
                if (best > error)
                {
                    network.log.Print(" => Adjust ", Log.LogLevel.LOG_INFO);

                    network.Prune();
                    Randomize(network, temp, false);
                    
                    best = error;
                }
                else 
                {
                    if (firstRun)
                    {
                        best = error;
                        firstRun = false;
                    }
                    
                    network.log.Print(" => Retry (best = " + best.ToString("F3") + ")", Log.LogLevel.LOG_INFO);                    
                    Randomize(network, temp, true);
                }

                // Decrease "Temperature"
                ratio = Math.Exp(Math.Log(Stop / Start) / (Cycles - 1));
                temp *= ratio;
            }

            // Revert to winner and prune dead axons
            network.log.Print("-----------------------------------------------------------", Log.LogLevel.LOG_INFO);
            network.log.Print("Reverting to best configuration (score : " + best.ToString("F3") + ")", Log.LogLevel.LOG_INFO);
            network.Revert();
            network.log.Print("Learning completed", Log.LogLevel.LOG_INFO);
        }

    }
}
