﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace neuro
{
  public class NN
  {
    private class Connection
    {
      public double Weight { get; set; }

      public bool Enabled { get; set; }

      public Node From { get; set; }

      public Node To { get; set; }
    }

    private class Node
    {
      public Node()
      {
        Inbound = new List<Connection>();
        Outbound = new List<Connection>();
      }

      public double Value { get; set; }

      public IList<Connection> Inbound { get; set; }

      public IList<Connection> Outbound { get; set; }

      public void Update()
      {
        Value = Sigmoid(Inbound.Sum(connection => connection.From.Value * connection.Weight));
      }

      private static double Sigmoid(double d)
      {
        return -1 + 2 / (1 + Math.Pow(Math.E, d));
      }
    }

    private class Layer
    {
      public Layer()
      {
        Nodes = new List<Node>();
      }

      public IList<Node> Nodes { get; set; }
    }

    private IList<Layer> Layers;

    public static int GetNumConnections(IList<int> layerSizes)
    {
      int num = 0;
      for (int i = 0; i < layerSizes.Count - 1; ++i)
      {
        num += layerSizes[i] * layerSizes[i + 1];
      }
      return num;
    }

    Random random = new Random();

    public static double WeightMin = -5;

    public static double WeightMax = 5;

    public NN(IList<int> layerSizes, IList<double> weights = null, IList<bool> enables = null)
    {
      System.Diagnostics.Debug.Assert(layerSizes.Count >= 2);

      int numConnections = GetNumConnections(layerSizes);
      if (weights == null)
      {
        weights = new List<double>();
        for (int i = 0; i < numConnections; ++i)
        {
          weights.Add(WeightMin + random.NextDouble() * (WeightMax - WeightMin));
        }
      }
      if (enables == null)
      {
        enables = Enumerable.Repeat(true, numConnections).ToList();
      }

      int weighti = 0;
      int enablei = 0;

      Layers = new List<Layer>();
      for (int i = 0; i < layerSizes.Count; ++i)
      {
        Layer layer = new Layer();
        for (int j = 0; j < layerSizes[i]; ++j)
        {
          layer.Nodes.Add(new Node() { Value = 0 });
        }
        Layers.Add(layer);
      }

      for (int i = 0; i < Layers.Count - 1; ++i)
      {
        foreach (Node from in Layers[i].Nodes)
        {
          foreach (Node to in Layers[i + 1].Nodes)
          {
            Connection connection = new Connection()
            {
              Enabled = enables[enablei++],
              Weight = weights[weighti++],
              From = from,
              To = to,
            };
            from.Outbound.Add(connection);
            to.Inbound.Add(connection);
          }
        }
      }
    }

    public IList<double> Eval(IList<double> input)
    {
      System.Diagnostics.Debug.Assert(input.Count == Layers[0].Nodes.Count);

      for (int i = 0; i < input.Count; ++i)
      {
        Layers[0].Nodes[i].Value = input[i];
      }

      foreach (Layer layer in Layers.Skip(1))
      {
        foreach (Node node in layer.Nodes)
        {
          node.Update();
        }
      }

      return Layers.Last().Nodes.Select(node => node.Value).ToList();
    }

    public class TrainingData
    {
      public IList<double> Input { get; set; }

      public IList<double> Output { get; set; }
    }

    public double GetError(IList<TrainingData> trainingData)
    {
      double error = 0;
      foreach (TrainingData data in trainingData)
      {
        IList<double> output = Eval(data.Input);
        for (int i = 0; i < output.Count; ++i)
        {
          double derror = Math.Abs(output[i] - data.Output[i]);
          error += derror;
        }
      }
      return error;
    }

    private static IList<double> ConvertWeights(GA.IValue value)
    {
      return (value as GA.Value<IList<GA.IValue>>).Val.Select(val => (val as GA.Value<double>).Val).ToList();
    }

    private static NN ConvertNN(IList<int> layerSizes, GA.Phenome phenome)
    {
      return new NN(layerSizes, ConvertWeights(phenome.Values[0]));
    }

    private static double TrainEval(IList<TrainingData> trainingData, IList<int> layerSizes, GA.Phenome phenome)
    {
      IList<double> weights = ConvertWeights(phenome.Values[0]);
      NN nn = ConvertNN(layerSizes, phenome);
      double error = nn.GetError(trainingData);
      return -error;
    }

    public static NN Train(IList<TrainingData> trainingData, IList<int> midLayers)
    {
      IList<int> layerSizes = new List<int>();
      layerSizes.Add(trainingData[0].Input.Count);
      midLayers.ToList().ForEach(midLayer => layerSizes.Add(midLayer));
      layerSizes.Add(trainingData[0].Output.Count);
      int numConnections = GetNumConnections(layerSizes);

      GA ga = new GA()
      {
        CrossProb = 0.5,
        MutateProb = 0.001,
        NumGens = 100,
        PopSize = 1000,
        Selection = GA.SelectionType.TOURNAMENT,
        ParamList = new List<GA.IParam>()
        {
          new GA.ListParam()
          {
            Params = Enumerable.Repeat<GA.IParam>(new GA.DoubleParam(){
              Min = WeightMin,
              Max = WeightMax,
              Step = 0.01,
            }, numConnections).ToList(),
          },
        },
        Eval = phenome => TrainEval(trainingData, layerSizes, phenome),
      };

      return ConvertNN(layerSizes, ga.Run());
    }
  }
}
