﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

namespace NeuralNetworkPrototype
{
    //INode
    //External objects should be able to get input from its parents ??
    public interface INode
    {
        List<INode> Parents { get; }
        float Input { get; }
    }

    //Node
    //provides implementation for creating base node with parents
    public abstract class BaseNode : INode
    {
        List<INode> _parents = null;

        public List<INode> Parents      //so children can access
        {
            get { return _parents; }
        }

        public abstract float Input     //will be overidden
        {
            get;
        }

        public BaseNode(List<INode> parents)
        {
            _parents = parents;
        }
    }

    //IInputNode
    //External objects should be able to send input nodes input data.
    public interface IInputNode
    {
        void SendInput(float t);    //could use a setter - but this looks more obvious
    }

    //InputNode
    //Implementation for sending and getting input into the node
    public class InputNode : BaseNode, IInputNode
    {
        float _input_val;

        public void SendInput(float t)
        {
            _input_val = t;
        }

        public override float Input
        {
            get
            {
                return _input_val;
            }
        }

        public InputNode() : base(new List<INode>()) { }    //creates a new INode with no parents//
    }

    //IActivationMethod
    //External objects should be able to calculate an output
    public interface IActivationMethod
    {
        float Calculate(float input);
    }

    //SigmoidFunction
    //Provides the sigmoid output 
    public class SigmoidFunction : IActivationMethod
    {
        public float Calculate(float input)
        {
            return 1f / (1f + (float)Math.Exp(-input));
        }
    }

    //SummantionFunction
    //Provides the summation output
    public class SummationFunction : IActivationMethod
    {
        public float Calculate(float input) { return input; }
    }

    public interface INeuralNode
    {
        List<float> Weights { set; get; }
    }

    //NeuralNode
    //These nodes are either hidden nodes, or output nodes.
    //Has weights, bias and activationMethod for determining the output
    //Input sums all inputs*weights and calculates the activationValue with bias.
    public class NeuralNode : BaseNode, INeuralNode
    {
        List<float> _weights = null;
        float _bias;
        IActivationMethod _activationMethod = null;

        public List<float> Weights { set { _weights = value; } get { return _weights; } }

        public override float Input
        {
            get
            {
                float sigma = 0f;
                int N = Parents.Count;

                for (int i = 0; i < N; i++)
                {
                    sigma += Parents[i].Input * _weights[i];
                }

                return _activationMethod.Calculate(sigma + _bias);
            }
        }

        public NeuralNode(IActivationMethod activationMethod, List<INode> parents)
            : base(parents)
        {
            _activationMethod = activationMethod;
            _weights = new List<float>();

            for (int i = 0; i < Parents.Count; i++)
            {
                _weights.Add(1f);
            }

            _bias = 0.1f;
        }
    }

    ////INeuralNetFactory
    ////Allows external objects to create neural networks with data
    //public interface INeuralNetFactory
    //{
    //    INeuralNet Make(List<int> definition_structure);   //3,4,5,2 => 3 inputs, 4 hidden, 5 hidden, 2 outputs
    //}

    //NeuralNetworkFactory
    //Creates a feed-forwards Neural Network
    public class NeuralNetworkFactory// : INeuralNetFactory
    {
        public INeuralNet Make(List<int> structure, string type = "Feed Forwards")
        {
            List<List<INode>> nodes = new List<List<INode>>();      //all nodes in layers
            List<IInputNode> inputNodes = new List<IInputNode>();   //all input nodes to be returned
            List<INode> outputNodes = new List<INode>();            //all output nodes to be returns

            for (int i = 0; i < structure.Count; i++)               //for every layer
            {
                nodes.Add(new List<INode>());                       //make a new layer
                for (int j = 0; j < structure[i]; j++)
                {
                    if (i == 0)                                     //if input node layer
                    {
                        InputNode inn = new InputNode();            //create input node
                        nodes[i].Add(inn);                          //add to newest layer
                        inputNodes.Add(inn);                        //add to input list
                    }
                    else//hidden nodes
                    {
                        IActivationMethod activationMethod = null;
                        NeuralNode n = null;

                        if (i == structure.Count - 1)                                   //if output node layer
                        {
                            activationMethod = new SummationFunction();                 //use summation function instead of sigmoid
                            n = new NeuralNode(activationMethod, nodes[i - 1]);         //create new neural node with its 
                            outputNodes.Add(n);                                         //parents being the previous layer.
                            nodes[i].Add(n);                                            //Add to output list. Add to newest layer
                        }
                        else
                        {
                            activationMethod = new SigmoidFunction();                   //if hidden node layer
                            n = new NeuralNode(activationMethod, nodes[i - 1]);         //use sigmoid function
                            nodes[i].Add(n);                                            //add to newest layer
                        }
                    }
                }
            }
            return new NeuralNet(inputNodes, outputNodes, nodes);
        }
    }


    //INeuralNetMutator
    //Allows external objects to mutate a neural network
    public interface INeuralNetMutator
    {
        //Nuke
        //Has chance to set weights within range
        void Nuke(float min, float max, double chance, INeuralNet targetNN);

        //CrossBreed
        //Mutates the current NeuralNetwork into the offspring of 2 parents
        //Chance refers to the likelyhood that a weight will altered.
        void CrossBreed(INeuralNet a, INeuralNet b, double chance, INeuralNet targetNN);
    }

    //BaseNeuralNetworkMutator
    //Basic methods needed for initialising a Mutator
    public abstract class BaseNeuralNetworkMutator
    {
        protected Random _random = null;
        //protected INeuralNet _neuralNet = null;
        public BaseNeuralNetworkMutator() { /*_neuralNet = nn;*/ _random = new Random(); }

    }

    //NeuralNetMutator
    //Concrete implementation of Neural Net Mutator
    public class NeuralNetMutator : BaseNeuralNetworkMutator, INeuralNetMutator
    {
        //See interface
        public void CrossBreed(INeuralNet a, INeuralNet b, double chance, INeuralNet child)
        {
            //Get min amount of layers
            int layerCount = Math.Min(Math.Min(a.Nodes.Count, b.Nodes.Count), child.Nodes.Count);

            for (int i = 1; i < layerCount; i++)    //skip input - For each layer
            {
                //Get min amount of nodes in layer L

                int nodeCount = Math.Min(b.Nodes[i].Count, Math.Min(a.Nodes[i].Count, child.Nodes[i].Count));

                for (int j = 0; j < nodeCount; j++) // For each node
                {
                    //get min amount of weights in Node N
                    int weightCount = Math.Min((child.Nodes[i][j] as INeuralNode).Weights.Count,
                                                (a.Nodes[i][j] as INeuralNode).Weights.Count);
                    weightCount = Math.Min(weightCount, (b.Nodes[i][j] as INeuralNode).Weights.Count);

                    for (int k = 0; k < weightCount; k++)   //for each weight
                    {
                        if (_random.NextDouble() < chance)  //random chance
                        {
                            if (_random.NextDouble() < 0.5) //Either a or b gene taken
                            {
                                (child.Nodes[i][j] as INeuralNode).Weights[k] = (a.Nodes[i][j] as INeuralNode).Weights[k]; //expensive//
                            }
                            else
                            {
                                (child.Nodes[i][j] as INeuralNode).Weights[k] = (b.Nodes[i][j] as INeuralNode).Weights[k];
                            }
                        }
                    }
                }
            }
        }

        //see interface
        public void Nuke(float min, float max, double chance, INeuralNet target)
        {
            //Get # of layers
            int layerCount = target.Nodes.Count;
            for (int i = 1; i < layerCount; i++)    //skip i = 0, input layer
            {
                //Get # of nodes
                int nodeCount = target.Nodes[i].Count;
                for (int j = 0; j < nodeCount; j++)
                {
                    //Get # of weights
                    INeuralNode nn = target.Nodes[i][j] as INeuralNode;
                    int weightCount = nn.Weights.Count;
                    for (int k = 0; k < weightCount; k++)
                    {
                        if (_random.NextDouble() < chance)
                        {
                            float b = max - min; b = b / 2f;
                            float offset = min + b;
                            nn.Weights[k] = offset + b * (float)_random.NextDouble();
                        }
                    }
                }
            }
        }
    }

    //INeuralNet
    //External objects can send input to get output.
    public interface INeuralNet
    {
        List<float> SendInputGetOutput(List<float> _inputs);
        List<float> GetOldOutput();
        //INeuralNetMutator Mutator { set; get; }
        List<List<INode>> Nodes { get; }
    }

    //NeuralNet
    //Has access to the input and output layers of the neural network
    //Implementation for getting output with input data
    public class NeuralNet : INeuralNet
    {
        List<IInputNode> _inputNodes = null;
        List<INode> _outputNodes = null;
        List<List<INode>> _nodes = null;          //lists of all the nodes
        //INeuralNetMutator _mutator = null;

        public List<List<INode>> Nodes { get { return _nodes; } }
        //public INeuralNetMutator Mutator { get { return _mutator; } set { _mutator = value; } }

        //GetOldOutput
        //Recieves output from the outputs nodes Input, with input values already assumed
        public List<float> GetOldOutput()
        {
            List<float> outputValues = new List<float>();
            int N = _outputNodes.Count;
            for (int i = 0; i < N; i++)
            {
                outputValues.Add(_outputNodes[i].Input);
            }
            return outputValues;
        }

        //SendInputGetOutput
        //Sets the input nodes value then returns the output values.
        public List<float> SendInputGetOutput(List<float> inputs)
        {
            int N = _inputNodes.Count;
            N = Math.Min(N, inputs.Count);
            for (int i = 0; i < N; i++)
            {
                _inputNodes[i].SendInput(inputs[i]);
            }
            return GetOldOutput();
        }

        public NeuralNet(List<IInputNode> inputs, List<INode> outputs, List<List<INode>> nodes)
        {
            _inputNodes = inputs;
            _outputNodes = outputs;
            _nodes = nodes;
        }
    }

    //class Program
    //{
    //    static void Main(string[] args)
    //    {

    //        NeuralNetworkFactory fff = new NeuralNetworkFactory();

    //        INeuralNet NNa = fff.Make(new int[] { 3, 4, 4, 2 }.ToList());
    //        INeuralNet NNb = fff.Make(new int[] { 3, 4, 4, 2 }.ToList());
    //        INeuralNet NNab = fff.Make(new int[] { 3, 4, 4, 2 }.ToList());

    //        INeuralNetMutator Mutator = new NeuralNetMutator();

    //        //NNa.Mutator = new NeuralNetMutator();
    //        //NNb.Mutator = new NeuralNetMutator();
    //        //NNab.Mutator = new NeuralNetMutator();

    //        Mutator.Nuke(-2, 2, 1, NNa);
    //        Mutator.Nuke(-2, 2, 1, NNb);
    //        Mutator.Nuke(-2, 2, 1, NNab);
    //        Mutator.CrossBreed(NNa, NNb, 0.8, NNab);



    //        //List<float> o = NN.SendInputGetOutput(new float[] { 5f, -3f, 17f }.ToList());



    //        //for (int i = 0; i < 4000; i++ )
    //        //{
    //        //    Console.WriteLine(o[0]);
    //        //    Console.WriteLine(o[1]);
    //        //    NN.Mutator.Nuke(-2, 2, 0.1f);  //refactor
    //        //    o = NN.SendInputGetOutput(new float[] { 50f, -3f, 17f }.ToList());
    //        //}

    //        while (true) { }
    //    }
    //}
}
