﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GraphAlgorithms;
using System.Xml;
using System.IO;

namespace MachineLearning {
    class NeuralNetwork {

        //list of neron lists (neurons in the network)
        List<List<NeuralUnit>> _neurons = null;
        public List<List<NeuralUnit>> Neurons {
            get { return _neurons; }
        }

        Random _rand = null; //random number generator object to init weights

        double _eta = 0.03;
        public double Eta {
            get { return _eta; }
            set { _eta = value; }
        }

        double _alpha = 0.01;
        public double Alpha {
            get { return _alpha; }
            set { _alpha = value; }
        }

        string _name = null;
        public string Name {
            get { return _name; }
            set { _name = value; }
        }

        SquashingFunction _squash = null;

        int _last = 0;

        public NeuralNetwork(int[] layers, double learning_rate, double momentum) {
            _eta = learning_rate;
            _alpha = momentum;
            _squash = new Sigmoid();
            // layers.size is the number of layers
            // and each integer in layers is the number of units
            // in that layer, layer 0 is input layer layer n is output
            // initialize random number generator
            _rand = new Random();
            // initialize the list of neuron lists
            // , the number of neuron lists is the number of layers
            _neurons = new List<List<NeuralUnit>>();
            // iterate through the layers, newing the number of neurons in each layer
            for (int i = 0; i < layers.Length; i++) {
                _neurons.Add(new List<NeuralUnit>());
                for (int j = 0; j < layers[i]; j++)
                    _neurons[i].Add(new NeuralUnit(getRandomWeight(), _squash));
            }
            _last = _neurons.Count - 1;
            // initialize the input layer with one input in each input array
            foreach (NeuralUnit p in _neurons[0]) {
                p.Inputs = new Input[1];
                p.Inputs[0] = p.XZero;
            }
            for (int i = 1; i < _neurons.Count; i++) {
                foreach (NeuralUnit p in _neurons[i]) {
                    p.Inputs = new Input[_neurons[i - 1].Count + 1];
                    p.Inputs[0] = p.XZero;
                }
            }
            // wire the neurons together, each neuron's output connects
            // to one or more neuron inputs, plug output into inputs
            for (int i = 0; i < _last; i++) {
                for (int j = 0; j < _neurons[i].Count; j++) { //outer layer
                    NeuralUnit p1 = _neurons[i][j];
                    // since the output is a shared memory reference between objects
                    // updating the neuron's output will update all neurons consuming
                    // the output, also initialize the input weights to random numbers
                    // inner layer, start adding inputs at one because the first input is x0
                    foreach (NeuralUnit p2 in _neurons[i + 1]) {
                        p2.Inputs[j+1] = new Input(ref p1.Oput, getRandomWeight());
                    }
                }
            }
            //name the neurons
            for (int i = 0; i < _neurons.Count; i++) {
                for (int j = 0; j < _neurons[i].Count; j++) {
                    _neurons[i][j].Name = ((char)(i + 105)).ToString() + j.ToString();
                    _neurons[i][j].Oput.Name = ((char)(i + 105)).ToString() + j.ToString();
                }
            }

        }

        public NeuralNetwork(string fileName) {
            StreamReader sr = new StreamReader(fileName);
            XmlDocument doc = new XmlDocument();
            doc.LoadXml(sr.ReadToEnd());
            sr.Close();
            instantiateAnnFromFile(doc);
        }

        private void instantiateAnnFromFile(XmlDocument doc) {
            XmlNode node = null;
            //get the name of the ANN
            //get the squashing finction used
            //get the number of layers
            //get eta and alpha
            //get the units at each layer, their weights, 
            //and the units they are connected to
            foreach (XmlNode n in doc.ChildNodes) {
                if (n.Name == "network") {
                    _name = n.Attributes[0].Value;
                    string squashFunction = n.Attributes[1].Value;
                    string layers = n.Attributes[2].Value;
                    string learningRate = n.Attributes[3].Value;
                    string learningMomentum = n.Attributes[4].Value;
                    //init squashing function
                    _squash = new Sigmoid();
                    if (squashFunction == "tanh")
                        _squash = new Tanh();
                    //init the number of hidden layers
                    _neurons = new List<List<NeuralUnit>>();
                    int num = int.Parse(layers);
                    for (int i = 0; i < num; i++)
                        _neurons.Add(new List<NeuralUnit>());
                    _eta = double.Parse(learningRate);
                    _alpha = double.Parse(learningMomentum);
                    node = n;
                    break;
                }
            }
            //parse the IOs
            int currentLayer = 0;
            foreach (XmlNode n in node.ChildNodes) {
                if (n.Name == "layer") {
                    foreach (XmlNode n2 in n.ChildNodes) {
                        if (n2.Name == "unit") {
                            List<double> inputWeights = new List<double>();
                            string weights = n2.Attributes[1].Value;
                            while (weights.Contains(",")) {
                                int i = weights.IndexOf(",");
                                string weight = weights.Substring(0, i);
                                if (weight != ",")
                                    inputWeights.Add(double.Parse(weight));
                                weights = weights.Remove(0, i + 1);
                            }
                            NeuralUnit nu = new NeuralUnit(_squash);
                            nu.Name = n2.Attributes[0].Value;
                            nu.Oput.Name = nu.Name;
                            nu.Inputs = new Input[inputWeights.Count];
                            //this may not be necessary
                            nu.Inputs[0] = nu.XZero;
                            nu.Inputs[0].Weight = inputWeights[0];
                            for (int i = 1; i < inputWeights.Count; i++)
                                nu.Inputs[i] = new Input(inputWeights[i]);

                            List<string> connectedUnits = new List<string>();
                            string units = n2.Attributes[2].Value;
                            while (units.Contains(",")) {
                                int i = units.IndexOf(",");
                                string name = units.Substring(0, i);
                                if(name != ",")
                                connectedUnits.Add(name);
                                units = units.Remove(0, i+1);
                            }
                            nu.ConnectedUnits = connectedUnits.ToArray();
                            _neurons[currentLayer].Add(nu);
                        }
                    }
                    currentLayer++;
                }
            }

            //this is fine for networks without feedback
            //for (int i = _neurons.Count - 1; i >= 0; i--) {
            //    for (int j = 0; j < _neurons[i].Count; j++) {
            //        NeuralUnit nu = _neurons[i][j];
            //        for (int k = 0; k < _neurons[i][j].ConnectedUnits.Length; k++) {
            //            for (int m = 0; m < _neurons[i - 1].Count; m++) {
            //                if (_neurons[i - 1][m].Name == _neurons[i][j].ConnectedUnits[k]) {
            //                    _neurons[i][j].Inputs[k + 1].Oput = _neurons[i - 1][m].Oput;
            //                }
            //            }
            //        }
            //    }
            //}
            _last = _neurons.Count - 1;
            // now wire the neurons together
            List<NeuralUnit> neurons = new List<NeuralUnit>();
            for (int i = 0; i < _neurons.Count; i++) {
                for (int j = 0; j < _neurons[i].Count; j++) {
                    neurons.Add(_neurons[i][j]);
                }
            }
            for (int i = 0; i < neurons.Count; i++) {
                for (int j = 0; j < neurons[i].ConnectedUnits.Length; j++) {
                    for (int k = 0; k < neurons.Count; k++) {
                        if (neurons[k].Name == neurons[i].ConnectedUnits[j])
                              neurons[i].Inputs[j+1].Oput = neurons[k].Oput;
                    }
                }
            }

        }


        public string saveNetwork(string fileName) {
            StringBuilder sb = new StringBuilder("<?xml version=\"1.0\" encoding=\"utf-8\"?>\r\n");
            sb.Append("<!-- (ANX) Artificial Neural Network Exchange Format -->\r\n");
            sb.Append("<network name=\"" + _name + "\" squashing_function=\"" + _squash.Name + "\"");
            sb.Append(" layers=\"" + _neurons.Count + "\" eta=\"" + _eta + "\" alpha=\"" + _alpha + "\" >\r\n");
            for (int i = 0; i < _neurons.Count; i++) {
                sb.Append("\t<layer name=\"" + i + "\" unit_count=\"" + _neurons[i].Count + "\">\r\n");
                for (int j = 0; j < _neurons[i].Count; j++) {
                    sb.Append("\t\t<unit name=\"" + _neurons[i][j].Name + "\" weights=\"");
                    //make sure to include the bias weight
                    for (int k = 0; k < _neurons[i][j].Inputs.Length; k++) {
                        sb.Append(_neurons[i][j].Inputs[k].Weight + ",");
                    }
                    sb.Append("\" connected_units=\"");
                    for (int k = 1; k < _neurons[i][j].Inputs.Length; k++) {
                        sb.Append(_neurons[i][j].Inputs[k].Oput.Name + ",");
                    }
                    sb.Append("\" />\r\n");
                }
                sb.Append("\t</layer>\r\n");
            }
            sb.Append("</network>");
            StreamWriter sr = new StreamWriter(fileName);
            sr.WriteLine(sb.ToString());
            sr.Close();
            return sb.ToString();
        }

        //geenrate a random number between -0.05 and 0.05
        double getRandomWeight() {
            return 0.0001 * (double)_rand.Next(-999, 1000); //dont know if this range is too big
        }

        //given an input vector, classify it
        public double[] classify(double[] inputs) {
            //the number of outputs is the number of output neurons
            double[] outputs = new double[_neurons[_last].Count];
            //set the output values of the input neurons
            //the size of the input array should match the size of the 
            //input layer (# of input units)
            for (int i = 0; i < inputs.Length; i++)
                _neurons[0][i].computeOutput(inputs[i]);
            //push the input values through the network
            for (int i = 1; i < _neurons.Count; i++) {
                foreach (NeuralUnit p in _neurons[i])
                    p.computeOutput();
            }
            
            for (int i = 0; i < outputs.Length; i++)
                outputs[i] = _neurons[_last][i].Oput.Value;
            return outputs;
        }

        //train until squared error is below threshold
        public double train(double[] input, double[] lesson) {
            double[] result = classify(input);
            double deltaW = 0.0;
            // iterate through each perceptron at the output layer,
            // calculating delta values for each and updating the bias
            // weights of the output layer perceptrons
            // δ = o(1-o)(t-o)
            NeuralUnit opt = null;
            for (int i = 0; i < _neurons[_last].Count; i++) {
                opt = _neurons[_last][i];
                opt.Error = opt.OputPrime * (lesson[i] - result[i]);
            }
            // iterate through each perceptron in each hidden layer,
            // calculating delta values for each, updating their bias
            // weights, and updating edge weights to the next higher layer
            //need to make this On^3 not On^4 as it is now
            //do this with an array of sums each entry represents 
            //the sum error of the units in the previous layer
            //double sum = 0.0;
            //for (int i = _neurons.Count - 2; i > 0; i--) {
            //    //inner layer
            //    foreach (NeuralUnit p1 in _neurons[i]) {
            //        sum = 0.0;
            //        //outer layer
            //        foreach (NeuralUnit p2 in _neurons[i + 1]) {
            //            // ∑_(k∈outputs)(w_h,k * δ_k)
            //            foreach (Input ipt in p2.Inputs) {
            //                if (p1.Oput == ipt.Oput)
            //                    sum += ipt.Weight * p2.Error;
            //            }
            //        }
            //        // δ = o * (1-o) * ∑_(k∈outputs)(w_k * δ_k)
            //        p1.Error = p1.OputPrime * sum;
            //    }
            //}

            //On^3 version
            double[] sumArray;
            for (int i = _neurons.Count - 2; i > 0; i--) {
                //inner layer
                sumArray = new double[_neurons[i].Count];
                foreach (NeuralUnit nu in _neurons[i + 1]) {
                    // ∑_(k∈outputs)(w_h,k * δ_k)
                    for (int k = 0; k < nu.Inputs.Length - 1; k++) {
                        sumArray[k] += nu.Inputs[k + 1].Weight * nu.Error;
                    }
                }
                //outer layer
                for (int j = 0; j < _neurons[i].Count; j++) {
                    // δ = o * (1-o) * ∑_(k∈outputs)(w_k * δ_k)
                    _neurons[i][j].Error = _neurons[i][j].OputPrime * sumArray[j];
                }
            }

            //no need to update inputs in the input layer
            for (int i = 1; i < _neurons.Count; i++) {
                foreach (NeuralUnit p in _neurons[i]) {
                    foreach (Input ipt in p.Inputs) {
                        // ∆w = ηδo + α∆w(n-1)
                        deltaW = _eta * p.Error * ipt.Oput.Value + _alpha * ipt.Momentum;
                        // w = w + ∆w 
                        ipt.Weight += deltaW;
                        ipt.Momentum = deltaW; //set the (n-1) delta
                    }
                }
            }
            //calculate squared error
            double squaredError = 0.0;
            for (int i = 0; i < result.Length; i++)
                squaredError += Math.Pow(result[i] - lesson[i], 2);
            return squaredError;
        }

        public double validate(double[] input, double[] lesson) {
            double[] result = classify(input);
            //calculate squared error
            double squaredError = 0.0;
            for (int i = 0; i < result.Length; i++)
                squaredError += Math.Pow(result[i] - lesson[i], 2);
            return squaredError;
        }
        
    }
}
