﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Diagnostics;

namespace StockLearner
{

    class NeuralNetwork
    {
        public Layer inputLayer, hiddenLayer, outputLayer;
        public Random rand = new Random();
        double rate;

        public NeuralNetwork(int inputs, int hiddens, int outputs, double learningRate)
        {
            inputLayer = new Layer(inputs, 0, true);
            hiddenLayer = new Layer(hiddens, 1, true);
            outputLayer = new Layer(outputs, 2, false);
            rate = learningRate;
            foreach (Neuron inp in inputLayer)
                foreach (Neuron hid in hiddenLayer)
                    inp.addOutput(hid);
            foreach (Neuron hid in hiddenLayer)
                foreach (Neuron outp in outputLayer)
                    hid.addOutput(outp);

        }

        public double train(List<double> ins, List<double> expected)
        {
            double totalerror = 0;
            // run the inputs through the net to populate fields
            this.input(ins);
            // update from input layer to hidden layer
            foreach (Neuron node in hiddenLayer)
            {
                // find the error at the hidden node
                double error = 0;
                for (int i = 0; i < expected.Count; i++)
                    error += node.outputs[i].weight * (expected[i] - node.outputs[i].output.output());
                //totalerror += error;
                // updeate each weight based on this nodes error
                foreach (Connection con in node.inputs)
                {
                    con.weight += rate
                        * error
                        * Neuron.DerivActivation(node.sum, 1) 
                        * con.input.output()
                        ;
                }
            }

            for (int i = 0; i < outputLayer.Count; i++)
            {
                double error = expected[i] - outputLayer[i].output();
                totalerror += error;
                foreach (Connection con in outputLayer[i].inputs)
                {
                    con.weight += rate 
                        * error 
                        * Neuron.DerivActivation(outputLayer[i].sum, 2) 
                        * con.input.output()
                        ;
                }
            }
            return totalerror;
        }


        public List<double> input(List<double> ins)
        {
            if (ins.Count + 1 != inputLayer.Count)
            {
                Trace.WriteLine("Recieved " + ins.Count + " inputs, expected " + (inputLayer.Count - 1));
                throw new Exception();
            }
            for (int i = 0; i < ins.Count; i++)
            {
                inputLayer[i+1].output_ = ins[i];
            }
            for (int i = 0; i < hiddenLayer.Count; i++)
            {
                hiddenLayer[i].passthrough();
            }
            List<double> output = new List<double>();
            for (int i = 0; i < outputLayer.Count; i++)
            {
                outputLayer[i].passthrough();
                output.Add(outputLayer[i].output_);
            }
            //Trace.WriteLine(output[0]);
            return output;
        }
    }

    class Neuron
    {
        public List<Connection> inputs = new List<Connection>();
        public List<Connection> outputs = new List<Connection>();
        public double sum;
        public double output_ = double.NaN;
        public int layer;

        public Neuron(int layer)
        {
            this.layer = layer;
        }

        public void addOutput(Neuron target)
        {
            Connection con = new Connection(this, target);
            outputs.Add(con);
            target.inputs.Add(con);
        }

        public virtual double output()
        {
            //Trace.WriteLine(output_);
            //Trace.WriteLine(double.IsNaN(output_));
            if (double.IsNaN(output_))
            //throw new Exception();
            {
                Trace.WriteLine("out exception");
                return 1.0;
            }
            return output_;
        }

        public virtual void passthrough()
        {
            sum = 0;
            for (int i = 0; i < inputs.Count; i++)
            {
                sum += inputs[i].input.output() * inputs[i].weight;
            }
            output_ = Neuron.Activation(sum, this.layer);
        }

        public static double Activation(double d, int layer)
        {
            if (layer == 2)
                return d;
            return Sigmoid(d);
        }

        public static double DerivActivation(double d, int layer)
        {
            if (layer == 2)
                return 1;
            return DerivSigmoid(d);
        }

        public static double Sigmoid(double d)
        {
            return 1 / (1 + Math.Exp(-d));
        }

        public static double DerivSigmoid(double d)
        {
            d = Neuron.Sigmoid(d);
            return d * (1 - d);
        }

    }

    class Bias : Neuron
    {
        public Bias(int layer):base(layer)
        { }
        public override double output()
        {
            return 1;
        }
        public override void passthrough() { }
    }

    class Layer : List<Neuron>
    {
        public int layer;
        public Layer(int size, int layer, bool bias)
        {
            this.layer = layer;
            if (bias)
                base.Add(new Bias(layer));
            for (int i = 0; i < size; i++)
            {
                base.Add(new Neuron(layer));
            }
        }
    }

    class Connection
    {
        public Neuron input;
        public Neuron output;
        public double weight;

        public Connection(Neuron inNeuron, Neuron outNeuron)
        {
            Random rand = new Random();
            input = inNeuron;
            output = outNeuron;
            weight = rand.NextDouble() - 0.5;
        }
    }
}
