﻿using System;
using System.Collections.Generic;
using System.Text;

namespace NeuralNetwork
{
    public class BackProNeuralAlg
    {
        private static BackProNeuralAlg _Instance=null;
        public static BackProNeuralAlg GetInstance()
        {
            if (_Instance == null)
                _Instance = new BackProNeuralAlg();
            return _Instance;
        }
        public static int numInput = 16;
        public static int numHidden = 2;
        public static int  numOutput = 1;
        float learnRate = 0.5f;  // learning rate - controls the maginitude of the increase in the change in weights.
        float momentum = 0.1f; // momentum - to discourage oscillation.
        public float [] Training(float []x)
        {
            BackPropNeuralNet bnn = new BackPropNeuralNet(numInput, numHidden, numOutput);
            float[] yValues = new float[numOutput];
            float[] tValues = new float[numOutput];
            tValues.SetValue(0, 0);
            int maxEpochs = 10000;
            float errorThresh = 0.00001f;
            Console.WriteLine("\nSetting max epochs = " + maxEpochs + " and error threshold = " + errorThresh.ToString("F6"));

            int epoch = 0;
            double error = double.MaxValue;
            Console.WriteLine("\nBeginning training using back-propagation\n");

            while (epoch < maxEpochs) // train
            {
                if (epoch % 20 == 0) Console.WriteLine("epoch = " + epoch);

                yValues = bnn.ComputeOutputs(x);
                error = Helpers.Error(tValues, yValues);
                if (error < errorThresh)
                {
                    Console.WriteLine("Found weights and bias values that meet the error criterion at epoch " + epoch);
                    break;
                }
                bnn.UpdateWeights(tValues, learnRate, learnRate);
                ++epoch;
            } // train loop

            float[] finalWeights = bnn.GetWeights();
            return finalWeights;
        }
        public float[] Test(float[] weighs,float []x)
        {
            BackPropNeuralNet b = new BackPropNeuralNet(numInput,numHidden, numOutput);
            b.SetWeights(weighs);
            return b.ComputeOutputs(x);
        }
    }
}
