/* libwatopt++ - Optimization Toolkit
 *
 * Copyright (c) 2007 Christopher Alexander Watford
 * <christopher.watford@gmail.com>
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to
 * deal in the Software without restriction, including without limitation the
 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
 * sell copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 * IN THE SOFTWARE.
 *
 * $Id: backprop_network.cpp 7 2014-07-21 20:29:08Z christopher.watford@gmail.com $
 */

#include <cstdlib>
#include <cmath>
#include <ctime>
#include <iostream>
#include <fstream>

#include <backprop_network.hpp>

#include "utils.hpp"

namespace watopt
{

   NN_BackPropagate::NN_BackPropagate(size_t inputs, size_t hidden, size_t outputs, double rate)
   : _m_InputNodes(inputs), _m_HiddenNodes(hidden), _m_OutputNodes(outputs), _m_LearningRate(rate)
   , _m_HiddenWeights(new double[inputs*hidden]), _m_OutputWeights(new double[outputs*hidden])
   , _m_Bias(new double[hidden+outputs]), _m_HiddenDelta(new double[inputs*hidden])
   , _m_OutputDelta(new double[outputs*hidden]), _m_BiasDelta(new double[hidden+outputs])
   , __hiddenOutput(new double[_m_HiddenNodes]), __output(new double[_m_OutputNodes])
   , __err(new double[_m_HiddenNodes+_m_OutputNodes])
   {
      seed_random();
   }

   NN_BackPropagate::~NN_BackPropagate(void)
   {
	   delete _m_HiddenWeights;
	   delete _m_OutputWeights;
	   delete _m_Bias;
	   delete _m_HiddenDelta;
	   delete _m_OutputDelta;
	   delete _m_BiasDelta;
	   delete __hiddenOutput;
	   delete __output;
	   delete __err;
   }

   #define		HN(nn) ((nn) * _m_InputNodes)
   #define		ON(nn) ((nn) * _m_HiddenNodes)

   void
   NN_BackPropagate::InitializeWeights(void)
   {
	   for(size_t hh = 0; hh < _m_HiddenNodes; hh++)
	   {
		   for(size_t ii = 0; ii < _m_InputNodes; ii++)
		   {
			   _m_HiddenWeights[HN(hh) + ii] = rand01();
		   }

		   _m_Bias[hh] = rand01();
	   }

	   for(size_t nn = 0; nn < _m_OutputNodes; nn++)
	   {
		   for(size_t hh = 0; hh < _m_HiddenNodes; hh++)
		   {
			   _m_OutputWeights[ON(nn) + hh] = rand01();
		   }

		   _m_Bias[_m_HiddenNodes+nn] = rand01();
	   }
   }

   void
   NN_BackPropagate::TrainWithSet(const double *inputs, const double *outputs, size_t trainingSize, bool randomize)
   {
      // get an order to train the network with
      size_t *firingOrder = new size_t[trainingSize];
      for(size_t ii = 0; ii < trainingSize; ii++)
         firingOrder[ii] = ii;

      // randomize that order if requested
      if(randomize)
         shuffle<size_t>(firingOrder, trainingSize);

      // go through the training set
	   for(size_t TT = 0; TT < trainingSize; TT++)
	   {
		   TrainWithValues(inputs+(firingOrder[TT]*_m_InputNodes), outputs+(firingOrder[TT]*_m_OutputNodes));
	   }

      delete firingOrder;
   }

   void
   NN_BackPropagate::operator()(const double* inputs, double *hiddenOutput, double *outputs) const
   {
	   /* calculate raw output from input nodes */
	   for(size_t hh = 0; hh < _m_HiddenNodes; hh++)
	   {
		   double hiddenSum = 0.0;
		   for(size_t ii = 0; ii < _m_InputNodes; ii++)
		   {
			   hiddenSum += _m_HiddenWeights[HN(hh) + ii]*inputs[ii];
		   }

		   hiddenSum += _m_Bias[hh];
		   hiddenOutput[hh] = 1.0/(1.0 + std::exp(0.0-hiddenSum));
	   }

	   /* calculate output from output nodes */
	   for(size_t nn = 0; nn < _m_OutputNodes; nn++)
	   {
		   double outputSum = 0.0;
		   for(size_t hh = 0; hh < _m_HiddenNodes; hh++)
		   {
			   outputSum += _m_OutputWeights[ON(nn) + hh]*hiddenOutput[hh];
		   }

		   outputSum += _m_Bias[_m_HiddenNodes+nn];
		   outputs[nn] = 1.0/(1.0 + std::exp(0.0-outputSum));
	   }
   }

   void
   NN_BackPropagate::TrainWithValues(const double *inputs, const double *outputs)
   {
	   /* run the NN on the input values */
	   this->operator()(inputs, __hiddenOutput, __output);

	   /* calculate error signals */
	   for(size_t nn = 0; nn < _m_OutputNodes; nn++)
	   {
		   double oo = __output[nn];
		   __err[_m_HiddenNodes+nn] = (outputs[nn]-oo)*(oo)*(1.0 - oo);
	   }

	   for(size_t hh = 0; hh < _m_HiddenNodes; hh++)
	   {
		   __err[hh] = 0.0;
		   for(size_t nn = 0; nn < _m_OutputNodes; nn++)
		   {
			   __err[hh] += (__err[_m_HiddenNodes+nn]*_m_OutputWeights[ON(nn) + hh]);
		   }

		   double oo = __hiddenOutput[hh];
		   __err[hh] *= (oo) * (1.0 - oo);
	   }

	   /* calculate hidden deltas */
	   for(size_t hh = 0; hh < _m_HiddenNodes; hh++)
	   {
		   for(size_t ii = 0; ii < _m_InputNodes; ii++)
		   {
			   _m_HiddenDelta[HN(hh) + ii] = _m_LearningRate * inputs[ii] * __err[hh];
		   }

		   _m_BiasDelta[hh] = _m_LearningRate * __err[hh];
	   }

	   /* calculate output deltas */
	   for(size_t nn = 0; nn < _m_OutputNodes; nn++)
	   {
		   for(size_t hh = 0; hh < _m_HiddenNodes; hh++)
		   {
			   _m_OutputDelta[ON(nn) + hh]
				   = _m_LearningRate * __hiddenOutput[hh] * __err[_m_HiddenNodes+nn];
		   }

		   _m_BiasDelta[_m_HiddenNodes+nn] = _m_LearningRate * __err[_m_HiddenNodes+nn];
	   }

	   /* roll in new weights */
	   for(size_t hh = 0; hh < _m_HiddenNodes; hh++)
	   {
		   for(size_t ii = 0; ii < _m_InputNodes; ii++)
		   {
			   _m_HiddenWeights[HN(hh)+ii] += _m_HiddenDelta[HN(hh)+ii];
		   }

		   _m_Bias[hh] += _m_BiasDelta[hh];
	   }

	   for(size_t nn = 0; nn < _m_OutputNodes; nn++)
	   {
		   for(size_t hh = 0; hh < _m_HiddenNodes; hh++)
		   {
			   _m_OutputWeights[ON(nn)+hh] += _m_OutputDelta[ON(nn)+hh];
		   }

		   _m_Bias[_m_HiddenNodes+nn] += _m_BiasDelta[_m_HiddenNodes+nn];
	   }
   }

}
