/**
 * @author Zvonimir Fras <zvonimir@frasbros.com>
 */

#include "./neuralnetwork.h"


NeuralNetwork::NeuralNetwork( QObject *parent ) : QObject( parent )
{
   m_networkParent = 0;
   m_learningRate = 0.3;
}

int NeuralNetwork::load(const QString& filename)
{
   if( m_tmpFile.isOpen() ) m_tmpFile.close();
   m_tmpFile.setFileName( filename );
   if( !m_tmpFile.open( QIODevice::ReadOnly ) ) return -1;
   
   QString line = m_readNextLine();
   if( line.isEmpty() ) return -1;
   
   QStringList params = line.split( QRegExp("\\s+"), QString::SkipEmptyParts );
   
   QList< int > structure;
   foreach( QString param, params )
   {
      bool ok = false;
      int cnt = param.toInt( &ok ); if( !ok ) return -2;
      structure.append( cnt );
   }
   
   // create network
   this->random( structure );
   
   // now set weights ( set w0s + set synapse weights )
   
   // set w0s
   foreach( QList< Neuron* > layer, m_layers )
   {
      line = m_readNextLine();
      if( line.isEmpty() ) return -2;
      
      params = line.split( QRegExp("\\s+"), QString::SkipEmptyParts );
      if( params.isEmpty() ) return -2;
      
      // integrity check
      if( params.size() != layer.size() ) return -2;
      
      for( int i=0;i<params.size() && i<layer.size();i++ )
      {
	 QString &param = params[i];
	 bool ok = false;
	 float w0 = param.toFloat( &ok ); if( !ok ) return -2;
	 layer[i]->m_w0 = w0;
      }
   }
   
   // set synapse weights
   for( int i=0;i<m_layers.size()-1;i++ )
   {
      foreach( Neuron* n, m_layers[i] )
      {
	 line = m_readNextLine();
	 params = line.split( QRegExp("\\s+"), QString::SkipEmptyParts );
	 if( params.size() != n->m_rightSynapses.size() ) return -2;
	 
	 for( int j=0;j<params.size();j++ )
	 {
	    bool ok = false;
	    float w = params[j].toFloat( &ok ); if( !ok ) return -2;
	    n->m_rightSynapses[j]->setWeight( w );
	 }
      }
   }
   
   return 0;
}



int NeuralNetwork::save(const QString& filename, bool overwrite)
{
   QFile file( filename );
   if( !overwrite && file.exists() ) return 1;
   
   if( !file.open( QIODevice::WriteOnly ) ) return -1;
   
   // write header
   file.write( tr("# File automaticly generated\n\n").toUtf8() );
   
   
   // write network structure
   foreach( QList< Neuron* > layer, m_layers )
   {
      file.write( QString("%1 ").arg( layer.size() ).toUtf8() );
   }
   file.write( tr("# network structure, neuron count in layers\n\n").toUtf8() );
   
   
   // write w0s
   file.write( tr("# w0(s) layer by layer\n# each layer in separate line\n").toUtf8() );
   foreach( QList< Neuron* > layer, m_layers )
   {
      foreach( Neuron *n, layer )
      {
	 file.write( QString( "%1 " ).arg( n->m_w0 ).toUtf8() );
      }
      file.write( "\n" );
   }
   file.write( "\n" );
   
   
   // write synapse weights
   file.write( tr("# from 1st to last-1 layer\n#   foreach neuron in layer\n").toUtf8() );
   file.write( tr("#      synapse weights from neuron to neurons in next layer (each neuron separate line)\n")
	       .toUtf8() );
   for( int i=0;i<m_layers.size()-1;i++ )
   {
      foreach( Neuron *n, m_layers[i] )
      {
	 foreach( Synapse *syn, n->m_rightSynapses )
	 {
	    file.write( QString("%1 ").arg(syn->weight()).toUtf8() );
	 }
	 file.write( "\n" );
      }
      file.write( "\n" );
   }
   file.close();
   return 0;
}


void NeuralNetwork::clear()
{
   delete m_networkParent; m_networkParent = 0;
   for( int i = 0; i < m_layers.size(); i++ )
      m_layers[i].clear();
   m_layers.clear();
}



void NeuralNetwork::random(const QList< int >& structure)
{
   this->clear();
   m_networkParent = new QObject( this ); // new net, new parent
   
   // build layers
   foreach( int cnt, structure )
   {
      QList< Neuron* > layer;
      
      for( int i=0;i<cnt;i++ )
      {
	 layer.append( new Neuron( m_networkParent ) );
      }
      
      m_layers.append( layer );
   }
   
   
   // fully connect layers
   for( int i=0;i<m_layers.size()-1;i++ ) // first to last - 1 
   {
      QList< Neuron* > &tmpLayer = m_layers[i];
      QList< Neuron* > &nextLayer = m_layers[i+1];
      
      foreach( Neuron *n, tmpLayer )
      {
	 foreach( Neuron *m, nextLayer )
	 {
	    n->addSuccessor( m );
	 }
      }
   }
   
   
   // create a synapse list
   updateSynapseList();
}


// TODO optimize
QList< float > NeuralNetwork::output(const QList< float >& input)
{
   QList< float > retVal;
   if( input.size() != m_layers.first().size() ) return retVal;
   
   // force input and get ouput
   QList< Neuron* > &firstLayer = m_layers.first();
   int firstLayerSize = firstLayer.size();
   for( int i=0;i<firstLayerSize;i++ )
   {
      firstLayer[i]->directInput( input[i] );
   }
   
   QList< Neuron *> &lastLayer = m_layers.last();
   int lastLayerSize = lastLayer.size();
   for( int i=0;i<lastLayerSize;i++ )
   {
      retVal.append( lastLayer[i]->getOutput() );
   }
   
   return retVal;
}




Synapse* NeuralNetwork::synapseByIndex(int i) const
{
   return m_synapseList[i];
}




int NeuralNetwork::synapseCount() const
{
   return m_synapseList.size();
}





int NeuralNetwork::backpropagate(const QList< float >& output, const QList< float >& target)
{
   if( output.size() != target.size()
    || output.size() != m_layers.last().size()
    || target.size() != m_layers.last().size() ) return -1;
   
   QList< float > error;
   
   // calculate error
   for( int i=0;i<output.size();i++ )
   {
      error.append( output[i] * (1.0f - output[i]) * (target[i] - output[i]) );
   }
   
   return backpropagate( error );
}



int NeuralNetwork::backpropagate(const QList< float >& error)
{
   if( m_layers.isEmpty() || error.size() != m_layers.last().size() ) return -1;
   
   // put error in last layer
   for( int i=0;i<m_layers.last().size();i++ )
   {
      m_layers.last()[i]->m_error = error[i];
   }
   
   // calculate error for all other layers
   for( int i=m_layers.size()-2;i>0;i-- )
   {
      foreach( Neuron *neuron, m_layers[i] )
      {
	 // sum( w * E )
	 float sum = 0.0f;
	 foreach( Synapse *syn, neuron->m_rightSynapses )
	 {
	    sum += syn->weight() * syn->rightNeuron()->m_error;
	 }
	 
	 float output = neuron->getOutput();
	 neuron->m_error = sum * output * (1 - output);
      }
   }
   
   // weight corrections
   for( int i=0;i<m_layers.size()-1;i++ )
   {
      foreach( Neuron *neuron, m_layers[i] )
      {
	 foreach( Synapse *syn, neuron->m_rightSynapses )
	 {
	    syn->setWeight(
	       syn->weight() + m_learningRate * syn->rightNeuron()->m_error * neuron->getOutput()
	       );
	 }
	 // w0 corrections
	 neuron->m_w0 += m_learningRate * neuron->m_error;
      }
   }
   
   return 0;
}





float NeuralNetwork::totalError()
{
   if( m_layers.isEmpty() || m_layers.last().isEmpty() ) return -1;
   
   float sum = 0.0;
   foreach( Neuron *neuron, m_layers.last() ) sum += neuron->m_error * neuron->m_error;
   return sum / m_layers.last().size();
}




NeuralNetwork::NeuralNetwork(const NeuralNetwork& ann) : QObject( 0 )
{
   *this = ann;
}


NeuralNetwork& NeuralNetwork::operator=(const NeuralNetwork& ann)
{
   // create new network parent
   m_networkParent = new QObject( this );
   
   // copy neurons
   QList< Neuron* > tmpLayer;
   m_layers.clear();
   for( int j = 0; j < ann.m_layers.size(); j++ )
   {
      const QList< Neuron* > &layer = ann.m_layers[j];
      tmpLayer.clear();
      foreach( Neuron *neuron, layer )
      {
	 Neuron *tmpNeuron = new Neuron( this );
	 *tmpNeuron = *neuron;
	 tmpLayer.append( tmpNeuron );
      }
      m_layers.append( tmpLayer );
   }
   
   // copy and connect synapses
   // fully connected layers assumed
   for( int i=0;i<m_layers.size()-1;i++ ) // first to last - 1 
   {
      QList< Neuron* > &tmpLayer = m_layers[i];
      QList< Neuron* > &nextLayer = m_layers[i+1];
      
      foreach( Neuron *n, tmpLayer )
      {
	 foreach( Neuron *m, nextLayer )
	 {
	    n->addSuccessor( m );
	 }
      }
   }
   
   updateSynapseList();
   
   // apply weights
   for( int i = 0;i<ann.synapseCount();i++ )
   {
      this->synapseByIndex(i)->setWeight( ann.synapseByIndex(i)->weight() );
   }

   m_learningRate = ann.m_learningRate;
   return *this;
}







int NeuralNetwork::outputSize()
{
   if( m_layers.isEmpty() ) return 0;
   return m_layers.last().size();
}

int NeuralNetwork::inputSize()
{
   if( m_layers.isEmpty() ) return 0;
   return m_layers.first().size();
}

void NeuralNetwork::setLearningRate(float lr)
{
   m_learningRate = lr;
}

float NeuralNetwork::learningRate()
{
   return m_learningRate;
}



void NeuralNetwork::updateSynapseList()
{
   m_synapseList.clear();
   for( int j = 0; j < m_layers.size(); j++ )
   {
      const QList< Neuron* > &layer = m_layers[j];
      
      foreach( Neuron *neuron, layer )
      {
	 foreach( Synapse *synapse, neuron->m_leftSynapses )
	 {
	    m_synapseList.append( synapse );
	 }
      }
   }
}


QList< int > NeuralNetwork::networkStructure()
{
   QList< int > structure;
   foreach( QList< Neuron* > layer, m_layers )
   {
      structure.append( layer.size() );
   }
   return structure;
}





QString NeuralNetwork::m_readNextLine()
{
   QString empty;
   if( !m_tmpFile.isOpen() ) return empty;
   
   while( !m_tmpFile.atEnd() )
   {
      QString line = QString::fromUtf8( m_tmpFile.readLine().data() );
      if( line.isEmpty() ) continue;
      
      // strip leading blanks (spaces and tabs)
      int breakPoint = 0;
      foreach( QChar c, line ) if( c == ' ' || c == '\t' || c == '\n' ) breakPoint++; else break; // finds first non-blank
      line = line.right( line.size() - breakPoint );
      
      // strip comments
      breakPoint = 0;
      foreach( QChar c, line ) if( c != '#' ) breakPoint++; else break; // finds comment start
      line = line.left( breakPoint );
      
      if( line.isEmpty() ) continue;
      else return line;
   }
   
   return empty;
}
