//
//  WalkTrainingNode.cpp
//  ChampionRun
//
//  Created by MHD Yamen Saraiji on 11/10/13.
//
//

#include "WalkTrainingNode.h"


namespace cocos2d
{
    
    
    WalkTrainingNode::WalkTrainingNode()
    {
        m_detectNet=new FANN::neural_net();
        m_detectNet->create_standard(3,m_windows.size()*2+2,10,GetTrainingLabels().size());
        m_detectNet->set_activation_function_hidden( FANN::SIGMOID_SYMMETRIC);
        m_detectNet->set_activation_function_output(FANN::SIGMOID_SYMMETRIC);
        
        m_detectNet->set_training_algorithm(FANN::TRAIN_QUICKPROP);
        RestartTraining();
        
    }
    WalkTrainingNode::~WalkTrainingNode()
    {
        delete m_detectNet;
        _cleanData();
    }
    
    void WalkTrainingNode::_cleanData()
    {
        for (int i=0; i<m_trainingSet.size(); ++i) {
            delete m_trainingSet[i];
        }
        m_trainingSet.clear();
    }
    
    void WalkTrainingNode::_updateHistogram()
    {
        WalkNodeBase::_updateHistogram();
        
        if(m_status==ESampling)
        {
            //add training vector
            m_trainingSet.push_back(CreateVector());
        }else if(m_status==ETraining)
        {
            m_detectNet->train_epoch(m_netData);
        }else if(m_status==EFinished)
        {
            TrainingVector *v= CreateVector();
            float* result=m_detectNet->run(&v->inputs[0]);
            float maxV=0;
            m_detectedCycle=0;
            for(int i=0;i<GetTrainingLabels().size();++i)
            {
                if(result[i]>maxV)
                {
                    maxV=result[i];
                    m_detectedCycle=i;
                }
            }
            delete v;
        }
    }
    
    WalkTrainingNode::TrainingVector* WalkTrainingNode::CreateVector()
    {
        
        TrainingVector *v=new TrainingVector();
        v->inputs.resize(m_detectNet->get_num_input());
        v->inputs[0]=m_mean;
        v->inputs[1]=m_stdDev;
        for(int i=0;i<m_windows.size();++i)
        {
            v->inputs[2*(i+1)]=(m_windows[i]->Mean());
            v->inputs[2*(i+1)+1]=(m_windows[i]->Dev());
        }
        v->output.resize(GetTrainingLabels().size(),0);
        v->output[m_currentTraining]=1;
        return v;
    }
    
    void WalkTrainingNode::_prepareNetwork()
    {
        float** inputs=new float*[m_trainingSet.size()];
        float** outputs=new float*[m_trainingSet.size()];
        int inputCount=m_detectNet->get_num_input();
        for(int i=0;i<m_trainingSet.size();++i)
        {
            inputs[i]=&m_trainingSet[i]->inputs[0];
            outputs[i]=&m_trainingSet[i]->output[0];
        }
        
        m_netData.set_train_data(m_trainingSet.size(), inputCount, inputs, m_detectNet->get_num_output(), outputs);
        
        
    }
    
    void WalkTrainingNode::RestartTraining()
    {
        m_status=EIdle;
        m_currentTraining=0;
        m_lastTrainingSample=0;
        m_detectNet->randomize_weights(0, 1);
        _cleanData();
        m_detectedCycle=0;
    }
    bool WalkTrainingNode::NextTraining()
    {
        if(m_currentTraining==GetTrainingLabels().size()-1)
        {
            _prepareNetwork();
            m_status=ETraining;
            return false;
        }
        m_currentTraining++;
        m_status=EIdle;
        m_lastTrainingSample=m_trainingSet.size();
        return true;
    }
    
    void WalkTrainingNode::StartTraining()
    {
        if(m_status!=EIdle)
            return;
        m_status=ESampling;
    }
    void WalkTrainingNode::EndTraining()
    {
        m_status=EFinished;
    }
    int WalkTrainingNode::GetRemainingTraining()
    {
        return GetTrainingLabels().size()-m_currentTraining;
    }
    
    std::string WalkTrainingNode::GetCurrentTrainingLabel()
    {
        return GetTrainingLabels()[m_currentTraining];
    }
    
    float WalkTrainingNode::GetError()
    {
        return m_detectNet->get_MSE();
    }
    int WalkTrainingNode::GetCurrentSamples()
    {
        return m_trainingSet.size()-m_lastTrainingSample;
    }
}