#include "QMLPNet.h"
#include "Sigmod.h"
#include <iostream>

QMLPNet::QMLPNet(int _ninput, int _nhidden, int _noutput, std::vector<int> _hiddenNum, double _lrate)
{

    ninput = _ninput;
    nhidden = _nhidden;
    noutput = _noutput;
    hiddenNum = _hiddenNum;
    learnRate = _lrate;

    net.resize(_nhidden + 1);
    for (int i = 0; i < _nhidden; ++i)
    {
        net[i].resize(_hiddenNum[i]);
    }
    net[_nhidden].resize(_noutput);

    tFunction = new Sigmod();

    for (int i = 0; i < net.size(); ++i)
    {
        for (int j = 0; j < net[i].size(); ++j)
        {
            int linput = (i == 0) ? _ninput : net[i-1].size();
            QPerceptron * p = new QPerceptron(tFunction, linput, learnRate, j);
            net[i][j] = p;
            if (i > 0)
            {
                for (int k = 0; k < net[i-1].size(); ++k)
                {
                    net[i - 1][k]->Link(p);
                }
            }
        }
    }
}

QMLPNet::~QMLPNet()
{
    delete tFunction;
    for (int i = 0; i < net.size(); ++i)
    {
        for (int j = 0; j < net[i].size(); ++j)
        {
            delete net[i][j];
        }
    }
}

std::vector<double> QMLPNet::Simulate(std::vector<double> _input)
{
    std::vector<double> tmp;
    std::vector<double> ret = _input;
    for (int i = 0; i < net.size(); ++i)
    {
        tmp = ret;
        ret.clear();
        for (int j = 0; j < net[i].size(); ++j)
        {
            ret.push_back(net[i][j]->Simulate(tmp));
        }
    }
    return ret;
}

void QMLPNet::Train(std::vector<double> _input, std::vector<double> t)
{
    Simulate(_input);

    for (int i = net.size() - 1; i >= 0; --i)
    {
        if (i == net.size() - 1)
        {
            for (int j = 0; j < net[i].size(); ++j)
            {
                net[i][j]->UpdateBPFactor(t[j]);
            }
        }
        else 
        {
            for (int j = 0; j < net[i].size(); ++j)
            {
                net[i][j]->UpdateBPFactor(0);
            }
        }
    }
    for (int i = net.size() - 1; i >= 0; --i)
    {
        for (int j = 0; j < net[i].size(); ++j)
        {
            net[i][j]->UpdateWeight();
        }
    }
}

void QMLPNet::RandomInit()
{
    for (int i = 0; i < net.size(); ++i)
    {
        for (int j = 0; j < net[i].size(); ++j)
        {
            net[i][j]->RandomInit();
        }
    }
}