#include "NeuralNetwork.h"

#include "RandomGsl.h"
#include "BasicDebug.h"

#include <QtCore/QString>
#include <iostream>
#include <assert.h>

namespace jcrada {

    NeuralNetwork::NeuralNetwork( int hidden_layers ) :
    _number_of_layers( hidden_layers + 2 ),
    _layers( new NeuralLayer*[_number_of_layers] ) {
        for (int i = 0; i < getNumberOfLayers(); ++i) {
            setLayer(i, NULL);
        }
    }

    NeuralNetwork::NeuralNetwork(
            int inputs, int hidden_layers, int neurons_per_layer, int outputs ) {
        setNumberOfLayers(hidden_layers + 2);
        _layers = new NeuralLayer*[getNumberOfLayers()];
        _layers[0] = new NeuralLayer(inputs, 1);

        if (hidden_layers == 0) {
            _layers[1] = new NeuralLayer(outputs, inputs);
        } else {
            _layers[1] = new NeuralLayer(neurons_per_layer, inputs);
            for (int i = 2; i < getNumberOfLayers() - 1; ++i) {
                _layers[i] = new NeuralLayer(neurons_per_layer, neurons_per_layer);
            }
            _layers[getNumberOfLayers() - 1] = new NeuralLayer(outputs, neurons_per_layer);
        }
    }

    NeuralNetwork::~NeuralNetwork( ) {
        if (_layers) {
            for (int i = 0; i < getNumberOfLayers(); ++i) {
                delete _layers[i];
            }
            delete [] _layers;
        }
    }

    void NeuralNetwork::process( ) {
        for (int i = 0; i < getNumberOfLayers() - 1; ++i) {
            double * outputs = new double[getLayer(i).getNumberOfNeurons()];
            for (int j = 0; j < getLayer(i).getNumberOfNeurons(); ++j) {
                outputs[j] = getLayer(i).getNeuron(j).output();
            }
            for (int j = 0; j < getLayer(i + 1).getNumberOfNeurons(); ++j) {
                for (int k = 0; k < getLayer(i + 1).getNeuron(j).getNumberOfInputs(); ++k) {
                    getLayer(i + 1).getNeuron(j).setInput(k, outputs[k]);
                }
            }
            delete [] outputs;
        }
    }

    NeuralNetwork* NeuralNetwork::clone( ) const {
        NeuralNetwork* result = new NeuralNetwork(getNumberOfHiddenLayers());
        for (int i = 0; i < result->getNumberOfLayers(); ++i) {
            result->setLayer(i, getLayer(i).clone());
        }
        return result;
    }

    NeuralNetwork* NeuralNetwork::fromXml( QDomElement& xml, bool* ok ) {
        int number_of_layers = xml.attribute("number-of-layers", "0").toInt(ok);
        NeuralNetwork* result = new NeuralNetwork(number_of_layers - 2);

        QDomNode child = xml.firstChild();
        int i = 0;
        while (!child.isNull()) {
            QDomElement e = child.toElement();
            if (e.tagName() == "NeuralLayer") {
                assert(i < number_of_layers);
                result->setLayer(i++, NeuralLayer::fromXml(e, ok));
            }
            child = child.nextSibling();
        }
        if (!*ok) {
            BDEBUG("Error in NeuralNetwork::fromXml");
        }
        return result;
    }

    void NeuralNetwork::toXml( QDomDocument& xml, QDomElement* parent ) const {
        QDomElement network = xml.createElement("NeuralNetwork");
        network.setAttribute("number-of-layers", getNumberOfLayers());
        for (int i = 0; i < getNumberOfLayers(); ++i) {
            getLayer(i).toXml(xml, &network);
        }
        if (parent) {
            parent->appendChild(network);
        } else {
            xml.appendChild(network);
        }
    }

    int NeuralNetwork::main( int argc, char** argv ) {
        (void) argc;
        (void) argv;
        //        NeuralNetwork nn(6 * 3, 6 / 2, (6 * 3) / 2, 6 * 3);
        NeuralNetwork nn(3, 1, 2, 3);
        RandomGsl r(0);
        for (int i = 0; i < nn.getNumberOfLayers(); ++i) {
            for (int j = 0; j < nn.getLayer(i).getNumberOfNeurons(); ++j) {
                nn.getLayer(i).getNeuron(j).setActivationFunction(Neuron::AF_NONE);
                for (int k = 0; k < nn.getLayer(i).getNeuron(j).getNumberOfInputs(); ++k) {
                    nn.getLayer(i).getNeuron(j).setWeight(k, r.nextDouble(-1.0, 1.0));
                }
            }
        }
        for (int i = 0; i < nn.getInputLayer().getNumberOfNeurons(); ++i) {
            nn.getInputLayer().getNeuron(i).setInput(0, r.nextDouble(-1.0, 1.0));
        }
        nn.process();

        QDomDocument doc;
        nn.toXml(doc);
        QString xml = doc.toString();
        std::cout << xml.toStdString() << std::endl;
        LOGP(xml.toStdString());

        doc.clear();
        QString * error_msg = new QString();
        int* error_line = new int();
        int* error_col = new int();
        doc.setContent(xml, error_msg, error_line, error_col);
        std::cout << "Error: " << error_msg->toStdString() << " at [" << *error_line << " , "
                << *error_col << "]" << std::endl;
        delete error_msg;
        delete error_line;
        delete error_col;

        QDomElement root = doc.firstChild().toElement();
        NeuralNetwork nnxml(8);
        doc.clear();
        nnxml.toXml(doc);
        xml = doc.toString();
        std::cout << xml.toStdString() << std::endl;
        return 0;
    }

}
