/*
神经网络主类实现
实现完整的神经网络训练和推理流程
支持多层网络架构和参数优化
*/
#include "NeuralNetwork.h"
#include "NeuronLayer.h"
#include <cmath>
#include <iostream>

NeuralNetwork::NeuralNetwork(const std::vector<int>& topology, double learningRate, double momentum)
    : learningRate(learningRate), momentum(momentum) {
    if (topology.empty()) {
        throw std::invalid_argument("Topology cannot be empty");
    }
    int startNeuronId = 0;
    // Create layers
    for (size_t i = 0; i < topology.size(); ++i) {
        int numNeurons = topology[i];
        if (numNeurons <= 0) {
            throw std::invalid_argument("Number of neurons must be positive");
        }
        if (i == 0) {
            layers.emplace_back(std::make_shared<NeuronLayer>(i, LayerType::INPUT, numNeurons, startNeuronId));
            std::cout << "Input layer created with " << numNeurons << " neurons" << std::endl;
        }
        else if (i == topology.size() - 1) {
            layers.emplace_back(std::make_shared<NeuronLayer>(i, LayerType::OUTPUT, numNeurons, startNeuronId));
            std::cout << "Output layer created with " << numNeurons << " neurons" << std::endl;
        }
        else {
            layers.emplace_back(std::make_shared<NeuronLayer>(i, LayerType::HIDDEN, numNeurons, startNeuronId));
            std::cout << "Hidden layer " << i << " created with " << numNeurons << " neurons" << std::endl;
        }
        startNeuronId += numNeurons;
    }
    // Create connections between layers
    for (size_t i = 1; i < layers.size(); ++i) {
        for (int j = 0; j < layers[i]->getNumNeurons(); ++j) {
            for (int k = 0; k < layers[i-1]->getNumNeurons(); ++k) {
                connections.emplace_back(std::make_shared<Connection>(layers[i-1]->getNeurons()[k]->getId(),
                                                                      layers[i]->getNeurons()[j]->getId()));
            }
        }
    }
}
NeuralNetwork::~NeuralNetwork() {}
void NeuralNetwork::feedForward(const std::vector<double>& inputs) {
    if (layers.empty()) {
        throw std::runtime_error("No layers in the network");
    }
    if (inputs.size() != layers[0]->getNumNeurons()) {
        throw std::invalid_argument("Input size does not match input layer size");
    }
    layers[0]->forward(inputs);
    for (size_t i = 1; i < layers.size(); ++i) {
        std::vector<double> layer_i_inputs = layers[i-1]->getOutputs();
        layers[i]->forward(layer_i_inputs);
    }
}
void NeuralNetwork::backPropagate(const std::vector<double>& target) {
    layers.back()->backward(target);
    for (int i = layers.size() - 2; i > 0; --i) {
        std::vector<double> downstreamDeltas = layers[i+1]->getErrors();
        layers[i]->backward(downstreamDeltas);
    }
}
void NeuralNetwork::calculateGradients() {
    for (const auto& connection : connections) {
        connection->calculateGradient();
    }
}
void NeuralNetwork::updateWeights() {
    for (size_t i = 1; i < layers.size(); ++i) {
        layers[i]->updateWeights(learningRate, momentum);
    }
}
void NeuralNetwork::updateBias() {
    for (size_t i = 1; i < layers.size(); ++i) {
        layers[i]->updateBias(learningRate, momentum);
    }
}
const std::vector<double> NeuralNetwork::getOutput() const {
    return layers.back()->getOutputs();
}
void NeuralNetwork::print() const {
    std::cout << "Total layers: " << layers.size() << std::endl;
    size_t totalNeurons = 0;
    for (const auto& layer : layers) {
        totalNeurons += layer->getNumNeurons();
    }
    std::cout << "Total neurons: " << totalNeurons << std::endl;
    std::cout << "Total connections: " << connections.size() << std::endl;
    std::cout << "Learning rate: " << learningRate << std::endl;
    std::cout << "Momentum: " << momentum << std::endl;
    std::cout << "Layers details:" << std::endl;
    for (size_t i = 0; i < layers.size(); ++i) {
        layers[i]->print();
    }
}