/*
神经元实现
实现神经网络的基本计算单元
支持激活函数计算和偏置管理
*/
#include "Neuron.h"
#include "Connection.h"
#include "ActivationFunction.h"
#include "LossFunction.h"
#include <algorithm>
#include <cctype>
#include <cmath>
#include <string>
#include <vector>
#include <memory>
#include <iostream>
#include <stdexcept>

Neuron::Neuron(int id, int layerIndex, int positionInLayer, const std::string& activationName, const std::string& lossName)
    : id(id), layerIndex(layerIndex), positionInLayer(positionInLayer), bias(0.0), output(0.0), delta(0.0) {
    if (layerIndex < 0) {
        throw std::invalid_argument("Layer index must be non-negative");
    }
    if (positionInLayer < 0) {
        throw std::invalid_argument("Position in layer must be non-negative");
    }
    if (activationName.empty()) {
        throw std::invalid_argument("Activation function name cannot be empty");
    }
    if (lossName.empty()) {
        throw std::invalid_argument("Loss function name cannot be empty");
    }
    std::string lowerActivationName = activationName;
    std::transform(lowerActivationName.begin(), lowerActivationName.end(), lowerActivationName.begin(), ::tolower);
    if (lowerActivationName != "sigmoid" && lowerActivationName != "relu" && lowerActivationName != "tanh") {
        throw std::invalid_argument("Invalid activation function name");
    }
    std::string lowerLossName = lossName;
    std::transform(lowerLossName.begin(), lowerLossName.end(), lowerLossName.begin(), ::tolower);
    if (lowerLossName != "mse") {
        throw std::invalid_argument("Invalid loss function name");
    }
    if (std::isnan(bias) || std::isinf(bias)) {
        throw std::invalid_argument("Bias must be a finite number");
    }
    this->activationFunction = ActivationFactory<double, 1>::create(activationName);
    this->lossFunction = LossFactory::create(lossName);
}
Neuron::~Neuron() {
    // shared_ptr and unique_ptr will automatically clean up the memory
}
int Neuron::getId() const {
    return id;
}
void Neuron::setId(int id) {
    this->id = id;
}
int Neuron::getLayerIndex() const {
    return layerIndex;
}
void Neuron::setLayerIndex(int layerIndex) {
    this->layerIndex = layerIndex;
}
int Neuron::getPositionInLayer() const {
    return positionInLayer;
}
void Neuron::setPositionInLayer(int positionInLayer) {
    this->positionInLayer = positionInLayer;
}
const std::vector<std::shared_ptr<Connection> >& Neuron::getUpstreamConnections() const {
    return upstreamConnections;
}
void Neuron::addUpstreamConnection(const std::shared_ptr<Connection>& connection) {
    upstreamConnections.push_back(connection);
}
const std::vector<std::shared_ptr<Connection> >& Neuron::getDownstreamConnections() const {
    return downstreamConnections;
}
void Neuron::addDownstreamConnection(const std::shared_ptr<Connection>& connection) {
    downstreamConnections.push_back(connection);
}
double Neuron::getBias() const {
    return bias;
}
void Neuron::setBias(double bias) {
    this->bias = bias;
}
double Neuron::getOutput() const {
    return output;
}
void Neuron::setOutput(double output) {
    this->output = output;
}
double Neuron::getDelta() const {
    return delta;
}
void Neuron::setDelta(double delta) {
    this->delta = delta;
}
void Neuron::calculateOutput(const std::vector<std::pair<double, double> >& inputsWeights) {
    double inputSum = 0.0;
    for (const auto& inputWeight : inputsWeights) {
        inputSum += inputWeight.first * inputWeight.second;
    }
    output = this->activationFunction->activate(inputSum + this->bias);
}
void Neuron::calculateInputLayerOutput(const double input) {
    if (layerIndex != 0) {
        throw std::logic_error("calculateInputLayerOutput can only be called on input layer neurons");
    }
    output = input;
}
void Neuron::calculateHiddenLayerOutput(const std::vector<std::pair<double, double> >& inputsWeights) {
    if (layerIndex == 0) {
        throw std::logic_error("calculateHiddenLayerOutput can only be called on hidden layer neurons");
    }
    calculateOutput(inputsWeights);
}
void Neuron::calculateOutputLayerOutput(const std::vector<std::pair<double, double> >& inputsWeights, const double labelValue) {
    if (layerIndex != -1 && downstreamConnections.size() != 0) {
        throw std::logic_error("calculateOutputLayerOutput can only be called on output layer neurons");
    }
    calculateOutput(inputsWeights);
}
void Neuron::calculateHiddenLayerDelta() {
    double inputSum = 0.0;
    double downstreamDeltaSum = 0.0;
    for (const auto& conn : upstreamConnections) {
        inputSum += conn->getWeight() * conn->getUpstreamNeuron()->getOutput();
    }
    for (const auto& conn : downstreamConnections) {
        downstreamDeltaSum += conn->getWeight() * conn->getDownstreamNeuron()->getDelta();
    }
    delta = this->activationFunction->derivative(inputSum + this->bias) * downstreamDeltaSum;
}
void Neuron::calculateOutputLayerDelta(const double labelValue) {
    double inputSum = 0.0;
    for (const auto& conn : upstreamConnections) {
        inputSum += conn->getWeight() * conn->getUpstreamNeuron()->getOutput();
    }
    double loss_derivative = this->lossFunction->derivative(output, labelValue);
    double activation_derivative = this->activationFunction->derivative(inputSum + this->bias);
    delta = 0 - loss_derivative * activation_derivative;
}
void Neuron::updateInputWeights(const double learningRate) {
    for (const auto& conn : upstreamConnections) {
        double newWeight = conn->getWeight() + learningRate * conn->getGradient();
        newWeight = std::max(-1.0, std::min(1.0, newWeight));
        conn->setWeight(newWeight);
    }
}
void Neuron::updateInputWeights(const double learningRate, const double momentum) {
    for (const auto& conn : upstreamConnections) {
        double newWeight = conn->getWeight() + learningRate * conn->getGradient() + momentum * conn->getWeight();
        newWeight = std::clamp(newWeight, -1.0, 1.0);
        conn->setWeight(newWeight);
    }
}
void Neuron::updateBias(const double learningRate) {
    bias += learningRate * delta;
}
void Neuron::updateBias(const double learningRate, const double momentum) {
    bias += learningRate * delta + momentum * bias;
}
void Neuron::print() const {
    std::cout << "Neuron: " << id << " (" << layerIndex << ", " << positionInLayer << ")" << std::endl;
    std::cout << "  Bias: " << bias << std::endl;
    std::cout << "  Output: " << output << std::endl;
    std::cout << "  Delta: " << delta << std::endl;
    std::cout << "  Upstream connections: " << upstreamConnections.size() << std::endl;
    for (const auto& conn : upstreamConnections) {
        std::cout << "    " << conn->getUpstreamNeuronId() << " -> " << conn->getDownstreamNeuronId()
        << " (weight: " << conn->getWeight() << ", gradient: " << conn->getGradient() << ")" << std::endl;
    }
    std::cout << "  Downstream connections: " << downstreamConnections.size() << std::endl;
    for (const auto& conn : downstreamConnections) {
        std::cout << "    " << conn->getUpstreamNeuronId() << " -> " << conn->getDownstreamNeuronId()
        << " (weight: " << conn->getWeight() << ", gradient: " << conn->getGradient() << ")" << std::endl;
    }
    std::cout << "  Activation function: " << ActivationFactory<double, 1>::getName(activationFunction.get()) << endl;
    std::cout << "  Loss function: " << LossFactory::getName(lossFunction.get()) << std::endl;
    std::cout << std::endl;
}