/*
 * @Author: Tyler
 * @Date: 2025-10-15 15:50:18
 * @Description: 
 * Copyright (c) 2025 by User/Corp, All Rights Reserved.
 */
#include "Tensor.h"
#include "../include/ActivationFunction.h"
#include <cmath>

// Sigmoid Implementation
template<typename T, int Dynamic>
T Sigmoid<T, Dynamic>::activate(T x) const {
    // Clip input to avoid numerical instability
    x = max(-100.0, min(100.0, x));
    return T(1.0 / (1.0 + exp(-x)));
}
template<typename T, int Dynamic>
T Sigmoid<T, Dynamic>::derivative(T x) const {
    T sig = activate(x);
    return T(sig * (1.0 - sig));
}
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
Tensor<T, Dynamic>& Sigmoid<T, Dynamic>::activateToTensor(Tensor<T, Dynamic>& tensor) const {
    tensor = tensor.unaryExpr([](T x) -> T {
        return 1.0 / (1.0 + exp(-x));
    });
    return tensor;
}
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
Tensor<T, Dynamic>& Sigmoid<T, Dynamic>::derivativeToTensor(Tensor<T, Dynamic>& tensor) const {
    tensor = tensor.unaryExpr([this](T x) {
        T sig = activate(x);
        return sig * (1.0 - sig);
    });
    return tensor;
}

// ReLU Implementation
template<typename T, int Dynamic>
T ReLU<T, Dynamic>::activate(T x) const {
    return T(x > 0 ? x : 0);
}
template<typename T, int Dynamic>
T ReLU<T, Dynamic>::derivative(T x) const {
    return T(x > 0 ? 1 : 0);
}
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
Tensor<T, Dynamic>& ReLU<T, Dynamic>::activateToTensor(Tensor<T, Dynamic>& tensor) const {
    tensor = tensor.cwiseMax(0.0); // 逐位操作，将小于 0 的值置为 0
    return tensor;
}
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
Tensor<T, Dynamic>& ReLU<T, Dynamic>::derivativeToTensor(Tensor<T, Dynamic>& tensor) const {
    tensor = tensor.unaryExpr([](T x) {
        return x > 0 ? 1 : 0;
    });
    return tensor;
}

// Tanh Implementation
template <typename T, int Dynamic>
T Tanh<T, Dynamic>::activate(T x) const {
    return tanh(x);
}
template <typename T, int Dynamic>
T Tanh<T, Dynamic>::derivative(T x) const {
    T t = activate(x);
    return T(1.0 - t * t);
}
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
Tensor<T, Dynamic>& Tanh<T, Dynamic>::activateToTensor(Tensor<T, Dynamic>& tensor) const {
    tensor = tensor.unaryExpr([](T x) -> T {
        return tanh(x);
    });
    return tensor;
}
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
Tensor<T, Dynamic>& Tanh<T, Dynamic>::derivativeToTensor(Tensor<T, Dynamic>& tensor) const {
    tensor = tensor.unaryExpr([this](T x) {
        T t = activate(x);
        return 1.0 - t * t;
    });
    return tensor;
}
// Softmax Implementation
template <typename T, int Dynamic>
T Softmax<T, Dynamic>::activate(T x) const {
    return T(exp(x) / (exp(x) + exp(1.0 - x)));
}
template <typename T, int Dynamic>
T Softmax<T, Dynamic>::derivative(T x) const {
    T softmax = activate(x);
    return T(softmax * (1.0 - softmax));
}
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
Tensor<T, Dynamic>& Softmax<T, Dynamic>::activateToTensor(Tensor<T, Dynamic>& tensor) const {
    // tensor = tensor.unaryExpr([](T x) -> T {
    //     return exp(x) / (exp(x) + exp(1.0 - x));
    // });
    Tensor<T, Dynamic> exp_tensor = tensor.unaryExpr([](T x) -> T {
        return exp(x);
    });
    T sum_tensor = exp_tensor.sum();
    tensor = exp_tensor.unaryExpr([&sum_tensor](T x) -> T {
        return x / sum_tensor;
    });
    return tensor;
}
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
Tensor<T, Dynamic>& Softmax<T, Dynamic>::derivativeToTensor(Tensor<T, Dynamic>& tensor) const {
    // tensor = tensor.unaryExpr([this](T x) {
    //     T softmax = activate(x);
    //     return softmax * (1.0 - softmax);
    // });
    Tensor <T, Dynamic> softmax_tensor = activateToTensor(tensor);
    for (int i = 0; i < tensor.getSize(); ++i) {
        for (int j = 0; j < softmax_tensor.getSize(); ++j) {
            if (i == j) {
                tensor(i) = softmax_tensor(j) * (1.0 - softmax_tensor(j));
            } else {
                tensor(i) = -softmax_tensor(i) * softmax_tensor(j);
            }
        }
    }
    return tensor;
}

// Factory Implementation
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
unique_ptr<ActivationFunction<T, Dynamic> > ActivationFactory<T, Dynamic>::create(const string& name) {
    if (name == "sigmoid") {
        return make_unique<Sigmoid<T, Dynamic> >();
    } else if (name == "relu") {
        return make_unique<ReLU<T, Dynamic> >();
    } else if (name == "tanh") {
        return make_unique<Tanh<T, Dynamic> >();
    } else if (name == "softmax") {
        return make_unique<Softmax<T, Dynamic> >();
    } else {
        throw invalid_argument("Invalid activation function name, please check the name" + name);
    }
}
#ifdef USE_USER_DEFINED_TENSOR
template <typename T, int Dynamic>
#endif
const string& ActivationFactory<T, Dynamic>::getName(const ActivationFunction<T, Dynamic>* func) {
    if (dynamic_cast<const Sigmoid<T, Dynamic>*>(func)) {
        static const string name = "sigmoid";
        return name;
    } else if (dynamic_cast<const ReLU<T, Dynamic>*>(func)) {
        static const string name = "relu";
        return name;
    } else if (dynamic_cast<const Tanh<T, Dynamic>*>(func)) {
        static const string name = "tanh";
        return name;
    } else if (dynamic_cast<const Softmax<T, Dynamic>*>(func)) {
        static const string name = "softmax";
        return name;
    } else {
        static const string unknown = "unknown";
        return unknown;
    }
}

// Explicit template instantiation
template class ActivationFactory<double, 1>;
template class ActivationFactory<double, 3>;