/*
 * @Author: Tyler
 * @Date: 2025-10-06 15:30:10
 * @Description: 
 * Copyright (c) 2025 by User/Corp, All Rights Reserved.
 */
#ifndef LOSS_FUNCTION_H
#define LOSS_FUNCTION_H

#include "Macros.h"
#ifdef USE_USER_DEFINED_TENSOR 
    #undef USE_EIGEN_TENSOR
    #include "Tensor.h"
#endif
#ifdef USE_EIGEN_TENSOR
    #undef USE_USER_DEFINED_TENSOR
    #include <Eigen/Core>
    #include <Eigen/Dense>
#endif
#include <string>
#include <memory>

#ifdef USE_EIGEN_TENSOR
// Enable Eigen parallelization
#define EIGEN_USE_THREADS
#endif

using namespace std;
#ifdef USE_EIGEN_TENSOR
using namespace Eigen;
#endif
#ifdef USE_USER_DEFINED_TENSOR
using namespace UserDefinedTensor;
#endif

class LossFunction {
public:
    virtual double compute(double predicted, double actual) const = 0;
    virtual double derivative(double predicted, double actual) const = 0;
    virtual Tensor<double, 1> computeToTensor(const Tensor<double, 1>& predicted, const Tensor<double, 1>& actual) const = 0;
    virtual Tensor<double, 1> derivativeToTensor(const Tensor<double, 1>& predicted, const Tensor<double, 1>& actual) const = 0;
    virtual ~LossFunction() = default;
};

// Mean Squared Error Loss
class MeanSquaredError : public LossFunction {
public:
    double compute(double predicted, double actual) const override;
    double derivative(double predicted, double actual) const override;
    Tensor<double, 1> computeToTensor(const Tensor<double, 1>& predicted, const Tensor<double, 1>& actual) const override;
    Tensor<double, 1> derivativeToTensor(const Tensor<double, 1>& predicted, const Tensor<double, 1>& actual) const override;
    ~MeanSquaredError() = default;
};

// Categorical Crossentropy Loss
class CategoricalCrossentropy : public LossFunction {
public:
    double compute(double predicted, double actual) const override;
    double derivative(double predicted, double actual) const override;
    Tensor<double, 1> computeToTensor(const Tensor<double, 1>& predicted, const Tensor<double, 1>& actual) const override;
    Tensor<double, 1> derivativeToTensor(const Tensor<double, 1>& predicted, const Tensor<double, 1>& actual) const override;
    ~CategoricalCrossentropy() = default;
};

/* You can add more loss functions here as needed
 * Ensure all functions handle edge cases and invalid inputs appropriately
 * and include comments for clarity.
 * For example, you might want to add checks for NaN or infinite values
 * depending on your application's requirements.
 * Always test your implementations with unit tests to ensure correctness.
 * This file is part of a larger project and should integrate seamlessly
 * with other components such as the neural network training loop.
 * If you have specific requirements or constraints for the loss functions,
 * please provide them so the implementations can be adjusted accordingly.
 */

// Loss Function Factory
class LossFactory {
public:
    static unique_ptr<LossFunction> create(const string& name);
    static const string& getName(const LossFunction* func);
};

#endif // LOSS_FUNCTION_H