#pragma once

#include <vector>
#include <memory>
#include "tensor.h"
#include "core/exp.h"
#include "core/add.h"
#include "core/mul.h"

namespace  ldl
{
namespace core
{
template<typename T>
class Function : public std::enable_shared_from_this<Function<T>>
{
public:
    virtual ~Function() = default;
    // 辅助函数：将shape转换为字符串表示
    static std::string shapeToString(const std::vector<int64_t>& shape) {
        std::ostringstream oss;
        oss << "[";
        for (size_t i = 0; i < shape.size(); ++i) {
            if (i != 0) oss << ", ";
            oss << shape[i];
        }
        oss << "]";
        return oss.str();
    }
    std::vector<std::shared_ptr<Tensor<T>>> operator()(const std::vector<std::shared_ptr<Tensor<T>>>& inputs) {
        m_inputs = inputs;
        forward();
        for(auto& item:m_outputs) {
            item->set_creator(this->shared_from_this());
        }
        return m_outputs;
    }
    std::shared_ptr<Tensor<T>> operator()(std::shared_ptr<Tensor<T>> input) {
        m_inputs[0] = input;
        forward();
        for(auto& item:m_outputs) {
            item->set_creator(this->shared_from_this());
        }
        return m_outputs[0];
    }
    virtual void forward() = 0;
    virtual void backward(const std::vector<TensorData<T>>& output_grads) {
        set_output_grad(output_grads);
        backward();
    }

    virtual void backward() = 0;

    virtual void set_output_grad(const std::vector<TensorData<T>>& output_grads)
    {
        if (output_grads.size() != m_outputs.size()) {
            throw std::runtime_error(
                "Output gradients size mismatch. Expected " + 
                std::to_string(m_outputs.size()) + 
                ", got " + 
                std::to_string(output_grads.size())
            );
        }
        
        for (size_t i = 0; i < m_outputs.size(); ++i) {
            if (output_grads[i].shape() != m_outputs[i]->shape()) {
                throw std::runtime_error(
                    "Gradient shape mismatch at index " + 
                    std::to_string(i) + 
                    ". Expected " + 
                    shapeToString(m_outputs[i]->shape()) + 
                    ", got " + 
                    shapeToString(output_grads[i].shape())
                );
            }
            m_outputs[i]->grad() = output_grads[i];
        }
    }
    std::vector<std::shared_ptr<Tensor<T>>>& inputs()
    {
        return m_inputs;
    }
protected:
    std::vector<std::shared_ptr<Tensor<T>>> m_inputs{};
    std::vector<std::shared_ptr<Tensor<T>>> m_outputs{};
};
}
}
