// ReLU 激活层
namespace qing {
    class ReLU : public Layer {
    private:
        std::unique_ptr<std::vector<double>> input_cache;
        int dimension;
        bool is_training;

    public:
        ReLU(int dim) : dimension(dim), is_training(true) {
            input_cache = std::make_unique<std::vector<double>>(dim, 0.0);
        }

        std::vector<double> forward(const std::vector<double>& input) override {
            if (input.size() != dimension) {
                throw std::invalid_argument("Input size does not match ReLU dimension");
            }
            
            *input_cache = input;
            std::vector<double> output(dimension);
            
            for (int i = 0; i < dimension; ++i) {
                output[i] = std::max(0.0, input[i]);
            }
            
            return output;
        }

        std::vector<double> backward(const std::vector<double>& grad_output) override {
            if (grad_output.size() != dimension) {
                throw std::invalid_argument("Gradient size does not match ReLU dimension");
            }
            
            std::vector<double> grad_input(dimension);
            
            for (int i = 0; i < dimension; ++i) {
                grad_input[i] = (*input_cache)[i] > 0 ? grad_output[i] : 0.0;
            }
            
            return grad_input;
        }

        void update_parameters(double learning_rate) override {
            // ReLU 没有可训练参数
        }

        std::string get_layer_type() const override {
            return "ReLU";
        }

        int get_input_dim() const override {
            return dimension;
        }

        int get_output_dim() const override {
            return dimension;
        }

        void set_training(bool training) override {
            is_training = training;
        }

        int get_parameter_count() const override {
            return 0;
        }
    };
}