// activations.h
#pragma once
 
#include <ATen/ATen.h>
#include <vector>
 
// ReLU
at::Tensor relu_aten(const at::Tensor& input);
std::vector<float> relu_cpu(const std::vector<float>& input);
 
// Sigmoid
at::Tensor sigmoid_aten(const at::Tensor& input);
std::vector<float> sigmoid_cpu(const std::vector<float>& input);
 
// Tanh
at::Tensor tanh_aten(const at::Tensor& input);
std::vector<float> tanh_cpu(const std::vector<float>& input);
 
// LeakyReLU
at::Tensor leaky_relu_aten(const at::Tensor& input, double negative_slope);
std::vector<float> leaky_relu_cpu(const std::vector<float>& input, float negative_slope);
 
// ELU
at::Tensor elu_aten(const at::Tensor& input, double alpha);
std::vector<float> elu_cpu(const std::vector<float>& input, float alpha);
 
// SELU (with fixed alpha and scale)
at::Tensor selu_aten(const at::Tensor& input);
std::vector<float> selu_cpu(const std::vector<float>& input);
 
// GELU
at::Tensor gelu_aten(const at::Tensor& input);
std::vector<float> gelu_cpu(const std::vector<float>& input);
 
// Softplus
at::Tensor softplus_aten(const at::Tensor& input);
std::vector<float> softplus_cpu(const std::vector<float>& input);
 
// Swish
at::Tensor swish_aten(const at::Tensor& input);
std::vector<float> swish_cpu(const std::vector<float>& input);
 
// Mish 
at::Tensor mish_aten(const at::Tensor& input);
std::vector<float> mish_cpu(const std::vector<float>& input);
