// main.cpp 
#include <iostream>
#include <vector>
#include <cmath>
#include "activations.h"
 
const float EPSILON = 1e-5;  // 精度阈值
 
bool compare_results(const std::vector<float>& result1, const std::vector<float>& result2) {
    if (result1.size()  != result2.size())  return false;
    for (size_t i = 0; i < result1.size();  ++i) {
        if (std::abs(result1[i] - result2[i]) > EPSILON) {
            std::cout << "Mismatch at index " << i << ": "
                      << result1[i] << " vs " << result2[i] << std::endl;
            return false;
        }
    }
    return true;
}
 
int main() {
    std::vector<float> input_data = {-2.0f, -1.0f, 0.0f, 1.0f, 2.0f};
 
    // ReLU
    auto input_tensor = at::from_blob(input_data.data(),  {5}, at::kFloat);
    auto output_aten = relu_aten(input_tensor);
    auto output_cpu = relu_cpu(input_data);
    std::cout << output_aten << std::endl;
    std::cout << output_cpu << std::endl;
 
    // Sigmoid
    output_aten = sigmoid_aten(input_tensor);
    output_cpu = sigmoid_cpu(input_data);
    std::cout << output_aten << std::endl;
    std::cout << output_cpu << std::endl;
 
    // Tanh
    output_aten = tanh_aten(input_tensor);
    output_cpu = tanh_cpu(input_data);
    std::cout << output_aten << std::endl;
    std::cout << output_cpu << std::endl;
 
    // LeakyReLU 
    output_aten = leaky_relu_aten(input_tensor, 0.1);
    output_cpu = leaky_relu_cpu(input_data, 0.1f);
 
    // ELU
    output_aten = elu_aten(input_tensor, 1.0);
    output_cpu = elu_cpu(input_data, 1.0f);
 
    // SELU
    output_aten = selu_aten(input_tensor);
    output_cpu = selu_cpu(input_data);
 
    // GELU 
    output_aten = gelu_aten(input_tensor);
    output_cpu = gelu_cpu(input_data);
 
    // Softplus
    output_aten = softplus_aten(input_tensor);
    output_cpu = softplus_cpu(input_data);
 
    // Swish
    output_aten = swish_aten(input_tensor);
    output_cpu = swish_cpu(input_data);
 
    // Mish
    output_aten = mish_aten(input_tensor);
    output_cpu = mish_cpu(input_data);
 
    return 0;
}
