#include <iostream>
#include <torch/torch.h>
#include <cmath>

// 定义精度宏
#define TOLERANCE 1e-5

#if 0
// 定义 Sigmoid 激活函数 (使用 libtorch)
torch::Tensor sigmoid_torch(const torch::Tensor& input) {
	return 1.0 / (1.0 + torch::exp(-input));
}
#endif

// 定义 Sigmoid 激活函数 (使用 libtorch)
torch::Tensor sigmoid_torch(const torch::Tensor& input) {
	torch::Tensor output = torch::sigmoid(input);
	return output;
}

// 定义 Sigmoid 激活函数 (CPU 实现)
torch::Tensor sigmoid_cpu(const torch::Tensor& input) {
	torch::Tensor result = torch::zeros_like(input);
	auto input_accessor = input.accessor<float, 2>();
	auto result_accessor = result.accessor<float, 2>();
	for (int i = 0; i < input_accessor.size(0); ++i) {
		for (int j = 0; j < input_accessor.size(1); ++j) {
			result_accessor[i][j] = 1.0f / (1.0f + std::exp(-input_accessor[i][j]));
		}
	}
	return result;
}

// 定义 ReLU 激活函数 (使用 libtorch)
torch::Tensor relu_torch(const torch::Tensor& input) {
	return torch::relu(input);
}

// 定义 ReLU 激活函数 (CPU 实现)
torch::Tensor relu_cpu(const torch::Tensor& input) {
	torch::Tensor result = torch::zeros_like(input);
	auto input_accessor = input.accessor<float, 2>();
	auto result_accessor = result.accessor<float, 2>();
	for (int i = 0; i < input_accessor.size(0); ++i) {
		for (int j = 0; j < input_accessor.size(1); ++j) {
			result_accessor[i][j] = std::max(0.0f, input_accessor[i][j]);
		}
	}
	return result;
}

// 定义 Tanh 激活函数 (使用 libtorch)
torch::Tensor tanh_torch(const torch::Tensor& input) {
	return torch::tanh(input);
}

// 定义 Tanh 激活函数 (CPU 实现)
torch::Tensor tanh_cpu(const torch::Tensor& input) {
	torch::Tensor result = torch::zeros_like(input);
	auto input_accessor = input.accessor<float, 2>();
	auto result_accessor = result.accessor<float, 2>();
	for (int i = 0; i < input_accessor.size(0); ++i) {
		for (int j = 0; j < input_accessor.size(1); ++j) {
			result_accessor[i][j] = std::tanh(input_accessor[i][j]);
		}
	}
	return result;
}

// 定义 Softmax 激活函数 (使用 libtorch)
torch::Tensor softmax_torch(const torch::Tensor& input, int64_t dim = -1) {
	return torch::softmax(input, dim);
}

// 定义 Softmax 激活函数 (CPU 实现)
torch::Tensor softmax_cpu(const torch::Tensor& input, int64_t dim = -1) {
	torch::Tensor exp_input = torch::exp(input);
	torch::Tensor sum_exp = torch::sum(exp_input, dim, true);
	return exp_input / sum_exp;
}

// 定义 silu 激活函数 (使用 libtorch)
torch::Tensor silu_torch(const torch::Tensor& input) {
	return input * sigmoid_torch(input);
}

// 定义 silu 激活函数 (CPU 实现)
torch::Tensor silu_cpu(const torch::Tensor& input) {
	torch::Tensor sigmoid_result = sigmoid_cpu(input);
	return input * sigmoid_result;
}


// 测试函数
void testActivationFunctions() {
	torch::Tensor input = torch::randn({2, 3});
	std::cout << "Input Tensor: " << std::endl << input << std::endl;

	// Sigmoid 测试
	torch::Tensor sigmoid_torch_result = sigmoid_torch(input);
	torch::Tensor sigmoid_cpu_result = sigmoid_cpu(input);
	std::cout << "Sigmoid (torch) Result: " << std::endl << sigmoid_torch_result << std::endl;
	std::cout << "Sigmoid (CPU) Result: " << std::endl << sigmoid_cpu_result << std::endl;
	torch::Tensor sigmoid_diff = torch::abs(sigmoid_torch_result - sigmoid_cpu_result);
	float sigmoid_max_diff = torch::max(sigmoid_diff).item<float>();
	std::cout << "Sigmoid Max Difference: " << sigmoid_max_diff << std::endl;
	if (sigmoid_max_diff < TOLERANCE) {
		std::cout << "Sigmoid Difference is within tolerance." << std::endl;
	} else {
		std::cout << "Sigmoid Difference exceeds tolerance." << std::endl;
	}

	// ReLU 测试
	torch::Tensor relu_torch_result = relu_torch(input);
	torch::Tensor relu_cpu_result = relu_cpu(input);
	std::cout << "ReLU (torch) Result: " << std::endl << relu_torch_result << std::endl;
	std::cout << "ReLU (CPU) Result: " << std::endl << relu_cpu_result << std::endl;
	torch::Tensor relu_diff = torch::abs(relu_torch_result - relu_cpu_result);
	float relu_max_diff = torch::max(relu_diff).item<float>();
	std::cout << "ReLU Max Difference: " << relu_max_diff << std::endl;
	if (relu_max_diff < TOLERANCE) {
		std::cout << "ReLU Difference is within tolerance." << std::endl;
	} else {
		std::cout << "ReLU Difference exceeds tolerance." << std::endl;
	}

	// Tanh 测试
	torch::Tensor tanh_torch_result = tanh_torch(input);
	torch::Tensor tanh_cpu_result = tanh_cpu(input);
	std::cout << "Tanh (torch) Result: " << std::endl << tanh_torch_result << std::endl;
	std::cout << "Tanh (CPU) Result: " << std::endl << tanh_cpu_result << std::endl;
	torch::Tensor tanh_diff = torch::abs(tanh_torch_result - tanh_cpu_result);
	float tanh_max_diff = torch::max(tanh_diff).item<float>();
	std::cout << "Tanh Max Difference: " << tanh_max_diff << std::endl;
	if (tanh_max_diff < TOLERANCE) {
		std::cout << "Tanh Difference is within tolerance." << std::endl;
	} else {
		std::cout << "Tanh Difference exceeds tolerance." << std::endl;
	}

	// Softmax 测试
	torch::Tensor softmax_torch_result = softmax_torch(input, 1);
	torch::Tensor softmax_cpu_result = softmax_cpu(input, 1);
	std::cout << "Softmax (torch) Result: " << std::endl << softmax_torch_result << std::endl;
	std::cout << "Softmax (CPU) Result: " << std::endl << softmax_cpu_result << std::endl;
	torch::Tensor softmax_diff = torch::abs(softmax_torch_result - softmax_cpu_result);
	float softmax_max_diff = torch::max(softmax_diff).item<float>();
	std::cout << "Softmax Max Difference: " << softmax_max_diff << std::endl;
	if (softmax_max_diff < TOLERANCE) {
		std::cout << "Softmax Difference is within tolerance." << std::endl;
	} else {
		std::cout << "Softmax Difference exceeds tolerance." << std::endl;
	}

	// SiLU 测试
	torch::Tensor silu_torch_result = silu_torch(input);
	torch::Tensor silu_cpu_result = silu_cpu(input);
	std::cout << "SiLU (torch) Result: " << std::endl << silu_torch_result << std::endl;
	std::cout << "SiLU (CPU) Result: " << std::endl << silu_cpu_result << std::endl;
	torch::Tensor silu_diff = torch::abs(silu_torch_result - silu_cpu_result);
	float silu_max_diff = torch::max(silu_diff).item<float>();
	std::cout << "SiLU Max Difference: " << silu_max_diff << std::endl;
	if (silu_max_diff < TOLERANCE) {
		std::cout << "SiLU Difference is within tolerance." << std::endl;
	} else {
		std::cout << "SiLU Difference exceeds tolerance." << std::endl;
	}
}

int main() {
	testActivationFunctions();
	return 0;
}
