-- 定义 ReLU 激活函数及其导数
local function relu(x)
    return math.max(0, x)
end

local function relu_derivative(x)
    return x > 0 and 1 or 0
end

-- 定义 Neuron 对象
local Neuron = {}
Neuron.__index = Neuron

-- 构造函数：创建一个新的神经元
function Neuron:new(input_size, use_activation)
    local neuron = setmetatable({}, self)
    neuron.weights = {}
    neuron.bias = math.random() * 0.2 - 0.1  -- 初始化偏置
    for i = 1, input_size do
        neuron.weights[i] = math.random() * 0.2 - 0.1  -- 初始化权重
    end
    neuron.use_activation = use_activation  -- 是否使用激活函数
    return neuron
end

-- 前向传播：计算神经元的输出
function Neuron:forward(inputs)
    local sum = self.bias
    for i = 1, #inputs do
        sum = sum + inputs[i] * self.weights[i]
    end
    if self.use_activation then
        self.output = relu(sum)  -- 隐藏层使用 ReLU 激活函数
    else
        self.output = sum  -- 输出层不使用激活函数
    end
    return self.output
end

-- 反向传播：更新权重和偏置
function Neuron:backward(inputs, delta, learning_rate)
    local grad = self.use_activation and relu_derivative(self.output) or 1  -- 根据是否使用激活函数计算梯度
    for i = 1, #inputs do
        self.weights[i] = self.weights[i] + learning_rate * delta * grad * inputs[i]
    end
    self.bias = self.bias + learning_rate * delta * grad
end

-- 定义 Layer 对象
local Layer = {}
Layer.__index = Layer

-- 构造函数：创建一个新的层
function Layer:new(input_size, num_neurons, use_activation)
    local layer = setmetatable({}, self)
    layer.neurons = {}
    for i = 1, num_neurons do
        table.insert(layer.neurons, Neuron:new(input_size, use_activation))
    end
    return layer
end

-- 前向传播：计算整个层的输出
function Layer:forward(inputs)
    local outputs = {}
    for i, neuron in ipairs(self.neurons) do
        outputs[i] = neuron:forward(inputs)
    end
    return outputs
end

-- 反向传播：更新整个层的权重和偏置
function Layer:backward(inputs, deltas, learning_rate)
    for i, neuron in ipairs(self.neurons) do
        neuron:backward(inputs, deltas[i], learning_rate)
    end
end

-- 定义 NeuralNetwork 对象
local NeuralNetwork = {}
NeuralNetwork.__index = NeuralNetwork

-- 构造函数：创建一个新的神经网络
function NeuralNetwork:new(input_size, hidden_size, output_size)
    local network = setmetatable({}, self)
    network.hidden_layer = Layer:new(input_size, hidden_size, true)  -- 隐藏层使用激活函数
    network.output_layer = Layer:new(hidden_size, output_size, false)  -- 输出层不使用激活函数
    return network
end

-- 前向传播：计算整个神经网络的输出
function NeuralNetwork:forward(inputs)
    local hidden_outputs = self.hidden_layer:forward(inputs)
    local output = self.output_layer:forward(hidden_outputs)
    return output[1]  -- 假设输出层只有一个神经元
end

-- 反向传播：更新整个神经网络的权重和偏置
function NeuralNetwork:backward(inputs, target, learning_rate)
    -- 计算输出层误差
    local output = self:forward(inputs)
    local output_error = target - output
    local output_delta = output_error  -- 输出层没有激活函数，导数为 1

    -- 计算隐藏层误差
    local hidden_outputs = self.hidden_layer:forward(inputs)
    local hidden_errors = {}
    for j = 1, #self.hidden_layer.neurons do
        hidden_errors[j] = 0
        for i, neuron in ipairs(self.output_layer.neurons) do
            hidden_errors[j] = hidden_errors[j] + output_delta * neuron.weights[j]
        end
    end

    -- 更新输出层权重和偏置
    self.output_layer:backward(hidden_outputs, {output_delta}, learning_rate)

    -- 更新隐藏层权重和偏置
    self.hidden_layer:backward(inputs, hidden_errors, learning_rate)
end

-- 训练神经网络
function NeuralNetwork:train(inputs, targets, epochs, learning_rate)
    for epoch = 1, epochs do
        local total_error = 0
        for i = 1, #inputs do
            local input = inputs[i]
            local target = targets[i]

            -- 反向传播
            self:backward(input, target, learning_rate)

            -- 计算总误差
            local output = self:forward(input)
            total_error = total_error + (target - output)^2
        end

        -- 打印每个 epoch 的平均误差
        if epoch % 100 == 0 then
            print(string.format("Epoch %d, Error: %.6f", epoch, total_error / #inputs))
        end
    end
end

-- 测试神经网络
function NeuralNetwork:test(inputs, targets)
    for i = 1, #inputs do
        local output = self:forward(inputs[i])
        print(string.format("Input: (%.2f, %.2f, %.2f), Predicted Output: %.6f, Target: %.6f", 
            inputs[i][1], inputs[i][2], inputs[i][3], output, targets[i]))
    end
end

-- 生成随机训练数据和测试数据
local function generate_data(num_samples)
    local inputs = {}
    local targets = {}
    for i = 1, num_samples do
        -- 随机生成输入值 x1, x2, x3 在 [0, 1] 范围内
        local x1 = math.random()
        local x2 = math.random()
        local x3 = math.random()

        -- 目标值为 y = x1 * 10 + x2^3 + sin(x3)
        local target = x1 * 5 + 10 * x2^3 + 5 * math.sin(x3)

        table.insert(inputs, {x1, x2, x3})
        table.insert(targets, target)
    end
    return inputs, targets
end

-- 主程序
local function main()
    -- 生成 100 条随机数据
    local num_samples = 100
    local inputs, targets = generate_data(num_samples)

    -- 将数据分为训练集和测试集（80% 训练，20% 测试）
    local train_inputs = {}
    local train_targets = {}
    local test_inputs = {}
    local test_targets = {}
    for i = 1, num_samples do
        if i <= num_samples * 0.8 then
            table.insert(train_inputs, inputs[i])
            table.insert(train_targets, targets[i])
        else
            table.insert(test_inputs, inputs[i])
            table.insert(test_targets, targets[i])
        end
    end

    -- 创建神经网络
    local input_size = 3
    local hidden_size = 5
    local output_size = 1
    local epochs = 4000
    local learning_rate = 0.03
    local network = NeuralNetwork:new(input_size, hidden_size, output_size)

    -- 训练神经网络
    network:train(train_inputs, train_targets, epochs, learning_rate)

    -- 测试神经网络
    print("\nTesting the neural network:")
    network:test(test_inputs, test_targets)
end

-- 运行主程序
main()

