-- 定义 sigmoid 激活函数及其导数
function sigmoid(x)
    return 1 / (1 + math.exp(-x))
end

function sigmoid_derivative(x)
    return x * (1 - x)
end

-- 初始化权重
local weights = {math.random(), math.random(), math.random()}
local bias = math.random()
local learning_rate = 0.1

-- 前向传播函数
function forward(inputs)
    local sum = bias
    for i = 1, #inputs do
        sum = sum + inputs[i] * weights[i]
    end
    return sigmoid(sum)
end

-- 训练函数
function train(inputs, target)
    -- 前向传播
    local output = forward(inputs)
    
    -- 计算误差
    local error = target - output
    
    -- 反向传播，更新权重和偏置
    for i = 1, #weights do
        weights[i] = weights[i] + learning_rate * error * sigmoid_derivative(output) * inputs[i]
    end
    bias = bias + learning_rate * error * sigmoid_derivative(output)
end

-- 打印权重和偏置
function print_weights()
    print("Weights:", weights[1], weights[2], weights[3])
    print("Bias:", bias)
end

-- 训练数据
local training_data = {
    {inputs = {0, 0, 1}, target = 0},
    {inputs = {1, 1, 1}, target = 1},
    {inputs = {1, 0, 1}, target = 1},
    {inputs = {1, 1, 0}, target = 1},
    {inputs = {1, 0, 0}, target = 1},
    {inputs = {0, 1, 1}, target = 0},
    {inputs = {0, 0, 0}, target = 0},
    {inputs = {0, 1, 0}, target = 0}
}

-- 训练过程
for epoch = 1, 10000 do
    for i = 1, #training_data do
        train(training_data[i].inputs, training_data[i].target)
    end
end

-- 测试训练结果
print("Testing trained network:")
for i = 1, #training_data do
    local output = forward(training_data[i].inputs)
    print("Input: " .. table.concat(training_data[i].inputs, ", ") .. " | Output: " .. output)
end

print_weights();