-- 定义神经网络
local NeuralNetwork = {}
NeuralNetwork.__index = NeuralNetwork

-- 创建一个新的神经网络实例
function NeuralNetwork.new()
    local self = setmetatable({}, NeuralNetwork)
    -- 初始化权重和偏置，随机小值
    self.weights = {math.random() * 0.1, math.random() * 0.1, math.random() * 0.1}
    self.bias = math.random() * 0.1
    self.learning_rate = 0.1
    return self
end

-- Sigmoid 激活函数
local function sigmoid(x)
    return 1 / (1 + math.exp(-x))
end

-- Sigmoid 的导数
local function sigmoid_derivative(x)
    return x * (1 - x)
end

-- 前向传播：计算输出
function NeuralNetwork:forward(inputs)
    -- 加权求和：z = w1*x1 + w2*x2 + w3*x3 + b
    local z = self.weights[1] * inputs[1] + self.weights[2] * inputs[2] + self.weights[3] * inputs[3] + self.bias
    -- 激活函数
    return sigmoid(z)
end

-- 训练网络
function NeuralNetwork:train(inputs, target)
    -- 前向传播：计算输出
    local output = self:forward(inputs)

    -- 计算误差：目标值 - 输出值
    local error = target - output

    -- 计算通用调整值
    local common_adjust = self.learning_rate * error * sigmoid_derivative(output)

    -- 反向传播：调整权重和偏置
    for i = 1, #self.weights do
        -- 权重更新规则：Δw = learning_rate * error * sigmoid_derivative(output) * input
        local adjust = common_adjust * inputs[i];
        self.weights[i] = self.weights[i] + adjust;
    end

    -- 更新偏置
    self.bias = self.bias + common_adjust;
    --self.bias = 0
end

-- 打印权重和偏置
function NeuralNetwork:print_weights()
    print("Weights:", self.weights[1], self.weights[2], self.weights[3])
    print("Bias:", self.bias)
end

-- 测试神经网络
local nn = NeuralNetwork.new()

-- 训练数据
local training_data = {
    {inputs = {0, 0, 1}, target = 0},
    {inputs = {1, 1, 1}, target = 1},
    {inputs = {1, 0, 1}, target = 1},
    {inputs = {1, 1, 0}, target = 1},
    {inputs = {1, 0, 0}, target = 1},
    {inputs = {0, 1, 1}, target = 0},
    {inputs = {0, 0, 0}, target = 0},
    {inputs = {0, 1, 0}, target = 0}
}

-- 训练网络
for epoch = 1, 10000 do
    for _, data in ipairs(training_data) do
        nn:train(data.inputs, data.target)
    end
end

-- 测试输出
print("After Training:")
for _, data in ipairs(training_data) do
    local output = nn:forward(data.inputs)
    print(string.format("Inputs: %s, %s, %s -> Output: %.4f", data.inputs[1], data.inputs[2], data.inputs[3], output))
end

local out = nn:forward({0, 0, 0});

-- 打印最终权重和偏置
nn:print_weights()
