/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */

/**
 *
 * @author chenjinlong
 */
public class GradientDescentNN extends AbstractNeuralNetwork {
    private double learningRate;
    
    public GradientDescentNN(double learningRate) {
        this.learningRate = learningRate;
    }
    
    public void train(double[][] inputs, double[] targets, int epochs) {
        for (int epoch = 0; epoch < epochs; epoch++) {
            double totalLoss = 0;
            
            for (int i = 0; i < inputs.length; i++) {
                double loss = trainOne(inputs[i], targets[i]);
                totalLoss += loss;
            }
            
            double avgLoss = totalLoss / inputs.length;
            
            if (epoch % 100 == 0) {
                System.out.println("梯度下降 轮次 " + epoch + ", 损失: " + avgLoss);
            }
            
            if (avgLoss < 0.001) break;
        }
    }
    
    private double trainOne(double[] input, double target) {
        // 前向传播
        double[] hidden1 = new double[4];
        double[] hidden2 = new double[4];
        double output;
        
        // 隐藏层1
        for (int i = 0; i < 4; i++) {
            double sum = biases[0][i];
            for (int j = 0; j < 2; j++) {
                sum += input[j] * weights[0][j][i];
            }
            hidden1[i] = sigmoid(sum);
        }
        
        // 隐藏层2
        for (int i = 0; i < 4; i++) {
            double sum = biases[1][i];
            for (int j = 0; j < 4; j++) {
                sum += hidden1[j] * weights[1][j][i];
            }
            hidden2[i] = sigmoid(sum);
        }
        
        // 输出层（线性激活）
        output = biases[2][0];
        for (int i = 0; i < 4; i++) {
            output += hidden2[i] * weights[2][i][0];
        }
        
        double error = output - target;
        double loss = error * error;
        
        // 反向传播
        double outputGradient = error; // 线性激活导数 = 1
        
        // 更新输出层
        for (int i = 0; i < 4; i++) {
            weights[2][i][0] -= learningRate * outputGradient * hidden2[i];
        }
        biases[2][0] -= learningRate * outputGradient;
        
        // 隐藏层2梯度
        double[] hidden2Gradients = new double[4];
        for (int i = 0; i < 4; i++) {
            double upstreamGradient = outputGradient * weights[2][i][0];
            // sigmoid导数
            hidden2Gradients[i] = upstreamGradient * hidden2[i] * (1 - hidden2[i]);
        }
        
        // 更新隐藏层2
        for (int i = 0; i < 4; i++) {
            for (int j = 0; j < 4; j++) {
                weights[1][i][j] -= learningRate * hidden2Gradients[j] * hidden1[i];
            }
        }
        for (int i = 0; i < 4; i++) {
            biases[1][i] -= learningRate * hidden2Gradients[i];
        }
        
        // 隐藏层1梯度
        double[] hidden1Gradients = new double[4];
        for (int i = 0; i < 4; i++) {
            double sum = 0;
            for (int j = 0; j < 4; j++) {
                sum += weights[1][i][j] * hidden2Gradients[j];
            }
            // sigmoid导数
            hidden1Gradients[i] = sum * hidden1[i] * (1 - hidden1[i]);
        }
        
        // 更新隐藏层1
        for (int i = 0; i < 2; i++) {
            for (int j = 0; j < 4; j++) {
                weights[0][i][j] -= learningRate * hidden1Gradients[j] * input[i];
            }
        }
        for (int i = 0; i < 4; i++) {
            biases[0][i] -= learningRate * hidden1Gradients[i];
        }
        
        return loss;
    }
}