/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package network;

/**
 *
 * @author Raise
 */
import network.SoftmaxLayer;
import network.MaxPoolLayer;
import network.FullyConnectedLayer;
import network.ConvLayer;
import network.Layer;
import date.ImageData;
import java.util.ArrayList;
import java.util.List;
import java.util.Arrays;
import java.util.Random;

public class CNN {

    private List<Layer> layers;
    private double learningRate;
    private Random rand;

    public CNN(double learningRate, long seed) {
        this.layers = new ArrayList<>();
        this.learningRate = learningRate;
        this.rand = new Random(seed);
        initializeNetwork();
    }

    private void initializeNetwork() {
        // CNN架构: Conv -> ReLU -> Pool -> FC -> ReLU -> FC -> Softmax
        System.out.println("初始化CNN网络...");

        // 输入: 28x28 灰度图像
        layers.add(new ConvLayer(28, 28, 5, 8, rand));      // 输出: 24x24x8，亦包括激活
        layers.add(new MaxPoolLayer(24, 24, 8, 2));   // 输出: 12x12x8 = 1152
        layers.add(new FullyConnectedLayer(1152, 128, true, rand)); // 隐藏层
        layers.add(new FullyConnectedLayer(128, 10, false, rand));   // 输出层
        layers.add(new SoftmaxLayer(10));              // Softmax

        System.out.println("CNN网络初始化完成");
        System.out.println("网络结构: 28x28 -> Conv(5x5,8) -> Pool(2x2) -> FC(128) -> FC(10) -> Softmax");


    }

    public double[] forward(double[] input) {
        double[] current = input;
        for (Layer layer : layers) {
            current = layer.forward(current);
        }
        return current;
    }

    public void backward(double[] gradient) {
        double[] currentGradient = gradient;
        for (int i = layers.size() - 1; i >= 0; i--) {
            currentGradient = layers.get(i).backward(currentGradient, learningRate);
        }
    }

    public void train(List<ImageData> trainingData, int epochs) {
        System.out.println("开始训练CNN...");

        for (int epoch = 0; epoch < epochs; epoch++) {
            double totalLoss = 0;
            int correct = 0;

            for (ImageData data : trainingData) {
                // 前向传播
                double[] output = forward(data.getPixels());
                int predicted = argmax(output);

                if (predicted == data.getLabel()) {
                    correct++;
                }

                // 计算损失和梯度
                SoftmaxLayer softmaxLayer = (SoftmaxLayer) layers.get(layers.size() - 1);
                totalLoss += softmaxLayer.crossEntropyLoss(data.getLabel());
                double[] gradient = softmaxLayer.crossEntropyGradient(data.getLabel());

                // 反向传播
                backward(gradient);
            }

            double accuracy = (double) correct / trainingData.size();
            double avgLoss = totalLoss / trainingData.size();

            System.out.printf("Epoch %d: 损失=%.4f, 准确率=%.4f\n",
                    epoch + 1, avgLoss, accuracy);

            // 动态调整学习率
            if (epoch % 5 == 0 && epoch > 0) {
                learningRate *= 0.8;
                System.out.printf("调整学习率: %.6f\n", learningRate);
            }
        }
    }

    public int predict(double[] input) {
        double[] output = forward(input);
        return argmax(output);
    }

    //寻找数组中最大值索引
    private int argmax(double[] array) {
        int maxIndex = 0;
        for (int i = 1; i < array.length; i++) {
            if (array[i] > array[maxIndex]) {
                maxIndex = i;
            }
        }
        return maxIndex;
    }

}
