package com.cu.machinelearning.neural;

import java.util.Random;

/**
 * 可运行，但是无反向传播
 */
public class ResNetDemo3 {
    // 激活函数：ReLU
    public static double relu(double x) {
        return Math.max(0, x);
    }

    // 对数组应用ReLU激活
    public static double[] relu(double[] x) {
        double[] result = new double[x.length];
        for (int i = 0; i < x.length; i++) {
            result[i] = relu(x[i]);
        }
        return result;
    }

    // 对三维数组应用ReLU激活
    public static double[][][] relu3D(double[][][] x) {
        int depth = x.length;
        int height = x[0].length;
        int width = x[0][0].length;

        double[][][] result = new double[depth][height][width];
        for (int d = 0; d < depth; d++) {
            for (int i = 0; i < height; i++) {
                for (int j = 0; j < width; j++) {
                    result[d][i][j] = relu(x[d][i][j]);
                }
            }
        }
        return result;
    }

    // 卷积操作
    public static double[][][] convolution(double[][][] input, double[][][][] kernel,
                                           double[] bias, int stride) {
        int inputDepth = input.length;
        int inputHeight = input[0].length;
        int inputWidth = input[0][0].length;
        int numKernels = kernel.length;
        int kernelSize = kernel[0].length;

        // 计算输出尺寸
        int outputHeight = (inputHeight - kernelSize) / stride + 1;
        int outputWidth = (inputWidth - kernelSize) / stride + 1;

        double[][][] output = new double[numKernels][outputHeight][outputWidth];

        // 执行卷积
        for (int k = 0; k < numKernels; k++) {
            for (int i = 0; i < outputHeight; i++) {
                for (int j = 0; j < outputWidth; j++) {
                    double sum = 0.0;
                    for (int d = 0; d < inputDepth; d++) {
                        for (int x = 0; x < kernelSize; x++) {
                            for (int y = 0; y < kernelSize; y++) {
                                sum += input[d][i * stride + x][j * stride + y] *
                                        kernel[k][x][y][d];
                            }
                        }
                    }
                    output[k][i][j] = sum + bias[k];
                }
            }
        }
        return output;
    }

    // 1x1卷积（用于调整通道数）
    public static double[][][] conv1x1(double[][][] input, double[][][][] kernel,
                                       double[] bias) {
        return convolution(input, kernel, bias, 1);
    }

    // 最大池化
    public static double[][][] maxPooling(double[][][] input, int poolSize, int stride) {
        int depth = input.length;
        int inputHeight = input[0].length;
        int inputWidth = input[0][0].length;

        int outputHeight = (inputHeight - poolSize) / stride + 1;
        int outputWidth = (inputWidth - poolSize) / stride + 1;

        double[][][] output = new double[depth][outputHeight][outputWidth];

        for (int d = 0; d < depth; d++) {
            for (int i = 0; i < outputHeight; i++) {
                for (int j = 0; j < outputWidth; j++) {
                    double max = 0.0;
                    for (int x = 0; x < poolSize; x++) {
                        for (int y = 0; y < poolSize; y++) {
                            max = Math.max(max, input[d][i * stride + x][j * stride + y]);
                        }
                    }
                    output[d][i][j] = max;
                }
            }
        }
        return output;
    }

    // 残差块：包含两个卷积层和跳跃连接
    static class ResidualBlock {
        private double[][][][] conv1Weights;
        private double[] conv1Bias;
        private double[][][][] conv2Weights;
        private double[] conv2Bias;
        private double[][][][] shortcutWeights;  // 1x1卷积用于调整维度
        private double[] shortcutBias;
        private boolean useShortcutConv;

        public ResidualBlock(int inputChannels, int outputChannels, int stride, Random random) {
            // 判断是否需要使用1x1卷积调整维度
            useShortcutConv = (inputChannels != outputChannels) || (stride != 1);

            // 初始化第一个卷积层 (3x3)
            conv1Weights = initializeConvWeights(outputChannels, 3, inputChannels, random);
            conv1Bias = new double[outputChannels];
            initializeBias(conv1Bias);

            // 初始化第二个卷积层 (3x3)
            conv2Weights = initializeConvWeights(outputChannels, 3, outputChannels, random);
            conv2Bias = new double[outputChannels];
            initializeBias(conv2Bias);

            // 如果需要，初始化用于调整维度的1x1卷积
            if (useShortcutConv) {
                shortcutWeights = initializeConvWeights(outputChannels, 1, inputChannels, random);
                shortcutBias = new double[outputChannels];
                initializeBias(shortcutBias);
            }
        }

        // 前向传播
        public double[][][] forward(double[][][] input) {
            // 主路径
            double[][][] x = convolution(input, conv1Weights, conv1Bias, 1);
            x = relu3D(x);
            x = convolution(x, conv2Weights, conv2Bias, 1);

            // 跳跃连接路径
            double[][][] shortcut;
            if (useShortcutConv) {
                shortcut = conv1x1(input, shortcutWeights, shortcutBias);
            } else {
                shortcut = input;  // 直接连接，维度相同
            }

            // 残差连接：主路径输出 + 跳跃连接输出
            int depth = x.length;
            int height = x[0].length;
            int width = x[0][0].length;

            double[][][] output = new double[depth][height][width];
            for (int d = 0; d < depth; d++) {
                for (int i = 0; i < height; i++) {
                    for (int j = 0; j < width; j++) {
                        output[d][i][j] = relu(x[d][i][j] + shortcut[d][i][j]);
                    }
                }
            }

            return output;
        }

        // 初始化卷积核权重
        private double[][][][] initializeConvWeights(int numKernels, int kernelSize,
                                                     int inputDepth, Random random) {
            double[][][][] weights = new double[numKernels][kernelSize][kernelSize][inputDepth];
            double scale = Math.sqrt(2.0 / (kernelSize * kernelSize * inputDepth));

            for (int k = 0; k < numKernels; k++) {
                for (int i = 0; i < kernelSize; i++) {
                    for (int j = 0; j < kernelSize; j++) {
                        for (int d = 0; d < inputDepth; d++) {
                            weights[k][i][j][d] = random.nextGaussian() * scale;
                        }
                    }
                }
            }
            return weights;
        }

        // 初始化偏置
        private void initializeBias(double[] bias) {
            for (int i = 0; i < bias.length; i++) {
                bias[i] = 0.1;
            }
        }
    }

    // 简化版ResNet模型
    static class SimpleResNet {
        private double[][][][] conv1Weights;  // 初始卷积层
        private double[] conv1Bias;
        private ResidualBlock[] blocks;       // 残差块数组
        private double[][] fcWeights;         // 全连接层
        private double[] fcBias;
        private int numClasses;

        public SimpleResNet(int numClasses, Random random) {
            this.numClasses = numClasses;

            // 初始化第一个卷积层
            conv1Weights = initializeConvWeights(64, 7, 3, random);  // 7x7卷积，输出64通道
            conv1Bias = new double[64];
            initializeBias(conv1Bias);

            // 创建残差块
            blocks = new ResidualBlock[4];
            blocks[0] = new ResidualBlock(64, 64, 1, random);   // 输入输出通道相同
            blocks[1] = new ResidualBlock(64, 128, 2, random);  // 通道加倍，步长2
            blocks[2] = new ResidualBlock(128, 256, 2, random); // 通道加倍，步长2
            blocks[3] = new ResidualBlock(256, 512, 2, random); // 通道加倍，步长2

            // 初始化全连接层
            fcWeights = new double[numClasses][512];  // 假设最后特征图大小为1x1
            fcBias = new double[numClasses];
            initializeFcWeights(fcWeights, fcBias, random);
        }

        // 前向传播
        public double[] forward(double[][][] input) {
            // 初始卷积层
            double[][][] x = convolution(input, conv1Weights, conv1Bias, 2);
            x = relu3D(x);
            x = maxPooling(x, 3, 2);  // 3x3最大池化

            // 通过所有残差块
            for (ResidualBlock block : blocks) {
                x = block.forward(x);
            }

            // 全局平均池化
            double[] features = globalAvgPool(x);

            // 全连接层输出
            double[] logits = fullyConnected(features, fcWeights, fcBias);

            // Softmax激活
            return softmax(logits);
        }

        // 全局平均池化
        private double[] globalAvgPool(double[][][] input) {
            int depth = input.length;
            int height = input[0].length;
            int width = input[0][0].length;

            double[] result = new double[depth];
            for (int d = 0; d < depth; d++) {
                double sum = 0.0;
                for (int i = 0; i < height; i++) {
                    for (int j = 0; j < width; j++) {
                        sum += input[d][i][j];
                    }
                }
                result[d] = sum / (height * width);
            }
            return result;
        }

        // 全连接层
        private double[] fullyConnected(double[] input, double[][] weights, double[] bias) {
            int outputSize = weights.length;
            double[] output = new double[outputSize];

            for (int i = 0; i < outputSize; i++) {
                double sum = 0.0;
                for (int j = 0; j < input.length; j++) {
                    sum += input[j] * weights[i][j];
                }
                output[i] = sum + bias[i];
            }
            return output;
        }

        // Softmax激活函数
        private double[] softmax(double[] input) {
            double[] output = new double[input.length];
            double expSum = 0.0;
            double maxVal = input[0];

            // 防止数值溢出
            for (double val : input) {
                maxVal = Math.max(maxVal, val);
            }

            for (int i = 0; i < input.length; i++) {
                output[i] = Math.exp(input[i] - maxVal);
                expSum += output[i];
            }

            for (int i = 0; i < output.length; i++) {
                output[i] /= expSum;
            }

            return output;
        }

        // 初始化卷积权重
        private double[][][][] initializeConvWeights(int numKernels, int kernelSize,
                                                     int inputDepth, Random random) {
            double[][][][] weights = new double[numKernels][kernelSize][kernelSize][inputDepth];
            double scale = Math.sqrt(2.0 / (kernelSize * kernelSize * inputDepth));

            for (int k = 0; k < numKernels; k++) {
                for (int i = 0; i < kernelSize; i++) {
                    for (int j = 0; j < kernelSize; j++) {
                        for (int d = 0; d < inputDepth; d++) {
                            weights[k][i][j][d] = random.nextGaussian() * scale;
                        }
                    }
                }
            }
            return weights;
        }

        // 初始化偏置
        private void initializeBias(double[] bias) {
            for (int i = 0; i < bias.length; i++) {
                bias[i] = 0.1;
            }
        }

        // 初始化全连接层权重
        private void initializeFcWeights(double[][] weights, double[] bias, Random random) {
            int rows = weights.length;
            int cols = weights[0].length;
            double scale = Math.sqrt(2.0 / cols);

            for (int i = 0; i < rows; i++) {
                for (int j = 0; j < cols; j++) {
                    weights[i][j] = random.nextGaussian() * scale;
                }
            }

            initializeBias(bias);
        }

        // 预测
        public int predict(double[][][] input) {
            double[] outputs = forward(input);
            int maxIndex = 0;
            double maxVal = outputs[0];

            for (int i = 1; i < outputs.length; i++) {
                if (outputs[i] > maxVal) {
                    maxVal = outputs[i];
                    maxIndex = i;
                }
            }

            return maxIndex;
        }
    }

    public static void main(String[] args) {
        // 创建随机数生成器
        Random random = new Random(42);

        // 创建一个简单的ResNet模型，用于10分类任务
        SimpleResNet resNet = new SimpleResNet(10, random);

        // 创建一个随机的3通道输入图像 (3x224x224)
        double[][][] testImage = new double[3][224][224];
        for (int c = 0; c < 3; c++) {
            for (int i = 0; i < 224; i++) {
                for (int j = 0; j < 224; j++) {
                    testImage[c][i][j] = random.nextGaussian() * 0.1;  // 生成随机输入
                }
            }
        }

        // 执行前向传播并输出预测结果
        int prediction = resNet.predict(testImage);
        System.out.println("预测类别: " + prediction);

        // 输出预测概率分布
        double[] probabilities = resNet.forward(testImage);
        System.out.println("预测概率分布:");
        for (int i = 0; i < probabilities.length; i++) {
            System.out.printf("类别 %d: %.4f\n", i, probabilities[i]);
        }
    }
}
