#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>

// 网络参数
#define IMAGE_SIZE 28
#define INPUT_SIZE (IMAGE_SIZE * IMAGE_SIZE)  // 784
#define HIDDEN1_SIZE 128
#define HIDDEN2_SIZE 10  // 输出层10个神经元
#define OUTPUT_SIZE 10

// 全局变量
char name[100];
char judge[100];
char fullpath[100];
float image[IMAGE_SIZE][IMAGE_SIZE];

// 神经网络权重和偏置
double* w1;  // 输入层到第一隐藏层权重 [784][128]
double* b1;  // 第一隐藏层偏置 [128]
double* w2;  // 第一隐藏层到第二隐藏层权重 [128][10]
double* b2;  // 第二隐藏层偏置 [10]

// 激活函数
double relu(double x) {
    return x > 0 ? x : 0;
}

double softmax(double* x, int index, int size) {
    double max_val = x[0];
    for (int i = 1; i < size; i++) {
        if (x[i] > max_val) {
            max_val = x[i];
        }
    }
    
    double sum = 0.0;
    for (int i = 0; i < size; i++) {
        sum += exp(x[i] - max_val);
    }
    
    return exp(x[index] - max_val) / sum;
}
// 加载权重或偏置
void load_weights_or_biases(const char* filename, double* array, int size) {
    FILE* file = fopen(filename, "r");
    if (file == NULL) {
        printf("Error opening file: %s\n", filename);
        exit(1);
    }
    
    for (int i = 0; i < size; i++) {
        if (fscanf(file, "%lf", &array[i]) != 1) {
            printf("Error reading file: %s at line %d\n", filename, i+1);
            exit(1);
        }
    }
    
    fclose(file);
}

// 加载矩阵权重
void load_matrix_weights(const char* filename, double* matrix, int rows, int cols) {
    FILE* file = fopen(filename, "r");
    if (file == NULL) {
        printf("Error opening file: %s\n", filename);
        exit(1);
    }
    
    for (int i = 0; i < rows * cols; i++) {
        if (fscanf(file, "%lf", &matrix[i]) != 1) {
            printf("Error reading file: %s at line %d\n", filename, i+1);
            exit(1);
        }
    }
    
    fclose(file);
}

// 全连接层前向传播
void dense_layer(double* input, double* output, double* weights, double* biases, 
                int input_size, int output_size, int use_relu) {
    for (int i = 0; i < output_size; i++) {
        output[i] = 0.0;
        for (int j = 0; j < input_size; j++) {
            output[i] += input[j] * weights[j * output_size + i];
        }
        output[i] += biases[i];
        
        if (use_relu) {
            output[i] = relu(output[i]);
        }
    }
}

// 读取PGM文件
int read_pgm(const char* filename, float A[IMAGE_SIZE][IMAGE_SIZE]) {
    FILE* fp = fopen(filename, "rb");
    if (fp == NULL) {
        printf("%s是无效文件\n",filename+7);
        return 1;
    }

    // 跳过PGM文件头信息（前4行）
    char buffer[256];
    for (int i = 0; i < 4; i++) {
        if (fgets(buffer, sizeof(buffer), fp) == NULL) {
            printf("Error reading PGM header\n");
            fclose(fp);
            exit(1);
        }
    }

    // 读取像素数据
    for (int i = 0; i < IMAGE_SIZE; i++) {
        for (int j = 0; j < IMAGE_SIZE; j++) {
            int pixel = fgetc(fp);
            if (pixel == EOF) {
                printf("Unexpected end of file\n");
                fclose(fp);
                exit(1);
            }
            // 标准化像素值到0-1范围
            A[i][j] = (float)pixel / 255.0f;
        }
    }
    fclose(fp);
    return 0;
}

// 预测函数
int predict(float input[IMAGE_SIZE][IMAGE_SIZE]) {
    // 展平输入图像
    double flat_input[INPUT_SIZE];
    for (int i = 0; i < IMAGE_SIZE; i++) {
        for (int j = 0; j < IMAGE_SIZE; j++) {
            flat_input[i * IMAGE_SIZE + j] = input[i][j];
        }
    }
    
    // 第一隐藏层
    double hidden1[HIDDEN1_SIZE];
    dense_layer(flat_input, hidden1, w1, b1, INPUT_SIZE, HIDDEN1_SIZE, 1);
    
    // 第二隐藏层（输出层）
    double output[HIDDEN2_SIZE];
    dense_layer(hidden1, output, w2, b2, HIDDEN1_SIZE, HIDDEN2_SIZE, 0);
    
    // 应用softmax
    double probabilities[OUTPUT_SIZE];
    for (int i = 0; i < OUTPUT_SIZE; i++) {
        probabilities[i] = softmax(output, i, OUTPUT_SIZE);
    }
    
    // 找到最大概率的索引
    int prediction = 0;
    double max_prob = probabilities[0];
    for (int i = 1; i < OUTPUT_SIZE; i++) {
        if (probabilities[i] > max_prob) {
            max_prob = probabilities[i];
            prediction = i;
        }
    }
    
    return prediction;
}

// 初始化神经网络
void init_neural_network() {
    // 分配内存
    w1 = (double*)malloc(INPUT_SIZE * HIDDEN1_SIZE * sizeof(double));
    b1 = (double*)malloc(HIDDEN1_SIZE * sizeof(double));
    w2 = (double*)malloc(HIDDEN1_SIZE * HIDDEN2_SIZE * sizeof(double));
    b2 = (double*)malloc(HIDDEN2_SIZE * sizeof(double));
    
    if (!w1 || !b1 || !w2 || !b2) {
        printf("Memory allocation failed\n");
        exit(1);
    }
    
    // 加载权重和偏置
    printf("Loading weights...\n");
    printf("Loading W1.txt (%d x %d)...\n", INPUT_SIZE, HIDDEN1_SIZE);
    load_matrix_weights("W1.txt", w1, INPUT_SIZE, HIDDEN1_SIZE);
    
    printf("Loading B1.txt (%d)...\n", HIDDEN1_SIZE);
    load_weights_or_biases("B1.txt", b1, HIDDEN1_SIZE);
    
    printf("Loading W2.txt (%d x %d)...\n", HIDDEN1_SIZE, HIDDEN2_SIZE);
    load_matrix_weights("W2.txt", w2, HIDDEN1_SIZE, HIDDEN2_SIZE);
    
    printf("Loading B2.txt (%d)...\n", HIDDEN2_SIZE);
    load_weights_or_biases("B2.txt", b2, HIDDEN2_SIZE);
    
    printf("All weights loaded successfully!\n");
}


void free_neural_network() {
    free(w1);
    free(b1);
    free(w2);
    free(b2);
}

int main() {
    init_neural_network();
    
    while(1) {
        printf("请输入文件名: ");
        scanf("%99s", name);  // 限制输入长度防止缓冲区溢出
        
        // 拼接完整路径
        snprintf(fullpath, sizeof(fullpath), "images/%s", name);
        
        // 读取图像
        int sign=read_pgm(fullpath, image);
        if(sign) continue;
        // 进行预测
        int result = predict(image);
        printf("%s: %d\n",name,result);
        
        // 询问是否继续
        printf("你想继续吗? [Y/N]: ");
        scanf("%99s", judge);
        while(strcmp(judge, "Y") != 0 && strcmp(judge, "y") != 0 && strcmp(judge, "N") != 0 && strcmp(judge, "n") != 0)
        {
            printf("输错了,请重新输入[Y/N]：");
            scanf("%99s", judge);
        }
        if (strcmp(judge, "Y") != 0 && strcmp(judge, "y") != 0) {
            printf("Bye欢迎下次使用");
            break;
        }
    }
    
    // 释放内存
    free_neural_network();
}