#include "mnist.h"
#include "linear_layer.h"
#include "maxpool_layer.h"
#include "softmax_layer.h"
#include "convolutional_layer.h"
#include "data_loader.h"
#include "image.h"
#include <string.h>

#define BATCH 8
#define CHANNEL 1
#define WIDTH 224
#define HEIGHT 224

/*
void mnist_train() {
    convolutional_layer conv1 =  make_convolutional_layer(BATCH, HEIGHT, WIDTH, CHANNEL, 96, 1, 11, 4, 2, CONVOLUTIONAL, 1, 0, 0, 1);
    maxpool_layer maxpool1 = make_maxpool_layer(BATCH, 55, 55, 96, 3, 2, 0);

    convolutional_layer conv2 = make_convolutional_layer(BATCH, 27, 27, 96, 256, 1, 5, 1, 1, CONVOLUTIONAL, 1, 0, 0, 1);
    maxpool_layer maxpool2 = make_maxpool_layer(BATCH, 27, 27, 256, 3, 2, 0);

    convolutional_layer conv3 = make_convolutional_layer(BATCH, 13, 13, 256, 384, 1, 3, 1, 1, CONVOLUTIONAL, 1, 0, 0, 1);
    maxpool_layer maxpool3 = make_maxpool_layer(BATCH, 13, 13, 384, 2, 2, 0);

    convolutional_layer conv4 = make_convolutional_layer(BATCH, 13, 13, 384, 384, 1, 3, 1, 1, CONVOLUTIONAL, 1, 0, 0, 1);
    maxpool_layer maxpool4 = make_maxpool_layer(BATCH, 13, 13, 384, 2, 2, 0);

    convolutional_layer conv5 = make_convolutional_layer(BATCH, 13, 13, 384, 256, 1, 3, 1, 1, CONVOLUTIONAL, 1, 0, 0, 1);
    maxpool_layer maxpool5 = make_maxpool_layer(BATCH, 13, 13, 256, 3, 2, 0);

    linear_layer connect1 = make_linear_layer(BATCH, 6 * 6 * 256, 4096, LINEAR, 1, 1);
    linear_layer connect2 = make_linear_layer(BATCH, 4096, 4096, LINEAR, 1, 1);
    linear_layer classes = make_linear_layer(BATCH, 4096, 1000, LINEAR, 1, 1);

    softmax_layer loss = make_softmax_layer(BATCH, 1000, 1);
    struct network net;
    net.batch = 1;

    image* out;
    int batch = 0;
    read_images("/mnt/disk1/code_work/cpp_work/cuda_work/cform/data/train-images-idx3-ubyte", &out, &batch);

    int* label;
    int labelBatch = 0;
    read_labels("/mnt/disk1/code_work/cpp_work/cuda_work/cform/data/train-labels-idx1-ubyte", &label, &labelBatch);
    printf("batch %d labelBatch %d\n", batch, labelBatch);
    printf("%d\n", label[2]);

    // 创建索引数组
    int* indices = (int*)malloc(batch * sizeof(int));
    if (indices == NULL) {
        fprintf(stderr, "Memory allocation failed\n");
        return;
    }

    // 初始化索引数组
    for (int i = 0; i < batch; i++) {
        indices[i] = i;
    }

    shuffle(indices, batch, sizeof(int));
    reorder_array(out, indices, batch, sizeof(image));
    reorder_array(label, indices, labelBatch, sizeof(int));

    int dataCnt = batch / BATCH;
    int imgSize = 224 * 224;
    net.batch = batch;
    net.input = (float*)malloc(sizeof(float) * imgSize * BATCH);
    for (int i = 0; i < dataCnt; i++) {
        fprintf(stderr, "for dataCnt begin  =================>\n");
        fprintf(stderr, "padImgData begin  =================>\n");
        image padImgData = resize_image(out[i * BATCH], 224, 224);
        memcpy(net.input, padImgData.data, sizeof(float) * imgSize);
        fprintf(stderr, "padImgData end  =================>\n");

        image padImgData1 = resize_image(out[i * BATCH + 1], 224, 224);
        memcpy(net.input + imgSize, padImgData1.data, sizeof(float) * imgSize);
        fprintf(stderr, "padImgData1  =================>\n");

        image padImgData2 = resize_image(out[i * BATCH + 2], 224, 224);
        memcpy(net.input + imgSize * 2, padImgData.data, sizeof(float) * imgSize);
        fprintf(stderr, "padImgData2  =================>\n");

        image padImgData3 = resize_image(out[i * BATCH + 3], 224, 224);
        memcpy(net.input + imgSize * 3, padImgData.data, sizeof(float) * imgSize);
        fprintf(stderr, "padImgData3  =================>\n");

        image padImgData4 = resize_image(out[i * BATCH + 4], 224, 224);
        memcpy(net.input + imgSize * 4, padImgData.data, sizeof(float) * imgSize);
        fprintf(stderr, "padImgData4  =================>\n");

        image padImgData5 = resize_image(out[i * BATCH + 5], 224, 224);
        memcpy(net.input + imgSize * 5, padImgData.data, sizeof(float) * imgSize);
        fprintf(stderr, "padImgData5  =================>\n");

        image padImgData6 = resize_image(out[i * BATCH + 6], 224, 224);
        memcpy(net.input + imgSize * 6, padImgData.data, sizeof(float) * imgSize);
        fprintf(stderr, "padImgData6  =================>\n");

        image padImgData7 = resize_image(out[i * BATCH + 7], 224, 224);
        memcpy(net.input + imgSize * 7, padImgData.data, sizeof(float) * imgSize);
        fprintf(stderr, "padImgData7  =================>\n");

        // forward
        conv1.forward(conv1, net);
        net.input = conv1.output;

        maxpool1.forward(maxpool1, net);
        net.input = maxpool1.output;

        conv2.forward(conv2, net);
        net.input = conv2.output;

        maxpool2.forward(maxpool2, net);
        net.input = maxpool2.output;

        conv3.forward(conv3, net);
        net.input = conv3.output;

        maxpool3.forward(maxpool3, net);
        net.input = maxpool3.output;

        conv4.forward(conv4, net);
        net.input = conv4.output;

        maxpool4.forward(maxpool4, net);
        net.input = maxpool4.output;

        conv5.forward(conv5, net);
        net.input = conv5.output;

        maxpool5.forward(maxpool5, net);
        net.input = maxpool5.output;

        connect1.forward(connect1, net);
        net.input = connect1.output;

        connect2.forward(connect2, net);
        net.input = connect2.output;

        classes.forward(classes, net);
        net.input = classes.output;

        loss.forward(loss, net);
        net.input = loss.output;
        net.truth = loss.output;
        loss.delta = loss.output;
        net.delta = loss.output;

        // backward
        loss.backward(loss, net);
        net.input = loss.output;

        classes.backward(classes, net);
        net.input = classes.output;

        connect2.backward(connect2, net);
        net.input = connect2.output;

        connect1.backward(connect1, net);
        net.input = connect1.output;

        maxpool5.backward(maxpool5, net);
        net.input = maxpool5.output;
        
        net.workspace = calloc(1, get_workspace_size(conv5));
        conv5.backward(conv5, net);
        net.input = conv5.output;

        maxpool4.backward(maxpool4, net);
        net.input = maxpool4.output;

        conv4.backward(conv4, net);
        net.input = conv4.output;

        maxpool3.backward(maxpool3, net);
        net.input = maxpool3.output;

        conv3.backward(conv3, net);
        net.input = conv3.output;

        maxpool2.backward(maxpool2, net);
        net.input = maxpool2.output;

        conv2.backward(conv2, net);
        net.input = conv2.output;

        maxpool1.backward(maxpool1, net);
        net.input = maxpool1.output;

        conv1.backward(conv1, net);
        net.input = conv1.output;

        // update network
        update_args a = {0};
        a.learning_rate = 0.001;
        a.momentum = 0.9;
        a.decay = 0.0001;
        

        conv1.update(conv1, a);
        net.input = conv1.output;

        // maxpool1.update(maxpool1, a);
        // net.input = maxpool1.output;

        conv2.update(conv2, a);
        net.input = conv2.output;

        // maxpool2.update(maxpool2, a);
        // net.input = maxpool2.output;

        conv3.update(conv3, a);
        net.input = conv3.output;

        // maxpool3.update(maxpool3, a);
        // net.input = maxpool3.output;

        conv4.update(conv4, a);
        net.input = conv4.output;

        // maxpool4.update(maxpool4, a);
        // net.input = maxpool4.output;

        conv5.update(conv5, a);
        net.input = conv5.output;

        // maxpool5.update(maxpool5, a);
        // net.input = maxpool5.output;

        connect1.update(connect1, a);
        net.input = connect1.output;

        connect2.update(connect2, a);
        net.input = connect2.output;

        classes.update(classes, a);
        net.input = classes.output;
        
        fprintf(stderr, "for dataCnt end\n");
    }
    free(net.input);
}
*/

void mnist_train(char *cfgfile) {
    char *base = basecfg(cfgfile);
    printf("%s\n", base);
    char* weightfile;
    network *net = load_network(cfgfile, weightfile, 0);
    printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net->learning_rate, net->momentum, net->decay);

    // char *backup_directory = "";
    // int classes = 10;
    // int N = 50000;

    // char **labels = get_labels("data/cifar/labels.txt");
    // int epoch = (*net->seen) / N;
    // data train = load_all_cifar10();
    // while (get_current_batch(net) < net->max_batches || net->max_batches == 0) {
    //     clock_t time = clock();

    //     float loss = train_network_sgd(net, train, 1);
    // }

    
}

void mnist_test() {

}

void mnist_save() {

}

void mnist_load() {
    
}