
#include "glann.h"
#include <stdio.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include "pilink.h"

void glann_weight_init(struct glann_weight_s *weight)
{
    int i,j;
    if(weight->w == NULL){
        return;
    }

    if(weight->current_layer_num == 0 || weight->forward_layer_num == 0){
        return;
    }

    for(i = 0; i < weight->current_layer_num; i++){
        for(j = 0; j < weight->forward_layer_num; j++){
            float wi = (float)(rand() % 10000) / 5000.f;
            wi -= 1.f;
            glann_weight_set(weight,i,j, wi);
        }
    }

}

float glann_weight_get(struct glann_weight_s *weight, int neure_of_current, int neure_of_forward)
{
    return weight->w[neure_of_current * weight->current_layer_num + neure_of_forward];
}

void glann_weight_set(struct glann_weight_s *weight, int neure_of_current, int neure_of_forward, float nw)
{
    weight->w[neure_of_current * weight->current_layer_num + neure_of_forward] = nw;
}

float LeakyReLU(float input)
{
    float output = input;
    if(input < 0){
        output *= 0.01f;
    }
    return output;
}

float DevLeakyReLU(float input)
{
    if(input > 0){
        return 1.f;
    }
    return 0.01f;
}



int glann_network_create(struct glann_network_s *network, unsigned short *layers_describe, int layers_num)
{
    int i,j;

    if(layers_num < 2){
        return -1;
    }

    for(i = 0; i < layers_num; i++){
        if(layers_describe[i] < 1){
            return -2;
        }
    }

    network->layer_num = layers_num;
    network->layers    = (struct glann_layer_s  *)glann_malloc(network->layer_num * sizeof(struct glann_layer_s));
    network->weights   = (struct glann_weight_s *)glann_malloc((network->layer_num - 1) * sizeof(struct glann_weight_s));

    if(network->layers == NULL){
        return -3;
    }

    if(network->weights == NULL){
        glann_free(network->layers);
        return -4;
    }

    for(i = 0; i < network->layer_num; i++){
        network->layers[i].nuere_num = (int)layers_describe[i];
        network->layers[i].neures    = (struct glann_neure_s *)glann_malloc(network->layers[i].nuere_num * sizeof(struct glann_neure_s));

        if(network->layers[i].neures == NULL){

            for(j = 0; j < i; j++){
                if(network->layers[j].neures != NULL){
                    glann_free(network->layers[j].neures);
                }
            }

            glann_free(network->layers);
            glann_free(network->weights);
            return -5;
        }

        for(j = 0; j < network->layers[i].nuere_num; j++){
            network->layers[i].neures[j].act_fun = LeakyReLU;
            network->layers[i].neures[j].dev_fun = DevLeakyReLU;
        }
    }

    for(i = 0; i < network->layer_num - 1; i++){
        network->weights[i].forward_layer_num = network->layers[i].nuere_num;
        network->weights[i].current_layer_num = network->layers[i+1].nuere_num;
        network->weights[i].w                 = (float *)glann_malloc(4 * network->weights[i].forward_layer_num * network->weights[i].current_layer_num * sizeof(float));

        if(network->weights[i].w == NULL){

            glann_free(network->layers);
            glann_free(network->weights);
            
            for(j = 0; j < network->layer_num; j++){
                glann_free(network->layers[j].neures);
            }

            for(j = 0; j < i; j++){
                if(network->weights[j].w != NULL){
                    glann_free(network->weights[j].w);
                }
            }
            return -6;
        }

        glann_weight_init(&(network->weights[i]));
    }
    
    network->learn_rate = 0.0002f;
    return 0;
}

void glann_network_destory(struct glann_network_s *network)
{
    if(network->layer_num > 0 && network->layers != NULL){
        for(int i = 0; i < network->layer_num; i++){
            if(network->layers[i].neures != NULL){
                free(network->layers[i].neures);
            }
        }

        free(network->layers);
    }


    if(network->layer_num > 0 && network->weights != NULL){
        for(int i = 0; i < network->layer_num - 1; i++){
            if(network->weights[i].w != NULL){
                free(network->weights[i].w);
            }
        }

        free(network->weights);
    }
    network->layer_num = 0;
    network->layers    = NULL;
    network->weights   = NULL;
    network->learn_rate = 0;
}


int glann_network_run(struct glann_network_s *network, struct glann_iodata_s *dat)
{
    int i,j,k;
    struct glann_layer_s *input_layer = network->layers;

    if(input_layer == NULL){
        return -1;
    }

    if(dat->input_size != input_layer->nuere_num){
        return -2;
    }

    if(dat->output_size != network->layers[network->layer_num - 1].nuere_num){
        return -3;
    }

    for(i = 0;i < input_layer->nuere_num;i++){
        input_layer->neures[i].input = dat->input[i];
        input_layer->neures[i].output = input_layer->neures[i].act_fun(input_layer->neures[i].input);
    }

    for(i = 1;i < network->layer_num; i++){
        struct glann_layer_s *current_layer = &(network->layers[i]);
        struct glann_layer_s *forward_layer = &(network->layers[i-1]);
        struct glann_weight_s *weights      = &(network->weights[i-1]);

        for(j = 0;j < current_layer->nuere_num;j++){
            current_layer->neures[j].input = 0;
            for(k = 0; k < forward_layer->nuere_num; k++){
                current_layer->neures[j].input += forward_layer->neures[k].output * glann_weight_get(weights,j,k);
            }
            current_layer->neures[j].output = current_layer->neures->act_fun(current_layer->neures[j].input);
        }
    }

    float total_result = 0;

    for(i = 0; i < dat->output_size; i++){
        dat->output[i] = network->layers[network->layer_num - 1].neures[i].output;
        total_result += dat->output[i];
    }

    if(total_result < 1e-10f){
        return -4;
    }


    for(i = 0; i < dat->output_size; i++){
        dat->probability[i] = dat->output[i] / total_result;
    }

    return 0;
    

}

int glann_network_train(struct glann_network_s *network, struct glann_iodata_s *dat)
{
    int i,j,k;

    struct glann_layer_s *output_layer = &(network->layers[network->layer_num-1]);
    if(output_layer->nuere_num != dat->output_size){
        return -1;
    }

    for(i = 0; i < output_layer->nuere_num; i++){
        output_layer->neures[i].goal = dat->output[i];
    }

    glann_network_run(network,dat);

    struct glann_layer_s  *current_layer = &(network->layers[network->layer_num-1]);
    struct glann_layer_s  *forward_layer = &(network->layers[network->layer_num-2]);
    struct glann_weight_s *weight        = &(network->weights[network->layer_num-2]);

    for(j = 0; j < current_layer->nuere_num; j++){
        struct glann_neure_s *neu = &(current_layer->neures[j]);
        float sigma =  neu->dev_fun(neu->input) * (neu->goal - neu->output);
        sigma *= network->learn_rate;
        for(k = 0; k < forward_layer->nuere_num; k++){   
            float w = glann_weight_get(weight,j,k);
            w += sigma * forward_layer->neures[k].output;
            glann_weight_set(weight,j,k,w);
        }
    }
    
    for(i = network->layer_num-2; i > 0; i--){
        struct glann_layer_s  *forward_layer   = &(network->layers[i-1]);
        struct glann_layer_s  *current_layer   = &(network->layers[i]);
        struct glann_layer_s  *backward_layer  = &(network->layers[i+1]);

        struct glann_weight_s *current_weight  = &(network->weights[i-1]);
        struct glann_weight_s *backward_weight = &(network->weights[i]);

        for(j = 0; j < current_layer->nuere_num; j++){
            float sigma = 0;
            struct glann_neure_s *cneu = &(current_layer->neures[j]);

            for(k = 0; k < backward_layer->nuere_num;k++){
                struct glann_neure_s *bneu = &(backward_layer->neures[k]);
                sigma += glann_weight_get(backward_weight,k,j) * bneu->dev_fun(bneu->output) * (bneu->goal - bneu->output);
            }

            sigma = network->learn_rate * cneu->dev_fun(cneu->output) * sigma;
            
            cneu->goal = 0;
            for(k = 0; k < forward_layer->nuere_num; k++){   
                float w = glann_weight_get(current_weight,j,k);
                struct glann_neure_s *fneu = &(forward_layer->neures[k]);
                w += sigma * fneu->output;
                cneu->goal += w * fneu->output;
                glann_weight_set(current_weight,j,k,w);
            }

            cneu->goal = cneu->act_fun(cneu->goal);
        }
    }
    return 0;
}


int glann_network_save(struct glann_network_s *network, char *path)
{
    unsigned char buf[544];

    int fd = open(path, O_CREAT | O_TRUNC | O_WRONLY, 0666);

    if(fd < 0){
        printf("Can't open file %s r\nFailed to save model\r\n", path);
        return -1;
    }

    if(network->layer_num > PILINK_HANDLER_BUFFER_LENGTH / 8 ){
        printf("Can't save model, too much network layers\r\n");
        return -2;
    }

    struct pilink_struct_glann_info_s model_info;

    model_info.layer_num     = network->layer_num;
    model_info.learning_rate = network->learn_rate;

    for(int i = 0; i < network->layer_num; i++){
        model_info.neure_num[i] = network->layers[i].nuere_num;
        model_info.neure_type[i] = 0;
    }

    int len = pilink_msg_pack(buf, 0x00, PILINK_DATA_TYPE_GLANN_INFO, (unsigned char *)&model_info,sizeof(model_info));

    write(fd, buf, len);

    for(int i = 0; i < network->layer_num - 1; i++){
        for(int j = 0; j < network->layers[i].nuere_num; j++){
            for(int k = 0; k < network->layers[i + 1].nuere_num; k++){
                struct pilink_struct_glann_weight_s weight;

                weight.layer = i;
                weight.neure_forward = j;
                weight.neure_current = k;
                weight.weight = glann_weight_get(&(network->weights[i]), k, j);
                weight.timestamp = 0x1122334455667788;
                int len = pilink_msg_pack(buf, 0x00, PILINK_DATA_TYPE_GLANN_WEIGHT, (unsigned char *)&weight,sizeof(weight));

                write(fd, buf, len);
            }
        }
    }  

    close(fd); 

    return 0;  
}

int glann_network_load(struct glann_network_s *network, char *path)
{
    int  len;
    unsigned char buf[256];

    int total_weights = 0;
    int valid_weights = 0;

    struct pilink_handler_s handler;

    int fd = open(path, O_RDONLY);

    if(fd < 0){
        printf("Can't open file %s r\nFailed to load model\r\n", path);
        return -1;
    }

    pilink_handler_init(&handler);

    do{
        len = read(fd, buf, 256);

        for(int i = 0; i < len; i++){
            int handle_result = pilink_handler_parse(&handler, buf[i]);

            if(handle_result == 1){
                if(handler.port == PILINK_DATA_TYPE_GLANN_INFO){
                    struct pilink_struct_glann_info_s *model_info = (struct pilink_struct_glann_info_s *)pilink_handler_get_data(&handler);

                    glann_network_destory(network);
                    glann_network_create(network, model_info->neure_num, model_info->layer_num);
                    network->learn_rate = model_info->learning_rate;
                    printf("Found network info: \r\n");
                    printf("structure of network is :");
                    for(int j = 0; j < network->layer_num; j++){
                        printf("%d ", network->layers[j].nuere_num);   
                    }
                    printf("\r\n");

                    for(int j = 0; j < network->layer_num - 1; j++){
                        total_weights += network->layers[j].nuere_num * network->layers[j+1].nuere_num;
                    } 
                }

                if(handler.port == PILINK_DATA_TYPE_GLANN_WEIGHT){
                    struct pilink_struct_glann_weight_s *weight = (struct pilink_struct_glann_weight_s *)pilink_handler_get_data(&handler);

                    int weight_is_valid = 1;

                    if(weight->layer >= network->layer_num - 1){
                        weight_is_valid = 0;
                        printf("layer of weight %d is out of network\r\n",weight->layer);
                    }

                    if(weight->neure_forward >= network->layers[weight->layer].nuere_num){
                        weight_is_valid = 0;
                        printf("forward neure index %d is out of network\r\n",weight->neure_forward);

                    }

                    if(weight->neure_current >= network->layers[weight->layer + 1].nuere_num){
                        weight_is_valid = 0;
                        printf("current neure index %d is out of network\r\n",weight->neure_forward);

                    }

                    if(weight_is_valid){
                        if(network->weights != NULL){
                            glann_weight_set(&(network->weights[weight->layer]), weight->neure_current, weight->neure_forward, weight->weight);
                            valid_weights++;
                        }else{
                            printf("Fatal error: network is not be created\r\n");
                            close(fd);
                            return -1;
                        }
                    }
                }
            }
        }
    }while (len == 256);

    close(fd);

    if(valid_weights != total_weights){
        printf("[WARING]%d weight expected, buf only %d weights found in file %s,the model is incomplete\r\n", \
                    total_weights, valid_weights, path);
        return -2;
    }

    return 0;
    
}