#include "tool.h"


Linear::Linear(int input_num,int output_num){
    this->input_num=input_num;
    this->output_num=output_num;
    this->weight=(float**)malloc(sizeof(float*)*input_num);
    this->bias=(float**)malloc(sizeof(float*)*input_num);
    srand(time(NULL));
    for(int i=0;i<input_num;i++){
        weight[i]=(float*)malloc(sizeof(float)*output_num);
        bias[i]  =(float*)malloc(sizeof(float)*output_num);
        for(int j=0;j<output_num;j++){
            weight[i][j]=(rand()%2000-1000)/10000.0;//初始化权重参数与偏置参数
            bias[i][j]=(rand()%2000-1000)/10000.0;
        }
    }
}

Linear::~Linear(){
    ;
}

float* Linear::forward(float* input,int input_num){
    if(input_num!=this->input_num){
        std::cout<<"输入维度与模型不符("<<input_num<<","<<this->input_num<<")"<<std::endl;
        return nullptr;
    }
    float* output=(float*)malloc(sizeof(float)*this->output_num);
    for(int i=0;i<this->output_num;i++){
        float sum=0;
        for(int j=0;j<this->input_num;j++){
            sum+=input[j]*this->weight[j][i]+this->bias[j][i];
        }
        output[i]=sum;
    }
    return output;
}

void Linear::update_para(float* input,float* gradients,float learning_rate,int value_num,int gra_num){
    // int value_num=Func::arraylenth(input);
    // int gra_num=Func::arraylenth(gradients);
    // tag::taghere("para_input:",input,value_num);
    // tag::taghere("gradients:",gradients,gra_num);
    for(int i=0;i<value_num;i++){
        for(int j=0;j<gra_num;j++){
            // std::cout<<i<<","<<j<<","<<weight[i][j]<<std::endl;
            this->weight[i][j]-=learning_rate*gradients[j]*input[i];
            // std::cout<<i<<","<<j<<","<<weight[i][j]<<std::endl;
            this->bias[i][j]-=learning_rate*gradients[j];
            // std::cout<<i<<","<<j<<","<<input[i]<<std::endl;
        }
    }
    // tag::taghere("weight:",this->weight[0],gra_num);
}

float* Func::relu(float* input,int len_input){
    // int len_input=Func::arraylenth(input);
    float *output=(float*)malloc(sizeof(float)*len_input);
    for(int i=0;i<len_input;i++){
        output[i]=input[i]>0?input[i]:(pow(E,input[i])-1);
    }
    return output;
}

int Func::argmax(float* input,int len_input){
    float max=-10;
    int pos=0;
    // int len_input=Func::arraylenth(input);
    for(int i=0;i<len_input;i++){
        if(input[i]>max){
            max=input[i];
            pos=i;
        }
    }
    return pos;
}

float* Func::softmax(float* input,int len_input){
    // int len_input=Func::arraylenth(input);
    float* output=(float*)malloc(sizeof(float)*len_input);
    for(int i=0;i<len_input;i++){
        output[i]=pow(E,input[i]);
    }
    return output;
}

float Func::MSELoss(float* output,float* target,int num,int num_Tmp){
    float sum=0;
    // int num=Func::arraylenth(output);
    // int num_Tmp=Func::arraylenth(target);
    if(num!=num_Tmp){
        std::cout<<"标签数量错误！"<<std::endl;
        return 0;
    }
    for(int i=0;i<num;i++){
        sum+=(target[i]-output[i])*(target[i]-output[i]);
    }
    float MSE=sum/num;
    return MSE;
}

float* gradient::gra_loss(float* output,float* target,int num_output){
    // int num_output=Func::arraylenth(output);
    float* gradients=(float*)malloc(sizeof(float)*num_output);
    for(int i=0;i<num_output;i++){
        gradients[i]=(2*(output[i]-target[i]))/(num_output*1.0);
    }
    return gradients;
}

float* gradient::gra_relu(float* input,int num_input,float* t,int num_t){
    // int num_t=Func::arraylenth(t);
    float* gradients=(float*)malloc(sizeof(float)*num_t);
    for(int i=0;i<num_t;i++){
        gradients[i]=t[i]*(input[i]>0?1:(pow(E,input[i])));
    }
    return gradients;
}

float* gradient::gra_weight(int h_num,float** weight,float* t,int num_weight){
    // int num_weight=Func::arraylenth(weight[0]);
    float* gradients=(float*)malloc(sizeof(float)*h_num);
    for(int i=0;i<h_num;i++){
        float sum=0;
        for(int j=0;j<num_weight;j++){
            sum+=weight[i][j]*t[j];
        }
        gradients[i]=sum;
    }
    return gradients;
}

float* gradient::gra_softmax(float* y,int num_y){
    // int num_y=Func::arraylenth(y);
    float* gradients=(float*)malloc(sizeof(float)*num_y);
    for(int i=0;i<num_y;i++){
        float sum_tmp=0;
        for(int j=0;j<num_y;j++){
            if(i==j){
                sum_tmp+=y[j]*(1-y[j]);//乘梯度？？？
            }
            else{
                sum_tmp+=-y[j]*y[i];
            }
        }
        gradients[i]=sum_tmp;
    }
    return gradients;
}
void tag::taghere(std::string content,int number){
    std::cout<<content;
    std::cout<<number;
    std::cout<<std::endl;
}
void tag::taghere(std::string content,float number){
    std::cout<<content;
    std::cout<<number;
    std::cout<<std::endl;
}
void tag::taghere(std::string content,float* arr,int num){
    std::cout<<content<<std::endl;
    for(int i=0;i<num;i++){
        std::cout<<arr[i]<<",";
    }
    std::cout<<std::endl;
}
void tag::taghere(std::string content,int* arr,int num){
    std::cout<<content<<std::endl;
    for(int i=0;i<num;i++){
        std::cout<<arr[i]<<",";
    }
    std::cout<<std::endl;
}
void tag::taghere(std::string content,float** arr,int width,int height){
    std::cout<<content<<std::endl;
    for(int i=0;i<height;i++){
        for(int j=0;j<width;j++){
            std::cout<<arr[i][j]<<",";
        }
        std::cout<<std::endl;
    }
    std::cout<<std::endl;
}
void tag::taghere(std::string content,int** arr,int width,int height){
    std::cout<<content<<std::endl;
    for(int i=0;i<height;i++){
        for(int j=0;j<width;j++){
            std::cout<<arr[i][j]<<",";
        }
        std::cout<<std::endl;
    }
    std::cout<<std::endl;
}