#include "LinearRegression.h"
#include <iostream>
#include <random>

LinearRegression::LinearRegression(double w, double b):
    w_(w),
    b_(b)
{
    std::random_device rd; // 用于生成高质量随机数的硬件随机数源
    std::mt19937 gen(rd()); // 使用随机设备作为种子初始化梅森旋转发生器
    std::uniform_int_distribution<> dis(0, 100); // 定义随机数分布范围
    for(unsigned long i = 0; i < 10; ++i){
        int randomNum = dis(gen); // 生成范围在0到RAND_MAX之间的随机数
        double x = randomNum;
        datas.emplace_back(point{x,GetTestY(x)});
        printf("%f %f\n", datas[i].x, datas[i].y);
    }
    printf("-----------------------------\n");
}

void LinearRegression::Run()
{
    for(unsigned long i = 0; i < 8000000; ++i){
        double costJ = this->CalcCostJ();
        printf("costJ= %f\n", costJ);
        if(costJ <= 0.00001){
            printf("模型训练完成，训练次数count= %ld\n", i);
            break;
        }
        this->CalcGradientDescent();
    }
    printf("real  w= %f b= %f\n", this->tW_, this->tB_);
    printf("model w= %f b= %f\n", this->w_, this->b_);
}

double LinearRegression::ModelValue(double x)
{
    return (x * this->w_ + this->b_);
}

double LinearRegression::CalcCostJ()
{
    double sum = 0;
    for(unsigned long i = 0; i < this->datas.size(); ++i){
        double diff = (ModelValue(datas.at(i).x) - datas.at(i).y);
        sum += (diff * diff);
    }
    return sum / (2 * datas.size());
}

void LinearRegression::CalcGradientDescent()
{
    double sum1 = 0.0;
    double sum2 = 0.0;
    for(unsigned long i = 0; i < this->datas.size(); ++i){
        double diff = (ModelValue(datas.at(i).x) - datas.at(i).y);
        sum1 += (diff * datas.at(i).x);
        sum2 += (diff);
    }
    double partialW = sum1 / datas.size();
    double partialB = sum2 / datas.size();
    const double alpha = 1.0e-5 * 2;
    this->w_ -= alpha * partialW;
    this->b_ -= alpha * partialB;
    printf("w= %f b= %f\n", this->w_, this->b_);
}

double LinearRegression::GetTestY(double x)
{
    return (tW_ * x + tB_);
}
