'''
@ Author:as.ck
@ Date:2021-3-15
@ Function: 随机梯度和小批量梯度Demo
@ Reference: https://blog.csdn.net/weixin_43462348/article/details/102640104
             https://blog.csdn.net/m2284089331/article/details/76492521
1. 批量梯度下降GD，一次梯度更新需要使用所有样本数据
2. 随机梯度下降-SGD，一次梯度更新使用一个样本数据
3. 小批量梯度下降-MBGD，一次梯度更新使用一小部分（batch）数据更新

'''
print(__doc__)

import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
from matplotlib import style
import math


def SGDMethod(data, w, alpha=0.01, numit=5000):
    # 设置“随机性”：数据打乱
	# data = data.sample(numit,replace=True) # pandas 数据才能用这个采样方式

    x, y = data['x'], data['y']
    m, n = x.shape
 
    ## 随机梯度下降
    for i in range(m):
        grad = x[i].T*(np.dot(x[i], w) -y[i])
        w = w - alpha * grad
    return w
 
def MBGDMethod(data, w, alpha=1e-3, num=5000, batch_size=20):
    #注意学习效率和batch_size之间的匹配关系
    x, y = data['x'], data['y']
    m, n = x.shape

    ## 小批量梯度下降
    for i in range(math.floor(m / batch_size)):
        x_batch = x[i*batch_size:i*batch_size+batch_size]
        y_batch = y[i*batch_size:i*batch_size+batch_size]
        grad = x_batch.T*(np.mat(w) * np.mat(x_batch).T  - np.mat(y_batch)).T
        grad = np.array([k for item in grad.tolist() for k in item])
        w -= alpha * grad
    return w  

def train(data, epochs = 100, batchsizes = 10, tollerances = 1e-2, method = "SGD"):
    
    m, n = data['x'].shape
    w = np.zeros(n)

    if method == "SGD":
        for ep in range(epochs):
            w = SGDMethod(data,w) 
    elif method == "MBGD":
        for ep in range(epochs):
            MBGDMethod(data, w)
    else:
        print("不存在的方法！")
    return w


def generated_data(sample_num=100):
    x1 = np.linspace(0, 9, sample_num)
    x2 = np.linspace(4, 13, sample_num)
    x = np.concatenate(([x1], [x2]), axis=0).T
    y = np.dot(x, np.array([5, 7]).T)  
    return x, y


if __name__ == '__main__':
    x, y = generated_data(100)
    data = {'x':x, 'y':y} # 替代结构体
    w_sgd = train(data)
    w_mbgd = train(data,method = "MBGD")
    print("w_sgd=",w_sgd,", w_mbgd=",w_mbgd)