import numpy
from numpy.linalg import inv
from matplotlib import pyplot
pyplot.rcParams['font.sans-serif'] = 'SimHei'
#线性回归
data=numpy.loadtxt('./ex0.txt',delimiter='\t')
X=data[:,:2]
Y=data[:,2]
length=Y.shape[0]
# W=(inv((X.T@X))@X.T)@Y
# Y_pre=X@W
# pyplot.scatter(X[:,1],Y,s=1.5)
# pyplot.plot(X[:,1],Y_pre,c='red')
# pyplot.show()
# print(W)
def ssE(y1,y2):
    return numpy.sum((y1-y2)**2)
def s2(y_pre):
    return numpy.var(y_pre)
#局部加权的线性回归
def Gauss_kernel_function(x0,x,k):
    return numpy.exp(-(x-x0)**2/(2*k**2))#该函数最大值为1，k越大，即高斯分布的方差越大，图形越胖，越小即越瘦,在某点附近的权重
def fun(x,w):
    return w[0]+x*w[1]
# Y_pre=[]
# X_test=[]
# for x0 in numpy.arange(0,1,0.005):
#     weight=Gauss_kernel_function(x0,X[:,1],k=0.1)
#     weightmat=numpy.diag(weight)
#     W=inv(X.T@weightmat@X)@X.T@weightmat@Y
#     Y_pre.append(fun(x0,W))
#     X_test.append(x0)
# pyplot.scatter(X[:,1],Y,s=1.5)
# pyplot.plot(X_test,Y_pre,c='red')
# pyplot.show()
#测试随着k值的变化，模型偏差、方差的变化,k值增大，偏差增大，方差减少，模型越简单
Y_pre=numpy.empty(length)
S2=numpy.empty(100)
SSE=numpy.empty(100)
K=numpy.arange(0.01,1.01,0.01)
for k in range(len(K)):
    for i in range(length):
        weight=Gauss_kernel_function(X[i,1],X[:,1],k=K[k])
        weightmat=numpy.diag(weight)
        W=inv(X.T@weightmat@X)@X.T@weightmat@Y
        Y_pre[i]=fun(X[i,1], W)
    S2[k]=s2(Y_pre)#计算训练样本预测值的方差
    SSE[k]=ssE(Y_pre,Y)#计算训练样本的偏差
pyplot.plot(K,S2,label='方差')
pyplot.xlabel('k')
pyplot.legend()
pyplot.show()
pyplot.plot(K,SSE,label='偏差')
pyplot.legend()
pyplot.xlabel('k')
pyplot.show()