# x1,x2 --> y 多元线性回归

# 线性 ：连续的函数的线
# 非线性 ： 非线性方程（公式），将函数中不规则的散点，连续起来  sigmoid、ReLu、tanh...

import numpy as np
import keras
from keras import layers
import matplotlib.pyplot as plt

x1 = np.linspace(0,2,20)
x2 = np.linspace(1,3,20)
noise = np.random.normal(0,0.05,x1.shape)
y = (x1 + noise)**2 + x2**2 + noise
print(y.shape)

fig,ax = plt.subplots(subplot_kw={"projection": "3d"})

ax.plot(x1,x2,y,'r-')
plt.xlabel = "x1"
plt.ylabel = "x2"
plt.clabel = "y"

x1_tmp = np.array(x1,ndmin=2)
x2_tmp = np.array(x2,ndmin=2)
# 沿着x轴的方向拼接元素
x1x2 = np.stack((x1_tmp,x2_tmp),axis=2)[0]
print(x1x2)

# -------------- 定义神经网络训练曲线（多元线性回归） ------------------

model = keras.Sequential([
    layers.Flatten(input_shape=(2,)), # 输入
    layers.Dense(10,activation="tanh"), # 隐藏层
    layers.Dense(10,activation="tanh"), # 隐藏层
    layers.Dense(1) # 数量
])

model.compile(
    optimizer=keras.optimizers.SGD(learning_rate=0.01),
    loss=keras.losses.MSE,
    metrics=['acc']
)

model.fit(x1x2,y,epochs=1000)

# 预测结果
predict_y = model.predict(x1x2)
# 绘制预测图
ax.plot(x1,x2,predict_y.T[0],'bo')

plt.show()