import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
import time

x = np.linspace(0, 10, 20) + np.random.randn(20)
y = np.linspace(0, 10, 20) + np.random.randn(20)

plt.scatter(x, y)


W = tf.Variable(0.)
B = tf.Variable(0.)

#定义线性模型
def linear_regression(x):
    return W * x + B

#定义损失函数
def mean_square_loss(y_pred, y_true):
    return tf.reduce_mean(tf.square(tf.subtract(y_pred, y_true)))
    #return tf.reduce_mean(tf.square(y_pred - y_true))


optimizer = tf.optimizers.SGD()

#定义优化过程
def run_optimization():
    with tf.GradientTape() as g:
        pred = linear_regression(x)
        loss = mean_square_loss(pred, y)

    #计算梯度
    gradients = g.gradient(loss, [W,B])

    #更新W，B
    #optimizer.apply_gradients(zip(gradients, [W, B]))
    optimizer.apply_gradients(zip(gradients, [W, B]))

start_time = time.time()
for step in range(5000):
    run_optimization()
    #展示结果
    if step % 100 == 0:
        pred = linear_regression(x)
        loss = mean_square_loss(pred, y)
        print(f'step:{step}, loss:{loss}, W:{W.numpy()}, B:{B.numpy()}')

end_time = time.time()
print(f"Total time: {end_time - start_time} seconds")

sklinear = LinearRegression()
sklinear.fit(x.reshape(-1, 1), y)

print(sklinear.coef_)
print(sklinear.intercept_)
print("Hello Tensorflow!")

xa = np.linspace(0, 10, 20)
plt.plot(xa, linear_regression(xa))
plt.show()