import tensorflow as tf
import numpy as np
from sklearn.datasets import make_blobs
import numpy as np
import matplotlib.pyplot as plt

data, target = make_blobs(centers=2)
plt.scatter(data[:, 0], data[:, 1], c=target)
plt.show()

data = tf.constant(data, dtype=tf.float32)
target = tf.constant(target, dtype=tf.float32)

W = tf.Variable(np.random.randn(2, 1) * 0.02, dtype=tf.float32)
B = tf.Variable(0., dtype=tf.float32)


def sigmoid(x):
    return tf.nn.sigmoid(tf.matmul(x, W) + B)


# 定义损失函数
def cross_entropy_loss(y_true, y_pred):
    y_pred = tf.reshape(y_pred, shape=[100])
    delta = 1e-7
    return tf.reduce_mean(-(tf.multiply(y_true, tf.math.log(y_pred + delta))
                            + tf.multiply(1 - y_true, tf.math.log(1 - y_pred + delta))))


optimizer = tf.optimizers.SGD()


def run_optimization():
    with tf.GradientTape() as g:
        pred = sigmoid(data)
        loss = cross_entropy_loss(target, pred)
    #计算梯度
    gradients = g.gradient(loss, [W, B])
    #更新W，B
    optimizer.apply_gradients(zip(gradients, [W, B]))


# 计算准确率
def accuracy(y_true, y_pred):
    #需要把概率变成类别
    #概率大于0.5 可以认为是正例
    y_pred = tf.reshape(y_pred, shape=[100])
    y_ = y_pred.numpy() > 0.5
    y_true = y_true.numpy()
    return (y_ == y_true).mean()


#定义训练过程
for i in range(5000):
    run_optimization()
    if i % 100 == 0:
        pred = sigmoid(data)
        acc = accuracy(target, pred)
        loss = cross_entropy_loss(target, pred)
        print(f'训练次数：{i}， 准确率：{acc}，损失：{loss} ')
