import tensorflow as tf
import numpy as np
from datetime import datetime
def add_layer(inputs, Weights, biases, n_layer, activation_function=None):
    layer_name = 'layer%s' % n_layer
    with tf.name_scope(layer_name):
        with tf.name_scope('Wx_plus_b'):
            Wx_plus_b = tf.matmul(inputs, Weights) + biases
        if activation_function is None:
            outputs = Wx_plus_b
        else:
            outputs = activation_function(Wx_plus_b)
        return outputs
def loss(predicted_y, target_y):
    return tf.math.reduce_mean(tf.math.square(predicted_y - target_y))
def train_one_step(optimizer, x_data, y_data):
    with tf.GradientTape() as tape:
        l1 = add_layer(x_data, Wl, bl, n_layer=1, activation_function=tf.nn.relu)
        prediction = add_layer(l1, Wp, bp, n_layer=2, activation_function=None)
        with tf.name_scope('loss'):
            loss_val = loss(prediction, y_data)
    with tf.name_scope('trains'):
        grads = tape.gradient(loss_val, [Wl, bl, Wp, bp])  # 这里只使用Wl,bl也能得到结果
        optimizer.apply_gradients(zip(grads, [Wl, bl, Wp, bp]))
    return loss_val
@tf.function
def train(optimizer, x_data, y_data):
    loss_val = train_one_step(optimizer, x_data, y_data)
    with summary_writer.as_default():
        tf.summary.scalar('loss', loss_val, step=0)
    return loss_val
x_data = np.linspace(-1, 1, 300)[:, np.newaxis]
noise = np.random.normal(0, 0.05, x_data.shape)
y_data = np.square(x_data) - 0.5 + noise
x_data = x_data.astype(np.float32)
y_data = y_data.astype(np.float32)
Wl = tf.Variable(tf.random.normal((1, 10)), name='Wl')
Wp = tf.Variable(tf.random.normal((10, 1)), name='Wp')
bl = tf.Variable(tf.zeros((1, 10)) + 0.1, name='bl')
bp = tf.Variable(tf.zeros((1, 1)) + 0.1, name='bp')
optimizer = tf.optimizers.SGD(learning_rate=0.1)  # 优化器
stamp = datetime.now().strftime("%Y%m%d-%H%M%S")
logdir = 'logs/%s' % stamp
summary_writer = tf.summary.create_file_writer(logdir)  # 创建file writer
tf.summary.trace_on()
last_loss = train(optimizer, x_data, y_data)
with summary_writer.as_default():
    tf.summary.trace_export(
        name="my_func_trace",
        step=0,
        profiler_outdir=logdir)