import tensorflow as tf
import numpy as np
try:
    from tensorflow.keras import layers
except:
    from tensorflow.python.keras import layers


def campare_single():
    x = tf.random.normal([2, 784])

    def hand_single(x=tf.random.normal([2, 784]),
                    w1=tf.Variable(tf.random.truncated_normal([784, 256], stddev=0.1)),
                    b1=tf.Variable(tf.zeros([256]))):
        o1 = x @ w1 + b1
        # o1 = tf.matmul()
        o1 = tf.nn.relu(o1)
        return o1

    def tensor_single(x=tf.random.normal([2, 784])):
        fc = layers.Dense(256, activation=tf.nn.relu)
        h1 = fc(x)
        print('待优化张量表:', fc.trainable_variables)
        print('无需优化:', fc.non_trainable_variables)
        print('以上两种的合集:', fc.variables)
        print('权重矩阵:', fc.kernel)
        print('偏置:', fc.bias)
        return h1, fc.kernel, fc.bias
    r1 = tensor_single(x)
    r2 = hand_single(x, r1[1], r1[2])
    print(r1[0] - r2)


def hand_multi():
    x = tf.random.normal([2, 784])
    # 隐藏层 1 张量
    w1 = tf.Variable(tf.random.truncated_normal([784, 256], stddev=0.1))
    b1 = tf.Variable(tf.zeros([256]))
    # 隐藏层 2 张量
    w2 = tf.Variable(tf.random.truncated_normal([256, 128], stddev=0.1))
    b2 = tf.Variable(tf.zeros([128]))
    # 隐藏层 3 张量
    w3 = tf.Variable(tf.random.truncated_normal([128, 64], stddev=0.1))
    b3 = tf.Variable(tf.zeros([64]))
    # 输出层张量
    w4 = tf.Variable(tf.random.truncated_normal([64, 10], stddev=0.1))
    b4 = tf.Variable(tf.zeros([10]))

    with tf.GradientTape() as tape:  # 梯度记录器
        # x: [b, 28*28]
        # 隐藏层 1 前向计算，[b, 28*28] => [b, 256]
        h1 = x @ w1 + tf.broadcast_to(b1, [x.shape[0], 256])
        h1 = tf.nn.relu(h1)
        # 隐藏层 2 前向计算，[b, 256] => [b, 128]
        h2 = h1 @ w2 + b2
        h2 = tf.nn.relu(h2)
        # 隐藏层 3 前向计算，[b, 128] => [b, 64]
        h3 = h2 @ w3 + b3
        h3 = tf.nn.relu(h3)
        # 输出层前向计算，[b, 64] => [b, 10]
        h4 = h3 @ w4 + b4

def tensor_multi():
    x = tf.random.normal([2, 784])
    fc1 = layers.Dense(256, activation=tf.nn.relu)  # 隐藏层 1
    fc2 = layers.Dense(128, activation=tf.nn.relu)  # 隐藏层 2
    fc3 = layers.Dense(64, activation=tf.nn.relu)  # 隐藏层 3
    fc4 = layers.Dense(10, activation=None)  # 输出层

    h1 = fc1(x)  # 通过隐藏层 1 得到输出
    h2 = fc2(h1)  # 通过隐藏层 2 得到输出
    h3 = fc3(h2)  # 通过隐藏层 3 得到输出
    h4 = fc4(h3)  # 通过输出层得到网络输出

    model = layers.Sequential([
        layers.Dense(256, activation=tf.nn.relu),  # 创建隐藏层 1
        layers.Dense(128, activation=tf.nn.relu),  # 创建隐藏层 2
        layers.Dense(64, activation=tf.nn.relu),  # 创建隐藏层 3
        layers.Dense(10, activation=None),  # 创建输出层
    ])

    out = model(x)

if __name__ == '__main__':
    campare_single()