from tensorflow.keras import layers
import tensorflow as tf

model = tf.keras.Sequential()

# 向模型添加一个64单元的密集连接层：
model.add(layers.Dense(64,activation='relu'))

# 加上另一个：
model.add(layers.Dense(64,activation='relu'))

# 添加一个包含10个输出单位的softmax层：
model.add(layers.Dense(10,activation='softmax'))

# 创建一个sigmoid层:
layers.Dense(64, activation='sigmoid')
# 或者使用下面的代码创建:
layers.Dense(64, activation=tf.keras.activations.sigmoid)

# 将具有因子0.01的L1正则化的线性层应用于核矩阵:
layers.Dense(64, kernel_regularizer=tf.keras.regularizers.l1(0.01))

# 将L2正则化系数为0.01的线性层应用于偏置向量：
layers.Dense(64, bias_regularizer=tf.keras.regularizers.l2(0.01))

# 一个内核初始化为随机正交矩阵的线性层：
layers.Dense(64, kernel_initializer='orthogonal')

# 偏置矢量初始化为2.0s的线性层：
layers.Dense(64, bias_initializer=tf.keras.initializers.Constant(2.0))

model = tf.keras.Sequential([
# 向模型添加一个64单元的密集连接层：
    layers.Dense(64,activation='relu',input_shape=(32,)),
    layers.Dense(64,activation='relu'),
    layers.Dense(10,activation='softmax')
])

model.compile(optimizer=tf.keras.optimizers.Adam(0.001),
              loss='categorical_crossentropy',
              metrics=['accuracy'])

import numpy as np
data = np.random.random((1000,32))
labels = np.random.random((1000,10))

val_data = np.random.random((100,32))
val_labels = np.random.random((100,10))

model.fit(data,labels,epochs=10,batch_size =32,
          validation_data=(val_data,val_labels))


dataset = tf.data.Dataset.from_tensor_slices((data,labels))
dataset = dataset.batch(32)

val_dataset = tf.data.Dataset.from_tensor_slices((val_data,val_labels))
val_dataset = val_dataset.batch(32)

model.fit(dataset,epochs=10,
          validation_data=val_dataset)


inputs = tf.keras.Input(shape=(32,))

x = layers.Dense(64,activation='relu')(inputs)
x = layers.Dense(64,activation='relu')(x)
predictions = layers.Dense(10,activation='softmax')(x)

model = tf.keras.Model(inputs=inputs,outputs=predictions)

model.compile(optimizer=tf.keras.optimizers.RMSprop(0.001),
              loss='categorical_crossentropy',
              metrics=['accuracy'])

model.fit(data,labels,batch_size=32,epochs=5)


class MyModel(tf.keras.Model):
    def __init__(self,num_classes=10):
        super(MyModel,self).__init__(name='my_model')
        self.num_class = num_classes

        self.dense_1 = layers.Dense(32,activation='relu')
        self.dense_2 = layers.Dense(num_classes,activation='sigmoid')

    def call(self,inputs):
        x = self.dense_1(inputs)
        return self.dense_2(x)

model = MyModel(num_classes=10)
model.compile(optimizer=tf.keras.optimizers.RMSprop(0.001),
              loss='categorical_crossentropy',
              metrics=['accuracy'])
model.fit(data,labels,batch_size=32,epochs=5)

class MyLayer(layers.Layer):
    def __init__(self,output_dim,**kwargs):
        self.output_dim = output_dim
        super(MyLayer,self).__init__(**kwargs)

    def build(self,input_shape):
        self.kernel = self.add_weight(name='kernel',
                                      shape=(input_shape[1],self.output_dim),
                                      initializer='uniform',
                                      trainable=True)

    def call(self,inputs):
        return tf.matmul(inputs,self.kernel)

    def get_config(self):
        base_config = super(MyLayer,self).get_config()
        base_config['output_dim'] = self.output_dim
        return base_config

    @classmethod
    def from_config(cls, config):
        return cls(**config)

model = tf.keras.Sequential([
    MyLayer(10),
    layers.Activation('softmax')
])

# 训练配置
model.compile(optimizer=tf.keras.optimizers.RMSprop(0.001),
              loss='categorical_crossentropy',
              metrics=['accuracy'])

# 训练5个周期
model.fit(data, labels, batch_size=32, epochs=5)

callbacks = [
    tf.keras.callbacks.EarlyStopping(patience=2,monitor='val_loss'),
    tf.keras.callbacks.TensorBoard(log_dir='./logs')
]

model.fit(data,labels,batch_size=32,epochs=5,callbacks=callbacks,validation_data=(val_data,val_labels))





