import tensorflow as tf

tf.nn.tan
tf.keras.Model.fit
tf.keras.Sequential
tf.keras.layers.Dense
tf.keras.layers.InputLayer

tokenizer = tf.keras.preprocessing.text.Tokenizer()
tokenizer.tokenize

tf.data.Dataset.from_tensor_slices()

tf.keras.layers.Conv2D

tf.losses.SparseCategoricalCrossentropy()


tf.keras.datasets.cifar10.load_data()