from sklearn.datasets import load_iris
from sklearn.preprocessing import StandardScaler
import tensorflow as tf

tf.random.set_seed(777)

x, y = load_iris(return_X_y=True)
scaler = StandardScaler()
x = scaler.fit_transform(x)
print('x', x.shape)
print('y', y.shape)
M, N = x.shape

BATCH_SIZE = 64

ds = tf.data.Dataset.from_tensor_slices((x, y))\
    .shuffle(M)\
    .batch(BATCH_SIZE)\
    .prefetch(buffer_size=tf.data.experimental.AUTOTUNE)

for i, (bx, by) in enumerate(ds):
    if 0 == i:
        print(bx[:5])
        print(by)
    print(bx.shape)
    print(by.shape)
