import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np

np.random.seed(777)
tf.random.set_seed(777)

BATCH_SIZE = 4
img_dir = r'../../../../large_data/DL1/_many_files/catdog_data/fast_data2/train'


def my_read_file(path):
    y = tf.where(tf.strings.regex_full_match(path, r'.*[\\/]cat\.[^\\/]+'), 0., 1.)

    img = tf.io.read_file(path)
    img = tf.io.decode_jpeg(img, channels=3)
    img = tf.image.resize(img, (200, 200)) / 255.
    return img, y

# W tensorflow/core/framework/op_kernel.cc:1655] OP_REQUIRES failed at whole_file_read_ops.cc:116 :
# Unknown: NewRandomAccessFile failed to Create/Open: ..\..\..\..\large_data\DL1\_many_files\catdog_data\data\train : �ܾ����ʡ�
# W tensorflow/core/framework/op_kernel.cc:1655] OP_REQUIRES failed at iterator_ops.cc:941 :
# Unknown: NewRandomAccessFile failed to Create/Open: ..\..\..\..\large_data\DL1\_many_files\catdog_data\data\train : �ܾ����ʡ�
# ds = tf.data.Dataset.list_files(img_dir)\  # ATTENTION This is a glob not a dir. Write dir only will cause errors above.
ds = tf.data.Dataset.list_files(img_dir + '/*.jpg')\
    .map(my_read_file)\
    .batch(BATCH_SIZE, drop_remainder=False)\
    .prefetch(tf.data.experimental.AUTOTUNE)

spr = 5
spc = 8
spn = 0
for bx, by in ds:
    if spn > spr * spc:
        break
    for i, bxi in enumerate(bx):
        spn += 1
        if spn > spr * spc:
            break
        byi = by[i]
        plt.subplot(spr, spc, spn)
        plt.axis('off')
        plt.title(str(byi.numpy().astype(np.int32)))  # ATTENTION bytes.decode for string and this for numbers
        plt.imshow(bxi)

print('OVER')
