# -*- coding: utf-8 -*-
# !/usr/bin/python3
"""
Author :      wu
Description : 提升构建数据管道性能
"""

import time
from datetime import datetime

import tensorflow as tf


# 1.prefetch
def prefetch_test():

    def generator():
        for i in range(10):
            time.sleep(1)
            yield i

    def train_step():
        time.sleep(1)

    ds = tf.data.Dataset.from_generator(generator, output_types=tf.int32)
    start_time = datetime.now()
    for x in ds:
        train_step()
    tf.print("time cost:{}".format(datetime.now() - start_time))

    start_time = datetime.now()
    for x in ds.prefetch(buffer_size=tf.data.experimental.AUTOTUNE):
        train_step()
    tf.print("prefetch time cost:{}".format(datetime.now() - start_time))


# 2.interleave
def interleave_test():
    ds_files = tf.data.Dataset.list_files("../data/titanic/*.csv")
    ds = ds_files.interleave(lambda x: tf.data.TextLineDataset(x).skip(1))
    for line in ds.take(8):
        tf.print(line)


# 3.map
def map_parallel_test():
    def load_image(img_path, size=(32, 32)):
        label = 1 if tf.strings.regex_full_match(img_path, ".*/automobile/*.jpg") else 0
        img = tf.io.read_file(img_path)
        img = tf.image.decode_jpeg(img)
        img = tf.image.resize(img, size)

        return img, label


    start_time = datetime.now()
    ds = tf.data.Dataset.list_files("../data/cifar2/train/*/*.jpg")
    ds_map = ds.map(load_image)
    for _ in ds_map:
        pass
    tf.print("map time cost:{}".format(datetime.now() - start_time))

    start_time = datetime.now()
    ds_map_parallel = ds.map(load_image, num_parallel_calls=tf.data.experimental.AUTOTUNE)
    for _ in ds_map_parallel:
        pass
    tf.print("map parallel time cost:{}".format(datetime.now() - start_time))


# 4.cache
def cache_test():
    def generator():
        for i in range(10):
            time.sleep(1)
            yield i

    def train_step():
        pass

    start_time = datetime.now()
    ds = tf.data.Dataset.from_generator(generator, output_types=(tf.int32))
    for epoch in tf.range(5):
        for x in ds:
            train_step()
    tf.print("no cache time cost:{}".format(datetime.now() - start_time))

    start_time = datetime.now()
    ds_cache = tf.data.Dataset.from_generator(generator, output_types=(tf.int32)).cache()
    for epoch in tf.range(5):
        for x in ds_cache:
            train_step()
    tf.print("cache time cost:{}".format(datetime.now() - start_time))


# 5.map + batch
def map_batch_test():
    ds = tf.data.Dataset.range(10000)

    start_time = datetime.now()
    ds_map_batch = ds.map(lambda x: x**2).batch(8)
    for x in ds_map_batch:
        pass
    tf.print("map_batch time cost:{}".format(datetime.now() - start_time))

    start_time = datetime.now()
    ds_batch_map = ds.batch(8).map(lambda x: x ** 2)
    for x in ds_batch_map:
        pass
    tf.print("batch_map time cost:{}".format(datetime.now() - start_time))


def main():

    prefetch_test()
    interleave_test()
    map_parallel_test()
    cache_test()
    map_batch_test()


if __name__ == "__main__":
    main()
