# -*- coding: utf-8 -*-
# !/usr/bin/python3
"""
Author :      wu
Description : 不同的构建数据集方式
"""

import os

import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import numpy as np
import pandas as pd
from sklearn import datasets
import matplotlib.pyplot as plt


iris = datasets.load_iris()

# numpy构建数据管道
print("------ numpy dataset -------")
data = tf.data.Dataset.from_tensor_slices((iris["data"], iris["target"]))

for fea, label in data.take(5):
    tf.print(label, fea)

# dataframe
print("------ dataframe dataset -------")
df = pd.DataFrame(iris["data"], columns=iris.feature_names)
data2 = tf.data.Dataset.from_tensor_slices((df.to_dict("list"), iris["target"]))

for fea, label in data2.take(2):
    tf.print(label, fea)
# generator
print("------ generator dataset -------")
image_generator = ImageDataGenerator(rescale=1.0/255).flow_from_directory("../data/cifar2/test/",
                                                                          target_size=(32, 32),
                                                                          batch_size=20,
                                                                          class_mode="binary")
classdict = image_generator.class_indices
print(classdict)


def generator():
    for fea, label in image_generator:
        yield fea, label


data3 = tf.data.Dataset.from_generator(generator, output_types=(tf.float32, tf.int32))

for i, (img, label) in enumerate(data3.unbatch().take(9)):
    ax = plt.subplot(3, 3, i + 1)
    ax.imshow(img.numpy())
    ax.set_title("label {}".format(label))
plt.show()


# csv
print("------ csv dataset -------")
data4 = tf.data.experimental.make_csv_dataset(
    file_pattern=["../data/titanic/train.csv"],
    batch_size=3,
    label_name="Survived",
    na_value="",
    num_epochs=1,
    ignore_errors=True
)
for data, label in data4.take(2):
    tf.print(label, data)


# text..
print("------ text dataset -------")
data5 = tf.data.TextLineDataset(
    filenames=["../data/titanic/train.csv", "../data/titanic/test.csv"],
    ).skip(1)

for line in data5.take(5):
    tf.print(line)


# file_path
print("------ from file dataset -------")
data5 = tf.data.Dataset.list_files("../data/cifar2/train/*/*.jpg")
for file in data5.take(2):
    tf.print(file)


# tf_records
def create_tfrecords(inpath, outpath):
    writer = tf.io.TFRecordWriter(outpath)
    dirs = os.listdir(inpath)

    for index, name in enumerate(dirs):
        class_path = inpath + "/" + name + "/"
        for img_name in os.listdir(class_path):
            img_path = class_path + img_name
            img = tf.io.read_file(img_path)
            example = tf.train.Example(
                features=tf.train.Features(feature={
                    "label": tf.train.Feature(int64_list=tf.train.Int64List(value=[index])),
                    "img_raw": tf.train.Feature(bytes_list=tf.train.BytesList(value=[img.numpy()]))
                })
            )
            writer.write(example.SerializeToString())
    writer.close()


create_tfrecords("../data/cifar2/test/", "./cifar2.tfrecord")


def parse_example(proto):
    descrition = {"img_raw": tf.io.FixedLenFeature([], tf.string),
                  "label": tf.io.FixedLenFeature([], tf.int64)
                  }
    example = tf.io.parse_single_example(proto, descrition)
    img = tf.image.decode_jpeg(example["img_raw"])
    img = tf.image.resize(img, (32, 32))
    label = example["label"]

    return img, label


data7 = tf.data.TFRecordDataset("./cifar2.tfrecord")\
    .map(parse_example).shuffle(100)

print("------ tf record dataset -------")
plt.figure(figsize=(6, 6))
for i, (img, label) in enumerate(data7.take(9)):
    ax = plt.subplot(3, 3, i + 1)
    ax.imshow((img/255).numpy())
    ax.set_title("label {}".format(label))
plt.show()


def main():
    pass


if __name__ == "__main__":
    main()

