from tfrecord import TFReader, TFWriter
from tqdm import tqdm
import glob
import tensorflow as tf

tf_dummy_schema = {

    "input_ids": ("var_len", "int"),
    "attention_mask": ("var_len", "int"),

    "token_type_ids": ("var_len", "int"),

    "output": ("var_len", "float")
}


def writer_tfrecord(record_path, tag,model_dir, input_ids, attention_mask, token_type_ids, labels):
    """Write a tfrecord to a tfrecord file .

    Args:
        record_path ([type]): [description]
        tag ([type]): [description]
        input_ids ([type]): [description]
        attention_mask ([type]): [description]
        token_type_ids ([type]): [description]
        labels ([type]): [description]
    """
    tf_writer = TFWriter(tf_dummy_schema, record_path,tag=tag, model_dir=model_dir,overwrite=True)

    for i in tqdm(range(len(input_ids))):

        train_inputs = {
            'input_ids': input_ids[i].astype(int),
            'attention_mask': attention_mask[i].astype(int),
            'token_type_ids': token_type_ids[i].astype(int),
            'output': labels[i].reshape(128*57).astype(float).tolist()
        }

        tf_writer.write_record(train_inputs)


def read_tf_record(record_patern,batch_size):
    """Read a TTF record  .

    Args:
        record_patern ([type]): [description]
    """

    def label_encode(example):
        example["output"] = tf.reshape(example["output"], (128, 57))
        return example
    all_files = tf.io.gfile.glob(record_patern)
    
    print(record_patern,"All files", all_files)
    tf_reader = TFReader(tf_dummy_schema, all_files)
    dataset = tf_reader.read_record(batch_size)

    dataset.map(label_encode)

    return dataset
