# -*- coding: utf-8 -*-  
'''
baidu_nlu_generator的结果转为tfrecord

Created on 2021年10月1日
@author: luoyi
'''
import tensorflow as tf

import utils.conf as conf
from utils.dictionary import Dictionaries
from utils.relationship import Relationship


#    baidu_nlu数据集tfrecord工具
class TFRecordWriter:
    def __init__(self, q_iter=None, dictionary=Dictionaries.instance(), relationship=Relationship.instance()):
        self._q_iter = q_iter
        self._dictionary = dictionary
        self._relationship = relationship
        pass
    
    #    遍历写入tfrecord文件
    def write(self, 
              fpath=conf.DATASET_BAIDU.get_question_train_data_path(), 
              tfrecord_fpath=conf.DATASET_BAIDU.get_tfrecord_train_data_path(), 
              count=-1):
        conf.mkfiledir_ifnot_exises(tfrecord_fpath)
        
        fw = tf.io.TFRecordWriter(tfrecord_fpath)
        i = 0
        for sen, predicate, subject_, object_ in self._q_iter.data_iterator(fpath=fpath, count=count):
            sample = self.tfrecord_sample(sen, predicate, subject_, object_)
            fw.write(sample)
            i += 1
            pass
        fw.close()
        return i
    
    #    tfrecotd样本
    def tfrecord_sample(self, sen, predicate, subject_, object_):
        '''
            @param sen: 问句序列
            @param predicate: 关系名
            @param subject_: (word, loc可为空, type, q)
            @param object_: (word, loc可为空, type, q)
        '''
        #    sen以字单位转为字编码，并且首尾追加[CLS],[EOS]
        sen = list(sen)
        wids = self._dictionary.word2idx_slist(['[CLS]'] + sen + ['[EOS]'])
        senids = [1] * len(wids)
        
        #    关系转为关系id
        rid = self._relationship.rel_to_id(predicate)
        
        #    subject，object转为序列标注id（）
        pids = [0] * len(sen)                      #    默认是[PAD]对应的标注
        #    如果不是对subject提问
        if (not subject_[3]):
            word_s = subject_[0]
            loc_s = subject_[1]
            st = subject_[2]
            stids = self._relationship.st_to_id(word_s, st)
            pids[loc_s[0] : loc_s[1]+1] = stids
            pass
        #    如果不是对object提问
        elif (not object_[3]):
            word_o = object_[0]
            loc_o = object_[1]
            ot = object_[2]
            otids = self._relationship.ot_to_id(word_o, ot)
            pids[loc_o[0] : loc_o[1]+1] = otids
            pass
        #    首尾追加[CLS][EOS]对应的标注
        pids = [0] + pids + [5]
        
        feature = {'X_wids': tf.train.Feature(int64_list=tf.train.Int64List(value=wids)),                       #    词id序列，喂到bert里的数据
                   'X_senids': tf.train.Feature(int64_list=tf.train.Int64List(value=senids)),                   #    句子id序列，喂到bert里的数据
                   
                   'Y_rid': tf.train.Feature(int64_list=tf.train.Int64List(value=[rid])),                       #    意图id（关系id）
                   'Y_pids': tf.train.Feature(int64_list=tf.train.Int64List(value=pids)),                       #    subject,object意图实体的标注，与原句等长
                   }    
        sample = tf.train.Example(features=tf.train.Features(feature=feature))
        sample = sample.SerializeToString()   
        return sample
    pass


#    数据集读取
class TFRecordReader:
    def __init__(self, max_sen_len=conf.NLU.get_max_sen_len()):
        self._max_sen_len = max_sen_len
        
        self.init_features()
        pass
    
    def init_features(self):
        #    tfrecord字段定义
        features = {
                'X_wids': tf.io.VarLenFeature(tf.int64),
                'X_senids': tf.io.VarLenFeature(tf.int64),
                
                'Y_rid': tf.io.VarLenFeature(tf.int64),
                'Y_pids': tf.io.VarLenFeature(tf.int64),
            }
        self._features = features
        pass
    
    def tfrecord_read(self, sample):
        max_sen_len = self._max_sen_len
        parsed = tf.io.parse_single_example(sample, features=self._features)
        
        #    取X_wids数据，并统一长度    Tensor(max_sen_len, )
        X_wids = tf.sparse.to_dense(parsed['X_wids'])
        if (len(X_wids) > max_sen_len): X_wids = X_wids[:max_sen_len]
        elif (len(X_wids) < max_sen_len): X_wids = tf.pad(tf.stack([X_wids, tf.zeros_like(X_wids)], axis=0), paddings=[[0,0], [0, max_sen_len-len(X_wids)]], constant_values=0)[0, :]
        #    取X_senids数据，并统一长度    Tensor(max_sen_len, )
        X_senids = tf.sparse.to_dense(parsed['X_senids'])
        if (len(X_senids) > max_sen_len): X_senids = X_senids[:max_sen_len]
        elif (len(X_senids) < max_sen_len): X_senids = tf.pad(tf.stack([X_senids, tf.zeros_like(X_senids)], axis=0), paddings=[[0,0], [0, max_sen_len-len(X_senids)]], constant_values=0)[0, :]
        #    组合X数据
        X = tf.stack([X_wids, X_senids], axis=0)
        
        #    取意图id（关系id），并扩展为Tensor(max_sen_len, )
        Y_rid = tf.sparse.to_dense(parsed['Y_rid'])
        Y_rid = tf.pad(tf.stack([Y_rid, tf.zeros_like(Y_rid)], axis=0), paddings=[[0,0], [0, max_sen_len-len(Y_rid)]], constant_values=-1)[0, :]
        #    取意图实体标注，并扩展为Tensor(max_sen_len, )
        Y_pids = tf.sparse.to_dense(parsed['Y_pids'])
        Y_pids = tf.pad(tf.stack([Y_pids, tf.zeros_like(Y_pids)], axis=0), paddings=[[0,0], [0, max_sen_len-len(Y_pids)]], constant_values=-1)[0, :]
        #    组合Y数据
        Y = tf.stack([Y_rid, Y_pids], axis=0)
        
        return X, Y
    
    def tensor_db(self, 
                  tfrecord_path,
                  batch_size=conf.DATASET_BAIDU.get_batch_size(), 
                  epochs=conf.DATASET_BAIDU.get_epochs(),
                  shuffle_buffer_rate=conf.DATASET_BAIDU.get_shuffle_buffer_rate()):
        db = tf.data.TFRecordDataset(tfrecord_path)
        db = db.map(lambda s:self.tfrecord_read(s))
#         tf.data.Dataset.from_generator(generator, output_types, output_shapes, args, output_signature)
        
        if (shuffle_buffer_rate > 0): db = db.shuffle(buffer_size=shuffle_buffer_rate * batch_size)
        if (batch_size > 0): db = db.batch(batch_size, drop_remainder=True)
        if (epochs > 0): db = db.repeat(epochs)
        return db
    
    #    训练集
    def train_tensor_db(self,
                        tfrecord_path=conf.DATASET_BAIDU.get_tfrecord_train_data_path(),
                        batch_size=conf.DATASET_BAIDU.get_batch_size(), 
                        epochs=conf.DATASET_BAIDU.get_epochs(),
                        shuffle_buffer_rate=conf.DATASET_BAIDU.get_shuffle_buffer_rate()):
        return self.tensor_db(tfrecord_path, batch_size, epochs, shuffle_buffer_rate)
    #    验证集
    def val_tensor_db(self,
                      tfrecord_path=conf.DATASET_BAIDU.get_tfrecord_val_data_path(),
                      batch_size=conf.DATASET_BAIDU.get_batch_size(), 
                      epochs=conf.DATASET_BAIDU.get_epochs(),
                      shuffle_buffer_rate=conf.DATASET_BAIDU.get_shuffle_buffer_rate()):
        return self.tensor_db(tfrecord_path, batch_size, epochs, shuffle_buffer_rate)
    pass
