# -*- coding: utf-8 -*-  
'''
解析: resources/conf.yml

@author: luoyi
Created on 2021-10-02
'''
import yaml
import os
import sys


#    取项目根目录（其他一切相对目录在此基础上拼接）
ROOT_PATH = os.path.abspath(os.path.dirname(__file__)).split('NLU')[0]
ROOT_PATH = ROOT_PATH + "NLU"


#    取配置文件目录
CONF_PATH = ROOT_PATH + "/resources/conf.yml"
#    加载conf.yml配置文件
def load_conf_yaml(yaml_path=CONF_PATH):
    print('加载配置文件:' + yaml_path)
    f = open(yaml_path, 'r', encoding='utf-8')
    fr = f.read()
    
    c = yaml.safe_load(fr)
    
    
    dataset_baidu = Dataset_baiduConfParser(
                schemas_path = c['dataset_baidu']['schemas_path'],
                train_data_path = c['dataset_baidu']['train_data_path'],
                val_data_path = c['dataset_baidu']['val_data_path'],
                rel_id_path = c['dataset_baidu']['rel_id_path'],
                id_rel_path = c['dataset_baidu']['id_rel_path'],
                question_schemas_path = c['dataset_baidu']['question_schemas_path'],
                question_train_data_path = c['dataset_baidu']['question_train_data_path'],
                question_val_data_path = c['dataset_baidu']['question_val_data_path'],
                tfrecord_train_data_path = c['dataset_baidu']['tfrecord_train_data_path'],
                tfrecord_val_data_path = c['dataset_baidu']['tfrecord_val_data_path'],
                batch_size = c['dataset_baidu']['batch_size'],
                epochs = c['dataset_baidu']['epochs'],
                shuffle_buffer_rate = c['dataset_baidu']['shuffle_buffer_rate'],
                
                )
    
    dictionary = DictionaryConfParser(
                dictionaries_chchars_path = c['dictionary']['dictionaries_chchars_path'],
                dictionaries_word2dict_pkl_path = c['dictionary']['dictionaries_word2dict_pkl_path'],
                dictionaries_dict2word_pkl_path = c['dictionary']['dictionaries_dict2word_pkl_path'],
                
                )
    
    bert = BertConfParser(
                neg_prob = c['bert']['neg_prob'],
                rewrite_prob = c['bert']['rewrite_prob'],
                rewrite_max = c['bert']['rewrite_max'],
                rewrite_mask = c['bert']['rewrite_mask'],
                rewrite_original = c['bert']['rewrite_original'],
                rewrite_random = c['bert']['rewrite_random'],
                max_sen_len = c['bert']['max_sen_len'],
                max_sen = c['bert']['max_sen'],
                d_model = c['bert']['d_model'],
                n_head_attention = c['bert']['n_head_attention'],
                dropout_rate = c['bert']['dropout_rate'],
                f_model = c['bert']['f_model'],
                n_block = c['bert']['n_block'],
                lamud_loss_pre_nsp = c['bert']['lamud_loss_pre_nsp'],
                lamud_loss_pre_mlm = c['bert']['lamud_loss_pre_mlm'],
                learning_rate = c['bert']['learning_rate'],
                model_save_weights_path = c['bert']['model_save_weights_path'],
                tensorboard_dir_path = c['bert']['tensorboard_dir_path'],
                
                )
    
    nlu = NluConfParser(
                max_sen_len = c['nlu']['max_sen_len'],
                loss_lamda_crf = c['nlu']['loss_lamda_crf'],
                loss_lamda_inform = c['nlu']['loss_lamda_inform'],
                learning_rate = c['nlu']['learning_rate'],
                tensorboard_dir_path = c['nlu']['tensorboard_dir_path'],
                model_save_weights_path = c['nlu']['model_save_weights_path'],
                
                )
    
    
    return c, dataset_baidu, dictionary, bert, nlu



class Dataset_baiduConfParser:
    def __init__(self,
                schemas_path = 'temp/data/baidu/original/all_50_schemas',
                train_data_path = 'temp/data/baidu/original/train_data.json',
                val_data_path = 'temp/data/baidu/original/dev_data.json',
                rel_id_path = 'temp/data/baidu/relationships/rel_id.pkl',
                id_rel_path = 'temp/data/baidu/relationships/id_rel.pkl',
                question_schemas_path = 'temp/data/baidu/original/nlu_all_50_schemas',
                question_train_data_path = 'temp/data/baidu/question/train_data.json',
                question_val_data_path = 'temp/data/baidu/question/val_data.json',
                tfrecord_train_data_path = 'temp/data/baidu/tfrecord/train_data.tfrecord',
                tfrecord_val_data_path = 'temp/data/baidu/tfrecord/val_data.tfrecord',
                batch_size = 2,
                epochs = 10,
                shuffle_buffer_rate = -1,
                
                ):
        self._schemas_path = schemas_path
        self._train_data_path = train_data_path
        self._val_data_path = val_data_path
        self._rel_id_path = rel_id_path
        self._id_rel_path = id_rel_path
        self._question_schemas_path = question_schemas_path
        self._question_train_data_path = question_train_data_path
        self._question_val_data_path = question_val_data_path
        self._tfrecord_train_data_path = tfrecord_train_data_path
        self._tfrecord_val_data_path = tfrecord_val_data_path
        self._batch_size = batch_size
        self._epochs = epochs
        self._shuffle_buffer_rate = shuffle_buffer_rate
        
        pass
        
    def get_schemas_path(self): return convert_to_abspath(self._schemas_path)
    def get_train_data_path(self): return convert_to_abspath(self._train_data_path)
    def get_val_data_path(self): return convert_to_abspath(self._val_data_path)
    def get_rel_id_path(self): return convert_to_abspath(self._rel_id_path)
    def get_id_rel_path(self): return convert_to_abspath(self._id_rel_path)
    def get_question_schemas_path(self): return convert_to_abspath(self._question_schemas_path)
    def get_question_train_data_path(self): return convert_to_abspath(self._question_train_data_path)
    def get_question_val_data_path(self): return convert_to_abspath(self._question_val_data_path)
    def get_tfrecord_train_data_path(self): return convert_to_abspath(self._tfrecord_train_data_path)
    def get_tfrecord_val_data_path(self): return convert_to_abspath(self._tfrecord_val_data_path)
    def get_batch_size(self): return self._batch_size
    def get_epochs(self): return self._epochs
    def get_shuffle_buffer_rate(self): return self._shuffle_buffer_rate
    
    pass

class DictionaryConfParser:
    def __init__(self,
                dictionaries_chchars_path = 'temp/dictionaries/chchars_strokes.txt',
                dictionaries_word2dict_pkl_path = 'temp/dictionaries/word2dict.pkl',
                dictionaries_dict2word_pkl_path = 'temp/dictionaries/dict2word.pkl',
                
                ):
        self._dictionaries_chchars_path = dictionaries_chchars_path
        self._dictionaries_word2dict_pkl_path = dictionaries_word2dict_pkl_path
        self._dictionaries_dict2word_pkl_path = dictionaries_dict2word_pkl_path
        
        pass
        
    def get_dictionaries_chchars_path(self): return convert_to_abspath(self._dictionaries_chchars_path)
    def get_dictionaries_word2dict_pkl_path(self): return convert_to_abspath(self._dictionaries_word2dict_pkl_path)
    def get_dictionaries_dict2word_pkl_path(self): return convert_to_abspath(self._dictionaries_dict2word_pkl_path)
    
    pass

class BertConfParser:
    def __init__(self,
                neg_prob = 0.5,
                rewrite_prob = 0.15,
                rewrite_max = 10,
                rewrite_mask = 0.8,
                rewrite_original = 0.1,
                rewrite_random = 0.1,
                max_sen_len = 128,
                max_sen = 2,
                d_model = 768,
                n_head_attention = 12,
                dropout_rate = 0.1,
                f_model = 3072,
                n_block = 12,
                lamud_loss_pre_nsp = 1,
                lamud_loss_pre_mlm = 1,
                learning_rate = 0.0001,
                model_save_weights_path = 'temp/models/bert',
                tensorboard_dir_path = 'logs/bert/tensorboard',
                
                ):
        self._neg_prob = neg_prob
        self._rewrite_prob = rewrite_prob
        self._rewrite_max = rewrite_max
        self._rewrite_mask = rewrite_mask
        self._rewrite_original = rewrite_original
        self._rewrite_random = rewrite_random
        self._max_sen_len = max_sen_len
        self._max_sen = max_sen
        self._d_model = d_model
        self._n_head_attention = n_head_attention
        self._dropout_rate = dropout_rate
        self._f_model = f_model
        self._n_block = n_block
        self._lamud_loss_pre_nsp = lamud_loss_pre_nsp
        self._lamud_loss_pre_mlm = lamud_loss_pre_mlm
        self._learning_rate = learning_rate
        self._model_save_weights_path = model_save_weights_path
        self._tensorboard_dir_path = tensorboard_dir_path
        
        pass
        
    def get_neg_prob(self): return self._neg_prob
    def get_rewrite_prob(self): return self._rewrite_prob
    def get_rewrite_max(self): return self._rewrite_max
    def get_rewrite_mask(self): return self._rewrite_mask
    def get_rewrite_original(self): return self._rewrite_original
    def get_rewrite_random(self): return self._rewrite_random
    def get_max_sen_len(self): return self._max_sen_len
    def get_max_sen(self): return self._max_sen
    def get_d_model(self): return self._d_model
    def get_n_head_attention(self): return self._n_head_attention
    def get_dropout_rate(self): return self._dropout_rate
    def get_f_model(self): return self._f_model
    def get_n_block(self): return self._n_block
    def get_lamud_loss_pre_nsp(self): return self._lamud_loss_pre_nsp
    def get_lamud_loss_pre_mlm(self): return self._lamud_loss_pre_mlm
    def get_learning_rate(self): return self._learning_rate
    def get_model_save_weights_path(self): return convert_to_abspath(self._model_save_weights_path)
    def get_tensorboard_dir_path(self): return convert_to_abspath(self._tensorboard_dir_path)
    
    pass

class NluConfParser:
    def __init__(self,
                max_sen_len = 128,
                loss_lamda_crf = 1,
                loss_lamda_inform = 1,
                learning_rate = 0.001,
                tensorboard_dir_path = 'logs/nlu/tensorboard',
                model_save_weights_path = 'temp/models/nlu',
                
                ):
        self._max_sen_len = max_sen_len
        self._loss_lamda_crf = loss_lamda_crf
        self._loss_lamda_inform = loss_lamda_inform
        self._learning_rate = learning_rate
        self._tensorboard_dir_path = tensorboard_dir_path
        self._model_save_weights_path = model_save_weights_path
        
        pass
        
    def get_max_sen_len(self): return self._max_sen_len
    def get_loss_lamda_crf(self): return self._loss_lamda_crf
    def get_loss_lamda_inform(self): return self._loss_lamda_inform
    def get_learning_rate(self): return self._learning_rate
    def get_tensorboard_dir_path(self): return convert_to_abspath(self._tensorboard_dir_path)
    def get_model_save_weights_path(self): return convert_to_abspath(self._model_save_weights_path)
    
    pass



#    取配置的绝对目录
def convert_to_abspath(path):
    '''取配置的绝对目录
        "/"开头的目录原样输出
        非"/"开头的目录开头追加项目根目录
    '''
    if (path.startswith("/")):
        return path
    else:
        return ROOT_PATH + "/" + path
    
#    检测文件所在上级目录是否存在，不存在则创建
def mkfiledir_ifnot_exises(filepath):
    '''检测log所在上级目录是否存在，不存在则创建
        @param filepath: 文件目录
    '''
    _dir = os.path.dirname(filepath)
    if (not os.path.exists(_dir)):
        os.makedirs(_dir)
    pass
#    检测目录是否存在，不存在则创建
def mkdir_ifnot_exises(_dir):
    '''检测log所在上级目录是否存在，不存在则创建
        @param dir: 目录
    '''
    if (not os.path.exists(_dir)):
        os.makedirs(_dir)
    pass


#    写入配置文件
def write_conf(_dict, file_path):
    '''写入当前配置项的配置文件
        @param dict: 要写入的配置项字典
        @param file_path: 文件path
    '''
    file_path = convert_to_abspath(file_path)
    mkfiledir_ifnot_exises(file_path)
    
    #    存在同名文件先删除
    if (os.path.exists(file_path)):
        os.remove(file_path)
        pass
    
    fw = open(file_path, mode='w', encoding='utf-8')
    yaml.safe_dump(_dict, fw)
    fw.close()
    pass


#    追加sys.path
def append_sys_path(path):
    path = convert_to_abspath(path)
    sys.path.append(path)
    print(sys.path)
    pass


#	外部访问的属性
ALL_DICT, DATASET_BAIDU, DICTIONARY, BERT, NLU = load_conf_yaml()


