# -*- coding:utf-8 -*-

from utils.io_utils import get_yml, convert_abspath
from utils.data_utils import dict_merge
from utils.log_utils import ClampLog, log_debug
from utils.data_utils import DictObj
from config.glob.global_pool import global_pool

"""
加载配置
"""


def validate_config(config):
    """
    校验配置
    :param config:
    :return:
    """
    with ClampLog('validating config'):
        # net必须有
        assert config.net and config.net.module, \
            '"net" and "net.module" have to exist in config'
        # xs ys的四项必须有
        assert config.xs_shape and config.ys_shape, \
            '"xs_shape" and "ys_shape" have to exist in config'
        assert config.xs_dtype and config.ys_dtype, \
            'xs_dtype" and "ys_dtype" have to exist in config'
        # batch_size必须有
        assert config.batch_size, \
            '"batch_size" has to exist in config'
        # embed可以有
        if config.embed:
            assert config.embed.module, \
                'If "embed" exists in config, "embed.module" has to exist'
            assert config.embed.model or (config.embed.dataset and config.embed.size), \
                'If "embed" exists in config, "embed.module" has to exist or' \
                ' both "embed.dataset", "embed.size" have to exist'
        # 训练需要配置校验
        if config.is_train:
            assert config.reader and config.reader.dataset and config.reader.module, \
                'If train, "reader", "reader.dataset" and "reader.module" have to exist in config'
            assert config.epoch, \
                'If train, "epoch" has to exist in config'
            assert config.save, \
                'If train, "save" has to exist in config'
            assert config.loss and config.loss.reduction, \
                'If train, "loss" and "loss.reduction" have to exist in config'
            if config.loss.self_losser:
                assert config.loss.losser, \
                    'If train, "loss.self_losser"=true, "loss.losser" have to exist in config'
            else:
                assert config.loss.name, \
                    'If train, "loss.self_losser"=false, "loss.name" have to exist in config'
            assert config.optimizer and config.optimizer.name, \
                'If train, "optimizerr" and "optimizer.name" have to exist in config'
            assert config.learning_rate and config.learning_rate.name and config.learning_rate.value, \
                'If train, "learning_rate", "learning_rate.name" and "learning_rate.value" have to exist in config'
            if config.learning_rate.name == 'exp_decay':
                assert config.learning_rate.decay_rate and config.learning_rate.decay_step, \
                    'If train, "loss.self_losser"=true, "loss.losser" have to exist in config'
        else:
            assert config.predict_mod, \
                'If predict, "predict_mod" has to exist in config'
            assert config.load_model_dir, \
                'If predict, "load_model_dir" has to exist in config'
        return True


def load_config(path, is_train):
    # 加载配置
    with ClampLog('loading config'):
        user_cfg = get_yml(path)  # 自定义的配置
        default_cfg = get_yml(convert_abspath('config/core/default/default_train.yml'))  # 默认的配置
        cfg = dict_merge(user_cfg, default_cfg)  # merge
        log_debug(cfg, form='yml')
        config_cls = DictObj(cfg)
        config_cls.is_train = is_train
        if validate_config(config_cls):
            global_pool.config = config_cls # 放入global_pool
