import os
import sys
from pathlib import Path
import yaml

BASE_DIR = Path(__file__).resolve().parent.parent


class ImproperlyConfigured(Exception):
    """Server is somehow improperly configured"""
    pass


CONFIG_PATH = os.path.join(BASE_DIR, 'config.yaml')
# 从配置文件更新
if os.path.exists(CONFIG_PATH):
    with open(CONFIG_PATH, "r") as f:
        config_dict = yaml.load(f.read(), Loader=yaml.FullLoader)
        if config_dict is None:
            config_dict = {}
else:
    sys.stderr.write(f"请确认配置文件{CONFIG_PATH}是否存在!! 配置文件信息请见 README.MD#Configuration\n")


def _get_config(field, default=None):
    if field in config_dict.keys():
        return config_dict[field]
    else:
        if default is None:
            raise ImproperlyConfigured(f"缺失必须的配置信息 {field}")
        else:
            sys.stderr.write(f"缺失配置信息{field}，现使用默认值{default}.\n")
            return default


LOGIN_LEVEL = _get_config("LOGIN_LEVEL", "debug")
MAX_CODE_LENGTH = _get_config("MAX_CODE_LENGTH", 500)
MAX_NL_LENGTH = _get_config("MAX_NL_LENGTH", 50)
MIN_NL_LENGTH = _get_config("MIN_NL_LENGTH", 1)
UNK_TOKEN = _get_config("UNK_TOKEN", "<unk>")
PAD_TOKEN = _get_config("PAD_TOKEN", "<pad>")
BOS_TOKEN = _get_config("BOS_TOKEN", "<s>")
EOS_TOKEN = _get_config("EOS_TOKEN", "</s>")
VOCAB_MIN_FREQ = _get_config("VOCAB_MIN_FREQ", 2)
BATCH_SIZE = _get_config("BATCH_SIZE")
SEQ2SEQ_PARAM = _get_config("SEQ2SEQ")
CONSUM_PARAM = _get_config("CONSUM")
DEVICE = _get_config("DEVICE")
DEVICE_ID = _get_config("DEVICE_ID", 0)
LR = _get_config("LR", 0.001)
if DEVICE == "cpu":
    pass
else:
    import torch

    DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
