import sys

sys.path.append("..")

from config import config
import nni
import torch


def get_default_parameters():
    # 关键参数，包括要调参的参数
    params = \
        {
            # base
            "cuda": 2,
            "seed": 2022,

            # sampler
            "sampler": "uniform",

            # loss
            "loss": 'bpr_for_user',  # bpr_for_sample, bpr_for_user

            # train
            "batch_size": 500,
            "lr": 0.05,
            "train_way": "per_user",  # per_sample, per_user
            "is_use_early_stop": True,
        }
    return params


if __name__ == "__main__":
    '''
    仅供nni测试使用
    '''
    from main import main

    try:
        # get parameters form tuner
        tuner_params = nni.get_next_parameter()
        config.update(get_default_parameters())
        config.update(tuner_params)
        # log_file_name = params["project_name"] + "_" + current_time + ".log"
        # logger = logCof(logger, "../log/", log_file_name)
        # logger.info(params)
        main(config, None)
    except Exception as exception:
        # logger.exception(exception)
        raise
