# ################################ # Model: GPT2LMHeadModel + NLL # Authors: # Pooneh Mousavi 2023 # ################################ # URL for the gpt2 model gpt_hub: gpt2 gpt_folder: gpt2_result/save/gpt_checkpoint # Special tokens bos_token: "BOS" eos_token: "EOS" system_token: "SPK_1" user_token: "SPK_2" tokenizer: !ref additional_special_tokens: [ !ref , !ref ] special_tokens: [ !ref , !ref , !ref , !ref ] attr_to_special_tokens: "bos_token": !ref "eos_token": !ref "additional_special_tokens": !ref # history_window, i.e. how many user-system exchanges consider as context. max_history: 5 # decoder setting freeze_gptmodel: True num_beams: 3 max_new_tokens: 50 top_k: 45 top_p: 0.9 # gpt model model: !new:custom.HuggingFaceGPT_expanded source: !ref freeze: !ref save_path: !ref max_new_tokens: !ref num_beams: !ref top_k: !ref top_p: !ref # Masks padding_mask: !name:speechbrain.lobes.models.transformer.Transformer.get_key_padding_mask pretrainer: !new:speechbrain.utils.parameter_transfer.Pretrainer loadables: model: !ref modules: model: !ref