EasyPrompt / utils.py
Trace2333's picture
modified files to test remote POST
115460a
import os
from transformers import AutoTokenizer, AutoModelForCausalLM
def get_tok_and_model(path_for_model):
if not os.path.exists(path_for_model):
raise RuntimeError("no cached model.")
tok = AutoTokenizer.from_pretrained(path_for_model, padding_side='left')
tok.pad_token_id = 50256
# default for open-ended generation
model = AutoModelForCausalLM.from_pretrained(path_for_model)
return tok, model