OSError: Error no file named pytorch_model.bin, tf_model.h5, model.ckpt.index or flax_model.msgpack found in directory /path/to/local/model.

#3
by ibrim - opened

import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline

tokenizer = AutoTokenizer.from_pretrained("/raid/users/Beluga", use_fast=False)
model = AutoModelForCausalLM.from_pretrained("/raid/users/Beluga", torch_dtype=torch.float16, low_cpu_mem_usage=True, trust_remote_code = True,device_map="auto")
system_prompt = "### System:\nYou are Stable Beluga 13B, an AI that follows instructions extremely well. Help as much as you can. Remember, be safe, and don't do anything illegal.\n\n"

message = "Write me a poem please"
prompt = f"{system_prompt}### User: {message}\n\n### Assistant:\n"
inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
output = model.generate(**inputs, do_sample=True, top_p=0.95, top_k=0, max_new_tokens=256)

print(tokenizer.decode(output[0], skip_special_tokens=True))

Sign up or log in to comment