hohieu's picture
init project
841b0ff
raw
history blame
873 Bytes
# coding: utf8
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
model_path = "vinai/PhoGPT-4B"
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(device)
model = AutoModelForCausalLM.from_pretrained(model_path)
tokenizer = AutoTokenizer.from_pretrained(model_path)
inputs = tokenizer('### Câu hỏi: Viết bài văn nghị luận xã hội về an toàn giao thông \n### Trả lời:', return_tensors='pt').to(device)
print(inputs)
outputs = model.generate(
inputs=inputs["input_ids"].to(device),
attention_mask=inputs["attention_mask"].to(device),
do_sample=True,
temperature=1.0,
top_k=50,
top_p=0.9,
max_new_tokens=1024,
eos_token_id=tokenizer.eos_token_id,
pad_token_id=tokenizer.pad_token_id
)
response = tokenizer.decode(outputs[0])
print(response)