Burman-GPT / app.py
burman-ai's picture
Update app.py
f4572b6 verified
raw
history blame contribute delete
712 Bytes
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
# Load tokenizer and model
tokenizer = AutoTokenizer.from_pretrained("simbolo-ai/Myanmarsar-GPT")
model = AutoModelForCausalLM.from_pretrained("simbolo-ai/Myanmarsar-GPT")
# Move model to GPU if available
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
# Input text
input_text = "Marketing"
input_ids = tokenizer.encode(input_text, return_tensors='pt').to(device)
# Generate output
output = model.generate(
input_ids,
max_length=256,
do_sample=True,
temperature=0.7,
top_k=50,
top_p=0.95
)
# Decode and print
print(tokenizer.decode(output[0], skip_special_tokens=True))