xglm-1b / app.py
Danil
Update app.py
f1dd2f0
import torch
import gradio as gr
from transformers import XGLMTokenizer, XGLMForCausalLM
tokenizer = XGLMTokenizer.from_pretrained("facebook/xglm-1.7B")
model = XGLMForCausalLM.from_pretrained("facebook/xglm-1.7B", low_cpu_mem_usage=False)
def generate(text):
inputs = tokenizer(text, return_tensors="pt")
outputs = model.generate(**inputs, labels=inputs["input_ids"],
do_sample=True, max_length=2048)
return tokenizer.decode(outputs.tolist()[0])
gr.Interface(fn=generate, inputs="text", outputs="text").launch()