from transformers import GPT2LMHeadModel, GPT2Tokenizer import gradio as gr trained_tokenizer = GPT2Tokenizer.from_pretrained("Kumarkishalaya/GPT-2-next-word-prediction") trained_model = GPT2LMHeadModel.from_pretrained("Kumarkishalaya/GPT-2-next-word-prediction") untrained_model = GPT2Tokenizer.from_pretrained("gpt2") untrained_tokenizer = ("gpt2") def generate(commentary_text): input_ids = trained_tokenizer(commentary_text, return_tensors="pt") input_ids = input_ids['input_ids'].to(device) output = trained_model.generate(input_ids, max_length=60, num_beams=5, do_sample=False) return tokenizer_finetuned.decode(output[0]) demo = gr.Interface(fn=generate, inputs="text", outputs="text") demo.launch()