Kumarkishalaya commited on
Commit
ed738dc
1 Parent(s): 8cc2393

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -1,10 +1,12 @@
1
  from transformers import GPT2LMHeadModel, GPT2Tokenizer
2
  import gradio as gr
 
3
 
4
  trained_tokenizer = GPT2Tokenizer.from_pretrained("Kumarkishalaya/GPT-2-next-word-prediction")
5
  trained_model = GPT2LMHeadModel.from_pretrained("Kumarkishalaya/GPT-2-next-word-prediction")
6
  untrained_model = GPT2Tokenizer.from_pretrained("gpt2")
7
  untrained_tokenizer = ("gpt2")
 
8
 
9
  def generate(commentary_text):
10
  input_ids = trained_tokenizer(commentary_text, return_tensors="pt")
@@ -13,7 +15,7 @@ def generate(commentary_text):
13
  return tokenizer_finetuned.decode(output[0])
14
 
15
  # Create Gradio interface
16
- iface = gr.Interface(fn=generate_text,
17
  inputs="text",
18
  outputs="text",
19
  title="GPT-2 Text Generation",
 
1
  from transformers import GPT2LMHeadModel, GPT2Tokenizer
2
  import gradio as gr
3
+ import torch
4
 
5
  trained_tokenizer = GPT2Tokenizer.from_pretrained("Kumarkishalaya/GPT-2-next-word-prediction")
6
  trained_model = GPT2LMHeadModel.from_pretrained("Kumarkishalaya/GPT-2-next-word-prediction")
7
  untrained_model = GPT2Tokenizer.from_pretrained("gpt2")
8
  untrained_tokenizer = ("gpt2")
9
+ device = "cuda" if torch.cuda.is_available() else "cpu"
10
 
11
  def generate(commentary_text):
12
  input_ids = trained_tokenizer(commentary_text, return_tensors="pt")
 
15
  return tokenizer_finetuned.decode(output[0])
16
 
17
  # Create Gradio interface
18
+ iface = gr.Interface(fn=generate,
19
  inputs="text",
20
  outputs="text",
21
  title="GPT-2 Text Generation",