ai-forever commited on
Commit
75c52ad
1 Parent(s): 0454021
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -3,6 +3,8 @@ import gradio as gr
3
  from transformers import GPT2LMHeadModel, GPT2Tokenizer
4
  tokenizer = GPT2Tokenizer.from_pretrained("sberbank-ai/mGPT")
5
  model = GPT2LMHeadModel.from_pretrained("sberbank-ai/mGPT")
 
 
6
 
7
  description = "Multilingual generation with mGPT"
8
  title = "Generate your own example"
@@ -16,7 +18,7 @@ article = (
16
  )
17
 
18
  def generate(prompt: str):
19
- input_ids = tokenizer.encode(prompt, return_tensors="pt").cuda(device)
20
  out = model.generate(input_ids,
21
  min_length=100,
22
  max_length=200,
@@ -36,7 +38,8 @@ interface = gr.Interface.load("huggingface/sberbank-ai/mGPT",
36
  outputs='text',
37
  thumbnail = 'https://habrastorage.org/r/w1560/getpro/habr/upload_files/26a/fa1/3e1/26afa13e1d1a56f54c7b0356761af7b8.png',
38
  theme = "peach",
39
- article = article
 
40
  )
41
 
42
  interface.launch(enable_queue=True)
 
3
  from transformers import GPT2LMHeadModel, GPT2Tokenizer
4
  tokenizer = GPT2Tokenizer.from_pretrained("sberbank-ai/mGPT")
5
  model = GPT2LMHeadModel.from_pretrained("sberbank-ai/mGPT")
6
+ model.cuda()
7
+ model.eval()
8
 
9
  description = "Multilingual generation with mGPT"
10
  title = "Generate your own example"
 
18
  )
19
 
20
  def generate(prompt: str):
21
+ input_ids = tokenizer.encode(prompt, return_tensors="pt").cuda()
22
  out = model.generate(input_ids,
23
  min_length=100,
24
  max_length=200,
 
38
  outputs='text',
39
  thumbnail = 'https://habrastorage.org/r/w1560/getpro/habr/upload_files/26a/fa1/3e1/26afa13e1d1a56f54c7b0356761af7b8.png',
40
  theme = "peach",
41
+ article = article,
42
+ cache_examples=True
43
  )
44
 
45
  interface.launch(enable_queue=True)