unilm commited on
Commit
0ee073b
1 Parent(s): 6f11bf1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -17,17 +17,17 @@ def generate(plain_text):
17
  outputs = prompter_model.generate(input_ids, do_sample=False, max_new_tokens=75, num_beams=8, num_return_sequences=8, eos_token_id=eos_id, pad_token_id=eos_id, length_penalty=-1.0)
18
  output_texts = prompter_tokenizer.batch_decode(outputs, skip_special_tokens=True)
19
  res = output_texts[0].replace(plain_text+" Rephrase:", "").strip()
20
- print("[I] Prompter input: %s" % plain_text)
21
- print("[I] Prompter output: %s \n------------\n" % res)
22
  return res
23
 
24
  txt = grad.Textbox(lines=1, label="Initial Text", placeholder="Input Prompt")
25
  out = grad.Textbox(lines=1, label="Optimized Prompt")
 
26
 
27
  grad.Interface(fn=generate,
28
  inputs=txt,
29
  outputs=out,
30
  title="Promptist",
 
31
  allow_flagging='never',
32
  cache_examples=False,
33
  theme="default").launch(enable_queue=True, debug=True)
 
17
  outputs = prompter_model.generate(input_ids, do_sample=False, max_new_tokens=75, num_beams=8, num_return_sequences=8, eos_token_id=eos_id, pad_token_id=eos_id, length_penalty=-1.0)
18
  output_texts = prompter_tokenizer.batch_decode(outputs, skip_special_tokens=True)
19
  res = output_texts[0].replace(plain_text+" Rephrase:", "").strip()
 
 
20
  return res
21
 
22
  txt = grad.Textbox(lines=1, label="Initial Text", placeholder="Input Prompt")
23
  out = grad.Textbox(lines=1, label="Optimized Prompt")
24
+ examples = ["A rabbit is wearing a space suit", "Several railroad tracks with one train passing by", "A basket ball court in a military barracks, looks like an old grass mat after years of water damage.", "Cats dancing in a space club"]
25
 
26
  grad.Interface(fn=generate,
27
  inputs=txt,
28
  outputs=out,
29
  title="Promptist",
30
+ examples=examples,
31
  allow_flagging='never',
32
  cache_examples=False,
33
  theme="default").launch(enable_queue=True, debug=True)