Wootang01 commited on
Commit
0fab62c
1 Parent(s): 3d62006

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -1
app.py CHANGED
@@ -45,6 +45,13 @@ description = "Paste or write a text. Provide a short answer or noun keywords. S
45
  context = gr.inputs.Textbox(lines=5, placeholder="Enter paragraph/context here...")
46
  answer = gr.inputs.Textbox(lines=3, placeholder="Enter answer/keyword here...")
47
  question = gr.outputs.Textbox( type="auto", label="Question")
 
 
 
 
 
 
 
48
 
49
  def generate_question(context,answer):
50
  return get_question(context,answer,question_model,question_tokenizer)
@@ -52,5 +59,5 @@ def generate_question(context,answer):
52
  iface = gr.Interface(
53
  fn=generate_question,
54
  inputs=[context,answer],
55
- outputs=question, title=title, description=description)
56
  iface.launch(debug=False)
 
45
  context = gr.inputs.Textbox(lines=5, placeholder="Enter paragraph/context here...")
46
  answer = gr.inputs.Textbox(lines=3, placeholder="Enter answer/keyword here...")
47
  question = gr.outputs.Textbox( type="auto", label="Question")
48
+ examples = [
49
+ ["""Fears of a new Covid-19 cluster linked to a hotpot restaurant have surfaced amid Hong Kong’s Omicron-fuelled fifth wave, while infections tied to an investment bank continued to expand, triggering the evacuation of residents in a building after vertical transmission of the virus was detected.
50
+ On Wednesday, hundreds thronged Covid-19 testing stations in Tuen Mun, with some residents complaining of long waiting times and chaotic arrangements. Authorities have deemed the district a high-risk area because of a higher number of infections.
51
+ Health officials said sewage testing would be conducted in Tuen Mun to monitor the spread of the coronavirus, but a string of preliminary-positive cases detected across the city suggested a wider, more worrying situation.
52
+ """, "a higher number of infections"]
53
+
54
+ ]
55
 
56
  def generate_question(context,answer):
57
  return get_question(context,answer,question_model,question_tokenizer)
 
59
  iface = gr.Interface(
60
  fn=generate_question,
61
  inputs=[context,answer],
62
+ outputs=question, title=title, description=description, examples=examples)
63
  iface.launch(debug=False)