potsawee commited on
Commit
3bfc23d
1 Parent(s): 523e127

add more info

Browse files
Files changed (1) hide show
  1. app.py +1 -26
app.py CHANGED
@@ -13,31 +13,6 @@ translator.to(device)
13
  summarizer.to(device)
14
 
15
 
16
- # def generate_multiple_choice_question(
17
- # context
18
- # ):
19
- # num_questions = 1
20
- # question_item = question_generation_sampling(
21
- # g1_model, g1_tokenizer,
22
- # g2_model, g2_tokenizer,
23
- # context, num_questions, device
24
- # )[0]
25
- # question = question_item['question']
26
- # options = question_item['options']
27
- # options[0] = f"{options[0]} [ANSWER]"
28
- # random.shuffle(options)
29
- # output_string = f"Question: {question}\n[A] {options[0]}\n[B] {options[1]}\n[C] {options[2]}\n[D] {options[3]}"
30
- # return output_string
31
- #
32
- # demo = gr.Interface(
33
- # fn=generate_multiple_choice_question,
34
- # inputs=gr.Textbox(lines=8, placeholder="Context Here..."),
35
- # outputs=gr.Textbox(lines=5, placeholder="Question: \n[A] \n[B] \n[C] \n[D] "),
36
- # title="Multiple-choice Question Generator",
37
- # description="Provide some context (e.g. news article or any passage) in the context box and click **Submit**. The models currently support English only. This demo is a part of MQAG - https://github.com/potsawee/mqag0.",
38
- # allow_flagging='never'
39
- # )
40
-
41
  def generate_output(
42
  task,
43
  text,
@@ -76,7 +51,7 @@ demo = gr.Interface(
76
  # examples=[["Building a translation demo with Gradio is so easy!", "eng_Latn", "spa_Latn"]],
77
  cache_examples=False,
78
  title="English🇬🇧 to Thai🇹🇭 | Translation or Summarization",
79
- description="Provide some text (in English) & select one of the tasks (Translation or Summarization). Note that currently the model only supports text up to 1024 tokens. The base architecture is mt5-large with the embeddings filtered to only English and Thai tokens and fine-tuned to XSum (Eng2Thai) Dataset (https://huggingface.co/datasets/potsawee/xsum_eng2thai).",
80
  allow_flagging='never'
81
 
82
  )
 
13
  summarizer.to(device)
14
 
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  def generate_output(
17
  task,
18
  text,
 
51
  # examples=[["Building a translation demo with Gradio is so easy!", "eng_Latn", "spa_Latn"]],
52
  cache_examples=False,
53
  title="English🇬🇧 to Thai🇹🇭 | Translation or Summarization",
54
+ description="Provide some text (in English) & select one of the tasks (Translation or Summarization). Note that currently the model only supports text up to 1024 tokens. The base architecture is mt5-large with the embeddings filtered to only English and Thai tokens and fine-tuned to XSum (Eng2Thai) Dataset (https://huggingface.co/datasets/potsawee/xsum_eng2thai). This is only after training for 1 epoch of xsum (the quality is not production-ready), just a quick proof-of-concept about fine-tuning on translated texts.",
55
  allow_flagging='never'
56
 
57
  )