File size: 889 Bytes
0feb0a9 95d0ffa 0feb0a9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
from transformers import AutoModelWithLMHead, AutoTokenizer
import gradio as grad
text2text_tkn = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
mdl = AutoModelWithLMHead.from_pretrained("mrm8488/t5-base-finetuned-question-generation-ap")
def text2text(answer,context):
input_text = "answer: %s context: %s </s>" % (answer, context)
features = text2text_tkn ([input_text], return_tensors='pt')
output = mdl.generate(input_ids=features['input_ids'],
attention_mask=features['attention_mask'],
max_length=64)
response=text2text_tkn.decode(output[0])
return response
txt=grad.Textbox(lines=5, label="English", placeholder="Context")
ans=grad.Textbox(lines=1, label="Answer")
out=grad.Textbox(lines=1, label="Genereated Question")
grad.Interface(text2text, inputs=[txt,ans], outputs=out).launch()
|