tyang's picture
Update app.py
0dfd317
from transformers import pipeline
import wikipedia
import random
import gradio as gr
model_name = "deepset/electra-base-squad2"
nlp = pipeline('question-answering', model=model_name, tokenizer=model_name)
def get_wiki_article(topic):
topic=topic
try:
search = wikipedia.search(topic, results = 1)[0]
except wikipedia.DisambiguationError as e:
choices = [x for x in e.options if ('disambiguation' not in x) and ('All pages' not in x) and (x!=topic)]
search = random.choice(choices)
try:
p = wikipedia.page(search)
except wikipedia.exceptions.DisambiguationError as e:
choices = [x for x in e.options if ('disambiguation' not in x) and ('All pages' not in x) and (x!=topic)]
s = random.choice(choices)
p = wikipedia.page(s)
return p.content, p.url
def get_answer(topic, question):
w_art, w_url=get_wiki_article(topic)
qa = {'question': question, 'context': w_art}
res = nlp(qa)
return res['answer'], w_url, {'confidence':res['score']}
inputs = [
gr.Textbox(lines=5, label="Topic"),
gr.Textbox(lines=5, label="Question")
]
outputs = [
gr.Textbox(type='text',label="Answer"),
gr.Textbox(type='text',label="Wikipedia Reference Article"),
gr.Label(label="Confidence in answer (assuming the correct wikipedia article)"),
]
title = "Question Answering with ELECTRA and Wikipedia"
description = 'Please note that topics with long articles may take around a minute. If you get an error, please try double checking spelling, or try a more specific topic (e.g. George H. Bush instead of George Bush).'
article = ''
examples = [
['Politics of the United States', 'what does the future hold?'],
["Unabomber","What radicalized him?"],
['Roman Empire', 'why did it collapse?'],
['American Civil War', 'What was the cause?'],
['Donald Trump', 'Why do his supporters like him?'],
['Donald Trump', 'Why do his detractors dislike him?'],
['Bernie Sanders', 'Why do his supporters like him?'],
['Bernie Sanders', 'Why do his detractors dislike him?'],
["George H. Bush","Did he pursue higher education?"],
["John Deere Tractors","Do their customers like them?"],
['Michael Jordan', 'Was he successful outside of basketball?'],
]
gr.Interface(get_answer, inputs, outputs, title=title, description=description, article=article,
theme="darkdefault", examples=examples, flagging_options=["strongly related","related", "neutral", "unrelated", "stongly unrelated"]).launch()