File size: 2,554 Bytes
d512ec9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42f61dc
 
d512ec9
 
42f61dc
 
6531e1f
d512ec9
 
 
 
9a1359f
d512ec9
1f02018
0dfd317
1b2665e
 
 
 
 
 
d512ec9
1b2665e
 
b255b53
d512ec9
 
 
61f374e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
from transformers import pipeline
import wikipedia
import random
import gradio as gr

model_name = "deepset/electra-base-squad2"
nlp = pipeline('question-answering', model=model_name, tokenizer=model_name)


def get_wiki_article(topic):
    topic=topic
    try:
        search = wikipedia.search(topic, results = 1)[0]
    except wikipedia.DisambiguationError as e:
        choices = [x for x in e.options if ('disambiguation' not in x) and ('All pages' not in x) and (x!=topic)]
        search = random.choice(choices)
    try:
        p = wikipedia.page(search)
    except wikipedia.exceptions.DisambiguationError as e:
        choices = [x for x in e.options if ('disambiguation' not in x) and ('All pages' not in x) and (x!=topic)]
        s = random.choice(choices)
        p = wikipedia.page(s)
    return p.content, p.url

def get_answer(topic, question):
    w_art, w_url=get_wiki_article(topic)
    qa = {'question': question, 'context': w_art}
    res = nlp(qa)
    return res['answer'], w_url, {'confidence':res['score']}


inputs = [
          gr.Textbox(lines=5, label="Topic"),
          gr.Textbox(lines=5, label="Question")
]
outputs = [
            gr.Textbox(type='text',label="Answer"),
            gr.Textbox(type='text',label="Wikipedia Reference Article"),
            gr.Label(label="Confidence in answer (assuming the correct wikipedia article)"),
]

title = "Question Answering with ELECTRA and Wikipedia"
description = 'Please note that topics with long articles may take around a minute. If you get an error, please try double checking spelling, or try a more specific topic (e.g. George H. Bush instead of George Bush).'
article = ''
examples = [
    ['Politics of the United States', 'what does the future hold?'],
    ["Unabomber","What radicalized him?"],
    ['Roman Empire', 'why did it collapse?'],
    ['American Civil War', 'What was the cause?'],
    ['Donald Trump', 'Why do his supporters like him?'],
    ['Donald Trump', 'Why do his detractors dislike him?'],
    ['Bernie Sanders', 'Why do his supporters like him?'],
    ['Bernie Sanders', 'Why do his detractors dislike him?'],
    ["George H. Bush","Did he pursue higher education?"],
    ["John Deere Tractors","Do their customers like them?"],
    ['Michael Jordan', 'Was he successful outside of basketball?'],

]

gr.Interface(get_answer, inputs, outputs, title=title, description=description, article=article, 
 theme="darkdefault", examples=examples, flagging_options=["strongly related","related", "neutral", "unrelated", "stongly unrelated"]).launch()