File size: 1,594 Bytes
ebc56ac
30509f5
 
 
ebc56ac
 
30509f5
 
 
 
 
 
 
 
 
 
ebc56ac
 
30509f5
ebc56ac
 
 
 
 
 
 
 
 
 
30509f5
ebc56ac
 
 
 
 
 
30509f5
 
6533bfc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import tensorflow as tf

#!pip install transformers

from transformers import pipeline

# importing necessary libraries
from transformers import AutoTokenizer, TFAutoModelForQuestionAnswering


tokenizer = AutoTokenizer.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad")
model = TFAutoModelForQuestionAnswering.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad",return_dict=False)

nlp = pipeline("question-answering", model=model, tokenizer=tokenizer)

#!pip install gradio
import gradio as gr

# creating the function
def func(context, question):
  result = nlp(question = question, context=context)
  return result['answer']

example_1 = "(1) My name is Ajulor Christian, I am a data scientist and machine learning engineer"
qst_1 =  "what is christian's profession?"

example_2 = "(2) Natural Language Processing (NLP) allows machines to break down and interpret human language. It's at the core of tools we use every day – from translation software, chatbots, spam filters, and search engines, to grammar correction software, voice assistants, and social media monitoring tools."
qst_2 =  "What is NLP used for?"

# creating the interface
app = gr.Interface(fn=func, inputs = ['textbox', 'text'], outputs = 'textbox', 
                   title = 'Question Answering bot', theme = 'dark-grass',
                   description = 'Input context and question, then get answers!',
                   examples = [[example_1, qst_1],
                               [example_2, qst_2]]
                   )

# launching the app
app.launch(inline=False)