import tensorflow as tf #!pip install transformers from transformers import pipeline # importing necessary libraries from transformers import AutoTokenizer, TFAutoModelForQuestionAnswering tokenizer = AutoTokenizer.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad") model = TFAutoModelForQuestionAnswering.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad",return_dict=False) nlp = pipeline("question-answering", model=model, tokenizer=tokenizer) #!pip install gradio import gradio as gr # creating the function def func(context, question): result = nlp(question = question, context=context) return result['answer'] example_1 = "(1) My name is Ajulor Christian, I am a data scientist and machine learning engineer" qst_1 = "what is christian's profession?" example_2 = "(2) Natural Language Processing (NLP) allows machines to break down and interpret human language. It's at the core of tools we use every day – from translation software, chatbots, spam filters, and search engines, to grammar correction software, voice assistants, and social media monitoring tools." qst_2 = "What is NLP used for?" # creating the interface app = gr.Interface(fn=func, inputs = ['textbox', 'text'], outputs = 'textbox', title = 'Question Answering bot', theme = 'dark-grass', description = 'Input context and question, then get answers!', examples = [[example_1, qst_1], [example_2, qst_2]] ) # launching the app app.launch(inline=False)