Ezi commited on
Commit
37683fd
1 Parent(s): dd3d237

Code Question Answering

Browse files
individual_tab_as_python_file/CodeLLM_Question_answering.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+
3
+ #pip install transformers
4
+ from transformers import pipeline
5
+
6
+ # importing necessary libraries
7
+ from transformers import AutoTokenizer, TFAutoModelForQuestionAnswering
8
+
9
+ tokenizer = AutoTokenizer.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad")
10
+ model = TFAutoModelForQuestionAnswering.from_pretrained("bert-large-uncased-whole-word-masking-finetuned-squad",return_dict=False)
11
+
12
+ nlp = pipeline("question-answering", model=model, tokenizer=tokenizer)
13
+
14
+ #pip install gradio
15
+ import gradio as gr
16
+
17
+ # creating the function
18
+ def func(context, question):
19
+ result = nlp(question = question, context=context)
20
+ return result['answer']
21
+
22
+ example_1 = "(1) My name is Ajulor Christian, I am a data scientist and machine learning engineer"
23
+ qst_1 = "what is christian's profession?"
24
+
25
+ example_2 = "(2) Natural Language Processing (NLP) allows machines to break down and interpret human language. It's at the core of tools we use every day – from translation software, chatbots, spam filters, and search engines, to grammar correction software, voice assistants, and social media monitoring tools."
26
+ qst_2 = "What is NLP used for?"
27
+
28
+ # creating the interface
29
+ gr.Interface(fn=func, inputs = ['textbox', 'text'], outputs = 'textbox',
30
+ title = 'Question Answering bot', theme = 'dark-grass',
31
+ description = 'Input context and question, then get answers!',
32
+ examples = [[example_1, qst_1],
33
+ [example_2, qst_2]]
34
+ ).launch()
35
+
36
+ # launching the app
37
+ #app.launch(inline=False)