Spaces:
Sleeping
Sleeping
File size: 3,964 Bytes
de5d292 109a51e de5d292 7af941b 569f8f6 de5d292 569f8f6 7af941b 03f073b de5d292 6f2256e 3d99e17 63163f7 109a51e 63163f7 de5d292 6f2256e 2da57d4 6f2256e 7af941b 03f073b 0f5dc43 03f073b ddc7505 080ccca ddc7505 9f6c9c3 ddc7505 6f2256e ddc7505 7b1054f ddc7505 7af941b 0f5dc43 9f6c9c3 0f5dc43 787cf57 569f8f6 0b4cb0b 569f8f6 6f2256e 7af941b 6f2256e 2da57d4 7af941b 0f5dc43 7af941b 0f5dc43 7af941b de5d292 03f073b de5d292 569f8f6 de5d292 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
import streamlit as st
from transformers import pipeline
from transformers import AutoModelForQuestionAnswering, AutoTokenizer
st.set_page_config(page_title="Automated Question Answering System") # set page title
# heading
st.markdown("<h2 style='text-align: center;'>Question Answering on Academic Essays</h2>", unsafe_allow_html=True)
# description
st.markdown("<h3 style='text-align: left; color:#F63366; font-size:18px;'><b>What is extractive question answering about?<b></h3>", unsafe_allow_html=True)
st.write("Extractive question answering is a Natural Language Processing task where text is provided for a model so that the model can refer to it and make predictions about where the answer to a question is.")
# store the model in cache resources to enhance efficiency (ref: https://docs.streamlit.io/library/advanced-features/caching)
@st.cache_resource(show_spinner=True)
def question_model():
# call my model for question answering
model_name = "kxx-kkk/FYP_deberta-v3-base-squad2_mrqa"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForQuestionAnswering.from_pretrained(model_name)
question_answerer = pipeline("question-answering", model=model, tokenizer=tokenizer)
return question_answerer
# choose the source with different tabs
tab1, tab2 = st.tabs(["Input text", "Upload File"])
# if type the text as input
with tab1:
# set the example
sample_question = "What is NLP?"
with open("sample.txt", "r") as text_file:
sample_text = text_file.read()
# Get the initial values of context and question
context = st.session_state.get("contextInput", "")
question = st.session_state.get("questionInput", "")
# Button to try the example
example = st.button("Try example")
# Update the values if the "Try example" button is clicked
if example:
context = sample_text
question = sample_question
# Display the text area and text input with the updated or default values
context = st.text_area("Enter the essay below:", value=context, key="contextInput", height=330)
question = st.text_input(label="Enter the question: ", value=question, key="questionInput")
# perform question answering when "get answer" button clicked
button = st.button("Get answer")
if button:
with st.spinner(text="Loading question model..."):
question_answerer = question_model()
with st.spinner(text="Getting answer..."):
answer = question_answerer(context=context, question=question)
answer = answer["answer"]
# display the result in container
container = st.container(border=True)
container.write("<h5><b>Answer:</b></h5>" + answer + "<br>", unsafe_allow_html=True)
# if upload file as input
with tab2:
# provide upload place
uploaded_file = st.file_uploader("Choose a .txt file to upload", type=["txt"])
# transfer file to context and allow ask question, then perform question answering
if uploaded_file is not None:
raw_text = str(uploaded_file.read(),"utf-8")
context = st.text_area("", value=raw_text, height=330)
question = st.text_input(label="Enter your question", value=sample_question)
# perform question answering when "get answer" button clicked
button = st.button("Get answer")
if button:
with st.spinner(text="Loading question model..."):
question_answerer = question_model()
with st.spinner(text="Getting answer..."):
answer = question_answerer(context=context, question=question)
answer = answer["answer"]
# display the result in container
container = st.container(border=True)
container.write("<h5><b>Answer:</b></h5>" + answer + "<br>", unsafe_allow_html=True)
st.markdown("<br><br><br><br><br>", unsafe_allow_html=True)
|