File size: 857 Bytes
e556fdd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
import streamlit as st
from transformers import pipeline, AutoTokenizer, AutoModelForQuestionAnswering

# Load Flan Alpaca Large model
model_name = "declare-lab/flan-alpaca-base"
model = AutoModelForQuestionAnswering.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
qa_pipeline = pipeline("question-answering", model=model, tokenizer=tokenizer)

def main():
    # Set app title
    st.title("Flan Alpaca Large Model")

    # Create input for user's question
    question = st.text_input("Enter your question here:")

    # Create button to submit question
    if st.button("Submit"):
        # Generate answer using Flan Alpaca Large model
        answer = qa_pipeline(question=question, context="")["answer"]
        # Display answer in output box
        st.write("Answer: ", answer)

if __name__ == "__main__":
    main()