varun500's picture
Update app.py
770a02f
import streamlit as st
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
import numpy as np
from transformers import (
AutoModelForCausalLM,
AutoTokenizer,
PreTrainedModel,
PreTrainedTokenizer
)
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
tokenizer = AutoTokenizer.from_pretrained("declare-lab/flan-alpaca-base")
model = AutoModelForSeq2SeqLM.from_pretrained("declare-lab/flan-alpaca-base")
def main():
# Set app title
st.title("Flan Alpaca Large Model")
# Create input for user's question
question = st.text_input("Enter your question here:")
# Create button to submit question
if st.button("Submit"):
# Generate answer using Flan Alpaca Large model
answer = qa_pipeline(question=question, context="")["answer"]
# Display answer in output box
st.write("Answer: ", answer)
if __name__ == "__main__":
main()