# Q&A Chatbot #from langchain.llms import OpenAI from langchain import HuggingFaceHub from dotenv import load_dotenv load_dotenv() # take environment variables from .env. import streamlit as st import os huggingface_token = os.getenv("HUGGINGFACEHUB_API_TOKEN") ## Function to load OpenAI model and get respones os.environ["HUGGINGFACEHUB_API_TOKEN"]=huggingface_token llm_huggingface=HuggingFaceHub(repo_id="google/flan-t5-xxl",model_kwargs={"temperature":0.6,"max_length":64}) def get_openai_response(question): #llm=OpenAI(model_name="text-davinci-003",temperature=0.5) return llm_huggingface(question) ##initialize our streamlit app st.set_page_config(page_title="Q&A Demo") st.title("Q&A Chatbot using LangChain") st.markdown( """ """, unsafe_allow_html=True ) st.sidebar.title("Options") st.sidebar.info("Welcome to the Q&A Chatbot!") st.write("Ask me anything and I'll do my best to answer!") input=st.text_input("Question: ",key="input",value=" ") response=get_openai_response(input) submit=st.button("Ask the question") ## If ask button is clicked if submit: st.subheader("The Answer is") st.write(response)