#Q&A Chatbot from langchain.llms import OpenAI from dotenv import load_dotenv load_dotenv() import streamlit as st import os #Function to load OPENAI Model and get responses def get_openai_response(question): llm = OpenAI(model="gpt-3.5-turbo-instruct",temperature = 0.6, openai_api_key = os.environ["OPENAI_API_KEY"] ) response = llm(question) return response #Initialize our streamlit app st.set_page_config(page_title="Q&A DEMO") st.header("Langchain Application") input = st.text_input("Input: " ,key="input") response = get_openai_response(input) sumbit = st.button("Ask the Question..") if sumbit: st.subheader("The response is ") st.write(response)