# from langchain.llms import openai import OpenAI import streamlit as st from dotenv import load_dotenv load_dotenv() import os os.environ["OPEN_AI_KEY"]="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" # print(llm.predict("what is the diffrence bw client and user")) # create a openai question def get_open_ai_response(question): llm=OpenAI(openai_api_key=os.getenv("OPEN_AI_KEY"),temperature=0.6) response=llm(question) return response # Build streamlit App st.set_page_config(page_title="Q&A Application") st.header("LangChain Application ") input_text=st.text_input("Input: ",key="input") response=get_open_ai_response(input_text) submit=st.button("Ask the Question") if submit: st.subheader("The Response is ") st.write(response)