Rohit1008's picture
Upload app.py
6a9a1fe
# Q&A Chatbot
import os
from langchain.llms import OpenAI
from dotenv import load_dotenv
import streamlit as st
load_dotenv()
# Function to load OpenAI model and get responses
def get_openai_response(question):
llm = OpenAI(openai_api_key = os.getenv("OPEN_API_KEY"), model_name = "text-davinci-003", temperature = 0.5)
response = llm(question)
return response
# Initialize streamlit app
st.set_page_config(page_title= "Q&A Demo")
st.header("Langchain Application")
# Get user input
input = st.text_input("Input: ", key= input)
response = get_openai_response(input)
# How we got the input here:
# 1. Sent the 'input' to the get_openai_response function
# 2. OpenAI model was loaded with get_openai_response function, and calls for response using llm
# (Instead of llm, we can also use predict message, predict functionality)
# (We can also use chain or PromptTemplate instead of LLM)
submit = st.button("Ask the question")
# If the above 'ask' button is clicked -
if submit: # Means if submit is true
st.subheader("The response is ")
st.write(response)