File size: 1,086 Bytes
6a9a1fe
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
# Q&A Chatbot
import os
from langchain.llms import OpenAI
from dotenv import load_dotenv
import streamlit as st

load_dotenv()

# Function to load OpenAI model and get responses

def get_openai_response(question):
    llm = OpenAI(openai_api_key = os.getenv("OPEN_API_KEY"), model_name = "text-davinci-003", temperature = 0.5)
    response = llm(question)
    return response

# Initialize streamlit app
st.set_page_config(page_title= "Q&A Demo")
st.header("Langchain Application")

# Get user input
input = st.text_input("Input: ", key= input)
response = get_openai_response(input)

# How we got the input here:

# 1. Sent the 'input' to the get_openai_response function
# 2. OpenAI model was loaded with get_openai_response function, and calls for response using llm
# (Instead of llm, we can also use predict message, predict functionality)
# (We can also use chain or PromptTemplate instead of LLM)

submit = st.button("Ask the question")

# If the above 'ask' button is clicked -
if submit:     # Means if submit is true
    st.subheader("The response is ")
    st.write(response)