File size: 1,949 Bytes
ba1fe9e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
## Conversational Q&A Chatbot
import streamlit as st

from langchain.schema import HumanMessage, SystemMessage, AIMessage
# from openai import AzureChatOpenAI
from langchain_openai import AzureChatOpenAI
import os
# import dotenv
# dotenv.load_dotenv()
AZURE_OPENAI_KEY = "7a8f58dd922e4c78b1de2b660ebe61d6"
AZURE_OPENAI_ENDPOINT = "https://mlsdaiinstance.openai.azure.com/"
AZURE_OPENAI_VERSION = "2024-05-01-preview"

EMBEDDING_MODEL = "text-embedding-ada-002"
CHAT_MODEL = "gpt-35-turbo"
# Initialize the Azure OpenAI client
llm = AzureChatOpenAI(
        # azure_endpoint="https://azureopenai16.openai.azure.com/",
        # api_key="75db73a3b9da40b0b6e0e98273a6029f",  
        # api_version="2024-05-01-preview",
        # deployment_name="gpt-35-turbo",
        # temperature=0.5
        openai_api_type="azure",
        openai_api_version=AZURE_OPENAI_VERSION,
        openai_api_key=AZURE_OPENAI_KEY,
        azure_endpoint=AZURE_OPENAI_ENDPOINT,
        deployment_name=CHAT_MODEL,
        temperature=0
    )

# '''
## Streamlit UI
st.set_page_config(page_title="Conversational Q&A Chatbot")
st.header("Hey, Let's Chat")


if 'flow_messages' not in st.session_state:
    st.session_state['flow_messages'] = [
        SystemMessage(content="You are an AI assitant who answers the questions asked truthfully!")
    ]

## Function to load OpenAI model and get respones
def get_chatmodel_response(question):
    st.session_state['flow_messages'].append(HumanMessage(content=question))
    
    response = llm(st.session_state['flow_messages'])

    st.session_state['flow_messages'].append(AIMessage(content=response.content))
    return response.content


input = st.text_input("Input: ", key="input")
response = get_chatmodel_response(input)

submit = st.button("Ask the question")

## If ask button is clicked
if submit:
    st.subheader("The Response is")
    st.write(response)