Spaces:
Sleeping
Sleeping
File size: 5,597 Bytes
6748588 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
import streamlit as st
from openai import OpenAI
from app_config import *
from app_access_db import *
model = "gpt-3.5-turbo"
# ------------------------------------------------------------------------------------------------
# SIDEBAR
# ------------------------------------------------------------------------------------------------
st.sidebar.title('OpenAI Business Chat')
st.sidebar.write('This chat bot is build with Tools and Function feature of OpenAI to be able to answer question regarding applications and performance of officers')
st.sidebar.markdown("""
### Having a sample database with a structure
- application
- app_number
- amount
- amount_paid
- state. (APPROVED, REJECTED, PENDING_PAYMENT, PAID)
- office_code [FK]
- service_code [FK]
- date_created
- date_paid
- date_processed
- office
- office_name
- office_location_code [FK]
- location
- location_name
- location_code
- service
- service_code
- service_name
### The chatbot will provide answers from that database
- The number of applications rejected is a location during the current month
- The trend of applications in particular states, for a location
- Any question you think relevant from this DB
""")
def onchange_openai_key():
print(st.session_state.openai_key)
openai_key = st.sidebar.text_input('OpenAI key', on_change=onchange_openai_key, key='openai_key')
def submit_openai_key(model=model):
if(openai_key == None or openai_key==''):
st.sidebar.write('Please provide the key before')
return
else:
client = OpenAI(api_key=openai_key)
model = model
completion = client.chat.completions.create(
model=model,
messages=[
{"role": "system", "content": "You are an assistant giving simple and short answer for question of child"},
{"role": "user", "content": "count from 0 to 10"}
]
)
st.sidebar.write(f'Simple count : {completion.choices[0].message.content}')
submit_key = st.sidebar.button(label='Submit', on_click=submit_openai_key)
# ------------------------------------------------------------------------------------------------
# CHAT
# ------------------------------------------------------------------------------------------------
st.title('OpenAI Business Chat')
st.write(f'Ask any question that can be answer by the LLM {model}.')
def askQuestion(model=model, question=''):
if(openai_key == None or openai_key==''):
print('Please provide the key before')
return 'LLM API is not defined. Please provide the key before'
else:
client = OpenAI(api_key=openai_key)
model = model
completion = client.chat.completions.create(
model=model,
messages=[
{"role": "system", "content": f'{query_context}'},
{"role": "user", "content": f'{question}'}
]
)
return completion.choices[0].message.content
class AssistantMessage:
def __init__(self):
self.sql : str
self.response_data : DataFrame
def displayAssistantMessage( assistantMessage: AssistantMessage):
with st.chat_message("assistant"):
st.code(assistantMessage.sql, language='sql')
st.code(assistantMessage.response_data, language='markdown')
if assistantMessage.response_data.columns.size == 2:
st.bar_chart(assistantMessage.response_data, x=assistantMessage.response_data.columns[0], y=assistantMessage.response_data.columns[1])
if assistantMessage.response_data.columns.size == 1:
st.metric(label=assistantMessage.response_data.columns[0], value=f'{assistantMessage.response_data.values[0]}')
# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# Display chat messages from history on app rerun
for message in st.session_state.messages:
if message["role"] == "user":
with st.chat_message(message["role"]):
st.markdown(message["content"])
elif message["role"] == "assistant":
displayAssistantMessage(message["content"])
# React to user input
if prompt := st.chat_input("What is up?"):
with st.status('Running', expanded=True) as status:
# Display user message in chat message container
st.chat_message("user").markdown(prompt)
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
response = askQuestion(question=prompt)
st.code(response, language='sql')
response_data = run_query(response)
# Display assistant response in chat message container
assistanMsg = AssistantMessage()
assistanMsg.sql = response
assistanMsg.response_data = response_data
displayAssistantMessage(assistanMsg)
# with st.chat_message("assistant"):
# st.code(response, language='sql')
# st.caption(response_data)
# st.bar_chart(response_data, x=response_data.columns[0], y=response_data.columns[1])
# Add assistant response to chat history
st.session_state.messages.append({"role": "assistant", "content": assistanMsg})
status.update(label='Response of last question', state="complete", expanded=True)
|