Spaces:
Sleeping
Sleeping
File size: 8,077 Bytes
fa0f431 5a902e6 fa0f431 5a902e6 fa0f431 5a902e6 fa0f431 5a902e6 6932ca4 fa0f431 5a902e6 fa0f431 5a902e6 fa0f431 5a902e6 fa0f431 5a902e6 fa0f431 5a902e6 fa0f431 5a902e6 fa0f431 797cd0c fa0f431 5a902e6 fa0f431 d599396 5a902e6 fa0f431 797cd0c 5a902e6 fa0f431 5a902e6 797cd0c fa0f431 5a902e6 797cd0c 5a902e6 797cd0c 5a902e6 797cd0c 5a902e6 797cd0c fa0f431 5a902e6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 |
from langchain.tools import DuckDuckGoSearchRun
from langchain.agents.tools import tool
from langchain import OpenAI
from langchain.agents import Tool, load_tools, initialize_agent, AgentType
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
# Streamlit UI Callback
from langchain.callbacks import StreamlitCallbackHandler
from langchain.chains import LLMMathChain
from langchain.memory import ConversationBufferMemory
from langchain_openai import ChatOpenAI
import streamlit as st
from dotenv import load_dotenv
from sqlalchemy import create_engine, text, URL
from htmlTemplates import css, bot_template, user_template
from openai import OpenAI
import os
import time
from be import config, get_db_chain, outreach_sms_message, get_store_address, data_visualization
from PIL import Image
from langchain_core.messages import HumanMessage, AIMessage
import matplotlib.pyplot as plt
import re
def conversation_agent(question):
llm = ChatOpenAI(temperature=0.5, model="gpt-3.5-turbo") # create LLM
# search = DuckDuckGoSearchRun()
llm_math_chain = LLMMathChain(llm=llm, verbose=True)
tools = [
Tool(
name="Calculator",
func=llm_math_chain.run,
description="useful when you need to answer questions with math"
)
]
######### CREATING ALL THE TOOLS FOR THE AGENT TO USE #####################
# Create the final SMS message
outreach_sms_message_tool = Tool(
name="Outreach SMS Message",
func=outreach_sms_message,
description="Create an outreach SMS message for the customer. Pass both user input and the Store Address as ONE SINGLE INPUT STRING. Use this Tool only to create an outreach SMS or Text message. At the end always include the Store Address for appointment confirmation messages"
)
#tools.append(outreach_sms_message_tool)
# Creating a Query Tool - to generate SQL statements and query the database
get_db_chain_tool = Tool(
name='Query Generation Tool',
func=get_db_chain,
description="ONLY use this tool for query generation and to fetch any information. Use this to Create MYSQL Query with the ORIGINAL User Input question to pull customer, store, product information. MySQL database connections are established within this tool. Use this tool first"
"During SQL Query Generation, make sure the SELECT list of columns are in GROUP BY clause"
"Use this to get the store address from the database"
)
# create the tool for finding the store details
get_store_address_tool = Tool(
name="Get Store Address",
func=get_store_address,
description="Use this tool with store number to get the store address. INPUT to this tool is Store number. Do not use this tool if you don't have Store number as input"
)
# create the tool for finding the store details
data_visualization_tool = Tool(
name="Data Visualization Tool",
func=data_visualization,
description="Query Generation tool has to be called first to extract the data before using this tool, DO NOT call this tool as a first tool. ONLY USE THIS TOOL for data visualization when the user question is to 'visualize' or 'summarize' the data or info. This tool is used to create python code to generate charts for data visualization with user question, column names, and pandas dataframe. DO NOT create sample dataframe, always use the provided dataframe in df."
)
#tools.append(get_db_chain_tool)
# List all the tools for the agent to use
tools = [get_db_chain_tool, get_store_address_tool, outreach_sms_message_tool, data_visualization_tool]
conversational_agent = initialize_agent(
agent="conversational-react-description",
tools=tools,
llm=llm,
verbose=True,
max_iterations=10,
return_intermediate_steps=True,
memory=st.session_state.memory,
handle_parsing_errors=True
)
#print(conversational_agent.agent.llm_chain.prompt.template)
#response = conversational_agent.invoke(question)
response = conversational_agent({"input": question})
return response
client = OpenAI()
def main():
st.set_option('deprecation.showPyplotGlobalUse', False)
img = Image.open('assist_logo.jpg')
# user_avatar = Image.open('renga_profile.jpg')
# ai_avatar = Image.open('Designer.png')
load_dotenv() # load env parameters
st.set_page_config(page_title="Assist", page_icon=img)
st.write(css, unsafe_allow_html=True)
# Logo and image next to each other with a space column separating them out for rendering in small devices
st.title(':blue[Assist] Store Associates')
with st.sidebar:
st.image('assist_logo.jpg', width=120)
st.sidebar.header("Assist App for Store Associates")
st.write("Assist store associates to get information on Customers, Stores, Product, Sales Analytics, Inventory Management and help with customer outreach")
st.write(" ")
st.write("Tasks I can help with:")
st.write("a. Extract Data/info")
st.write("b. Outreach message ")
st.write("c. Send Text to Customers")
st.write("d. Search websites and look up Product prices & other info")
st.write("e. Generate charts for greater visualization")
if "chat_history" not in st.session_state:
st.session_state.memory = ConversationBufferMemory(memory_key="chat_history")
st.session_state.chat_history = []
# ini chat history
if "messages" not in st.session_state:
st.session_state.messages = []
if "sql_query_cleaned" not in st.session_state:
st.session_state.sql_query_cleaned = []
if "sql_query_output" not in st.session_state:
st.session_state.sql_query_output = []
for message in st.session_state.chat_history:
if isinstance(message, HumanMessage):
with st.chat_message("Human"):
st.markdown(message.content)
else:
with st.chat_message("AI"):
st.markdown(message.content)
user_question = st.chat_input("Type your question")
#st.button("Charts", type="primary")
if user_question is not None:
#Print the last question from user
with st.chat_message("Human"):
st.markdown(user_question)
with st.spinner("Processing"):
st.session_state.chat_history.append(HumanMessage(user_question))
# st.write("memory:", st.session_state.memory)
# st.write("messages:", st.session_state.messages)
#st.write(user_template.replace("{{MSG}}", user_question), unsafe_allow_html=True)
# Print the last answer from user
with st.chat_message("AI"):
assistant_response = conversation_agent(user_question)
#print(type(assistant_response))
#print(assistant_response)
print("intermediate steps ::::::::::::::::::::::::::::", assistant_response["intermediate_steps"])
assistant_response_output = assistant_response["output"]
print("Assistant reponse output:", assistant_response_output)
st.markdown(assistant_response_output)
#if 'plt.show()' is assistant_response_output:
# print("before executing the python code")
st.session_state.chat_history.append(AIMessage(assistant_response_output))
#st.write(bot_template.replace("{{MSG}}", assistant_response_output), unsafe_allow_html=True)
#if st.button("Charts") and st.session_state.chat_history is not None:
# code = data_visualization(question)
# exec(code)
# st.pyplot()
#question = history(count)
if __name__== '__main__':
main()
#if st.button:
# question = "Top 3 sales in store 4057"
# output = data_visualization(question)
|