Spaces:
Runtime error
Runtime error
""" | |
Generative AI Chatbot through Document Sources | |
""" | |
import boto3 | |
import gradio as gr | |
from langchain.chains import RetrievalQA | |
from langchain.embeddings.openai import OpenAIEmbeddings | |
#from langchain.llms import ChatOpenAI | |
from langchain.vectorstores import Chroma | |
from langchain.chat_models import ChatOpenAI | |
# Get OpenAI API key from SSM Parameter Store | |
API_KEY_PARAMETER_PATH = '/openai/api_key' | |
ssm_client = boto3.client('ssm', region_name='us-east-1') | |
def get_openai_api_key(client, parameter_path): | |
""" | |
Get the OpenAI API key from the SSM Parameter Store | |
Args: | |
ssm_client: boto3 SSM client | |
parameter_path: path to the SSM Parameter Store parameter | |
Returns: | |
OpenAI API key | |
""" | |
try: | |
response = client.get_parameter( | |
Name=parameter_path, | |
WithDecryption=True, | |
) | |
return response['Parameter']['Value'] | |
except client.exceptions.ParameterNotFound: | |
raise Exception(f'Parameter {parameter_path} not found in SSM Parameter Store') | |
# Get the API key from the SSM Parameter Store | |
openai_api_key = get_openai_api_key(client=ssm_client, parameter_path=API_KEY_PARAMETER_PATH) | |
def OpenAIWithChroma(persist_directory='./chroma.db', model_name='gpt-3.5-turbo-16k', chain_type="stuff"): | |
""" | |
Create a retrieval chatbot with OpenAI LLM and Chroma | |
Args: | |
persist_directory: directory to save the Chroma database | |
model_name: name of the OpenAI LLM | |
chain_type: type of chain to use for the retrieval chatbot | |
Returns: | |
RetrievalQA: retrieval chatbot | |
""" | |
# connect to local Chroma | |
embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key) | |
db = Chroma(persist_directory=persist_directory, embedding_function=embeddings) | |
# connect to OpenAI LLM with Chroma | |
llm = ChatOpenAI(model_name=model_name, temperature=0, openai_api_key=openai_api_key, max_tokens=5000) | |
chain = RetrievalQA.from_chain_type(llm, chain_type="stuff", retriever=db.as_retriever(), return_source_documents=True) | |
return chain | |
def message_construction(result): | |
message = "**Bot Answer:** \n" | |
message += f"{result['result']}\n" | |
source_documents = "**Source Documents:**\n" | |
for d in result['source_documents']: | |
source_documents += f"* *{d.metadata['source']}* - {d.page_content[0:200].encode('unicode_escape').decode('utf-8')}...\n" | |
return message + "\n" + source_documents | |
retrieval_chain = OpenAIWithChroma() | |
with gr.Blocks(theme=gr.themes.Default( | |
primary_hue="blue", | |
secondary_hue="yellow" | |
)) as demo: | |
gr.Markdown(""" | |
# Neurons Lab: Generative AI Chatbot through Document Sources | |
## Document Sources | |
1. [Generative AI in Finance and Banking: The Current State and Future Implications](https://www.leewayhertz.com/generative-ai-in-finance-and-banking/#Variational-Autoencoders-(VAEs)) | |
2. [McKinsey & Company: The economic potential of generative AI](https://www.mckinsey.com/~/media/mckinsey/business%20functions/mckinsey%20digital/our%20insights/the%20economic%20potential%20of%20generative%20ai%20the%20next%20productivity%20frontier/the-economic-potential-of-generative-ai-the-next-productivity-frontier-vf.pdf) | |
3. [Deloitte: Generative AI is all the rage](https://www2.deloitte.com/content/dam/Deloitte/us/Documents/deloitte-analytics/us-ai-institute-gen-ai-for-enterprises.pdf) | |
## Prompt Examples | |
- Provide Generative AI use cases for financial services. Print in table view wiht columns: Use Case Name, Description | |
- Provide Generative AI models that fit for Financial Services. Print in table view with columns: Model Name, Model Description, Areas of Application in Finance. | |
- Provide real world example on how Generative AI change Financial Services sector. | |
- What is difference between traditional AI and Generative AI? | |
- Summarise the economic potential of generative AI | |
- How does Generative AI change a future of work? | |
- How Generative AI can personalise customer experience in finance? | |
""") | |
chatbot = gr.Chatbot() | |
msg = gr.Textbox() | |
clear = gr.ClearButton([msg, chatbot]) | |
def respond(message, chat_history): | |
result = retrieval_chain({"query": message}) | |
bot_message = message_construction(result) | |
chat_history.append((message, bot_message)) | |
return "", chat_history | |
msg.submit(respond, [msg, chatbot], [msg, chatbot]) | |
demo.launch() | |