chatbot / app.py
Utkarsh-Tiwari's picture
dataset
0d520ec
raw
history blame contribute delete
No virus
2.42 kB
from langchain.chat_models import ChatOpenAI
import gradio as gr
import os
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import DeepLake
from langchain import PromptTemplate
from langchain import OpenAI
from langchain.vectorstores import DeepLake
os.environ['OPENAI_API_KEY'] = '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'
os.environ["ACTIVELOOP_TOKEN"] = "eyJhbGciOiJIUzUxMiIsImlhdCI6MTY5NTE5MTAyNiwiZXhwIjoxNzU4MzQ5NDA3fQ.eyJpZCI6InV0a2Fyc2h0aXdhcmkifQ.PK_iz7uybeSmgqFvOYrICw-CQDbDY1aOjYhkMu-0Jle6gU33dCwxah7bmy39O0hPN4jYLu_RfLuU-XejyNvXrw"
def predict(query,history):
template = """You are an exceptional customer support chatbot for the company Modelwise that gently answers questions related to the company.
You know the following context information.
{chunks_formatted}
Answer the following question from a customer. Use only information from the context. Do not provide wrong answers and do not make up any answers.
If you don't know the answer, say you don't the answer and request the customer to contact Arnold and provide his contact details.
If you know the answer, don't ask the customer to contact Arnold unless the customer specifically asks for someone's contact details.
Question: {query}
Answer:"""
# Create a PromptTemplate instance
prompt = PromptTemplate(
input_variables=["chunks_formatted", "query"],
template=template,
)
embeddings = OpenAIEmbeddings(model="text-embedding-ada-002")
my_activeloop_org_id = "utkarshtiwari"
my_activeloop_dataset_name = "modelwise-dataset"
dataset_path = f"hub://{my_activeloop_org_id}/{my_activeloop_dataset_name}"
db = DeepLake(dataset_path=dataset_path, read_only=True, embedding_function = embeddings)
# Retrieve relevant chunks from the Knowledge Base
docs = db.similarity_search(query)
retrieved_chunks = [doc.page_content for doc in docs]
# Format the prompt with retrieved chunks and user query
chunks_formatted = "\n\n".join(retrieved_chunks)
prompt_formatted = prompt.format(chunks_formatted=chunks_formatted, query=query)
# Create an OpenAI instance for text generation
llm = OpenAI(model="text-davinci-003", temperature=0)
# Generate the answer using GPT-3
answer = llm(prompt_formatted)
print(answer)
return answer
gr.ChatInterface(predict).launch()