Spaces:
Paused
Paused
# import logging | |
import os | |
os.environ['OPENAI_API_KEY'] = "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" | |
import openai | |
import json | |
# create a logger with a file handler | |
# logger = logging.getLogger("chatbot_logger") | |
# handler = logging.FileHandler("chatbot.log") | |
# logger.addHandler(handler) | |
# logger.setLevel(logging.INFO) | |
from llama_index import SimpleDirectoryReader, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext, QuestionAnswerPrompt | |
from langchain import OpenAI | |
documents = SimpleDirectoryReader('https://huggingface.co/spaces/waelabou/Gochat247Demo/tree/main/Data_Gochat').load_data() | |
# Setup your LLM | |
# define LLM | |
llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="text-davinci-003")) | |
# define prompt helper | |
# set maximum input size | |
max_input_size = 4096 | |
# set number of output tokens | |
num_output = 256 | |
# set maximum chunk overlap | |
max_chunk_overlap = 20 | |
prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap) | |
service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper) | |
index = GPTSimpleVectorIndex.from_documents(documents, service_context=service_context) | |
## Define Chat BOT Class to generate Response , handle chat history, | |
class Chatbot: | |
def __init__(self, api_key, index): | |
self.index = index | |
openai.api_key = api_key | |
self.chat_history = [] | |
QA_PROMPT_TMPL = ( | |
"Answer without 'Answer:' word please." | |
"you are in a converation with Gochat247's web site visitor\n" | |
"user got into this conversation to learn more about Gochat247" | |
"you will act like Gochat247 Virtual AI BOT. Be friendy and welcoming\n" | |
# "you will be friendy and welcoming\n" | |
"The Context of the conversstion should be always limited to learing more about Gochat247 as a company providing Business Process Outosuricng and AI Customer expeeince soltuion /n" | |
"The below is the previous chat with the user\n" | |
"---------------------\n" | |
"{context_str}" | |
"\n---------------------\n" | |
"Given the context information and the chat history, and not prior knowledge\n" | |
"\nanswer the question : {query_str}\n" | |
"\n it is ok if you don not know the answer. and ask for infomration \n" | |
"Please provide a brief and concise but friendly response." | |
) | |
self.QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL) | |
def generate_response(self, user_input): | |
prompt = "\n".join([f"{message['role']}: {message['content']}" for message in self.chat_history[-5:]]) | |
prompt += f"\nUser: {user_input}" | |
self.QA_PROMPT.context_str = prompt | |
response = index.query(user_input, text_qa_template=self.QA_PROMPT | |
) | |
message = {"role": "assistant", "content": response.response} | |
self.chat_history.append({"role": "user", "content": user_input}) | |
self.chat_history.append(message) | |
return message | |
def load_chat_history(self, filename): | |
try: | |
with open(filename, 'r') as f: | |
self.chat_history = json.load(f) | |
except FileNotFoundError: | |
pass | |
def save_chat_history(self, filename): | |
with open(filename, 'w') as f: | |
json.dump(self.chat_history, f) | |
## Define Chat BOT Class to generate Response , handle chat history, | |
bot = Chatbot("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", index=index) | |
import gradio as gr | |
import time | |
with gr.Blocks() as demo: | |
chatbot = gr.Chatbot(label="GoChat247_Demo") | |
msg = gr.Textbox() | |
clear = gr.Button("Clear") | |
def user(user_message, history): | |
return "", history + [[user_message, None]] | |
def agent(history): | |
last_user_message = history[-1][0] | |
agent_message = bot.generate_response(last_user_message) | |
history[-1][1] = agent_message ["content"] | |
time.sleep(1) | |
return history | |
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( | |
agent, chatbot, chatbot | |
) | |
clear.click(lambda: None, None, chatbot, queue=False) | |
if __name__ == "__main__": | |
demo.launch() | |