Ahmed-14 commited on
Commit
8b63472
1 Parent(s): bc42cf0

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +133 -0
app.py ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # import logging
3
+ import os
4
+ os.environ['OPENAI_API_KEY'] = "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"
5
+ import openai
6
+ import json
7
+
8
+
9
+ # create a logger with a file handler
10
+ # logger = logging.getLogger("chatbot_logger")
11
+ # handler = logging.FileHandler("chatbot.log")
12
+ # logger.addHandler(handler)
13
+ # logger.setLevel(logging.INFO)
14
+
15
+ from llama_index import SimpleDirectoryReader, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext, QuestionAnswerPrompt
16
+ from langchain import OpenAI
17
+
18
+
19
+ documents = SimpleDirectoryReader('https://huggingface.co/spaces/waelabou/Gochat247Demo/tree/main/Data_Gochat').load_data()
20
+
21
+
22
+ # Setup your LLM
23
+
24
+
25
+ # define LLM
26
+ llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="text-davinci-003"))
27
+
28
+ # define prompt helper
29
+ # set maximum input size
30
+ max_input_size = 4096
31
+ # set number of output tokens
32
+ num_output = 256
33
+ # set maximum chunk overlap
34
+ max_chunk_overlap = 20
35
+ prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap)
36
+
37
+ service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
38
+
39
+
40
+ index = GPTSimpleVectorIndex.from_documents(documents, service_context=service_context)
41
+
42
+
43
+ ## Define Chat BOT Class to generate Response , handle chat history,
44
+ class Chatbot:
45
+
46
+ def __init__(self, api_key, index):
47
+ self.index = index
48
+ openai.api_key = api_key
49
+ self.chat_history = []
50
+
51
+ QA_PROMPT_TMPL = (
52
+ "Answer without 'Answer:' word please."
53
+ "you are in a converation with Gochat247's web site visitor\n"
54
+ "user got into this conversation to learn more about Gochat247"
55
+ "you will act like Gochat247 Virtual AI BOT. Be friendy and welcoming\n"
56
+ # "you will be friendy and welcoming\n"
57
+ "The Context of the conversstion should be always limited to learing more about Gochat247 as a company providing Business Process Outosuricng and AI Customer expeeince soltuion /n"
58
+ "The below is the previous chat with the user\n"
59
+ "---------------------\n"
60
+ "{context_str}"
61
+ "\n---------------------\n"
62
+ "Given the context information and the chat history, and not prior knowledge\n"
63
+ "\nanswer the question : {query_str}\n"
64
+ "\n it is ok if you don not know the answer. and ask for infomration \n"
65
+ "Please provide a brief and concise but friendly response."
66
+
67
+
68
+
69
+ )
70
+
71
+ self.QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL)
72
+
73
+
74
+ def generate_response(self, user_input):
75
+
76
+ prompt = "\n".join([f"{message['role']}: {message['content']}" for message in self.chat_history[-5:]])
77
+ prompt += f"\nUser: {user_input}"
78
+ self.QA_PROMPT.context_str = prompt
79
+ response = index.query(user_input, text_qa_template=self.QA_PROMPT
80
+ )
81
+
82
+ message = {"role": "assistant", "content": response.response}
83
+ self.chat_history.append({"role": "user", "content": user_input})
84
+ self.chat_history.append(message)
85
+ return message
86
+
87
+ def load_chat_history(self, filename):
88
+ try:
89
+ with open(filename, 'r') as f:
90
+ self.chat_history = json.load(f)
91
+ except FileNotFoundError:
92
+ pass
93
+
94
+ def save_chat_history(self, filename):
95
+ with open(filename, 'w') as f:
96
+ json.dump(self.chat_history, f)
97
+
98
+
99
+ ## Define Chat BOT Class to generate Response , handle chat history,
100
+
101
+ bot = Chatbot("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", index=index)
102
+
103
+
104
+ import gradio as gr
105
+ import time
106
+
107
+
108
+ with gr.Blocks() as demo:
109
+ chatbot = gr.Chatbot(label="GoChat247_Demo")
110
+ msg = gr.Textbox()
111
+ clear = gr.Button("Clear")
112
+
113
+
114
+ def user(user_message, history):
115
+ return "", history + [[user_message, None]]
116
+
117
+ def agent(history):
118
+ last_user_message = history[-1][0]
119
+ agent_message = bot.generate_response(last_user_message)
120
+ history[-1][1] = agent_message ["content"]
121
+ time.sleep(1)
122
+ return history
123
+
124
+ msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
125
+ agent, chatbot, chatbot
126
+ )
127
+ clear.click(lambda: None, None, chatbot, queue=False)
128
+
129
+
130
+
131
+
132
+ if __name__ == "__main__":
133
+ demo.launch(share=True)