Ahmed-14 commited on
Commit
bbf9a23
1 Parent(s): bc4e3db

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +97 -103
app.py CHANGED
@@ -1,147 +1,141 @@
1
 
2
  import os
3
 
4
- # OPENAI_API_KEY = os.environ['Open_AI_Key']
5
- # HF_Key = os.environ['HF_Key']
6
-
7
- print('OPENAI_API_KEY' in os.environ)
8
- print('HF_Key' in os.environ)
9
-
10
- print(os.environ['OPENAI_API_KEY'])
11
- print(os.environ['HF_Key'])
12
 
13
  import openai
14
  import json
15
 
16
 
17
 
18
- # from llama_index import GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext, QuestionAnswerPrompt
19
- # from langchain import OpenAI
20
 
21
 
22
- # # handling data on space
23
 
24
- # from huggingface_hub import HfFileSystem
25
- # fs = HfFileSystem(token=HF_Key)
26
 
27
- # text_list = fs.ls("datasets/GoChat/Gochat247_Data/Data", detail=False)
28
 
29
- # data = fs.read_text(text_list[0])
30
 
31
- # from llama_index import Document
32
- # doc = Document(data)
33
- # docs = []
34
- # docs.append(doc)
35
 
36
 
37
- # # define LLM
38
- # llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="text-davinci-003"))
39
 
40
- # # define prompt helper
41
- # # set maximum input size
42
- # max_input_size = 4096
43
- # # set number of output tokens
44
- # num_output = 256
45
- # # set maximum chunk overlap
46
- # max_chunk_overlap = 20
47
- # prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap)
48
 
49
- # service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
50
 
51
- # index = GPTSimpleVectorIndex.from_documents(docs)
52
 
53
 
54
- # ## Define Chat BOT Class to generate Response , handle chat history,
55
- # class Chatbot:
56
 
57
- # def __init__(self, index):
58
- # self.index = index
59
- # openai.api_key = OPENAI_API_KEY
60
- # self.chat_history = []
61
 
62
- # QA_PROMPT_TMPL = (
63
- # "Answer without 'Answer:' word."
64
- # "you are in a converation with Gochat247's web site visitor\n"
65
- # "user got into this conversation to learn more about Gochat247"
66
- # "you will act like Gochat247 Virtual AI BOT. Be friendy and welcoming\n"
67
- # "you will be friendy and welcoming\n"
68
- # "The Context of the conversstion should be always limited to learing more about Gochat247 as a company providing Business Process Outosuricng and AI Customer expeeince soltuion /n"
69
- # "The below is the previous chat with the user\n"
70
- # "---------------------\n"
71
- # "{context_str}"
72
- # "\n---------------------\n"
73
- # "Given the context information and the chat history, and not prior knowledge\n"
74
- # "\nanswer the question : {query_str}\n"
75
- # "\n it is ok if you don not know the answer. and ask for infomration \n"
76
- # "Please provide a brief and concise but friendly response.")
77
 
78
- # self.QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL)
79
 
80
 
81
- # def generate_response(self, user_input):
82
 
83
- # prompt = "\n".join([f"{message['role']}: {message['content']}" for message in self.chat_history[-5:]])
84
- # prompt += f"\nUser: {user_input}"
85
- # self.QA_PROMPT.context_str = prompt
86
- # response = index.query(user_input, text_qa_template=self.QA_PROMPT)
87
-
88
- # message = {"role": "assistant", "content": response.response}
89
- # self.chat_history.append({"role": "user", "content": user_input})
90
- # self.chat_history.append(message)
91
- # return message
92
 
93
- # def load_chat_history(self, filename):
94
- # try:
95
- # with open(filename, 'r') as f:
96
- # self.chat_history = json.load(f)
97
- # except FileNotFoundError:
98
- # pass
99
-
100
- # def save_chat_history(self, filename):
101
- # with open(filename, 'w') as f:
102
- # json.dump(self.chat_history, f)
103
 
104
- # ## Define Chat BOT Class to generate Response , handle chat history,
105
 
106
- # bot = Chatbot(index=index)
107
 
108
- # import webbrowser
109
 
110
- # import gradio as gr
111
- # import time
112
 
113
- # with gr.Blocks(theme='SebastianBravo/simci_css') as demo:
114
- # with gr.Column(scale=4):
115
- # title = 'GoChat247 AI BOT'
116
- # chatbot = gr.Chatbot(label='GoChat247 AI BOT')
117
- # msg = gr.Textbox()
118
- # clear = gr.Button("Clear")
119
 
120
 
121
- # def user(user_message, history):
122
- # return "", history + [[user_message, None]]
123
 
124
- # def agent(history):
125
- # last_user_message = history[-1][0]
126
- # agent_message = bot.generate_response(last_user_message)
127
- # history[-1][1] = agent_message ["content"]
128
- # time.sleep(1)
129
- # return history
130
 
131
- # msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(agent, chatbot, chatbot)
132
- # clear.click(lambda: None, None, chatbot, queue=False)
133
- # print(webbrowser.get())
134
 
135
- # # handling dark_theme
136
 
137
 
138
 
139
- # # def apply_dark_theme(url):
140
- # # if not url.endswith('?__theme=dark'):
141
- # # webbrowser.open_new(url + '?__theme=dark')
142
 
143
- # # gradioURL = 'http://localhost:7860/'
144
- # # apply_dark_theme(gradioURL)
145
 
146
- # if __name__ == "__main__":
147
- # demo.launch()
 
1
 
2
  import os
3
 
4
+ open_ai_key= os.environ['OPENAI_API_KEY']
5
+ hf_key = os.environ['HF_KEY']
 
 
 
 
 
 
6
 
7
  import openai
8
  import json
9
 
10
 
11
 
12
+ from llama_index import GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext, QuestionAnswerPrompt
13
+ from langchain import OpenAI
14
 
15
 
16
+ # handling data on space
17
 
18
+ from huggingface_hub import HfFileSystem
19
+ fs = HfFileSystem(token=hf_key)
20
 
21
+ text_list = fs.ls("datasets/GoChat/Gochat247_Data/Data", detail=False)
22
 
23
+ data = fs.read_text(text_list[0])
24
 
25
+ from llama_index import Document
26
+ doc = Document(data)
27
+ docs = []
28
+ docs.append(doc)
29
 
30
 
31
+ # define LLM
32
+ llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="text-davinci-003"))
33
 
34
+ # define prompt helper
35
+ # set maximum input size
36
+ max_input_size = 4096
37
+ # set number of output tokens
38
+ num_output = 256
39
+ # set maximum chunk overlap
40
+ max_chunk_overlap = 20
41
+ prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap)
42
 
43
+ service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
44
 
45
+ index = GPTSimpleVectorIndex.from_documents(docs)
46
 
47
 
48
+ ## Define Chat BOT Class to generate Response , handle chat history,
49
+ class Chatbot:
50
 
51
+ def __init__(self, index):
52
+ self.index = index
53
+ openai.api_key = open_ai_key
54
+ self.chat_history = []
55
 
56
+ QA_PROMPT_TMPL = (
57
+ "Answer without 'Answer:' word."
58
+ "you are in a converation with Gochat247's web site visitor\n"
59
+ "user got into this conversation to learn more about Gochat247"
60
+ "you will act like Gochat247 Virtual AI BOT. Be friendy and welcoming\n"
61
+ "you will be friendy and welcoming\n"
62
+ "The Context of the conversstion should be always limited to learing more about Gochat247 as a company providing Business Process Outosuricng and AI Customer expeeince soltuion /n"
63
+ "The below is the previous chat with the user\n"
64
+ "---------------------\n"
65
+ "{context_str}"
66
+ "\n---------------------\n"
67
+ "Given the context information and the chat history, and not prior knowledge\n"
68
+ "\nanswer the question : {query_str}\n"
69
+ "\n it is ok if you don not know the answer. and ask for infomration \n"
70
+ "Please provide a brief and concise but friendly response.")
71
 
72
+ self.QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL)
73
 
74
 
75
+ def generate_response(self, user_input):
76
 
77
+ prompt = "\n".join([f"{message['role']}: {message['content']}" for message in self.chat_history[-5:]])
78
+ prompt += f"\nUser: {user_input}"
79
+ self.QA_PROMPT.context_str = prompt
80
+ response = index.query(user_input, text_qa_template=self.QA_PROMPT)
81
+
82
+ message = {"role": "assistant", "content": response.response}
83
+ self.chat_history.append({"role": "user", "content": user_input})
84
+ self.chat_history.append(message)
85
+ return message
86
 
87
+ def load_chat_history(self, filename):
88
+ try:
89
+ with open(filename, 'r') as f:
90
+ self.chat_history = json.load(f)
91
+ except FileNotFoundError:
92
+ pass
93
+
94
+ def save_chat_history(self, filename):
95
+ with open(filename, 'w') as f:
96
+ json.dump(self.chat_history, f)
97
 
98
+ ## Define Chat BOT Class to generate Response , handle chat history,
99
 
100
+ bot = Chatbot(index=index)
101
 
102
+ import webbrowser
103
 
104
+ import gradio as gr
105
+ import time
106
 
107
+ with gr.Blocks(theme='SebastianBravo/simci_css') as demo:
108
+ with gr.Column(scale=4):
109
+ title = 'GoChat247 AI BOT'
110
+ chatbot = gr.Chatbot(label='GoChat247 AI BOT')
111
+ msg = gr.Textbox()
112
+ clear = gr.Button("Clear")
113
 
114
 
115
+ def user(user_message, history):
116
+ return "", history + [[user_message, None]]
117
 
118
+ def agent(history):
119
+ last_user_message = history[-1][0]
120
+ agent_message = bot.generate_response(last_user_message)
121
+ history[-1][1] = agent_message ["content"]
122
+ time.sleep(1)
123
+ return history
124
 
125
+ msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(agent, chatbot, chatbot)
126
+ clear.click(lambda: None, None, chatbot, queue=False)
127
+ print(webbrowser.get())
128
 
129
+ # handling dark_theme
130
 
131
 
132
 
133
+ # def apply_dark_theme(url):
134
+ # if not url.endswith('?__theme=dark'):
135
+ # webbrowser.open_new(url + '?__theme=dark')
136
 
137
+ # gradioURL = 'http://localhost:7860/'
138
+ # apply_dark_theme(gradioURL)
139
 
140
+ if __name__ == "__main__":
141
+ demo.launch()