Ahmed-14 commited on
Commit
894a65b
1 Parent(s): 084810c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +102 -97
app.py CHANGED
@@ -1,139 +1,144 @@
1
 
2
  import os
3
- OPENAI_API_KEY = os.environ['Open_AI_Key']
4
- HF_Key = os.environ['HF_Key']
 
 
 
 
 
5
  import openai
6
  import json
7
 
8
 
9
 
10
- from llama_index import GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext, QuestionAnswerPrompt
11
- from langchain import OpenAI
12
 
13
 
14
- # handling data on space
15
 
16
- from huggingface_hub import HfFileSystem
17
- fs = HfFileSystem(token=HF_Key)
18
 
19
- text_list = fs.ls("datasets/GoChat/Gochat247_Data/Data", detail=False)
20
 
21
- data = fs.read_text(text_list[0])
22
 
23
- from llama_index import Document
24
- doc = Document(data)
25
- docs = []
26
- docs.append(doc)
27
 
28
 
29
- # define LLM
30
- llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="text-davinci-003"))
31
 
32
- # define prompt helper
33
- # set maximum input size
34
- max_input_size = 4096
35
- # set number of output tokens
36
- num_output = 256
37
- # set maximum chunk overlap
38
- max_chunk_overlap = 20
39
- prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap)
40
 
41
- service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
42
 
43
- index = GPTSimpleVectorIndex.from_documents(docs)
44
 
45
 
46
- ## Define Chat BOT Class to generate Response , handle chat history,
47
- class Chatbot:
48
 
49
- def __init__(self, index):
50
- self.index = index
51
- openai.api_key = OPENAI_API_KEY
52
- self.chat_history = []
53
 
54
- QA_PROMPT_TMPL = (
55
- "Answer without 'Answer:' word."
56
- "you are in a converation with Gochat247's web site visitor\n"
57
- "user got into this conversation to learn more about Gochat247"
58
- "you will act like Gochat247 Virtual AI BOT. Be friendy and welcoming\n"
59
- "you will be friendy and welcoming\n"
60
- "The Context of the conversstion should be always limited to learing more about Gochat247 as a company providing Business Process Outosuricng and AI Customer expeeince soltuion /n"
61
- "The below is the previous chat with the user\n"
62
- "---------------------\n"
63
- "{context_str}"
64
- "\n---------------------\n"
65
- "Given the context information and the chat history, and not prior knowledge\n"
66
- "\nanswer the question : {query_str}\n"
67
- "\n it is ok if you don not know the answer. and ask for infomration \n"
68
- "Please provide a brief and concise but friendly response.")
69
 
70
- self.QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL)
71
 
72
 
73
- def generate_response(self, user_input):
74
 
75
- prompt = "\n".join([f"{message['role']}: {message['content']}" for message in self.chat_history[-5:]])
76
- prompt += f"\nUser: {user_input}"
77
- self.QA_PROMPT.context_str = prompt
78
- response = index.query(user_input, text_qa_template=self.QA_PROMPT)
79
-
80
- message = {"role": "assistant", "content": response.response}
81
- self.chat_history.append({"role": "user", "content": user_input})
82
- self.chat_history.append(message)
83
- return message
84
 
85
- def load_chat_history(self, filename):
86
- try:
87
- with open(filename, 'r') as f:
88
- self.chat_history = json.load(f)
89
- except FileNotFoundError:
90
- pass
91
-
92
- def save_chat_history(self, filename):
93
- with open(filename, 'w') as f:
94
- json.dump(self.chat_history, f)
95
 
96
- ## Define Chat BOT Class to generate Response , handle chat history,
97
 
98
- bot = Chatbot(index=index)
99
 
100
- import webbrowser
101
 
102
- import gradio as gr
103
- import time
104
 
105
- with gr.Blocks(theme='SebastianBravo/simci_css') as demo:
106
- with gr.Column(scale=4):
107
- title = 'GoChat247 AI BOT'
108
- chatbot = gr.Chatbot(label='GoChat247 AI BOT')
109
- msg = gr.Textbox()
110
- clear = gr.Button("Clear")
111
 
112
 
113
- def user(user_message, history):
114
- return "", history + [[user_message, None]]
115
 
116
- def agent(history):
117
- last_user_message = history[-1][0]
118
- agent_message = bot.generate_response(last_user_message)
119
- history[-1][1] = agent_message ["content"]
120
- time.sleep(1)
121
- return history
122
 
123
- msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(agent, chatbot, chatbot)
124
- clear.click(lambda: None, None, chatbot, queue=False)
125
- print(webbrowser.get())
126
 
127
- # handling dark_theme
128
 
129
 
130
 
131
- # def apply_dark_theme(url):
132
- # if not url.endswith('?__theme=dark'):
133
- # webbrowser.open_new(url + '?__theme=dark')
134
 
135
- # gradioURL = 'http://localhost:7860/'
136
- # apply_dark_theme(gradioURL)
137
 
138
- if __name__ == "__main__":
139
- demo.launch()
 
1
 
2
  import os
3
+
4
+ # OPENAI_API_KEY = os.environ['Open_AI_Key']
5
+ # HF_Key = os.environ['HF_Key']
6
+
7
+ print('OPENAI_API_KEY' in os.environ)
8
+ print('HF_Key' in os.environ)
9
+
10
  import openai
11
  import json
12
 
13
 
14
 
15
+ # from llama_index import GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext, QuestionAnswerPrompt
16
+ # from langchain import OpenAI
17
 
18
 
19
+ # # handling data on space
20
 
21
+ # from huggingface_hub import HfFileSystem
22
+ # fs = HfFileSystem(token=HF_Key)
23
 
24
+ # text_list = fs.ls("datasets/GoChat/Gochat247_Data/Data", detail=False)
25
 
26
+ # data = fs.read_text(text_list[0])
27
 
28
+ # from llama_index import Document
29
+ # doc = Document(data)
30
+ # docs = []
31
+ # docs.append(doc)
32
 
33
 
34
+ # # define LLM
35
+ # llm_predictor = LLMPredictor(llm=OpenAI(temperature=0, model_name="text-davinci-003"))
36
 
37
+ # # define prompt helper
38
+ # # set maximum input size
39
+ # max_input_size = 4096
40
+ # # set number of output tokens
41
+ # num_output = 256
42
+ # # set maximum chunk overlap
43
+ # max_chunk_overlap = 20
44
+ # prompt_helper = PromptHelper(max_input_size, num_output, max_chunk_overlap)
45
 
46
+ # service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
47
 
48
+ # index = GPTSimpleVectorIndex.from_documents(docs)
49
 
50
 
51
+ # ## Define Chat BOT Class to generate Response , handle chat history,
52
+ # class Chatbot:
53
 
54
+ # def __init__(self, index):
55
+ # self.index = index
56
+ # openai.api_key = OPENAI_API_KEY
57
+ # self.chat_history = []
58
 
59
+ # QA_PROMPT_TMPL = (
60
+ # "Answer without 'Answer:' word."
61
+ # "you are in a converation with Gochat247's web site visitor\n"
62
+ # "user got into this conversation to learn more about Gochat247"
63
+ # "you will act like Gochat247 Virtual AI BOT. Be friendy and welcoming\n"
64
+ # "you will be friendy and welcoming\n"
65
+ # "The Context of the conversstion should be always limited to learing more about Gochat247 as a company providing Business Process Outosuricng and AI Customer expeeince soltuion /n"
66
+ # "The below is the previous chat with the user\n"
67
+ # "---------------------\n"
68
+ # "{context_str}"
69
+ # "\n---------------------\n"
70
+ # "Given the context information and the chat history, and not prior knowledge\n"
71
+ # "\nanswer the question : {query_str}\n"
72
+ # "\n it is ok if you don not know the answer. and ask for infomration \n"
73
+ # "Please provide a brief and concise but friendly response.")
74
 
75
+ # self.QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL)
76
 
77
 
78
+ # def generate_response(self, user_input):
79
 
80
+ # prompt = "\n".join([f"{message['role']}: {message['content']}" for message in self.chat_history[-5:]])
81
+ # prompt += f"\nUser: {user_input}"
82
+ # self.QA_PROMPT.context_str = prompt
83
+ # response = index.query(user_input, text_qa_template=self.QA_PROMPT)
84
+
85
+ # message = {"role": "assistant", "content": response.response}
86
+ # self.chat_history.append({"role": "user", "content": user_input})
87
+ # self.chat_history.append(message)
88
+ # return message
89
 
90
+ # def load_chat_history(self, filename):
91
+ # try:
92
+ # with open(filename, 'r') as f:
93
+ # self.chat_history = json.load(f)
94
+ # except FileNotFoundError:
95
+ # pass
96
+
97
+ # def save_chat_history(self, filename):
98
+ # with open(filename, 'w') as f:
99
+ # json.dump(self.chat_history, f)
100
 
101
+ # ## Define Chat BOT Class to generate Response , handle chat history,
102
 
103
+ # bot = Chatbot(index=index)
104
 
105
+ # import webbrowser
106
 
107
+ # import gradio as gr
108
+ # import time
109
 
110
+ # with gr.Blocks(theme='SebastianBravo/simci_css') as demo:
111
+ # with gr.Column(scale=4):
112
+ # title = 'GoChat247 AI BOT'
113
+ # chatbot = gr.Chatbot(label='GoChat247 AI BOT')
114
+ # msg = gr.Textbox()
115
+ # clear = gr.Button("Clear")
116
 
117
 
118
+ # def user(user_message, history):
119
+ # return "", history + [[user_message, None]]
120
 
121
+ # def agent(history):
122
+ # last_user_message = history[-1][0]
123
+ # agent_message = bot.generate_response(last_user_message)
124
+ # history[-1][1] = agent_message ["content"]
125
+ # time.sleep(1)
126
+ # return history
127
 
128
+ # msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(agent, chatbot, chatbot)
129
+ # clear.click(lambda: None, None, chatbot, queue=False)
130
+ # print(webbrowser.get())
131
 
132
+ # # handling dark_theme
133
 
134
 
135
 
136
+ # # def apply_dark_theme(url):
137
+ # # if not url.endswith('?__theme=dark'):
138
+ # # webbrowser.open_new(url + '?__theme=dark')
139
 
140
+ # # gradioURL = 'http://localhost:7860/'
141
+ # # apply_dark_theme(gradioURL)
142
 
143
+ # if __name__ == "__main__":
144
+ # demo.launch()