Spaces:
Runtime error
Runtime error
oliverwang15
commited on
Commit
•
753b62e
1
Parent(s):
863cd0c
init
Browse files- app.py +63 -0
- backend.py +148 -0
- requirements.txt +1 -0
app.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
|
3 |
+
|
4 |
+
with gr.Blocks() as demo:
|
5 |
+
backend = Backend()
|
6 |
+
with gr.Row():
|
7 |
+
gr.Markdown(f'<center> <h1> <b> DAN_PDF_CHAT </b> </h1> </center>')
|
8 |
+
with gr.Row():
|
9 |
+
with gr.Column(scale = 0.5):
|
10 |
+
with gr.Group():
|
11 |
+
gr.Markdown(f'<center> <h3> <b> Setup for the Agent </b> </h3> </center>')
|
12 |
+
openai_key = gr.Textbox(
|
13 |
+
label='Enter your OpenAI API key here',
|
14 |
+
type='password')
|
15 |
+
assistant_id = gr.Textbox(
|
16 |
+
label='Enter the OpenAI assistant ID here, or you can use the default one',
|
17 |
+
value = 'asst_FXsUUX2RacJ5GxEs6sCXL7nY',
|
18 |
+
type = 'password',
|
19 |
+
)
|
20 |
+
with gr.Group():
|
21 |
+
gr.Markdown(f'<center> <h3> <b> Setup for the User </b> </h3> </center>')
|
22 |
+
file = gr.File(label='Upload your .txt or .pdf file here', file_types=['.txt', '.pdf'], file_count = 'single')
|
23 |
+
btn_submit_txt_online = gr.Button(value='Submit passage')
|
24 |
+
|
25 |
+
with gr.Column(scale=1):
|
26 |
+
# with gr.Group():
|
27 |
+
chatbot = gr.Chatbot(show_copy_button = True)
|
28 |
+
question_box = gr.Textbox(label='Enter your question here',
|
29 |
+
placeholder = 'What is the animal mentioned in this passage?',
|
30 |
+
value = 'What is the animal mentioned in this passage?'
|
31 |
+
)
|
32 |
+
with gr.Row():
|
33 |
+
btn_submit_question_txt = gr.Button(value='Submit')
|
34 |
+
btn_reset_question_txt = gr.Button(value='Reset')
|
35 |
+
btn_show_html = gr.Button(value='Show reference')
|
36 |
+
btn_hide_html = gr.Button(value='Hide reference')
|
37 |
+
|
38 |
+
with gr.Row():
|
39 |
+
html = gr.HTML(visible = False, label='HTML', value='<h1> References would be shown HERE.</h1>')
|
40 |
+
|
41 |
+
btn_submit_txt_online.click(
|
42 |
+
fn = backend.submit_passage,
|
43 |
+
inputs = [openai_key, assistant_id, file],
|
44 |
+
)
|
45 |
+
|
46 |
+
btn_submit_question_txt.click(
|
47 |
+
fn = backend.submit_question,
|
48 |
+
inputs = [question_box],
|
49 |
+
# outputs = [chatbot],
|
50 |
+
outputs = [chatbot, html],
|
51 |
+
)
|
52 |
+
|
53 |
+
btn_show_html.click(
|
54 |
+
fn = lambda: gr.update(visible=True),
|
55 |
+
outputs=html,
|
56 |
+
)
|
57 |
+
btn_hide_html.click(
|
58 |
+
fn = lambda: gr.update(visible=False),
|
59 |
+
outputs=html,
|
60 |
+
)
|
61 |
+
|
62 |
+
demo.queue()
|
63 |
+
demo.launch(show_error=True)
|
backend.py
ADDED
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import time
|
2 |
+
import json
|
3 |
+
from openai import OpenAI
|
4 |
+
|
5 |
+
class Backend:
|
6 |
+
def __init__(self):
|
7 |
+
self.return_instruction = """ Please only return in the following Json format:
|
8 |
+
{{
|
9 |
+
"Answer": "",
|
10 |
+
"Reference Sentences": [""]
|
11 |
+
}}"""
|
12 |
+
|
13 |
+
self.chat_history = []
|
14 |
+
|
15 |
+
def load_agent(self, openai_api_key, assistant_id):
|
16 |
+
client = OpenAI(api_key=openai_api_key)
|
17 |
+
assistant = client.beta.assistants.retrieve(assistant_id=assistant_id)
|
18 |
+
return client, assistant
|
19 |
+
|
20 |
+
def update_file(self, file_path):
|
21 |
+
file = open(file_path, 'rb')
|
22 |
+
file = self.client.files.create(file=file, purpose='assistants')
|
23 |
+
return file
|
24 |
+
|
25 |
+
def create_thread(self):
|
26 |
+
thread = self.client.beta.threads.create()
|
27 |
+
return thread
|
28 |
+
|
29 |
+
def delate_thread(self, thread):
|
30 |
+
self.client.beta.threads.delete(thread.id)
|
31 |
+
|
32 |
+
def create_message(self, question, thread, file):
|
33 |
+
message = self.client.beta.threads.messages.create(
|
34 |
+
thread_id=thread.id,
|
35 |
+
role="user",
|
36 |
+
content= question + self.return_instruction,
|
37 |
+
file_ids=[file.id]
|
38 |
+
)
|
39 |
+
return message
|
40 |
+
|
41 |
+
def delate_message(self, message):
|
42 |
+
self.client.beta.threads.messages.delete(message.id)
|
43 |
+
|
44 |
+
def create_run(self, thread, assistant):
|
45 |
+
run = self.client.beta.threads.runs.create(
|
46 |
+
thread_id=thread.id,
|
47 |
+
assistant_id=assistant.id,
|
48 |
+
# instructions="""Please read PDF and answer the qusetions asked by users with professional knowledge."""
|
49 |
+
)
|
50 |
+
return run
|
51 |
+
|
52 |
+
def delate_run(self, run):
|
53 |
+
self.client.beta.threads.runs.delete(run.id)
|
54 |
+
|
55 |
+
def get_massage(self, thread):
|
56 |
+
messages = self.client.beta.threads.messages.list(
|
57 |
+
thread_id=thread.id
|
58 |
+
)
|
59 |
+
return messages
|
60 |
+
|
61 |
+
def phrase_massage(self, question, messages):
|
62 |
+
mess = json.loads(messages.json())
|
63 |
+
output = mess['data'][0]['content'][0]['text']['value']
|
64 |
+
print(output)
|
65 |
+
try:
|
66 |
+
output = output.split("{")[1:]
|
67 |
+
output = "{" + "".join(output)
|
68 |
+
output = output.split("}")[:-1]
|
69 |
+
output = "".join(output) + "}"
|
70 |
+
print(output)
|
71 |
+
output = eval(output)
|
72 |
+
answer = output['Answer']
|
73 |
+
reference = output['Reference Sentences']
|
74 |
+
except:
|
75 |
+
self.detete_message(message)
|
76 |
+
answer = output
|
77 |
+
reference = []
|
78 |
+
reference = ' '.join(reference)
|
79 |
+
reference = self.processing_html(reference)
|
80 |
+
self.chat_history.append([question, answer])
|
81 |
+
|
82 |
+
return self.chat_history, reference
|
83 |
+
|
84 |
+
def phrase_massage_1(self, question, messages):
|
85 |
+
mess = json.loads(messages.json())
|
86 |
+
output = mess['data'][0]['content'][0]['text']['value']
|
87 |
+
|
88 |
+
self.chat_history.append([question, output])
|
89 |
+
|
90 |
+
return self.chat_history
|
91 |
+
|
92 |
+
def processing_html(self, text):
|
93 |
+
return f'<center><p> {text} </p></center>'
|
94 |
+
|
95 |
+
def submit_passage(self, openai_key, assistant_id, file):
|
96 |
+
# Create a new conversation
|
97 |
+
self.client, self.assistant = self.load_agent(openai_key, assistant_id)
|
98 |
+
|
99 |
+
# Update file
|
100 |
+
self.file = self.update_file(file.name)
|
101 |
+
|
102 |
+
# Create a new conversation
|
103 |
+
self.thread = self.create_thread()
|
104 |
+
|
105 |
+
gr.Info("Upload successful. Please can now chat with the assistant. Enjoy!")
|
106 |
+
|
107 |
+
def submit_question(self, question):
|
108 |
+
# print(question)
|
109 |
+
# print(self.thread.id)
|
110 |
+
# print(self.file.id)
|
111 |
+
# Create a new message
|
112 |
+
self.message = self.create_message(question, self.thread, self.file)
|
113 |
+
|
114 |
+
# Create a new run
|
115 |
+
run = self.create_run(self.thread, self.assistant)
|
116 |
+
|
117 |
+
# Wait for the run to complete
|
118 |
+
while True:
|
119 |
+
run = self.client.beta.threads.runs.retrieve(thread_id=self.thread.id, run_id=run.id)
|
120 |
+
if run.status not in ["queued", "in_progress"]:
|
121 |
+
break
|
122 |
+
time.sleep(1)
|
123 |
+
|
124 |
+
# Get the answer
|
125 |
+
messages = self.get_massage(self.thread)
|
126 |
+
answer, reference = self.phrase_massage(question, messages)
|
127 |
+
|
128 |
+
return answer, reference
|
129 |
+
|
130 |
+
def submit_question_another(self, question):
|
131 |
+
# Create a new message
|
132 |
+
self.message = self.create_message(question, self.thread, self.file)
|
133 |
+
|
134 |
+
# Create a new run
|
135 |
+
run = self.create_run(self.thread, self.assistant)
|
136 |
+
|
137 |
+
# Wait for the run to complete
|
138 |
+
while True:
|
139 |
+
run = self.client.beta.threads.runs.retrieve(thread_id=self.thread.id, run_id=run.id)
|
140 |
+
if run.status not in ["queued", "in_progress"]:
|
141 |
+
break
|
142 |
+
time.sleep(1)
|
143 |
+
|
144 |
+
# Get the answer
|
145 |
+
messages = self.get_massage(self.thread)
|
146 |
+
answer = self.phrase_massage_1(question, messages)
|
147 |
+
|
148 |
+
return answer
|
requirements.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
openai
|