Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -28,7 +28,7 @@ with gr.Blocks() as demo:
|
|
28 |
msg = gr.Textbox(label="simple wikipedia semantic search query", placeholder="for example, \"medieval battles\"")
|
29 |
clear = gr.ClearButton([msg, chatbot])
|
30 |
|
31 |
-
def
|
32 |
batch_dict = tokenizer(["query: " + message], max_length=512, padding=True, truncation=True, return_tensors='pt')
|
33 |
|
34 |
outputs = model(**batch_dict)
|
@@ -49,11 +49,28 @@ with gr.Blocks() as demo:
|
|
49 |
# Get corresponding 'text' for top k similar points
|
50 |
top_k_text = df['title'].iloc[top_k_idx].tolist()
|
51 |
|
52 |
-
bot_message = "\n".join(f"{i+1}. {top_k_text[i]}" for i in range(len(top_k_text)))
|
53 |
|
54 |
-
chat_history.append((message, bot_message))
|
55 |
return "", chat_history
|
56 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
58 |
|
59 |
demo.launch()
|
|
|
28 |
msg = gr.Textbox(label="simple wikipedia semantic search query", placeholder="for example, \"medieval battles\"")
|
29 |
clear = gr.ClearButton([msg, chatbot])
|
30 |
|
31 |
+
def _search(message, chat_history):
|
32 |
batch_dict = tokenizer(["query: " + message], max_length=512, padding=True, truncation=True, return_tensors='pt')
|
33 |
|
34 |
outputs = model(**batch_dict)
|
|
|
49 |
# Get corresponding 'text' for top k similar points
|
50 |
top_k_text = df['title'].iloc[top_k_idx].tolist()
|
51 |
|
52 |
+
bot_message = "\n".join(f"{i+1}. {top_k_text[i]} // {top_k_idx[i]}" for i in range(len(top_k_text)))
|
53 |
|
54 |
+
chat_history.append((message, "results:\n" + bot_message))
|
55 |
return "", chat_history
|
56 |
|
57 |
+
def _retrieve(message, chat_history):
|
58 |
+
idx = int(message)
|
59 |
+
for _, m in chat_history[::-1]:
|
60 |
+
if m.startswith("results:\n"):
|
61 |
+
for n in m.split("\n")[1:]:
|
62 |
+
if str(idx) == n.split(".")[0]:
|
63 |
+
df_idx = int(n.split(" // ")[-1])
|
64 |
+
chat_history.append((message, f"contents of {n}:\n{df[df_idx]}"))
|
65 |
+
return "", chat_history
|
66 |
+
|
67 |
+
def respond(message, chat_history):
|
68 |
+
try:
|
69 |
+
int(message)
|
70 |
+
return _retrieve(message, chat_history)
|
71 |
+
except:
|
72 |
+
return _search(message, chat_history)
|
73 |
+
|
74 |
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
75 |
|
76 |
demo.launch()
|