Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -472,46 +472,16 @@ def perform_ai_lookup(query):
|
|
472 |
)
|
473 |
st.write('🔍Run of Multi-Agent System Paper Summary Spec is Complete')
|
474 |
st.markdown(response2)
|
475 |
-
|
476 |
-
# ArXiv searcher ~-<>-~ Paper References - Update with RAG
|
477 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
478 |
-
|
479 |
-
|
480 |
-
|
481 |
-
|
482 |
-
|
483 |
-
"Arxiv Search - Latest - (EXPERIMENTAL)",
|
484 |
-
"mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
|
485 |
api_name="/update_with_rag_md"
|
486 |
)
|
487 |
-
|
488 |
-
responseall = response2 + response1[0] + response1[1]
|
489 |
-
st.markdown(responseall)
|
490 |
-
|
491 |
-
|
492 |
-
"""Perform AI lookup using Gradio client."""
|
493 |
-
st.write("Performing AI Lookup...")
|
494 |
-
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
495 |
-
result1 = client.predict(
|
496 |
-
prompt=query,
|
497 |
-
llm_model_picked="mistralai/Mixtral-8x7B-Instruct-v0.1",
|
498 |
-
stream_outputs=True,
|
499 |
-
api_name="/ask_llm"
|
500 |
-
)
|
501 |
-
st.markdown("### Mixtral-8x7B-Instruct-v0.1 Result")
|
502 |
-
st.markdown(result1)
|
503 |
-
result2 = client.predict(
|
504 |
-
prompt=query,
|
505 |
-
llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
|
506 |
-
stream_outputs=True,
|
507 |
-
api_name="/ask_llm"
|
508 |
-
)
|
509 |
-
st.markdown("### Mistral-7B-Instruct-v0.2 Result")
|
510 |
-
st.markdown(result2)
|
511 |
-
combined_result = f"{result1}\n\n{result2}"
|
512 |
-
#return combined_result
|
513 |
-
|
514 |
-
return responseall
|
515 |
|
516 |
def display_file_content(file_path):
|
517 |
"""Display file content with editing capabilities."""
|
|
|
472 |
)
|
473 |
st.write('🔍Run of Multi-Agent System Paper Summary Spec is Complete')
|
474 |
st.markdown(response2)
|
475 |
+
|
|
|
476 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
477 |
+
result = client.predict(
|
478 |
+
message=response2,
|
479 |
+
llm_results_use=5,
|
480 |
+
database_choice="Semantic Search",
|
481 |
+
llm_model_picked="mistralai/Mistral-7B-Instruct-v0.2",
|
|
|
|
|
482 |
api_name="/update_with_rag_md"
|
483 |
)
|
484 |
+
return result
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
485 |
|
486 |
def display_file_content(file_path):
|
487 |
"""Display file content with editing capabilities."""
|