awacke1 commited on
Commit
20488d8
Β·
verified Β·
1 Parent(s): 445b0aa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -13
app.py CHANGED
@@ -54,7 +54,7 @@ LOCAL_APP_URL = "https://huggingface.co/spaces/awacke1/AzureCosmosDBUI"
54
  CosmosDBUrl = 'https://portal.azure.com/#@AaronCWackergmail.onmicrosoft.com/resource/subscriptions/003fba60-5b3f-48f4-ab36-3ed11bc40816/resourceGroups/datasets/providers/Microsoft.DocumentDB/databaseAccounts/acae-afd/dataExplorer'
55
 
56
  # πŸ€– Anthropic configuration - Teaching machines to be more human (and funnier)
57
- client = anthropic.Anthropic(api_key=os.environ.get("ANTHROPIC_API_KEY"))
58
 
59
  # 🧠 Initialize session state - Because even apps need a good memory
60
  if "chat_history" not in st.session_state:
@@ -315,18 +315,18 @@ def archive_current_container(database_name, container_name, client):
315
  # πŸ” Search glossary - Finding needles in digital haystacks
316
  def search_glossary(query):
317
  st.markdown(f"### πŸ” SearchGlossary for: {query}")
318
- # Dropdown for model selection
319
  model_options = ['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None']
320
- #model_choice = st.selectbox('🧠 Select LLM Model', options=model_options, index=1)
321
- # Dropdown for database selection
322
  database_options = ['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)']
323
- #database_choice = st.selectbox('πŸ“š Select Database', options=database_options, index=0)
 
324
  # πŸ•΅οΈβ€β™‚οΈ Searching the glossary for: query
325
  all_results = ""
326
- #st.markdown(f"- {query}")
327
-
 
328
 
329
- # πŸ” ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM
330
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
331
  # πŸ” ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm
332
  result = client.predict(
@@ -351,11 +351,6 @@ def search_glossary(query):
351
  st.markdown(result2)
352
  #st.code(result2, language="python", line_numbers=True)
353
 
354
-
355
-
356
-
357
-
358
-
359
  # πŸ” ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /update_with_rag_md
360
  response2 = client.predict(
361
  message=query, # str in 'parameter_13' Textbox component
 
54
  CosmosDBUrl = 'https://portal.azure.com/#@AaronCWackergmail.onmicrosoft.com/resource/subscriptions/003fba60-5b3f-48f4-ab36-3ed11bc40816/resourceGroups/datasets/providers/Microsoft.DocumentDB/databaseAccounts/acae-afd/dataExplorer'
55
 
56
  # πŸ€– Anthropic configuration - Teaching machines to be more human (and funnier)
57
+ anthropicclient = anthropic.Anthropic(api_key=os.environ.get("ANTHROPIC_API_KEY"))
58
 
59
  # 🧠 Initialize session state - Because even apps need a good memory
60
  if "chat_history" not in st.session_state:
 
315
  # πŸ” Search glossary - Finding needles in digital haystacks
316
  def search_glossary(query):
317
  st.markdown(f"### πŸ” SearchGlossary for: {query}")
 
318
  model_options = ['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None']
319
+ model_choice = st.selectbox('🧠 Select LLM Model', options=model_options, index=1, key=f"model_choice_{id(query)}")
 
320
  database_options = ['Semantic Search', 'Arxiv Search - Latest - (EXPERIMENTAL)']
321
+ database_choice = st.selectbox('πŸ“š Select Database', options=database_options, index=0, key=f"database_choice_{id(query)}")
322
+
323
  # πŸ•΅οΈβ€β™‚οΈ Searching the glossary for: query
324
  all_results = ""
325
+ # Limit the query display to 80 characters
326
+ display_query = query[:80] + "..." if len(query) > 80 else query
327
+ st.markdown(f"πŸ•΅οΈβ€β™‚οΈ Running ArXiV AI Analysis with Query: {display_query} - ML model: {model_choice} and Option: {database_options}")
328
 
329
+ # πŸ” ArXiV RAG researcher expert ~-<>-~ Paper Summary & Ask LLM
330
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
331
  # πŸ” ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /ask_llm
332
  result = client.predict(
 
351
  st.markdown(result2)
352
  #st.code(result2, language="python", line_numbers=True)
353
 
 
 
 
 
 
354
  # πŸ” ArXiv RAG researcher expert ~-<>-~ Paper Summary & Ask LLM - api_name: /update_with_rag_md
355
  response2 = client.predict(
356
  message=query, # str in 'parameter_13' Textbox component