Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -271,38 +271,30 @@ def generate_chunked_response(prompt, model, max_tokens=10000, num_calls=3, temp
|
|
271 |
print(f"Final clean response: {final_response[:100]}...")
|
272 |
return final_response
|
273 |
|
274 |
-
def duckduckgo_search(query,
|
275 |
params = {
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
"intitle": intitle,
|
285 |
-
"intext": intext,
|
286 |
-
"related": related,
|
287 |
-
"info": info,
|
288 |
-
"define": define
|
289 |
-
}
|
290 |
-
with DDGS() as ddgs:
|
291 |
-
results = ddgs.text(query, params=params, max_results=5)
|
292 |
-
return results
|
293 |
|
294 |
class CitingSources(BaseModel):
|
295 |
sources: List[str] = Field(
|
296 |
...,
|
297 |
description="List of sources to cite. Should be an URL of the source."
|
298 |
)
|
299 |
-
|
300 |
-
def chatbot_interface(message, history, use_web_search, model, temperature, num_calls, safe, region, language, time, filetype, site, inurl, intitle, intext, related, info, define):
|
301 |
if not message.strip():
|
302 |
return "", history
|
|
|
303 |
history = history + [(message, "")]
|
|
|
304 |
try:
|
305 |
-
for response in respond(message, history, model, temperature, num_calls, use_web_search,
|
306 |
history[-1] = (message, response)
|
307 |
yield history
|
308 |
except gr.CancelledError:
|
@@ -321,19 +313,21 @@ def retry_last_response(history, use_web_search, model, temperature, num_calls):
|
|
321 |
|
322 |
return chatbot_interface(last_user_msg, history, use_web_search, model, temperature, num_calls)
|
323 |
|
324 |
-
def respond(message, history, model, temperature, num_calls, use_web_search,
|
325 |
logging.info(f"User Query: {message}")
|
326 |
logging.info(f"Model Used: {model}")
|
327 |
logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
|
328 |
-
|
|
|
|
|
329 |
try:
|
330 |
if use_web_search:
|
331 |
-
for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature,
|
332 |
response = f"{main_content}\n\n{sources}"
|
333 |
first_line = response.split('\n')[0] if response else ''
|
334 |
-
logging.info(f"Generated Response (first line): {first_line}")
|
335 |
yield response
|
336 |
-
|
337 |
embed = get_embeddings()
|
338 |
if os.path.exists("faiss_database"):
|
339 |
database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
|
@@ -431,20 +425,16 @@ After writing the document, please provide a list of sources used in your respon
|
|
431 |
if not full_response:
|
432 |
yield "I apologize, but I couldn't generate a response at this time. Please try again later."
|
433 |
|
434 |
-
def get_response_with_search(query, model, num_calls=3, temperature=0.2,
|
435 |
-
search_results = duckduckgo_search(query
|
436 |
-
|
437 |
-
|
438 |
-
|
439 |
-
|
440 |
-
retriever = web_search_database.as_retriever(search_kwargs={"k": 5})
|
441 |
-
relevant_docs = retriever.get_relevant_documents(query)
|
442 |
-
context = "\n".join([doc.page_content for doc in relevant_docs])
|
443 |
-
prompt = f"""Using the following context from web search results:
|
444 |
{context}
|
445 |
Write a detailed and complete research document that fulfills the following user request: '{query}'
|
446 |
After writing the document, please provide a list of sources used in your response."""
|
447 |
-
|
448 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|
449 |
# Use Cloudflare API
|
450 |
for response in get_response_from_cloudflare(prompt="", context=context, query=query, num_calls=num_calls, temperature=temperature, search_type="web"):
|
@@ -460,7 +450,7 @@ After writing the document, please provide a list of sources used in your respon
|
|
460 |
max_tokens=10000,
|
461 |
temperature=temperature,
|
462 |
stream=True,
|
463 |
-
|
464 |
if message.choices and message.choices[0].delta and message.choices[0].delta.content:
|
465 |
chunk = message.choices[0].delta.content
|
466 |
main_content += chunk
|
@@ -591,24 +581,14 @@ demo = gr.ChatInterface(
|
|
591 |
gr.Dropdown(choices=MODELS, label="Select Model", value=MODELS[3]),
|
592 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.2, step=0.1, label="Temperature"),
|
593 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
594 |
-
use_web_search,
|
595 |
-
document_selector,
|
596 |
-
gr.Dropdown(choices=instruction_choices, label="Select Entity Type for Summary", value="None"),
|
597 |
-
gr.Checkbox(label="Safe Search", value=True),
|
598 |
gr.Textbox(label="Region", placeholder="US"),
|
599 |
gr.Textbox(label="Language", placeholder="en"),
|
600 |
gr.Textbox(label="Time", placeholder="d"),
|
601 |
-
|
602 |
-
|
603 |
-
gr.Textbox(label="Inurl", placeholder="github.com"),
|
604 |
-
gr.Textbox(label="Intitle", placeholder="github"),
|
605 |
-
gr.Textbox(label="Intext", placeholder="github"),
|
606 |
-
gr.Textbox(label="Related", placeholder="github.com"),
|
607 |
-
gr.Textbox(label="Info", placeholder="github.com"),
|
608 |
-
gr.Textbox(label="Define", placeholder="github")
|
609 |
],
|
610 |
title="AI-powered Web Search and PDF Chat Assistant",
|
611 |
-
description="Chat with your PDFs
|
612 |
theme=gr.themes.Soft(
|
613 |
primary_hue="orange",
|
614 |
secondary_hue="amber",
|
@@ -637,12 +617,12 @@ demo = gr.ChatInterface(
|
|
637 |
analytics_enabled=False,
|
638 |
textbox=gr.Textbox(placeholder=custom_placeholder, container=False, scale=7),
|
639 |
chatbot = gr.Chatbot(
|
640 |
-
|
641 |
-
|
642 |
-
|
643 |
-
|
644 |
-
|
645 |
-
|
646 |
)
|
647 |
|
648 |
# Add file upload functionality
|
|
|
271 |
print(f"Final clean response: {final_response[:100]}...")
|
272 |
return final_response
|
273 |
|
274 |
+
def duckduckgo_search(query,time=None):
|
275 |
params = {
|
276 |
+
"q": query,
|
277 |
+
"region": region,
|
278 |
+
"language": language,
|
279 |
+
"time": time,
|
280 |
+
}
|
281 |
+
with DDGS() as ddgs:
|
282 |
+
results = ddgs.text(query, params=params, max_results=5)
|
283 |
+
return results
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
284 |
|
285 |
class CitingSources(BaseModel):
|
286 |
sources: List[str] = Field(
|
287 |
...,
|
288 |
description="List of sources to cite. Should be an URL of the source."
|
289 |
)
|
290 |
+
def chatbot_interface(message, history, use_web_search, model, temperature, num_calls, region, language, time):
|
|
|
291 |
if not message.strip():
|
292 |
return "", history
|
293 |
+
|
294 |
history = history + [(message, "")]
|
295 |
+
|
296 |
try:
|
297 |
+
for response in respond(message, history, model, temperature, num_calls, use_web_search, region, language, time):
|
298 |
history[-1] = (message, response)
|
299 |
yield history
|
300 |
except gr.CancelledError:
|
|
|
313 |
|
314 |
return chatbot_interface(last_user_msg, history, use_web_search, model, temperature, num_calls)
|
315 |
|
316 |
+
def respond(message, history, model, temperature, num_calls, use_web_search, region, language, time, selected_docs):
|
317 |
logging.info(f"User Query: {message}")
|
318 |
logging.info(f"Model Used: {model}")
|
319 |
logging.info(f"Search Type: {'Web Search' if use_web_search else 'PDF Search'}")
|
320 |
+
|
321 |
+
logging.info(f"Selected Documents: {selected_docs}")
|
322 |
+
|
323 |
try:
|
324 |
if use_web_search:
|
325 |
+
for main_content, sources in get_response_with_search(message, model, num_calls=num_calls, temperature=temperature, region=region, language=language, time=time):
|
326 |
response = f"{main_content}\n\n{sources}"
|
327 |
first_line = response.split('\n')[0] if response else ''
|
328 |
+
# logging.info(f"Generated Response (first line): {first_line}")
|
329 |
yield response
|
330 |
+
else:
|
331 |
embed = get_embeddings()
|
332 |
if os.path.exists("faiss_database"):
|
333 |
database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
|
|
|
425 |
if not full_response:
|
426 |
yield "I apologize, but I couldn't generate a response at this time. Please try again later."
|
427 |
|
428 |
+
def get_response_with_search(query, model, num_calls=3, temperature=0.2, region=None, language=None, time=None):
|
429 |
+
search_results = duckduckgo_search(query)
|
430 |
+
context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n"
|
431 |
+
for result in search_results if 'body' in result)
|
432 |
+
|
433 |
+
prompt = f"""Using the following context:
|
|
|
|
|
|
|
|
|
434 |
{context}
|
435 |
Write a detailed and complete research document that fulfills the following user request: '{query}'
|
436 |
After writing the document, please provide a list of sources used in your response."""
|
437 |
+
|
438 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|
439 |
# Use Cloudflare API
|
440 |
for response in get_response_from_cloudflare(prompt="", context=context, query=query, num_calls=num_calls, temperature=temperature, search_type="web"):
|
|
|
450 |
max_tokens=10000,
|
451 |
temperature=temperature,
|
452 |
stream=True,
|
453 |
+
):
|
454 |
if message.choices and message.choices[0].delta and message.choices[0].delta.content:
|
455 |
chunk = message.choices[0].delta.content
|
456 |
main_content += chunk
|
|
|
581 |
gr.Dropdown(choices=MODELS, label="Select Model", value=MODELS[3]),
|
582 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.2, step=0.1, label="Temperature"),
|
583 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
|
|
|
|
|
|
|
|
584 |
gr.Textbox(label="Region", placeholder="US"),
|
585 |
gr.Textbox(label="Language", placeholder="en"),
|
586 |
gr.Textbox(label="Time", placeholder="d"),
|
587 |
+
use_web_search,
|
588 |
+
document_selector
|
|
|
|
|
|
|
|
|
|
|
|
|
589 |
],
|
590 |
title="AI-powered Web Search and PDF Chat Assistant",
|
591 |
+
description="Chat with your PDFs or use web search to answer questions. Toggle between Web Search and PDF Chat in Additional Inputs below.",
|
592 |
theme=gr.themes.Soft(
|
593 |
primary_hue="orange",
|
594 |
secondary_hue="amber",
|
|
|
617 |
analytics_enabled=False,
|
618 |
textbox=gr.Textbox(placeholder=custom_placeholder, container=False, scale=7),
|
619 |
chatbot = gr.Chatbot(
|
620 |
+
show_copy_button=True,
|
621 |
+
likeable=True,
|
622 |
+
layout="bubble",
|
623 |
+
height=400,
|
624 |
+
value=initial_conversation()
|
625 |
+
)
|
626 |
)
|
627 |
|
628 |
# Add file upload functionality
|