Spaces:
Running
on
Zero
Running
on
Zero
Update chatbot.py
Browse files- chatbot.py +0 -2
chatbot.py
CHANGED
@@ -261,7 +261,6 @@ def fetch_and_extract(link, max_chars_per_page):
|
|
261 |
visible_text = visible_text[:max_chars_per_page] + "..."
|
262 |
return {"link": link, "text": visible_text}
|
263 |
except requests.exceptions.RequestException as e:
|
264 |
-
print(f"Error fetching or processing {link}: {e}")
|
265 |
return {"link": link, "text": None}
|
266 |
|
267 |
def search(term, max_results=2, max_chars_per_page=8000, max_threads=10):
|
@@ -367,7 +366,6 @@ def model_inference(
|
|
367 |
output += response.token.text
|
368 |
yield output
|
369 |
update_history(output, user_prompt)
|
370 |
-
print(history)
|
371 |
return
|
372 |
else:
|
373 |
if user_prompt["text"].strip() == "" and not user_prompt["files"]:
|
|
|
261 |
visible_text = visible_text[:max_chars_per_page] + "..."
|
262 |
return {"link": link, "text": visible_text}
|
263 |
except requests.exceptions.RequestException as e:
|
|
|
264 |
return {"link": link, "text": None}
|
265 |
|
266 |
def search(term, max_results=2, max_chars_per_page=8000, max_threads=10):
|
|
|
366 |
output += response.token.text
|
367 |
yield output
|
368 |
update_history(output, user_prompt)
|
|
|
369 |
return
|
370 |
else:
|
371 |
if user_prompt["text"].strip() == "" and not user_prompt["files"]:
|