Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,25 +1,30 @@
|
|
1 |
import gradio as gr
|
2 |
import random
|
3 |
from scraperr import scrape_google
|
4 |
-
from pdf_converter import
|
5 |
|
6 |
|
7 |
|
8 |
-
async def scrape_websites(topic, num_links, num_results_per_link=10):
|
9 |
-
|
|
|
|
|
10 |
# Ensure output_text is a list
|
11 |
if not isinstance(output_text, list):
|
12 |
output_text = [output_text]
|
|
|
13 |
# Select random links based on the user's input
|
14 |
selected_links = random.sample(output_text, min(num_links, len(output_text)))
|
|
|
15 |
# Convert the list of strings to a single string
|
16 |
output_string = "\n".join(selected_links)
|
|
|
17 |
return output_string
|
18 |
|
19 |
-
def convert_to_pdf(url):
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
|
24 |
html = """
|
25 |
<div style="text-align:center; max-width: 900px; margin: 0 auto; margin-top:5px">
|
|
|
1 |
import gradio as gr
|
2 |
import random
|
3 |
from scraperr import scrape_google
|
4 |
+
from pdf_converter import create_pdf
|
5 |
|
6 |
|
7 |
|
8 |
+
async def scrape_websites(topic, num_links, num_results_per_link=10) -> str:
|
9 |
+
|
10 |
+
output_text = await scrape_google(topic, num_results_per_link)
|
11 |
+
|
12 |
# Ensure output_text is a list
|
13 |
if not isinstance(output_text, list):
|
14 |
output_text = [output_text]
|
15 |
+
|
16 |
# Select random links based on the user's input
|
17 |
selected_links = random.sample(output_text, min(num_links, len(output_text)))
|
18 |
+
|
19 |
# Convert the list of strings to a single string
|
20 |
output_string = "\n".join(selected_links)
|
21 |
+
|
22 |
return output_string
|
23 |
|
24 |
+
def convert_to_pdf(url) -> str:
|
25 |
+
#Call the create_pdf function to convert the webpage to PDF.
|
26 |
+
pdf_path = create_pdf(url)
|
27 |
+
return pdf_path
|
28 |
|
29 |
html = """
|
30 |
<div style="text-align:center; max-width: 900px; margin: 0 auto; margin-top:5px">
|