Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -9,23 +9,36 @@ import os
|
|
9 |
logging.basicConfig(level=logging.INFO)
|
10 |
logger = logging.getLogger(__name__)
|
11 |
|
12 |
-
def
|
13 |
-
"""
|
14 |
try:
|
15 |
-
|
16 |
-
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
text =
|
23 |
-
|
|
|
|
|
24 |
|
25 |
-
return
|
26 |
except Exception as e:
|
27 |
-
logger.error(f"
|
28 |
-
return
|
29 |
|
30 |
def process_url(url, link_types):
|
31 |
"""Process URL and generate llms.txt content"""
|
@@ -33,131 +46,87 @@ def process_url(url, link_types):
|
|
33 |
return "", "Please enter a URL"
|
34 |
|
35 |
try:
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
try:
|
44 |
-
# Perform
|
45 |
-
|
|
|
46 |
|
47 |
-
# Read
|
48 |
-
|
49 |
|
50 |
-
#
|
51 |
-
title =
|
52 |
-
meta_desc =
|
53 |
|
|
|
|
|
54 |
all_links = []
|
55 |
|
56 |
-
# Process links based on selected types
|
57 |
if link_types and "All links" not in link_types:
|
58 |
for link_type in link_types:
|
59 |
-
|
60 |
-
if
|
61 |
-
|
62 |
-
|
63 |
-
all_links.append(link_content)
|
64 |
-
all_links.append('\n\n')
|
65 |
else:
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
final_content
|
77 |
-
|
78 |
finally:
|
79 |
-
# Cleanup
|
80 |
-
if os.path.exists(
|
81 |
-
os.remove(
|
82 |
-
|
83 |
-
return final_content, f"Successfully crawled website. Found {len(all_links)} sections."
|
84 |
-
|
85 |
except Exception as e:
|
86 |
-
logger.error(f"Error processing
|
87 |
return "", f"Error: {str(e)}"
|
88 |
|
89 |
-
#
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
.
|
98 |
-
background-color: #3452db !important;
|
99 |
-
}
|
100 |
-
|
101 |
-
.primary-btn:hover {
|
102 |
-
background-color: #2a41af !important;
|
103 |
-
}
|
104 |
-
"""
|
105 |
-
|
106 |
-
# Create custom theme with specific color
|
107 |
-
theme = gr.themes.Soft(
|
108 |
-
primary_hue=gr.themes.colors.Color(
|
109 |
-
name="blue",
|
110 |
-
c50="#eef1ff",
|
111 |
-
c100="#e0e5ff",
|
112 |
-
c200="#c3cbff",
|
113 |
-
c300="#a5b2ff",
|
114 |
-
c400="#8798ff",
|
115 |
-
c500="#6a7eff",
|
116 |
-
c600="#3452db", # Main color
|
117 |
-
c700="#2a41af",
|
118 |
-
c800="#1f3183",
|
119 |
-
c900="#152156",
|
120 |
-
c950="#0a102b",
|
121 |
-
)
|
122 |
-
)
|
123 |
-
|
124 |
-
with gr.Blocks(theme=theme, css=css) as iface:
|
125 |
-
with gr.Row():
|
126 |
-
gr.Markdown("# Generate an `llms.txt` file")
|
127 |
-
|
128 |
-
with gr.Row():
|
129 |
-
url_input = gr.Textbox(
|
130 |
-
label="Enter the home page of a website:",
|
131 |
-
placeholder="example: https://example.com",
|
132 |
-
lines=1,
|
133 |
-
)
|
134 |
-
|
135 |
-
with gr.Row():
|
136 |
-
link_types = gr.Dropdown(
|
137 |
-
label="Select types of links to extract (leave empty to get all links)",
|
138 |
choices=["<header> links", "<nav> links", "<footer> links", "All links"],
|
|
|
139 |
multiselect=True,
|
140 |
value=["All links"]
|
141 |
)
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
with gr.Row():
|
147 |
-
output = gr.Textbox(
|
148 |
-
label="Generated llms.txt Content",
|
149 |
lines=20,
|
150 |
-
show_copy_button=True
|
151 |
-
|
|
|
|
|
152 |
)
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
outputs=[output, status],
|
160 |
-
)
|
161 |
|
162 |
if __name__ == "__main__":
|
163 |
iface.launch()
|
|
|
9 |
logging.basicConfig(level=logging.INFO)
|
10 |
logger = logging.getLogger(__name__)
|
11 |
|
12 |
+
def safe_crawl(url, output_file):
|
13 |
+
"""Safely perform web crawl with timeout"""
|
14 |
try:
|
15 |
+
adv.crawl(url, output_file,
|
16 |
+
follow_links=False, # Only crawl the main page
|
17 |
+
custom_settings={'CLOSESPIDER_TIMEOUT': 30}) # 30 second timeout
|
18 |
+
return True
|
19 |
+
except Exception as e:
|
20 |
+
logger.error(f"Crawl error: {str(e)}")
|
21 |
+
return False
|
22 |
+
|
23 |
+
def process_links(df, link_type=None):
|
24 |
+
"""Process links based on type"""
|
25 |
+
try:
|
26 |
+
if link_type:
|
27 |
+
mask = df['source'].str.contains(f'<{link_type}', case=False, na=False)
|
28 |
+
df = df[mask]
|
29 |
|
30 |
+
all_links = []
|
31 |
+
for _, row in df.iterrows():
|
32 |
+
if row['text'] and str(row['text']).strip():
|
33 |
+
text = str(row['text']).strip()
|
34 |
+
text = re.sub(r'\s+', ' ', text)
|
35 |
+
link = str(row['link']).strip()
|
36 |
+
all_links.append(f"## {text}\n[{text}]({link})")
|
37 |
|
38 |
+
return all_links
|
39 |
except Exception as e:
|
40 |
+
logger.error(f"Link processing error: {str(e)}")
|
41 |
+
return []
|
42 |
|
43 |
def process_url(url, link_types):
|
44 |
"""Process URL and generate llms.txt content"""
|
|
|
46 |
return "", "Please enter a URL"
|
47 |
|
48 |
try:
|
49 |
+
# Ensure URL has protocol
|
50 |
+
if not url.startswith(('http://', 'https://')):
|
51 |
+
url = 'https://' + url
|
52 |
+
|
53 |
+
# Create temporary file
|
54 |
+
output_file = f"{token_hex(4)}.jsonl"
|
55 |
+
|
56 |
try:
|
57 |
+
# Perform crawl
|
58 |
+
if not safe_crawl(url, output_file):
|
59 |
+
return "", "Crawl failed or timed out"
|
60 |
|
61 |
+
# Read results
|
62 |
+
df = pd.read_json(output_file, lines=True)
|
63 |
|
64 |
+
# Get basic info
|
65 |
+
title = df['title'].iloc[0] if not pd.isna(df['title'].iloc[0]) else "Untitled"
|
66 |
+
meta_desc = df['meta_desc'].iloc[0] if not pd.isna(df['meta_desc'].iloc[0]) else ""
|
67 |
|
68 |
+
# Process links
|
69 |
+
link_df = adv.crawlytics.links(df)
|
70 |
all_links = []
|
71 |
|
|
|
72 |
if link_types and "All links" not in link_types:
|
73 |
for link_type in link_types:
|
74 |
+
type_name = re.search(r'<(\w+)>', link_type)
|
75 |
+
if type_name:
|
76 |
+
links = process_links(link_df, type_name.group(1))
|
77 |
+
all_links.extend(links)
|
|
|
|
|
78 |
else:
|
79 |
+
all_links = process_links(link_df)
|
80 |
+
|
81 |
+
# Create content
|
82 |
+
content_parts = [
|
83 |
+
f"# {title}",
|
84 |
+
f"> {meta_desc}",
|
85 |
+
"\n\n".join(all_links)
|
86 |
+
]
|
87 |
+
final_content = "\n\n".join(content_parts)
|
88 |
+
|
89 |
+
return final_content, f"Found {len(all_links)} links"
|
90 |
+
|
91 |
finally:
|
92 |
+
# Cleanup
|
93 |
+
if os.path.exists(output_file):
|
94 |
+
os.remove(output_file)
|
95 |
+
|
|
|
|
|
96 |
except Exception as e:
|
97 |
+
logger.error(f"Error processing {url}: {str(e)}")
|
98 |
return "", f"Error: {str(e)}"
|
99 |
|
100 |
+
# Create interface
|
101 |
+
iface = gr.Interface(
|
102 |
+
fn=process_url,
|
103 |
+
inputs=[
|
104 |
+
gr.Textbox(
|
105 |
+
label="Enter website URL",
|
106 |
+
placeholder="example: example.com"
|
107 |
+
),
|
108 |
+
gr.Dropdown(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
choices=["<header> links", "<nav> links", "<footer> links", "All links"],
|
110 |
+
label="Select link types",
|
111 |
multiselect=True,
|
112 |
value=["All links"]
|
113 |
)
|
114 |
+
],
|
115 |
+
outputs=[
|
116 |
+
gr.Textbox(
|
117 |
+
label="Generated llms.txt",
|
|
|
|
|
|
|
118 |
lines=20,
|
119 |
+
show_copy_button=True
|
120 |
+
),
|
121 |
+
gr.Textbox(
|
122 |
+
label="Status"
|
123 |
)
|
124 |
+
],
|
125 |
+
title="LLMs.txt Generator",
|
126 |
+
description="Generate an llms.txt file from a website",
|
127 |
+
theme=gr.themes.Soft(),
|
128 |
+
allow_flagging="never"
|
129 |
+
)
|
|
|
|
|
130 |
|
131 |
if __name__ == "__main__":
|
132 |
iface.launch()
|