Werli's picture
Upload 2 files
43ad32d verified
import requests,re,base64,io,numpy as np
from PIL import Image,ImageOps
import torch,gradio as gr
# Helper to load image from URL
def loadImageFromUrl(url):
response = requests.get(url, timeout=10)
if response.status_code != 200:
raise Exception(f"Failed to load image from {url}")
i = Image.open(io.BytesIO(response.content))
i = ImageOps.exif_transpose(i)
if i.mode != "RGBA":
i = i.convert("RGBA")
alpha = i.split()[-1]
image = Image.new("RGB", i.size, (0, 0, 0))
image.paste(i, mask=alpha)
image = np.array(image).astype(np.float32) / 255.0
image = torch.from_numpy(image)[None,]
return image
# Fetch data from multiple booru platforms
def fetch_booru_images(site, Tags, exclude_tags, score, count, Safe, Questionable, Explicit):
# Clean and format tags
def clean_tag_list(tags):
return [item.strip().replace(' ', '_') for item in tags.split(',') if item.strip()]
Tags = '+'.join(clean_tag_list(Tags)) if Tags else ''
exclude_tags = '+'.join('-' + tag for tag in clean_tag_list(exclude_tags))
rating_filters = []
if not Safe:
rating_filters.extend(["rating:safe", "rating:general"])
if not Questionable:
rating_filters.extend(["rating:questionable", "rating:sensitive"])
if not Explicit:
rating_filters.append("rating:explicit")
rating_filters = '+'.join(f'-{r}' for r in rating_filters)
score_filter = f"score:>{score}"
# Build query
base_query = f"tags=sort:random+{Tags}+{exclude_tags}+{score_filter}+{rating_filters}&limit={count}&json=1"
base_query = re.sub(r"\++", "+", base_query)
# Fetch data based on site
if site == "Gelbooru":
url = f"https://gelbooru.com/index.php?page=dapi&s=post&q=index&{base_query}"
response = requests.get(url).json()
posts = response.get("post", [])
elif site == "Rule34":
url = f"https://api.rule34.xxx/index.php?page=dapi&s=post&q=index&{base_query}"
response = requests.get(url).json()
posts = response
elif site == "Xbooru":
url = f"https://xbooru.com/index.php?page=dapi&s=post&q=index&{base_query}"
response = requests.get(url).json()
posts = response
else:
return [], [], []
# Extract image URLs, tags, and post URLs
image_urls = []
tags_list = [post.get("tags", "").replace(" ", ", ").replace("_", " ").replace("(", "\\(").replace(")", "\\)").strip() for post in posts]
post_urls = []
for post in posts:
if site in ["Gelbooru", "Rule34", "Xbooru"]:
file_url = post.get("file_url")
tags = post.get("tags", "").replace(" ", ", ").strip()
post_id = post.get("id", "")
else:
continue
if file_url:
image_urls.append(file_url)
tags_list.append(tags)
if site == "Gelbooru":
post_urls.append(f"https://gelbooru.com/index.php?page=post&s=view&id={post_id}")
elif site == "Rule34":
post_urls.append(f"https://rule34.xxx/index.php?page=post&s=view&id={post_id}")
elif site == "Xbooru":
post_urls.append(f"https://xbooru.com/index.php?page=post&s=view&id={post_id}")
return image_urls, tags_list, post_urls
# Main function to fetch and return processed images
def booru_gradio(Tags, exclude_tags, score, count, Safe, Questionable, Explicit, site):
image_urls, tags_list, post_urls = fetch_booru_images(site, Tags, exclude_tags, score, count, Safe, Questionable, Explicit)
if not image_urls:
return [], [], [], []
image_data = []
for url in image_urls:
try:
image = loadImageFromUrl(url)
image = (image * 255).clamp(0, 255).cpu().numpy().astype(np.uint8)[0]
image = Image.fromarray(image)
image_data.append(image)
except Exception as e:
print(f"Error loading image from {url}: {e}")
continue
return image_data, tags_list, post_urls, image_urls
# Update UI on image click
def on_select(evt: gr.SelectData, tags_list, post_url_list, image_url_list):
idx = evt.index
if idx < len(tags_list):
return tags_list[idx], post_url_list[idx], image_url_list[idx]
return "No tags", "", ""