openfree's picture
Update app.py
1114252 verified
raw
history blame
34.4 kB
import os
import random
import base64
import requests
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import WebDriverException, TimeoutException
from PIL import Image
from io import BytesIO
from datetime import datetime
import gradio as gr
from typing import Tuple
import time
from pathlib import Path # μΆ”κ°€
# μŠ€ν¬λ¦°μƒ· μΊμ‹œ 디렉토리 μ„€μ •
CACHE_DIR = Path("screenshot_cache")
CACHE_DIR.mkdir(exist_ok=True)
# μ „μ—­ λ³€μˆ˜λ‘œ μŠ€ν¬λ¦°μƒ· μΊμ‹œ μ„ μ–Έ
SCREENSHOT_CACHE = {}
def get_cached_screenshot(url: str) -> str:
"""μΊμ‹œλœ μŠ€ν¬λ¦°μƒ· κ°€μ Έμ˜€κΈ° λ˜λŠ” μƒˆλ‘œ 생성"""
cache_file = CACHE_DIR / f"{base64.b64encode(url.encode()).decode()}.png"
if cache_file.exists():
with open(cache_file, "rb") as f:
return base64.b64encode(f.read()).decode()
return take_screenshot(url)
def take_screenshot(url):
"""μ›Ήμ‚¬μ΄νŠΈ μŠ€ν¬λ¦°μƒ· 촬영 ν•¨μˆ˜ (λ‘œλ”© λŒ€κΈ° μ‹œκ°„ μΆ”κ°€)"""
if url in SCREENSHOT_CACHE:
return SCREENSHOT_CACHE[url]
if not url.startswith('http'):
url = f"https://{url}"
options = webdriver.ChromeOptions()
options.add_argument('--headless')
options.add_argument('--no-sandbox')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--window-size=1080,720')
try:
driver = webdriver.Chrome(options=options)
driver.get(url)
# λͺ…μ‹œμ  λŒ€κΈ°: body μš”μ†Œκ°€ λ‘œλ“œλ  λ•ŒκΉŒμ§€ λŒ€κΈ° (μ΅œλŒ€ 10초)
try:
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.TAG_NAME, "body"))
)
except TimeoutException:
print(f"νŽ˜μ΄μ§€ λ‘œλ”© νƒ€μž„μ•„μ›ƒ: {url}")
# μΆ”κ°€ λŒ€κΈ° μ‹œκ°„μ„ 2초둜 증가
time.sleep(2) # 1μ΄ˆμ—μ„œ 2초둜 λ³€κ²½
# JavaScript μ‹€ν–‰ μ™„λ£Œ λŒ€κΈ°
driver.execute_script("return document.readyState") == "complete"
# μŠ€ν¬λ¦°μƒ· 촬영
screenshot = driver.get_screenshot_as_png()
img = Image.open(BytesIO(screenshot))
buffered = BytesIO()
img.save(buffered, format="PNG")
base64_image = base64.b64encode(buffered.getvalue()).decode()
# μΊμ‹œμ— μ €μž₯
SCREENSHOT_CACHE[url] = base64_image
return base64_image
except WebDriverException as e:
print(f"μŠ€ν¬λ¦°μƒ· 촬영 μ‹€νŒ¨: {str(e)} for URL: {url}")
return None
except Exception as e:
print(f"μ˜ˆμƒμΉ˜ λͺ»ν•œ 였λ₯˜: {str(e)} for URL: {url}")
return None
finally:
if 'driver' in locals():
driver.quit()
from datetime import datetime, timedelta
def calculate_rising_rate(created_date: str, rank: int) -> int:
"""AI Rising Rate 계산"""
# 생성일 κΈ°μ€€ 점수 계산
created = datetime.strptime(created_date.split('T')[0], '%Y-%m-%d')
today = datetime.now()
days_diff = (today - created).days
date_score = max(0, 300 - days_diff) # μ΅œλŒ€ 300점
# μˆœμœ„ κΈ°μ€€ 점수 계산
rank_score = max(0, 300 - rank) # μ΅œλŒ€ 300점
# 총점 계산
total_score = date_score + rank_score
# 별 개수 계산 (0~5)
if total_score <= 100:
stars = 1
elif total_score <= 200:
stars = 2
elif total_score <= 300:
stars = 3
elif total_score <= 400:
stars = 4
else:
stars = 5
return stars
def get_popularity_grade(likes: int, stars: int) -> tuple:
"""AI Popularity Score λ“±κΈ‰ 계산"""
# 기본 점수 (likes)
base_score = min(likes, 10000) # μ΅œλŒ€ 10000점
# 별점 μΆ”κ°€ 점수 (별 ν•˜λ‚˜λ‹Ή 500점)
star_score = stars * 500
# 총점
total_score = base_score + star_score
# λ“±κΈ‰ ν…Œμ΄λΈ” (18단계)
grades = [
(9000, "AAA+"), (8500, "AAA"), (8000, "AAA-"),
(7500, "AA+"), (7000, "AA"), (6500, "AA-"),
(6000, "A+"), (5500, "A"), (5000, "A-"),
(4500, "BBB+"), (4000, "BBB"), (3500, "BBB-"),
(3000, "BB+"), (2500, "BB"), (2000, "BB-"),
(1500, "B+"), (1000, "B"), (500, "B-")
]
for threshold, grade in grades:
if total_score >= threshold:
return grade, total_score
return "B-", total_score
# get_card ν•¨μˆ˜ λ‚΄μ˜ hardware_info 뢀뢄을 λ‹€μŒμœΌλ‘œ ꡐ체:
def get_rating_info(item: dict, index: int) -> str:
"""평가 정보 HTML 생성"""
created = item.get('createdAt', '').split('T')[0]
likes = int(str(item.get('likes', '0')).replace(',', ''))
# AI Rising Rate 계산
stars = calculate_rising_rate(created, index + 1)
star_html = "β˜…" * stars + "β˜†" * (5 - stars) # μ±„μ›Œμ§„ 별과 빈 별 μ‘°ν•©
# AI Popularity Score 계산
grade, score = get_popularity_grade(likes, stars)
# 등급별 색상 μ„€μ •
grade_colors = {
'AAA': '#FFD700', 'AA': '#FFA500', 'A': '#FF4500',
'BBB': '#4169E1', 'BB': '#1E90FF', 'B': '#00BFFF'
}
grade_base = grade.rstrip('+-')
grade_color = grade_colors.get(grade_base, '#666666')
return f"""
<div style='
margin-top: 15px;
padding: 15px;
background: rgba(255,255,255,0.4);
border-radius: 10px;
font-size: 0.9em;
box-shadow: 0 2px 10px rgba(0,0,0,0.1);'>
<div style='
display: grid;
grid-template-columns: repeat(2, 1fr);
gap: 15px;'>
<div style='
color: #333;
display: flex;
flex-direction: column;
gap: 5px;'>
<span style='font-weight: bold;'>AI Rising Rate:</span>
<span style='
color: #FF8C00;
font-size: 1.4em;
letter-spacing: 2px;
text-shadow: 1px 1px 2px rgba(0,0,0,0.1);'>{star_html}</span>
</div>
<div style='
color: #333;
display: flex;
flex-direction: column;
gap: 5px;'>
<span style='font-weight: bold;'>AI Popularity Score:</span>
<span style='
font-size: 1.2em;
font-weight: bold;
color: {grade_color};
text-shadow: 1px 1px 2px rgba(0,0,0,0.1);'>{grade} ({score:,})</span>
</div>
</div>
</div>
"""
def get_hardware_info(item: dict) -> tuple:
"""ν•˜λ“œμ›¨μ–΄ 정보 μΆ”μΆœ"""
try:
# runtime 정보 확인
runtime = item.get('runtime', {})
# CPU 정보 처리
cpu_info = runtime.get('cpu', 'Standard')
# GPU 정보 처리
gpu_info = "None"
if runtime.get('accelerator') == "gpu":
gpu_type = runtime.get('gpu', {}).get('name', '')
gpu_memory = runtime.get('gpu', {}).get('memory', '')
if gpu_type:
gpu_info = f"{gpu_type}"
if gpu_memory:
gpu_info += f" ({gpu_memory}GB)"
# spaces decorator 확인
if '@spaces.GPU' in str(item.get('sdk_version', '')):
if gpu_info == "None":
gpu_info = "GPU Enabled"
# SDK 정보 처리
sdk = item.get('sdk', 'N/A')
print(f"Debug - Runtime Info: {runtime}") # 디버그 좜λ ₯
print(f"Debug - GPU Info: {gpu_info}") # 디버그 좜λ ₯
return cpu_info, gpu_info, sdk
except Exception as e:
print(f"Error parsing hardware info: {str(e)}")
return 'Standard', 'None', 'N/A'
def get_card(item: dict, index: int, card_type: str = "space") -> str:
"""톡합 μΉ΄λ“œ HTML 생성"""
item_id = item.get('id', '')
author, title = item_id.split('/', 1)
likes = format(item.get('likes', 0), ',')
created = item.get('createdAt', '').split('T')[0]
# short_description κ°€μ Έμ˜€κΈ°
short_description = item.get('cardData', {}).get('short_description', '')
# titleκ³Ό short_description을 ν¬ν•¨ν•œ 헀더 HTML
title_html = f"""
<h3 style='
margin: 0 0 15px 0;
color: #333;
font-size: 1.3em;
line-height: 1.4;
display: -webkit-box;
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
overflow: hidden;
text-overflow: ellipsis;
text-shadow: 1px 1px 1px rgba(255,255,255,0.8);'>
{title}
{f'<span style="display: block; font-size: 0.7em; color: #666; margin-top: 5px; font-weight: normal; font-style: italic;">{short_description}</span>' if short_description else ''}
</h3>
"""
# URL μ •μ˜
if card_type == "space":
url = f"https://huggingface.co/spaces/{item_id}"
elif card_type == "model":
url = f"https://huggingface.co/{item_id}"
else: # dataset
url = f"https://huggingface.co/datasets/{item_id}"
# 메타데이터 처리
tags = item.get('tags', [])
pipeline_tag = item.get('pipeline_tag', '')
license = item.get('license', '')
sdk = item.get('sdk', 'N/A')
# AI Rating 정보 κ°€μ Έμ˜€κΈ°
rating_info = get_rating_info(item, index)
# μΉ΄λ“œ νƒ€μž…λ³„ κ·ΈλΌλ°μ΄μ…˜ μ„€μ •
if card_type == "space":
gradient_colors = """
rgba(255, 182, 193, 0.7), /* νŒŒμŠ€ν…” 핑크 */
rgba(173, 216, 230, 0.7), /* νŒŒμŠ€ν…” 블루 */
rgba(255, 218, 185, 0.7) /* νŒŒμŠ€ν…” ν”ΌμΉ˜ */
"""
bg_content = f"""
background-image: url(data:image/png;base64,{get_cached_screenshot(url) if get_cached_screenshot(url) else ''});
background-size: cover;
background-position: center;
"""
type_icon = "🎯"
type_label = "SPACE"
elif card_type == "model":
gradient_colors = """
rgba(110, 142, 251, 0.7), /* λͺ¨λΈ 블루 */
rgba(130, 158, 251, 0.7),
rgba(150, 174, 251, 0.7)
"""
bg_content = f"""
background: linear-gradient(135deg, #6e8efb, #4a6cf7);
padding: 15px;
"""
type_icon = "πŸ€–"
type_label = "MODEL"
else: # dataset
gradient_colors = """
rgba(255, 107, 107, 0.7), /* 데이터셋 λ ˆλ“œ */
rgba(255, 127, 127, 0.7),
rgba(255, 147, 147, 0.7)
"""
bg_content = f"""
background: linear-gradient(135deg, #ff6b6b, #ff8787);
padding: 15px;
"""
type_icon = "πŸ“Š"
type_label = "DATASET"
content_bg = f"""
background: linear-gradient(135deg, {gradient_colors});
backdrop-filter: blur(10px);
"""
# νƒœκ·Έ ν‘œμ‹œ (models와 datasets용)
tags_html = ""
if card_type != "space":
tags_html = f"""
<div style='
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
display: flex;
flex-wrap: wrap;
gap: 5px;
justify-content: center;
width: 90%;'>
{' '.join([f'''
<span style='
background: rgba(255,255,255,0.2);
padding: 5px 10px;
border-radius: 15px;
color: white;
font-size: 0.8em;'>
#{tag}
</span>
''' for tag in tags[:5]])}
</div>
"""
# μΉ΄λ“œ HTML λ°˜ν™˜
return f"""
<div class="card" style='
position: relative;
border: none;
padding: 0;
margin: 10px;
border-radius: 20px;
box-shadow: 0 10px 20px rgba(0,0,0,0.1);
background: white;
transition: all 0.3s ease;
overflow: hidden;
min-height: 400px;
cursor: pointer;
transform-origin: center;'
onmouseover="this.style.transform='scale(0.98) translateY(5px)'; this.style.boxShadow='0 5px 15px rgba(0,0,0,0.2)';"
onmouseout="this.style.transform='scale(1) translateY(0)'; this.style.boxShadow='0 10px 20px rgba(0,0,0,0.1)';"
onclick="window.open('{url}', '_blank')">
<!-- 상단 μ˜μ—­ -->
<div style='
width: 100%;
height: 200px;
{bg_content}
position: relative;'>
<!-- μˆœμœ„ 뱃지 -->
<div style='
position: absolute;
top: 10px;
left: 10px;
background: rgba(0,0,0,0.7);
color: white;
padding: 5px 15px;
border-radius: 20px;
font-weight: bold;
font-size: 0.9em;
backdrop-filter: blur(5px);'>
#{index + 1}
</div>
<!-- νƒ€μž… 뱃지 -->
<div style='
position: absolute;
top: 10px;
right: 10px;
background: rgba(255,255,255,0.9);
padding: 5px 15px;
border-radius: 20px;
font-weight: bold;
font-size: 0.8em;'>
{type_icon} {type_label}
</div>
{tags_html}
</div>
<!-- μ½˜ν…μΈ  μ˜μ—­ -->
<div style='
padding: 20px;
{content_bg}
border-radius: 0 0 20px 20px;
border-top: 1px solid rgba(255,255,255,0.5);'>
<h3 style='
margin: 0 0 15px 0;
color: #333;
font-size: 1.3em;
line-height: 1.4;
display: -webkit-box;
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
overflow: hidden;
text-overflow: ellipsis;
text-shadow: 1px 1px 1px rgba(255,255,255,0.8);'>
{title}
</h3>
<div style='
display: grid;
grid-template-columns: repeat(2, 1fr);
gap: 10px;
font-size: 0.9em;
background: rgba(255,255,255,0.3);
padding: 10px;
border-radius: 10px;'>
<div style='color: #444;'>
<span style='margin-right: 5px;'>πŸ‘€</span> {author}
</div>
<div style='color: #444;'>
<span style='margin-right: 5px;'>❀️</span> {likes}
</div>
<div style='color: #444; grid-column: span 2;'>
<span style='margin-right: 5px;'>πŸ“…</span> {created}
</div>
</div>
{rating_info}
</div>
</div>
"""
def get_trending_spaces(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]:
"""νŠΈλ Œλ”© 슀페이슀 κ°€μ Έμ˜€κΈ°"""
url = "https://huggingface.co/api/spaces"
try:
progress(0, desc="Fetching spaces data...")
params = {
'full': 'true',
'limit': 10 # κΈ°λ³Έ 300개 μœ μ§€
}
response = requests.get(url, params=params)
response.raise_for_status()
spaces = response.json()
# κ²€μƒ‰μ–΄λ‘œ 필터링 (검색어가 μžˆλŠ” κ²½μš°μ—λ§Œ)
if search_query:
filtered_spaces = []
# μΆ”κ°€ 데이터 κ°€μ Έμ˜€κΈ° (κ²€μƒ‰μš©)
params['limit'] = 10
response = requests.get(url, params=params)
all_spaces = response.json()
filtered_spaces = [space for space in all_spaces if search_query.lower() in
(space.get('id', '') + space.get('title', '')).lower()]
spaces = filtered_spaces[:300] # μƒμœ„ 300개만 μœ μ§€
# μ •λ ¬ (rankκ°€ μ•„λ‹Œ κ²½μš°μ—λ§Œ)
if sort_by != "rank": # rank인 경우 κΈ°μ‘΄ μˆœμ„œ μœ μ§€
if sort_by == "rising_rate":
spaces.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
elif sort_by == "popularity":
spaces.sort(key=lambda x: get_popularity_grade(
int(str(x.get('likes', '0')).replace(',', '')),
calculate_rising_rate(x.get('createdAt', ''), 0))[1],
reverse=True)
progress(0.1, desc="Creating gallery...")
html_content = """
<div style='padding: 20px; background: #f5f5f5;'>
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
"""
for idx, space in enumerate(spaces):
html_content += get_card(space, idx, "space")
progress((0.1 + 0.9 * idx/len(spaces)), desc=f"Loading space {idx+1}/{len(spaces)}...")
html_content += "</div></div>"
progress(1.0, desc="Complete!")
return html_content, f"Found {len(spaces)} spaces"
except Exception as e:
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
return error_html, f"Error: {str(e)}"
def get_models(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]:
"""인기 λͺ¨λΈ κ°€μ Έμ˜€κΈ°"""
url = "https://huggingface.co/api/models"
try:
progress(0, desc="Fetching models data...")
params = {
'full': 'true',
'limit': 300 # κΈ°λ³Έ 300개 μœ μ§€
}
response = requests.get(url, params=params)
response.raise_for_status()
models = response.json()
# κ²€μƒ‰μ–΄λ‘œ 필터링 (검색어가 μžˆλŠ” κ²½μš°μ—λ§Œ)
if search_query:
filtered_models = []
# μΆ”κ°€ 데이터 κ°€μ Έμ˜€κΈ° (κ²€μƒ‰μš©)
params['limit'] = 300
response = requests.get(url, params=params)
all_models = response.json()
filtered_models = [model for model in all_models if search_query.lower() in
(model.get('id', '') + model.get('title', '')).lower()]
models = filtered_models[:300] # μƒμœ„ 300개만 μœ μ§€
# μ •λ ¬ (rankκ°€ μ•„λ‹Œ κ²½μš°μ—λ§Œ)
if sort_by != "rank": # rank인 경우 κΈ°μ‘΄ μˆœμ„œ μœ μ§€
if sort_by == "rising_rate":
models.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
elif sort_by == "popularity":
models.sort(key=lambda x: get_popularity_grade(
int(str(x.get('likes', '0')).replace(',', '')),
calculate_rising_rate(x.get('createdAt', ''), 0))[1],
reverse=True)
progress(0.1, desc="Creating gallery...")
html_content = """
<div style='padding: 20px; background: #f5f5f5;'>
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
"""
for idx, model in enumerate(models):
html_content += get_card(model, idx, "model")
progress((0.1 + 0.9 * idx/len(models)), desc=f"Loading model {idx+1}/{len(models)}...")
html_content += "</div></div>"
progress(1.0, desc="Complete!")
return html_content, f"Found {len(models)} models"
except Exception as e:
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
return error_html, f"Error: {str(e)}"
def get_datasets(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]:
"""인기 데이터셋 κ°€μ Έμ˜€κΈ°"""
url = "https://huggingface.co/api/datasets"
try:
progress(0, desc="Fetching datasets data...")
params = {
'full': 'true',
'limit': 300 # κΈ°λ³Έ 300개 μœ μ§€
}
response = requests.get(url, params=params)
response.raise_for_status()
datasets = response.json()
# κ²€μƒ‰μ–΄λ‘œ 필터링 (검색어가 μžˆλŠ” κ²½μš°μ—λ§Œ)
if search_query:
filtered_datasets = []
# μΆ”κ°€ 데이터 κ°€μ Έμ˜€κΈ° (κ²€μƒ‰μš©)
params['limit'] = 300
response = requests.get(url, params=params)
all_datasets = response.json()
filtered_datasets = [dataset for dataset in all_datasets if search_query.lower() in
(dataset.get('id', '') + dataset.get('title', '')).lower()]
datasets = filtered_datasets[:300] # μƒμœ„ 300개만 μœ μ§€
# μ •λ ¬ (rankκ°€ μ•„λ‹Œ κ²½μš°μ—λ§Œ)
if sort_by != "rank": # rank인 경우 κΈ°μ‘΄ μˆœμ„œ μœ μ§€
if sort_by == "rising_rate":
datasets.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
elif sort_by == "popularity":
datasets.sort(key=lambda x: get_popularity_grade(
int(str(x.get('likes', '0')).replace(',', '')),
calculate_rising_rate(x.get('createdAt', ''), 0))[1],
reverse=True)
progress(0.1, desc="Creating gallery...")
html_content = """
<div style='padding: 20px; background: #f5f5f5;'>
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
"""
for idx, dataset in enumerate(datasets):
html_content += get_card(dataset, idx, "dataset")
progress((0.1 + 0.9 * idx/len(datasets)), desc=f"Loading dataset {idx+1}/{len(datasets)}...")
html_content += "</div></div>"
progress(1.0, desc="Complete!")
return html_content, f"Found {len(datasets)} datasets"
except Exception as e:
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
return error_html, f"Error: {str(e)}"
# μ •λ ¬ ν•¨μˆ˜ μΆ”κ°€
def sort_items(items, sort_by):
if sort_by == "rank":
return items # 이미 μˆœμœ„λŒ€λ‘œ μ •λ ¬λ˜μ–΄ 있음
elif sort_by == "rising_rate":
return sorted(items, key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
elif sort_by == "popularity":
return sorted(items, key=lambda x: get_popularity_grade(int(str(x.get('likes', '0')).replace(',', '')),
calculate_rising_rate(x.get('createdAt', ''), 0))[1], reverse=True)
return items
# API 호좜 ν•¨μˆ˜ μˆ˜μ •
def fetch_items(item_type, search_query="", sort_by="rank", limit=1000):
"""μ•„μ΄ν…œ κ°€μ Έμ˜€κΈ° (spaces/models/datasets)"""
base_url = f"https://huggingface.co/api/{item_type}"
params = {
'full': 'true',
'limit': limit,
'search': search_query
}
try:
response = requests.get(base_url, params=params)
response.raise_for_status()
items = response.json()
# κ²€μƒ‰μ–΄λ‘œ 필터링
if search_query:
items = [item for item in items if search_query.lower() in
(item.get('id', '') + item.get('title', '')).lower()]
# μ •λ ¬
items = sort_items(items, sort_by)
return items[:300] # μƒμœ„ 300개만 λ°˜ν™˜
except Exception as e:
print(f"Error fetching items: {e}")
return []
def create_interface():
with gr.Blocks(title="HuggingFace Trending Board", css="""
.search-sort-container {
background: linear-gradient(135deg, rgba(255,255,255,0.95), rgba(240,240,255,0.95));
border-radius: 15px;
padding: 20px;
margin: 10px 0;
box-shadow: 0 4px 6px rgba(0,0,0,0.1);
}
.search-box {
border: 2px solid #e1e1e1;
border-radius: 10px;
padding: 12px;
transition: all 0.3s ease;
background: linear-gradient(135deg, #ffffff, #f8f9ff);
width: 150%;
margin-right: -50%;
}
.search-box:focus {
border-color: #7b61ff;
box-shadow: 0 0 0 2px rgba(123,97,255,0.2);
background: linear-gradient(135deg, #ffffff, #f0f3ff);
}
/* μ •λ ¬ λΌλ””μ˜€ λ²„νŠΌ μ»¨ν…Œμ΄λ„ˆ μˆ˜μ • */
.sort-radio {
display: flex !important;
justify-content: flex-start !important;
gap: 10px !important;
background: linear-gradient(135deg, #f5f5ff, #f0f0ff);
padding: 12px;
border-radius: 10px;
width: 100% !important;
}
/* λΌλ””μ˜€ λ²„νŠΌ κ·Έλ£Ή μ»¨ν…Œμ΄λ„ˆ μˆ˜μ • */
.sort-radio > div {
display: flex !important;
gap: 10px !important;
width: auto !important;
}
/* 각 λΌλ””μ˜€ λ²„νŠΌ ν•­λͺ© μˆ˜μ • */
.sort-radio > div > div {
width: 100px !important; /* κ³ μ • λ„ˆλΉ„ μ„€μ • */
flex: none !important; /* flex μ„±μž₯ 방지 */
}
/* λΌλ””μ˜€ λ²„νŠΌ λ ˆμ΄λΈ” μˆ˜μ • */
.sort-radio label {
width: 100px !important; /* κ³ μ • λ„ˆλΉ„ μ„€μ • */
padding: 8px 5px !important;
text-align: center !important;
background: linear-gradient(135deg, #ffffff, #f8f9ff);
border-radius: 8px;
cursor: pointer;
transition: all 0.3s ease;
border: 1px solid rgba(123,97,255,0.1);
white-space: nowrap !important;
font-size: 0.9em !important;
display: block !important;
}
.sort-radio label:hover {
background: linear-gradient(135deg, #f0f3ff, #e8ecff);
border-color: rgba(123,97,255,0.3);
}
.sort-radio input:checked + label {
background: linear-gradient(135deg, #7b61ff, #6366f1);
color: white;
border-color: transparent;
}
/* λ¦¬ν”„λ ˆμ‹œ λ²„νŠΌ μŠ€νƒ€μΌ μˆ˜μ • */
.refresh-btn {
background: linear-gradient(135deg, #7b61ff, #6366f1);
color: white;
border: none;
padding: 10px 20px;
border-radius: 10px;
cursor: pointer;
transition: all 0.3s ease;
width: 120px;
height: 80px !important; /* 높이 2배둜 증가 */
display: flex;
align-items: center;
justify-content: center;
margin-left: auto;
font-size: 1.2em !important; /* 폰트 크기 증가 */
}
.refresh-btn:hover {
transform: translateY(-2px);
box-shadow: 0 4px 12px rgba(99,102,241,0.4);
background: linear-gradient(135deg, #8b71ff, #7376f1);
}
""") as interface:
gr.Markdown("""
# πŸ€— HuggingFace Trending TOP 300 Board
<div style='margin-bottom: 20px; padding: 10px; background: linear-gradient(135deg, rgba(123,97,255,0.1), rgba(99,102,241,0.1)); border-radius: 10px;'>
Explore, search, and sort through the top AI content from HuggingFace
</div>
""")
with gr.Tabs() as tabs:
# Spaces νƒ­
with gr.Tab("🎯 Trending Spaces"):
gr.Markdown("Shows top 300 trending spaces with AI ratings")
with gr.Row(elem_classes="search-sort-container"):
with gr.Column(scale=2):
spaces_search = gr.Textbox(
label="πŸ” Search Spaces",
placeholder="Enter keywords to search...",
elem_classes="search-box"
)
with gr.Column(scale=2):
spaces_sort = gr.Radio(
choices=["rank", "rising_rate", "popularity"],
value="rank",
label="πŸ“Š Sort by",
interactive=True,
elem_classes="sort-radio"
)
with gr.Column(scale=1):
spaces_refresh_btn = gr.Button(
"πŸ”„ Refresh",
variant="primary",
elem_classes="refresh-btn"
)
spaces_gallery = gr.HTML()
spaces_status = gr.Markdown("Loading...")
# Models νƒ­
with gr.Tab("πŸ€– Trending Models"):
gr.Markdown("Shows top 300 trending models with AI ratings")
with gr.Row(elem_classes="search-sort-container"):
with gr.Column(scale=2):
models_search = gr.Textbox(
label="πŸ” Search Models",
placeholder="Enter keywords to search...",
elem_classes="search-box"
)
with gr.Column(scale=2):
models_sort = gr.Radio(
choices=["rank", "rising_rate", "popularity"],
value="rank",
label="πŸ“Š Sort by",
interactive=True,
elem_classes="sort-radio"
)
with gr.Column(scale=1):
models_refresh_btn = gr.Button(
"πŸ”„ Refresh",
variant="primary",
elem_classes="refresh-btn"
)
models_gallery = gr.HTML()
models_status = gr.Markdown("Loading...")
# Datasets νƒ­
with gr.Tab("πŸ“Š Trending Datasets"):
gr.Markdown("Shows top 300 trending datasets with AI ratings")
with gr.Row(elem_classes="search-sort-container"):
with gr.Column(scale=2):
datasets_search = gr.Textbox(
label="πŸ” Search Datasets",
placeholder="Enter keywords to search...",
elem_classes="search-box"
)
with gr.Column(scale=2):
datasets_sort = gr.Radio(
choices=["rank", "rising_rate", "popularity"],
value="rank",
label="πŸ“Š Sort by",
interactive=True,
elem_classes="sort-radio"
)
with gr.Column(scale=1):
datasets_refresh_btn = gr.Button(
"πŸ”„ Refresh",
variant="primary",
elem_classes="refresh-btn"
)
datasets_gallery = gr.HTML()
datasets_status = gr.Markdown("Loading...")
# Event handlers
spaces_refresh_btn.click(
fn=get_trending_spaces,
inputs=[spaces_search, spaces_sort],
outputs=[spaces_gallery, spaces_status]
)
models_refresh_btn.click(
fn=get_models,
inputs=[models_search, models_sort],
outputs=[models_gallery, models_status]
)
datasets_refresh_btn.click(
fn=get_datasets,
inputs=[datasets_search, datasets_sort],
outputs=[datasets_gallery, datasets_status]
)
# 검색어 λ³€κ²½ μ‹œ μžλ™ μƒˆλ‘œκ³ μΉ¨
spaces_search.change(
fn=get_trending_spaces,
inputs=[spaces_search, spaces_sort],
outputs=[spaces_gallery, spaces_status]
)
models_search.change(
fn=get_models,
inputs=[models_search, models_sort],
outputs=[models_gallery, models_status]
)
datasets_search.change(
fn=get_datasets,
inputs=[datasets_search, datasets_sort],
outputs=[datasets_gallery, datasets_status]
)
# μ •λ ¬ 방식 λ³€κ²½ μ‹œ μžλ™ μƒˆλ‘œκ³ μΉ¨
spaces_sort.change(
fn=get_trending_spaces,
inputs=[spaces_search, spaces_sort],
outputs=[spaces_gallery, spaces_status]
)
models_sort.change(
fn=get_models,
inputs=[models_search, models_sort],
outputs=[models_gallery, models_status]
)
datasets_sort.change(
fn=get_datasets,
inputs=[datasets_search, datasets_sort],
outputs=[datasets_gallery, datasets_status]
)
# 초기 데이터 λ‘œλ“œ
interface.load(
fn=get_trending_spaces,
inputs=[spaces_search, spaces_sort],
outputs=[spaces_gallery, spaces_status]
)
interface.load(
fn=get_models,
inputs=[models_search, models_sort],
outputs=[models_gallery, models_status]
)
interface.load(
fn=get_datasets,
inputs=[datasets_search, datasets_sort],
outputs=[datasets_gallery, datasets_status]
)
return interface
if __name__ == "__main__":
try:
demo = create_interface()
demo.launch(
share=True,
inbrowser=True,
show_api=False
)
except Exception as e:
print(f"Error launching app: {e}")