|
import gradio as gr |
|
import pandas as pd |
|
import os |
|
import time |
|
import threading |
|
import tempfile |
|
import logging |
|
import random |
|
import uuid |
|
import shutil |
|
import glob |
|
from datetime import datetime |
|
import requests |
|
import json |
|
from dotenv import load_dotenv |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
logging.basicConfig(level=logging.WARNING) |
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
session_temp_files = {} |
|
session_data = {} |
|
|
|
def get_api_client(): |
|
"""νκ²½λ³μμμ API μλν¬μΈνΈλ₯Ό κ°μ Έμ μμ² ν¨μ μμ±""" |
|
endpoint = os.getenv('API_ENDPOINT') |
|
if not endpoint: |
|
raise ValueError("API_ENDPOINT νκ²½λ³μκ° νμν©λλ€.") |
|
|
|
def make_request(api_name, **kwargs): |
|
try: |
|
|
|
if not endpoint.startswith('http'): |
|
base_url = f"https://{endpoint}.hf.space" |
|
else: |
|
base_url = endpoint |
|
|
|
|
|
url = f"{base_url}/call{api_name}" |
|
|
|
|
|
if api_name == "/process_search_results": |
|
data = [kwargs.get('keyword', ''), kwargs.get('korean_only', True), |
|
kwargs.get('apply_main_keyword', 'λ©μΈν€μλ μ μ©'), kwargs.get('exclude_zero_volume', False)] |
|
elif api_name == "/search_with_loading": |
|
data = [kwargs.get('keyword', ''), kwargs.get('korean_only', True), |
|
kwargs.get('apply_main_keyword', 'λ©μΈν€μλ μ μ©'), kwargs.get('exclude_zero_volume', False)] |
|
elif api_name == "/filter_and_sort_table": |
|
data = [kwargs.get('selected_cat', 'μ 체 보기'), kwargs.get('keyword_sort', 'μ λ ¬ μμ'), |
|
kwargs.get('total_volume_sort', 'μ λ ¬ μμ'), kwargs.get('usage_count_sort', 'μ λ ¬ μμ'), |
|
kwargs.get('selected_volume_range', 'μ 체'), kwargs.get('exclude_zero_volume', False)] |
|
elif api_name == "/update_category_selection": |
|
data = [kwargs.get('selected_cat', 'μ 체 보기')] |
|
elif api_name == "/process_analyze_results": |
|
data = [kwargs.get('analysis_keywords', ''), kwargs.get('selected_category', 'μ 체 보기')] |
|
elif api_name == "/analyze_with_loading": |
|
data = [kwargs.get('analysis_keywords', ''), kwargs.get('selected_category', 'μ 체 보기')] |
|
elif api_name == "/reset_interface": |
|
data = [] |
|
elif api_name == "/get_session_id": |
|
data = [] |
|
else: |
|
data = [] |
|
|
|
response = requests.post(url, json={"data": data}, timeout=60) |
|
|
|
if response.status_code == 200: |
|
result = response.json() |
|
return result.get('data', []) |
|
else: |
|
raise Exception(f"API νΈμΆ μ€ν¨: {response.status_code}") |
|
|
|
except Exception as e: |
|
raise Exception(f"API μ°κ²° μ€λ₯: {str(e)}") |
|
|
|
return type('APIClient', (), {'predict': lambda self, **kwargs: make_request(kwargs.pop('api_name'), **kwargs)})() |
|
|
|
def cleanup_huggingface_temp_folders(): |
|
"""νκΉ
νμ΄μ€ μμ ν΄λ μ΄κΈ° μ 리""" |
|
try: |
|
temp_dirs = [tempfile.gettempdir(), "/tmp", "/var/tmp"] |
|
cleanup_count = 0 |
|
|
|
for temp_dir in temp_dirs: |
|
if os.path.exists(temp_dir): |
|
try: |
|
session_files = glob.glob(os.path.join(temp_dir, "session_*.xlsx")) |
|
session_files.extend(glob.glob(os.path.join(temp_dir, "session_*.csv"))) |
|
|
|
for file_path in session_files: |
|
try: |
|
if os.path.getmtime(file_path) < time.time() - 3600: |
|
os.remove(file_path) |
|
cleanup_count += 1 |
|
except Exception: |
|
pass |
|
except Exception: |
|
pass |
|
|
|
logger.info(f"β
μμ ν΄λ μ 리 μλ£ - {cleanup_count}κ° νμΌ μμ ") |
|
except Exception as e: |
|
logger.error(f"μμ ν΄λ μ 리 μ€ μ€λ₯: {e}") |
|
|
|
def setup_clean_temp_environment(): |
|
"""κΉ¨λν μμ νκ²½ μ€μ """ |
|
try: |
|
cleanup_huggingface_temp_folders() |
|
|
|
app_temp_dir = os.path.join(tempfile.gettempdir(), "control_tower_app") |
|
if os.path.exists(app_temp_dir): |
|
shutil.rmtree(app_temp_dir, ignore_errors=True) |
|
os.makedirs(app_temp_dir, exist_ok=True) |
|
|
|
os.environ['CONTROL_TOWER_TEMP'] = app_temp_dir |
|
|
|
logger.info(f"β
μ ν리μΌμ΄μ
μ μ© μμ λλ ν 리 μ€μ : {app_temp_dir}") |
|
return app_temp_dir |
|
except Exception as e: |
|
logger.error(f"μμ νκ²½ μ€μ μ€ν¨: {e}") |
|
return tempfile.gettempdir() |
|
|
|
def get_app_temp_dir(): |
|
"""μ ν리μΌμ΄μ
μ μ© μμ λλ ν 리 λ°ν""" |
|
return os.environ.get('CONTROL_TOWER_TEMP', tempfile.gettempdir()) |
|
|
|
def get_session_id(): |
|
"""μΈμ
ID μμ±""" |
|
try: |
|
client = get_api_client() |
|
result = client.predict(api_name="/get_session_id") |
|
return result[0] if result else str(uuid.uuid4()) |
|
except Exception: |
|
return str(uuid.uuid4()) |
|
|
|
def cleanup_session_files(session_id, delay=300): |
|
"""μΈμ
λ³ μμ νμΌ μ 리 ν¨μ""" |
|
def cleanup(): |
|
time.sleep(delay) |
|
if session_id in session_temp_files: |
|
files_to_remove = session_temp_files[session_id].copy() |
|
del session_temp_files[session_id] |
|
|
|
for file_path in files_to_remove: |
|
try: |
|
if os.path.exists(file_path): |
|
os.remove(file_path) |
|
logger.info(f"μΈμ
{session_id[:8]}... μμ νμΌ μμ : {file_path}") |
|
except Exception as e: |
|
logger.error(f"μΈμ
{session_id[:8]}... νμΌ μμ μ€λ₯: {e}") |
|
|
|
threading.Thread(target=cleanup, daemon=True).start() |
|
|
|
def register_session_file(session_id, file_path): |
|
"""μΈμ
λ³ νμΌ λ±λ‘""" |
|
if session_id not in session_temp_files: |
|
session_temp_files[session_id] = [] |
|
session_temp_files[session_id].append(file_path) |
|
|
|
def cleanup_old_sessions(): |
|
"""μ€λλ μΈμ
λ°μ΄ν° μ 리""" |
|
current_time = time.time() |
|
sessions_to_remove = [] |
|
|
|
for session_id, data in session_data.items(): |
|
if current_time - data.get('last_activity', 0) > 3600: |
|
sessions_to_remove.append(session_id) |
|
|
|
for session_id in sessions_to_remove: |
|
if session_id in session_temp_files: |
|
for file_path in session_temp_files[session_id]: |
|
try: |
|
if os.path.exists(file_path): |
|
os.remove(file_path) |
|
logger.info(f"μ€λλ μΈμ
{session_id[:8]}... νμΌ μμ : {file_path}") |
|
except Exception as e: |
|
logger.error(f"μ€λλ μΈμ
νμΌ μμ μ€λ₯: {e}") |
|
del session_temp_files[session_id] |
|
|
|
if session_id in session_data: |
|
del session_data[session_id] |
|
logger.info(f"μ€λλ μΈμ
λ°μ΄ν° μμ : {session_id[:8]}...") |
|
|
|
def update_session_activity(session_id): |
|
"""μΈμ
νλ μκ° μ
λ°μ΄νΈ""" |
|
if session_id not in session_data: |
|
session_data[session_id] = {} |
|
session_data[session_id]['last_activity'] = time.time() |
|
|
|
def create_session_temp_file(session_id, suffix='.xlsx'): |
|
"""μΈμ
λ³ μμ νμΌ μμ±""" |
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") |
|
random_suffix = str(random.randint(1000, 9999)) |
|
|
|
temp_dir = get_app_temp_dir() |
|
filename = f"session_{session_id[:8]}_{timestamp}_{random_suffix}{suffix}" |
|
temp_file_path = os.path.join(temp_dir, filename) |
|
|
|
with open(temp_file_path, 'w') as f: |
|
pass |
|
|
|
register_session_file(session_id, temp_file_path) |
|
return temp_file_path |
|
|
|
def search_with_loading(keyword, korean_only, apply_main_keyword, exclude_zero_volume): |
|
"""μλ³Έ API: /search_with_loading""" |
|
try: |
|
client = get_api_client() |
|
result = client.predict( |
|
keyword=keyword, |
|
korean_only=korean_only, |
|
apply_main_keyword=apply_main_keyword, |
|
exclude_zero_volume=exclude_zero_volume, |
|
api_name="/search_with_loading" |
|
) |
|
return result[0] if result else "" |
|
except Exception as e: |
|
logger.error(f"search_with_loading API νΈμΆ μ€λ₯: {e}") |
|
return "" |
|
|
|
def process_search_results(keyword, korean_only, apply_main_keyword, exclude_zero_volume): |
|
"""μλ³Έ API: /process_search_results""" |
|
try: |
|
client = get_api_client() |
|
result = client.predict( |
|
keyword=keyword, |
|
korean_only=korean_only, |
|
apply_main_keyword=apply_main_keyword, |
|
exclude_zero_volume=exclude_zero_volume, |
|
api_name="/process_search_results" |
|
) |
|
|
|
|
|
if len(result) >= 5: |
|
table_html, cat_choices, vol_choices, selected_cat, download_file = result[:5] |
|
|
|
|
|
local_download_file = None |
|
if download_file: |
|
session_id = get_session_id() |
|
local_download_file = create_session_temp_file(session_id, '.xlsx') |
|
try: |
|
shutil.copy2(download_file, local_download_file) |
|
except Exception as e: |
|
logger.error(f"νμΌ λ³΅μ¬ μ€λ₯: {e}") |
|
local_download_file = None |
|
|
|
return table_html, cat_choices, vol_choices, selected_cat, local_download_file |
|
else: |
|
return ( |
|
"<p>κ²μ κ²°κ³Όκ° μμ΅λλ€.</p>", |
|
["μ 체 보기"], ["μ 체"], "μ 체 보기", None |
|
) |
|
|
|
except Exception as e: |
|
logger.error(f"process_search_results API νΈμΆ μ€λ₯: {e}") |
|
return ( |
|
"<p>μλΉμ€ μ°κ²°μ λ¬Έμ κ° λ°μνμ΅λλ€. μ μ ν λ€μ μλν΄μ£ΌμΈμ.</p>", |
|
["μ 체 보기"], ["μ 체"], "μ 체 보기", None |
|
) |
|
|
|
def filter_and_sort_table(selected_cat, keyword_sort, total_volume_sort, usage_count_sort, selected_volume_range, exclude_zero_volume): |
|
"""μλ³Έ API: /filter_and_sort_table""" |
|
try: |
|
client = get_api_client() |
|
result = client.predict( |
|
selected_cat=selected_cat, |
|
keyword_sort=keyword_sort, |
|
total_volume_sort=total_volume_sort, |
|
usage_count_sort=usage_count_sort, |
|
selected_volume_range=selected_volume_range, |
|
exclude_zero_volume=exclude_zero_volume, |
|
api_name="/filter_and_sort_table" |
|
) |
|
return result[0] if result else "" |
|
except Exception as e: |
|
logger.error(f"filter_and_sort_table API νΈμΆ μ€λ₯: {e}") |
|
return "<p>νν°λ§ μλΉμ€ μ°κ²°μ λ¬Έμ κ° λ°μνμ΅λλ€.</p>" |
|
|
|
def update_category_selection(selected_cat): |
|
"""μλ³Έ API: /update_category_selection""" |
|
try: |
|
client = get_api_client() |
|
result = client.predict( |
|
selected_cat=selected_cat, |
|
api_name="/update_category_selection" |
|
) |
|
return gr.update(value=result[0] if result else selected_cat) |
|
except Exception as e: |
|
logger.error(f"update_category_selection API νΈμΆ μ€λ₯: {e}") |
|
return gr.update(value=selected_cat) |
|
|
|
def analyze_with_loading(analysis_keywords, selected_category): |
|
"""μλ³Έ API: /analyze_with_loading""" |
|
try: |
|
client = get_api_client() |
|
result = client.predict( |
|
analysis_keywords=analysis_keywords, |
|
selected_category=selected_category, |
|
api_name="/analyze_with_loading" |
|
) |
|
return result[0] if result else "" |
|
except Exception as e: |
|
logger.error(f"analyze_with_loading API νΈμΆ μ€λ₯: {e}") |
|
return "" |
|
|
|
def process_analyze_results(analysis_keywords, selected_category): |
|
"""μλ³Έ API: /process_analyze_results""" |
|
try: |
|
client = get_api_client() |
|
result = client.predict( |
|
analysis_keywords=analysis_keywords, |
|
selected_category=selected_category, |
|
api_name="/process_analyze_results" |
|
) |
|
|
|
if len(result) >= 2: |
|
analysis_result, download_file = result[:2] |
|
|
|
|
|
local_download_file = None |
|
if download_file: |
|
session_id = get_session_id() |
|
local_download_file = create_session_temp_file(session_id, '.xlsx') |
|
try: |
|
shutil.copy2(download_file, local_download_file) |
|
except Exception as e: |
|
logger.error(f"λΆμ κ²°κ³Ό νμΌ λ³΅μ¬ μ€λ₯: {e}") |
|
local_download_file = None |
|
|
|
return analysis_result, local_download_file |
|
else: |
|
return "λΆμ κ²°κ³Όκ° μμ΅λλ€.", None |
|
|
|
except Exception as e: |
|
logger.error(f"process_analyze_results API νΈμΆ μ€λ₯: {e}") |
|
return "λΆμ μλΉμ€ μ°κ²°μ λ¬Έμ κ° λ°μνμ΅λλ€. μ μ ν λ€μ μλν΄μ£ΌμΈμ.", None |
|
|
|
def reset_interface(): |
|
"""μλ³Έ API: /reset_interface""" |
|
try: |
|
client = get_api_client() |
|
result = client.predict(api_name="/reset_interface") |
|
return result if result else get_default_reset_values() |
|
except Exception as e: |
|
logger.error(f"reset_interface API νΈμΆ μ€λ₯: {e}") |
|
return get_default_reset_values() |
|
|
|
def get_default_reset_values(): |
|
"""κΈ°λ³Έ 리μ
κ° λ°ν""" |
|
return ( |
|
"", True, False, "λ©μΈν€μλ μ μ©", "", ["μ 체 보기"], "μ 체 보기", |
|
["μ 체"], "μ 체", "μ λ ¬ μμ", "μ λ ¬ μμ", ["μ 체 보기"], "μ 체 보기", |
|
"", "", None |
|
) |
|
|
|
|
|
def wrapper_search_with_loading(keyword, korean_only, apply_main_keyword, exclude_zero_volume): |
|
"""κ²μ λ‘λ© UI μ²λ¦¬""" |
|
search_with_loading(keyword, korean_only, apply_main_keyword, exclude_zero_volume) |
|
return ( |
|
gr.update(visible=True), |
|
gr.update(visible=False) |
|
) |
|
|
|
def wrapper_process_search_results(keyword, korean_only, apply_main_keyword, exclude_zero_volume): |
|
"""κ²μ κ²°κ³Ό μ²λ¦¬ UI""" |
|
result = process_search_results(keyword, korean_only, apply_main_keyword, exclude_zero_volume) |
|
|
|
table_html, cat_choices, vol_choices, selected_cat, download_file = result |
|
|
|
|
|
if table_html and "κ²μ κ²°κ³Όκ° μμ΅λλ€" not in table_html and "λ¬Έμ κ° λ°μνμ΅λλ€" not in table_html: |
|
keyword_section_visibility = True |
|
category_section_visibility = True |
|
empty_placeholder_vis = False |
|
execution_section_visibility = True |
|
else: |
|
keyword_section_visibility = False |
|
category_section_visibility = False |
|
empty_placeholder_vis = True |
|
execution_section_visibility = False |
|
|
|
|
|
state_df = pd.DataFrame() |
|
|
|
return ( |
|
table_html, |
|
cat_choices, |
|
vol_choices, |
|
state_df, |
|
selected_cat, |
|
download_file, |
|
gr.update(visible=keyword_section_visibility), |
|
gr.update(visible=category_section_visibility), |
|
gr.update(visible=False), |
|
gr.update(visible=empty_placeholder_vis), |
|
gr.update(visible=execution_section_visibility), |
|
keyword |
|
) |
|
|
|
def wrapper_analyze_with_loading(analysis_keywords, selected_category, state_df): |
|
"""λΆμ λ‘λ© UI μ²λ¦¬""" |
|
analyze_with_loading(analysis_keywords, selected_category) |
|
return gr.update(visible=True) |
|
|
|
def wrapper_process_analyze_results(analysis_keywords, selected_category, state_df): |
|
"""λΆμ κ²°κ³Ό μ²λ¦¬ UI""" |
|
analysis_result, download_file = process_analyze_results(analysis_keywords, selected_category) |
|
return ( |
|
analysis_result, |
|
download_file, |
|
gr.update(visible=True), |
|
gr.update(visible=False) |
|
) |
|
|
|
|
|
def start_session_cleanup_scheduler(): |
|
"""μΈμ
μ 리 μ€μΌμ€λ¬ μμ""" |
|
def cleanup_scheduler(): |
|
while True: |
|
time.sleep(600) |
|
cleanup_old_sessions() |
|
cleanup_huggingface_temp_folders() |
|
|
|
threading.Thread(target=cleanup_scheduler, daemon=True).start() |
|
|
|
def cleanup_on_startup(): |
|
"""μ ν리μΌμ΄μ
μμ μ μ 체 μ 리""" |
|
logger.info("𧹠컨νΈλ‘€ νμ μ ν리μΌμ΄μ
μμ - μ΄κΈ° μ 리 μμ
μμ...") |
|
|
|
cleanup_huggingface_temp_folders() |
|
app_temp_dir = setup_clean_temp_environment() |
|
|
|
global session_temp_files, session_data |
|
session_temp_files.clear() |
|
session_data.clear() |
|
|
|
logger.info(f"β
μ΄κΈ° μ 리 μμ
μλ£ - μ± μ μ© λλ ν 리: {app_temp_dir}") |
|
return app_temp_dir |
|
|
|
|
|
def create_app(): |
|
fontawesome_html = """ |
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css"> |
|
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/orioncactus/pretendard/dist/web/static/pretendard.css"> |
|
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=Noto+Sans+KR:wght@300;400;500;700&display=swap"> |
|
""" |
|
|
|
|
|
try: |
|
with open('style.css', 'r', encoding='utf-8') as f: |
|
custom_css = f.read() |
|
except: |
|
custom_css = """ |
|
:root { |
|
--primary-color: #FB7F0D; |
|
--secondary-color: #ff9a8b; |
|
} |
|
.custom-button { |
|
background: linear-gradient(135deg, var(--primary-color), var(--secondary-color)) !important; |
|
color: white !important; |
|
border-radius: 30px !important; |
|
height: 45px !important; |
|
font-size: 16px !important; |
|
font-weight: bold !important; |
|
width: 100% !important; |
|
} |
|
""" |
|
|
|
with gr.Blocks(css=custom_css, theme=gr.themes.Default( |
|
primary_hue="orange", |
|
secondary_hue="orange", |
|
font=[gr.themes.GoogleFont("Noto Sans KR"), "ui-sans-serif", "system-ui"] |
|
)) as demo: |
|
gr.HTML(fontawesome_html) |
|
|
|
|
|
keyword_state = gr.State("") |
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in"): |
|
gr.HTML('<div class="section-title"><i class="fas fa-search"></i> κ²μ μ
λ ₯</div>') |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
keyword = gr.Textbox( |
|
label="λ©μΈ ν€μλ", |
|
placeholder="μ: μ€μ§μ΄" |
|
) |
|
with gr.Column(scale=1): |
|
search_btn = gr.Button( |
|
"λ©μΈν€μλ λΆμ", |
|
elem_classes="custom-button" |
|
) |
|
|
|
with gr.Accordion("μ΅μ
μ€μ ", open=False): |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
korean_only = gr.Checkbox( |
|
label="νκΈλ§ μΆμΆ", |
|
value=True |
|
) |
|
with gr.Column(scale=1): |
|
exclude_zero_volume = gr.Checkbox( |
|
label="κ²μλ 0 ν€μλ μ μΈ", |
|
value=False |
|
) |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
apply_main_keyword = gr.Radio( |
|
["λ©μΈν€μλ μ μ©", "λ©μΈν€μλ λ―Έμ μ©"], |
|
label="μ‘°ν© λ°©μ", |
|
value="λ©μΈν€μλ μ μ©" |
|
) |
|
with gr.Column(scale=1): |
|
gr.HTML("") |
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as progress_section: |
|
gr.HTML('<div class="section-title"><i class="fas fa-spinner"></i> λΆμ μ§ν μν</div>') |
|
progress_html = gr.HTML(""" |
|
<div style="padding: 15px; background-color: #f9f9f9; border-radius: 5px; margin: 10px 0; border: 1px solid #ddd;"> |
|
<div style="margin-bottom: 10px; display: flex; align-items: center;"> |
|
<i class="fas fa-spinner fa-spin" style="color: #FB7F0D; margin-right: 10px;"></i> |
|
<span>ν€μλ λ°μ΄ν°λ₯Ό λΆμμ€μ
λλ€. μ μλ§ κΈ°λ€λ €μ£ΌμΈμ...</span> |
|
</div> |
|
<div style="background-color: #e9ecef; height: 10px; border-radius: 5px; overflow: hidden;"> |
|
<div class="progress-bar"></div> |
|
</div> |
|
</div> |
|
""") |
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in") as main_keyword_section: |
|
gr.HTML('<div class="section-title"><i class="fas fa-table"></i> λ©μΈν€μλ λΆμ κ²°κ³Ό</div>') |
|
|
|
empty_table_html = gr.HTML(""" |
|
<table class="empty-table"> |
|
<thead> |
|
<tr> |
|
<th>μλ²</th> |
|
<th>μ‘°ν© ν€μλ</th> |
|
<th>PCκ²μλ</th> |
|
<th>λͺ¨λ°μΌκ²μλ</th> |
|
<th>μ΄κ²μλ</th> |
|
<th>κ²μλꡬκ°</th> |
|
<th>ν€μλ μ¬μ©μμμ</th> |
|
<th>ν€μλ μ¬μ©νμ</th> |
|
<th>μν λ±λ‘ μΉ΄ν
κ³ λ¦¬</th> |
|
</tr> |
|
</thead> |
|
<tbody> |
|
<tr> |
|
<td colspan="9" style="padding: 30px; text-align: center;"> |
|
κ²μμ μ€ννλ©΄ μ¬κΈ°μ κ²°κ³Όκ° νμλ©λλ€ |
|
</td> |
|
</tr> |
|
</tbody> |
|
</table> |
|
""") |
|
|
|
with gr.Column(visible=False) as keyword_analysis_section: |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
category_filter = gr.Dropdown( |
|
choices=["μ 체 보기"], |
|
label="μΉ΄ν
κ³ λ¦¬ νν°", |
|
value="μ 체 보기", |
|
interactive=True |
|
) |
|
with gr.Column(scale=1): |
|
total_volume_sort = gr.Dropdown( |
|
choices=["μ λ ¬ μμ", "μ€λ¦μ°¨μ", "λ΄λ¦Όμ°¨μ"], |
|
label="μ΄κ²μλ μ λ ¬", |
|
value="μ λ ¬ μμ", |
|
interactive=True |
|
) |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
search_volume_filter = gr.Dropdown( |
|
choices=["μ 체"], |
|
label="κ²μλ κ΅¬κ° νν°", |
|
value="μ 체", |
|
interactive=True |
|
) |
|
with gr.Column(scale=1): |
|
usage_count_sort = gr.Dropdown( |
|
choices=["μ λ ¬ μμ", "μ€λ¦μ°¨μ", "λ΄λ¦Όμ°¨μ"], |
|
label="ν€μλ μ¬μ©νμ μ λ ¬", |
|
value="μ λ ¬ μμ", |
|
interactive=True |
|
) |
|
|
|
gr.HTML("<div class='data-container' id='table_container'></div>") |
|
table_output = gr.HTML(elem_classes="fade-in") |
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as category_analysis_section: |
|
gr.HTML('<div class="section-title"><i class="fas fa-chart-bar"></i> ν€μλ λΆμ</div>') |
|
|
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
analysis_keywords = gr.Textbox( |
|
label="ν€μλ μ
λ ₯ (μ΅λ 20κ°, μΌν λλ μν°λ‘ ꡬλΆ)", |
|
placeholder="μ: μ€μ§μ΄λ³Άμ, μ€μ§μ΄ μμ§, μ€μ§μ΄ μ리...", |
|
lines=5 |
|
) |
|
|
|
with gr.Column(scale=1): |
|
selected_category = gr.Dropdown( |
|
label="λΆμν μΉ΄ν
κ³ λ¦¬(λΆμ μ λ°λμ μ νν΄μ£ΌμΈμ)", |
|
choices=["μ 체 보기"], |
|
value="μ 체 보기", |
|
interactive=True |
|
) |
|
|
|
|
|
with gr.Column(elem_classes="execution-section", visible=False) as execution_section: |
|
gr.HTML('<div class="section-title"><i class="fas fa-play-circle"></i> μ€ν</div>') |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
analyze_btn = gr.Button( |
|
"μΉ΄ν
κ³ λ¦¬ μΌμΉ λΆμ", |
|
elem_classes=["execution-button", "primary-button"] |
|
) |
|
with gr.Column(scale=1): |
|
reset_btn = gr.Button( |
|
"λͺ¨λ μ
λ ₯ μ΄κΈ°ν", |
|
elem_classes=["execution-button", "secondary-button"] |
|
) |
|
|
|
|
|
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as analysis_output_section: |
|
gr.HTML('<div class="section-title"><i class="fas fa-list-ul"></i> λΆμ κ²°κ³Ό μμ½</div>') |
|
|
|
analysis_result = gr.HTML(elem_classes="fade-in") |
|
|
|
with gr.Row(): |
|
download_output = gr.File( |
|
label="ν€μλ λͺ©λ‘ λ€μ΄λ‘λ", |
|
visible=True |
|
) |
|
|
|
|
|
state_df = gr.State() |
|
|
|
|
|
search_btn.click( |
|
fn=wrapper_search_with_loading, |
|
inputs=[keyword, korean_only, apply_main_keyword, exclude_zero_volume], |
|
outputs=[progress_section, empty_table_html] |
|
).then( |
|
fn=wrapper_process_search_results, |
|
inputs=[keyword, korean_only, apply_main_keyword, exclude_zero_volume], |
|
outputs=[ |
|
table_output, category_filter, search_volume_filter, |
|
state_df, selected_category, download_output, |
|
keyword_analysis_section, category_analysis_section, |
|
progress_section, empty_table_html, execution_section, |
|
keyword_state |
|
] |
|
) |
|
|
|
|
|
category_filter.change( |
|
fn=filter_and_sort_table, |
|
inputs=[ |
|
category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), |
|
total_volume_sort, usage_count_sort, |
|
search_volume_filter, exclude_zero_volume |
|
], |
|
outputs=[table_output] |
|
) |
|
|
|
category_filter.change( |
|
fn=update_category_selection, |
|
inputs=[category_filter], |
|
outputs=[selected_category] |
|
) |
|
|
|
total_volume_sort.change( |
|
fn=filter_and_sort_table, |
|
inputs=[ |
|
category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), |
|
total_volume_sort, usage_count_sort, |
|
search_volume_filter, exclude_zero_volume |
|
], |
|
outputs=[table_output] |
|
) |
|
|
|
usage_count_sort.change( |
|
fn=filter_and_sort_table, |
|
inputs=[ |
|
category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), |
|
total_volume_sort, usage_count_sort, |
|
search_volume_filter, exclude_zero_volume |
|
], |
|
outputs=[table_output] |
|
) |
|
|
|
search_volume_filter.change( |
|
fn=filter_and_sort_table, |
|
inputs=[ |
|
category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), |
|
total_volume_sort, usage_count_sort, |
|
search_volume_filter, exclude_zero_volume |
|
], |
|
outputs=[table_output] |
|
) |
|
|
|
exclude_zero_volume.change( |
|
fn=filter_and_sort_table, |
|
inputs=[ |
|
category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), |
|
total_volume_sort, usage_count_sort, |
|
search_volume_filter, exclude_zero_volume |
|
], |
|
outputs=[table_output] |
|
) |
|
|
|
|
|
analyze_btn.click( |
|
fn=wrapper_analyze_with_loading, |
|
inputs=[analysis_keywords, selected_category, state_df], |
|
outputs=[progress_section] |
|
).then( |
|
fn=wrapper_process_analyze_results, |
|
inputs=[analysis_keywords, selected_category, state_df], |
|
outputs=[analysis_result, download_output, analysis_output_section, progress_section] |
|
) |
|
|
|
|
|
reset_btn.click( |
|
fn=reset_interface, |
|
inputs=[], |
|
outputs=[ |
|
keyword, korean_only, exclude_zero_volume, apply_main_keyword, |
|
table_output, category_filter, category_filter, |
|
search_volume_filter, search_volume_filter, |
|
total_volume_sort, usage_count_sort, |
|
selected_category, selected_category, |
|
analysis_keywords, analysis_result, download_output |
|
] |
|
) |
|
|
|
return demo |
|
|
|
if __name__ == "__main__": |
|
|
|
print("===== Application Startup at %s =====" % time.strftime("%Y-%m-%d %H:%M:%S")) |
|
logger.info("π 컨νΈλ‘€ νμ μ ν리μΌμ΄μ
μμ...") |
|
|
|
|
|
app_temp_dir = cleanup_on_startup() |
|
|
|
|
|
start_session_cleanup_scheduler() |
|
|
|
|
|
try: |
|
test_client = get_api_client() |
|
logger.info("β
API μ°κ²° ν
μ€νΈ μ±κ³΅") |
|
except Exception as e: |
|
logger.error("β API μ°κ²° μ€ν¨ - νκ²½λ³μ API_ENDPOINTλ₯Ό νμΈνμΈμ") |
|
print("β API_ENDPOINT νκ²½λ³μκ° μ€μ λμ§ μμμ΅λλ€.") |
|
print("π‘ .env νμΌμ λ€μκ³Ό κ°μ΄ μ€μ νμΈμ:") |
|
print("API_ENDPOINT=your-endpoint-here") |
|
raise SystemExit(1) |
|
|
|
logger.info("===== 컨νΈλ‘€ νμ μ ν리μΌμ΄μ
μμ μλ£ at %s =====", time.strftime("%Y-%m-%d %H:%M:%S")) |
|
logger.info(f"π μμ νμΌ μ μ₯ μμΉ: {app_temp_dir}") |
|
|
|
|
|
try: |
|
app = create_app() |
|
print("π Gradio μ ν리μΌμ΄μ
μ΄ μμλ©λλ€...") |
|
app.launch( |
|
share=False, |
|
server_name="0.0.0.0", |
|
server_port=7860, |
|
max_threads=40, |
|
auth=None, |
|
show_error=True, |
|
quiet=False, |
|
favicon_path=None, |
|
ssl_verify=False, |
|
inbrowser=False, |
|
prevent_thread_lock=False |
|
) |
|
except Exception as e: |
|
logger.error(f"μ ν리μΌμ΄μ
μ€ν μ€ν¨: {e}") |
|
print(f"β μ ν리μΌμ΄μ
μ€ν μ€ν¨: {e}") |
|
raise SystemExit(1) |
|
finally: |
|
|
|
logger.info("𧹠컨νΈλ‘€ νμ μ ν리μΌμ΄μ
μ’
λ£ - μ΅μ’
μ 리 μμ
...") |
|
try: |
|
cleanup_huggingface_temp_folders() |
|
if os.path.exists(app_temp_dir): |
|
shutil.rmtree(app_temp_dir, ignore_errors=True) |
|
logger.info("β
μ΅μ’
μ 리 μλ£") |
|
except Exception as e: |
|
logger.error(f"μ΅μ’
μ 리 μ€ μ€λ₯: {e}") |