demo-generator / app.py
hasune
Initial commit: demo generator ๊ตฌํ˜„
94c3165
import gradio as gr
from huggingface_hub import HfApi, model_info
import tempfile
import zipfile
import os
from datetime import datetime
api = HfApi()
def analyze_model(model_name):
"""ํ—ˆ๊น…ํŽ˜์ด์Šค ๋ชจ๋ธ ๋ถ„์„"""
if not model_name:
return "๋ชจ๋ธ๋ช…์„ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”.", None, None
try:
# ๋ชจ๋ธ ์ •๋ณด ๊ฐ€์ ธ์˜ค๊ธฐ
info = model_info(model_name)
# ํŒŒ์ดํ”„๋ผ์ธ ํƒœ์Šคํฌ ํ™•์ธ
pipeline_tag = getattr(info, 'pipeline_tag', None)
# ๋ชจ๋ธ ํƒ€์ž… ๋ถ„์„
model_type = "unknown"
if hasattr(info, 'config') and info.config:
model_type = info.config.get('model_type', 'unknown')
# ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ํ™•์ธ
library = getattr(info, 'library_name', 'transformers')
# ์–ธ์–ด ํ™•์ธ
languages = getattr(info, 'language', ['en'])
if isinstance(languages, str):
languages = [languages]
analysis_result = f"""
## ๐Ÿ” ๋ชจ๋ธ ๋ถ„์„ ๊ฒฐ๊ณผ
### ๊ธฐ๋ณธ ์ •๋ณด
- **๋ชจ๋ธ๋ช…**: {model_name}
- **ํƒœ์Šคํฌ**: {pipeline_tag or 'Unknown'}
- **๋ชจ๋ธ ํƒ€์ž…**: {model_type}
- **๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ**: {library}
- **์–ธ์–ด**: {', '.join(languages[:3])}
- **๋‹ค์šด๋กœ๋“œ**: {getattr(info, 'downloads', 0):,}ํšŒ
### ๋ฐ๋ชจ ์ƒ์„ฑ ๊ฐ€๋Šฅ ์—ฌ๋ถ€
"""
# ์ง€์› ๊ฐ€๋Šฅํ•œ ํƒœ์Šคํฌ์ธ์ง€ ํ™•์ธ
supported_tasks = {
'text-classification': 'โœ… ํ…์ŠคํŠธ ๋ถ„๋ฅ˜ ๋ฐ๋ชจ ์ƒ์„ฑ ๊ฐ€๋Šฅ',
'question-answering': 'โœ… ์งˆ์˜์‘๋‹ต ๋ฐ๋ชจ ์ƒ์„ฑ ๊ฐ€๋Šฅ',
'text-generation': 'โœ… ํ…์ŠคํŠธ ์ƒ์„ฑ ๋ฐ๋ชจ ์ƒ์„ฑ ๊ฐ€๋Šฅ',
'summarization': 'โœ… ์š”์•ฝ ๋ฐ๋ชจ ์ƒ์„ฑ ๊ฐ€๋Šฅ',
'translation': 'โœ… ๋ฒˆ์—ญ ๋ฐ๋ชจ ์ƒ์„ฑ ๊ฐ€๋Šฅ',
'fill-mask': 'โœ… ๋นˆ์นธ ์ฑ„์šฐ๊ธฐ ๋ฐ๋ชจ ์ƒ์„ฑ ๊ฐ€๋Šฅ',
'token-classification': 'โœ… ๊ฐœ์ฒด๋ช… ์ธ์‹ ๋ฐ๋ชจ ์ƒ์„ฑ ๊ฐ€๋Šฅ'
}
if pipeline_tag in supported_tasks:
analysis_result += supported_tasks[pipeline_tag]
demo_possible = True
else:
analysis_result += f"โš ๏ธ '{pipeline_tag}' ํƒœ์Šคํฌ๋Š” ์•„์ง ์ง€์›๋˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค."
demo_possible = False
return analysis_result, info, demo_possible
except Exception as e:
return f"โŒ ๋ชจ๋ธ ๋ถ„์„ ์‹คํŒจ: {str(e)}\n\n๋ชจ๋ธ๋ช…์ด ์ •ํ™•ํ•œ์ง€ ํ™•์ธํ•ด์ฃผ์„ธ์š”.", None, False
def generate_demo_code(model_info, model_name, demo_title, demo_description):
"""ํƒœ์Šคํฌ๋ณ„ ๋ฐ๋ชจ ์ฝ”๋“œ ์ƒ์„ฑ"""
if not model_info:
return "๋จผ์ € ๋ชจ๋ธ์„ ๋ถ„์„ํ•ด์ฃผ์„ธ์š”.", None
pipeline_tag = getattr(model_info, 'pipeline_tag', None)
if not demo_title:
demo_title = f"{model_name.split('/')[-1]} Demo"
if not demo_description:
demo_description = f"Demo for {model_name}"
# ํƒœ์Šคํฌ๋ณ„ ์ฝ”๋“œ ์ƒ์„ฑ
if pipeline_tag == 'text-classification':
app_code = generate_text_classification_demo(model_name, demo_title, demo_description)
elif pipeline_tag == 'question-answering':
app_code = generate_qa_demo(model_name, demo_title, demo_description)
elif pipeline_tag == 'text-generation':
app_code = generate_text_generation_demo(model_name, demo_title, demo_description)
elif pipeline_tag == 'summarization':
app_code = generate_summarization_demo(model_name, demo_title, demo_description)
elif pipeline_tag == 'translation':
app_code = generate_translation_demo(model_name, demo_title, demo_description)
elif pipeline_tag == 'fill-mask':
app_code = generate_fill_mask_demo(model_name, demo_title, demo_description)
elif pipeline_tag == 'token-classification':
app_code = generate_token_classification_demo(model_name, demo_title, demo_description)
else:
return f"โŒ '{pipeline_tag}' ํƒœ์Šคํฌ๋Š” ์•„์ง ์ง€์›๋˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.", None
return app_code, pipeline_tag
def generate_text_classification_demo(model_name, title, description):
"""ํ…์ŠคํŠธ ๋ถ„๋ฅ˜ ๋ฐ๋ชจ ์ฝ”๋“œ ์ƒ์„ฑ"""
return f'''import gradio as gr
from transformers import pipeline
# ๋ชจ๋ธ ๋กœ๋“œ
classifier = pipeline("text-classification", model="{model_name}")
def classify_text(text):
"""ํ…์ŠคํŠธ ๋ถ„๋ฅ˜ ์ˆ˜ํ–‰"""
if not text.strip():
return "ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
try:
results = classifier(text)
# ๊ฒฐ๊ณผ ํฌ๋งทํŒ…
output = "## ๐ŸŽฏ ๋ถ„๋ฅ˜ ๊ฒฐ๊ณผ\\n\\n"
for i, result in enumerate(results):
label = result['label']
score = result['score']
confidence = f"{score:.1%}"
output += f"**{i+1}. {label}**: {confidence}\\n"
return output
except Exception as e:
return f"โŒ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {{str(e)}}"
# Gradio ์ธํ„ฐํŽ˜์ด์Šค
with gr.Blocks(title="{title}", theme=gr.themes.Soft()) as demo:
gr.Markdown("# {title}")
gr.Markdown("{description}")
with gr.Row():
with gr.Column(scale=1):
text_input = gr.Textbox(
label="๋ถ„์„ํ•  ํ…์ŠคํŠธ",
placeholder="์—ฌ๊ธฐ์— ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”...",
lines=5
)
classify_btn = gr.Button("๐ŸŽฏ ๋ถ„๋ฅ˜ํ•˜๊ธฐ", variant="primary")
# ์˜ˆ์‹œ ๋ฒ„ํŠผ๋“ค
gr.Markdown("### ๐Ÿ“ ์˜ˆ์‹œ ํ…์ŠคํŠธ")
examples = [
["์ด ์˜ํ™” ์ •๋ง ์žฌ๋ฏธ์žˆ์–ด์š”! ๊ฐ•๋ ฅ ์ถ”์ฒœํ•ฉ๋‹ˆ๋‹ค."],
["์„œ๋น„์Šค๊ฐ€ ๋„ˆ๋ฌด ๋ณ„๋กœ๋„ค์š”. ์‹ค๋ง์Šค๋Ÿฝ์Šต๋‹ˆ๋‹ค."],
["๊ทธ๋ƒฅ ํ‰๋ฒ”ํ•œ ์ œํ’ˆ์ธ ๊ฒƒ ๊ฐ™์•„์š”."]
]
example_btns = []
for example in examples:
btn = gr.Button(example[0][:30] + "...", size="sm")
btn.click(lambda x=example[0]: x, outputs=text_input)
with gr.Column(scale=1):
output = gr.Markdown("ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ๋ถ„๋ฅ˜ ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์„ธ์š”.")
classify_btn.click(
fn=classify_text,
inputs=text_input,
outputs=output
)
gr.Markdown(f"""
### โ„น๏ธ ๋ชจ๋ธ ์ •๋ณด
- **๋ชจ๋ธ**: [{model_name}](https://huggingface.co/{model_name})
- **ํƒœ์Šคํฌ**: ํ…์ŠคํŠธ ๋ถ„๋ฅ˜
- **์„ค๋ช…**: {description}
""")
if __name__ == "__main__":
demo.launch()'''
def generate_qa_demo(model_name, title, description):
"""์งˆ์˜์‘๋‹ต ๋ฐ๋ชจ ์ฝ”๋“œ ์ƒ์„ฑ"""
return f'''import gradio as gr
from transformers import pipeline
# ๋ชจ๋ธ ๋กœ๋“œ
qa_pipeline = pipeline("question-answering", model="{model_name}")
def answer_question(context, question):
"""์งˆ์˜์‘๋‹ต ์ˆ˜ํ–‰"""
if not context.strip() or not question.strip():
return "์ปจํ…์ŠคํŠธ์™€ ์งˆ๋ฌธ์„ ๋ชจ๋‘ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
try:
result = qa_pipeline(question=question, context=context)
answer = result['answer']
score = result['score']
confidence = f"{score:.1%}"
output = f"""
## ๐Ÿ’ก ๋‹ต๋ณ€ ๊ฒฐ๊ณผ
**๋‹ต๋ณ€**: {answer}
**์‹ ๋ขฐ๋„**: {confidence}
**์‹œ์ž‘ ์œ„์น˜**: {result.get('start', 'N/A')}
**๋ ์œ„์น˜**: {result.get('end', 'N/A')}
"""
return output
except Exception as e:
return f"โŒ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {{str(e)}}"
# Gradio ์ธํ„ฐํŽ˜์ด์Šค
with gr.Blocks(title="{title}", theme=gr.themes.Soft()) as demo:
gr.Markdown("# {title}")
gr.Markdown("{description}")
with gr.Row():
with gr.Column():
context_input = gr.Textbox(
label="๐Ÿ“„ ์ปจํ…์ŠคํŠธ (๋ฐฐ๊ฒฝ ์ •๋ณด)",
placeholder="๋‹ต๋ณ€์˜ ๊ทผ๊ฑฐ๊ฐ€ ๋  ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”...",
lines=8
)
question_input = gr.Textbox(
label="โ“ ์งˆ๋ฌธ",
placeholder="์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜์„ธ์š”...",
lines=2
)
qa_btn = gr.Button("๐Ÿ’ก ๋‹ต๋ณ€ ์ฐพ๊ธฐ", variant="primary")
with gr.Column():
output = gr.Markdown("์ปจํ…์ŠคํŠธ์™€ ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜๊ณ  ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์„ธ์š”.")
qa_btn.click(
fn=answer_question,
inputs=[context_input, question_input],
outputs=output
)
gr.Markdown(f"""
### โ„น๏ธ ๋ชจ๋ธ ์ •๋ณด
- **๋ชจ๋ธ**: [{model_name}](https://huggingface.co/{model_name})
- **ํƒœ์Šคํฌ**: ์งˆ์˜์‘๋‹ต
- **์„ค๋ช…**: {description}
""")
if __name__ == "__main__":
demo.launch()'''
def generate_text_generation_demo(model_name, title, description):
"""ํ…์ŠคํŠธ ์ƒ์„ฑ ๋ฐ๋ชจ ์ฝ”๋“œ ์ƒ์„ฑ"""
return f'''import gradio as gr
from transformers import pipeline
# ๋ชจ๋ธ ๋กœ๋“œ
generator = pipeline("text-generation", model="{model_name}")
def generate_text(prompt, max_length, temperature, top_p):
"""ํ…์ŠคํŠธ ์ƒ์„ฑ ์ˆ˜ํ–‰"""
if not prompt.strip():
return "ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
try:
results = generator(
prompt,
max_length=max_length,
temperature=temperature,
top_p=top_p,
num_return_sequences=1,
do_sample=True,
pad_token_id=generator.tokenizer.eos_token_id
)
generated_text = results[0]['generated_text']
# ์›๋ณธ ํ”„๋กฌํ”„ํŠธ ์ œ๊ฑฐํ•˜๊ณ  ์ƒ์„ฑ๋œ ๋ถ€๋ถ„๋งŒ ํ‘œ์‹œ
if generated_text.startswith(prompt):
generated_text = generated_text[len(prompt):]
output = f"""
## โœจ ์ƒ์„ฑ๋œ ํ…์ŠคํŠธ
**์ž…๋ ฅ**: {prompt}
**์ƒ์„ฑ ๊ฒฐ๊ณผ**:
{generated_text}
---
*์„ค์ •: ์ตœ๋Œ€ ๊ธธ์ด={max_length}, ์˜จ๋„={temperature}, Top-p={top_p}*
"""
return output
except Exception as e:
return f"โŒ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {{str(e)}}"
# Gradio ์ธํ„ฐํŽ˜์ด์Šค
with gr.Blocks(title="{title}", theme=gr.themes.Soft()) as demo:
gr.Markdown("# {title}")
gr.Markdown("{description}")
with gr.Row():
with gr.Column(scale=1):
prompt_input = gr.Textbox(
label="โœ๏ธ ํ”„๋กฌํ”„ํŠธ",
placeholder="ํ…์ŠคํŠธ ์ƒ์„ฑ์„ ์‹œ์ž‘ํ•  ๋ฌธ์žฅ์„ ์ž…๋ ฅํ•˜์„ธ์š”...",
lines=4
)
gr.Markdown("### โš™๏ธ ์ƒ์„ฑ ์„ค์ •")
max_length = gr.Slider(
label="์ตœ๋Œ€ ๊ธธ์ด",
minimum=10,
maximum=200,
value=50,
step=10
)
temperature = gr.Slider(
label="Temperature (์ฐฝ์˜์„ฑ)",
minimum=0.1,
maximum=2.0,
value=0.7,
step=0.1
)
top_p = gr.Slider(
label="Top-p (๋‹ค์–‘์„ฑ)",
minimum=0.1,
maximum=1.0,
value=0.9,
step=0.1
)
generate_btn = gr.Button("โœจ ํ…์ŠคํŠธ ์ƒ์„ฑ", variant="primary")
with gr.Column(scale=2):
output = gr.Markdown("ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์ƒ์„ฑ ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์„ธ์š”.")
generate_btn.click(
fn=generate_text,
inputs=[prompt_input, max_length, temperature, top_p],
outputs=output
)
gr.Markdown(f"""
### โ„น๏ธ ๋ชจ๋ธ ์ •๋ณด
- **๋ชจ๋ธ**: [{model_name}](https://huggingface.co/{model_name})
- **ํƒœ์Šคํฌ**: ํ…์ŠคํŠธ ์ƒ์„ฑ
- **์„ค๋ช…**: {description}
""")
if __name__ == "__main__":
demo.launch()'''
def generate_summarization_demo(model_name, title, description):
"""์š”์•ฝ ๋ฐ๋ชจ ์ฝ”๋“œ ์ƒ์„ฑ"""
return f'''import gradio as gr
from transformers import pipeline
# ๋ชจ๋ธ ๋กœ๋“œ
summarizer = pipeline("summarization", model="{model_name}")
def summarize_text(text, max_length, min_length):
"""ํ…์ŠคํŠธ ์š”์•ฝ ์ˆ˜ํ–‰"""
if not text.strip():
return "์š”์•ฝํ•  ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
if len(text.split()) < 10:
return "์š”์•ฝํ•˜๊ธฐ์—๋Š” ํ…์ŠคํŠธ๊ฐ€ ๋„ˆ๋ฌด ์งง์Šต๋‹ˆ๋‹ค. ๋” ๊ธด ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
try:
results = summarizer(
text,
max_length=max_length,
min_length=min_length,
do_sample=False
)
summary = results[0]['summary_text']
# ํ†ต๊ณ„ ๊ณ„์‚ฐ
original_words = len(text.split())
summary_words = len(summary.split())
compression_ratio = (1 - summary_words / original_words) * 100
output = f"""
## ๐Ÿ“ ์š”์•ฝ ๊ฒฐ๊ณผ
**์š”์•ฝ๋ฌธ**:
{summary}
---
### ๐Ÿ“Š ํ†ต๊ณ„
- **์›๋ฌธ ๋‹จ์–ด ์ˆ˜**: {original_words:,}๊ฐœ
- **์š”์•ฝ๋ฌธ ๋‹จ์–ด ์ˆ˜**: {summary_words:,}๊ฐœ
- **์••์ถ•๋ฅ **: {compression_ratio:.1f}%
"""
return output
except Exception as e:
return f"โŒ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {{str(e)}}"
# Gradio ์ธํ„ฐํŽ˜์ด์Šค
with gr.Blocks(title="{title}", theme=gr.themes.Soft()) as demo:
gr.Markdown("# {title}")
gr.Markdown("{description}")
with gr.Row():
with gr.Column(scale=1):
text_input = gr.Textbox(
label="๐Ÿ“„ ์š”์•ฝํ•  ํ…์ŠคํŠธ",
placeholder="๊ธด ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”...",
lines=10
)
gr.Markdown("### โš™๏ธ ์š”์•ฝ ์„ค์ •")
max_length = gr.Slider(
label="์ตœ๋Œ€ ์š”์•ฝ ๊ธธ์ด",
minimum=20,
maximum=150,
value=50,
step=10
)
min_length = gr.Slider(
label="์ตœ์†Œ ์š”์•ฝ ๊ธธ์ด",
minimum=5,
maximum=50,
value=10,
step=5
)
summarize_btn = gr.Button("๐Ÿ“ ์š”์•ฝํ•˜๊ธฐ", variant="primary")
with gr.Column(scale=1):
output = gr.Markdown("ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์š”์•ฝ ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์„ธ์š”.")
summarize_btn.click(
fn=summarize_text,
inputs=[text_input, max_length, min_length],
outputs=output
)
gr.Markdown(f"""
### โ„น๏ธ ๋ชจ๋ธ ์ •๋ณด
- **๋ชจ๋ธ**: [{model_name}](https://huggingface.co/{model_name})
- **ํƒœ์Šคํฌ**: ํ…์ŠคํŠธ ์š”์•ฝ
- **์„ค๋ช…**: {description}
""")
if __name__ == "__main__":
demo.launch()'''
def generate_translation_demo(model_name, title, description):
"""๋ฒˆ์—ญ ๋ฐ๋ชจ ์ฝ”๋“œ ์ƒ์„ฑ"""
return f'''import gradio as gr
from transformers import pipeline
# ๋ชจ๋ธ ๋กœ๋“œ
translator = pipeline("translation", model="{model_name}")
def translate_text(text):
"""๋ฒˆ์—ญ ์ˆ˜ํ–‰"""
if not text.strip():
return "๋ฒˆ์—ญํ•  ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
try:
results = translator(text)
translated = results[0]['translation_text']
output = f"""
## ๐ŸŒ ๋ฒˆ์—ญ ๊ฒฐ๊ณผ
**์›๋ฌธ**: {text}
**๋ฒˆ์—ญ๋ฌธ**: {translated}
---
*๋ฌธ์ž ์ˆ˜: {len(text)} โ†’ {len(translated)}*
"""
return output
except Exception as e:
return f"โŒ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {{str(e)}}"
# Gradio ์ธํ„ฐํŽ˜์ด์Šค
with gr.Blocks(title="{title}", theme=gr.themes.Soft()) as demo:
gr.Markdown("# {title}")
gr.Markdown("{description}")
with gr.Row():
with gr.Column():
text_input = gr.Textbox(
label="๐ŸŒ ๋ฒˆ์—ญํ•  ํ…์ŠคํŠธ",
placeholder="๋ฒˆ์—ญํ•  ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”...",
lines=6
)
translate_btn = gr.Button("๐ŸŒ ๋ฒˆ์—ญํ•˜๊ธฐ", variant="primary")
with gr.Column():
output = gr.Markdown("ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ๋ฒˆ์—ญ ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์„ธ์š”.")
translate_btn.click(
fn=translate_text,
inputs=text_input,
outputs=output
)
gr.Markdown(f"""
### โ„น๏ธ ๋ชจ๋ธ ์ •๋ณด
- **๋ชจ๋ธ**: [{model_name}](https://huggingface.co/{model_name})
- **ํƒœ์Šคํฌ**: ๋ฒˆ์—ญ
- **์„ค๋ช…**: {description}
""")
if __name__ == "__main__":
demo.launch()'''
def generate_fill_mask_demo(model_name, title, description):
"""๋นˆ์นธ ์ฑ„์šฐ๊ธฐ ๋ฐ๋ชจ ์ฝ”๋“œ ์ƒ์„ฑ"""
return f'''import gradio as gr
from transformers import pipeline
# ๋ชจ๋ธ ๋กœ๋“œ
fill_mask = pipeline("fill-mask", model="{model_name}")
def predict_mask(text):
"""๋งˆ์Šคํฌ ์˜ˆ์ธก ์ˆ˜ํ–‰"""
if not text.strip():
return "ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
if "[MASK]" not in text:
return "ํ…์ŠคํŠธ์— [MASK] ํ† ํฐ์„ ํฌํ•จํ•ด์ฃผ์„ธ์š”."
try:
results = fill_mask(text)
output = f"## ๐ŸŽฏ ์˜ˆ์ธก ๊ฒฐ๊ณผ\\n\\n**์›๋ฌธ**: {text}\\n\\n### ํ›„๋ณด๋“ค:\\n"
for i, result in enumerate(results[:5]):
token = result['token_str']
score = result['score']
sequence = result['sequence']
confidence = f"{score:.1%}"
output += f"**{i+1}. {token}** ({confidence})\\n"
output += f" *{sequence}*\\n\\n"
return output
except Exception as e:
return f"โŒ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {{str(e)}}"
# Gradio ์ธํ„ฐํŽ˜์ด์Šค
with gr.Blocks(title="{title}", theme=gr.themes.Soft()) as demo:
gr.Markdown("# {title}")
gr.Markdown("{description}")
with gr.Row():
with gr.Column():
text_input = gr.Textbox(
label="๐ŸŽฏ [MASK] ์˜ˆ์ธกํ•  ํ…์ŠคํŠธ",
placeholder="[MASK] ํ† ํฐ์„ ํฌํ•จํ•œ ๋ฌธ์žฅ์„ ์ž…๋ ฅํ•˜์„ธ์š”...",
lines=4,
value="์˜ค๋Š˜ ๋‚ ์”จ๊ฐ€ ์ •๋ง [MASK]๋„ค์š”."
)
predict_btn = gr.Button("๐ŸŽฏ ์˜ˆ์ธกํ•˜๊ธฐ", variant="primary")
gr.Markdown("### ๐Ÿ“ ์˜ˆ์‹œ")
examples = [
"์˜ค๋Š˜ ๋‚ ์”จ๊ฐ€ ์ •๋ง [MASK]๋„ค์š”.",
"์ด ์˜ํ™”๋Š” [MASK] ์žฌ๋ฏธ์žˆ์–ด์š”.",
"ํ•œ๊ตญ์˜ ์ˆ˜๋„๋Š” [MASK]์ž…๋‹ˆ๋‹ค."
]
for example in examples:
btn = gr.Button(example, size="sm")
btn.click(lambda x=example: x, outputs=text_input)
with gr.Column():
output = gr.Markdown("[MASK] ํ† ํฐ์„ ํฌํ•จํ•œ ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์˜ˆ์ธก ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์„ธ์š”.")
predict_btn.click(
fn=predict_mask,
inputs=text_input,
outputs=output
)
gr.Markdown(f"""
### โ„น๏ธ ๋ชจ๋ธ ์ •๋ณด
- **๋ชจ๋ธ**: [{model_name}](https://huggingface.co/{model_name})
- **ํƒœ์Šคํฌ**: ๋นˆ์นธ ์ฑ„์šฐ๊ธฐ (Fill Mask)
- **์„ค๋ช…**: {description}
""")
if __name__ == "__main__":
demo.launch()'''
def generate_token_classification_demo(model_name, title, description):
"""๊ฐœ์ฒด๋ช… ์ธ์‹ ๋ฐ๋ชจ ์ฝ”๋“œ ์ƒ์„ฑ"""
return f'''import gradio as gr
from transformers import pipeline
# ๋ชจ๋ธ ๋กœ๋“œ
ner_pipeline = pipeline("token-classification", model="{model_name}", aggregation_strategy="simple")
def recognize_entities(text):
"""๊ฐœ์ฒด๋ช… ์ธ์‹ ์ˆ˜ํ–‰"""
if not text.strip():
return "ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
try:
results = ner_pipeline(text)
if not results:
return "์ธ์‹๋œ ๊ฐœ์ฒด๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค."
output = f"## ๐Ÿท๏ธ ๊ฐœ์ฒด๋ช… ์ธ์‹ ๊ฒฐ๊ณผ\\n\\n**์›๋ฌธ**: {text}\\n\\n### ์ธ์‹๋œ ๊ฐœ์ฒด๋“ค:\\n"
for i, entity in enumerate(results):
word = entity['word']
label = entity['entity_group']
score = entity['score']
start = entity['start']
end = entity['end']
confidence = f"{score:.1%}"
output += f"**{i+1}. {word}** โ†’ *{label}* ({confidence})\\n"
output += f" ์œ„์น˜: {start}-{end}\\n\\n"
return output
except Exception as e:
return f"โŒ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {{str(e)}}"
# Gradio ์ธํ„ฐํŽ˜์ด์Šค
with gr.Blocks(title="{title}", theme=gr.themes.Soft()) as demo:
gr.Markdown("# {title}")
gr.Markdown("{description}")
with gr.Row():
with gr.Column():
text_input = gr.Textbox(
label="๐Ÿท๏ธ ๊ฐœ์ฒด๋ช… ์ธ์‹ํ•  ํ…์ŠคํŠธ",
placeholder="์‚ฌ๋žŒ๋ช…, ์ง€๋ช…, ๊ธฐ๊ด€๋ช… ๋“ฑ์ด ํฌํ•จ๋œ ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”...",
lines=5,
value="์‚ผ์„ฑ์ „์ž์˜ ์ด์žฌ์šฉ ํšŒ์žฅ์ด ์„œ์šธ์—์„œ ๊ธฐ์žํšŒ๊ฒฌ์„ ์—ด์—ˆ๋‹ค."
)
ner_btn = gr.Button("๐Ÿท๏ธ ๊ฐœ์ฒด๋ช… ์ธ์‹", variant="primary")
with gr.Column():
output = gr.Markdown("ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์ธ์‹ ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์„ธ์š”.")
ner_btn.click(
fn=recognize_entities,
inputs=text_input,
outputs=output
)
gr.Markdown(f"""
### โ„น๏ธ ๋ชจ๋ธ ์ •๋ณด
- **๋ชจ๋ธ**: [{model_name}](https://huggingface.co/{model_name})
- **ํƒœ์Šคํฌ**: ๊ฐœ์ฒด๋ช… ์ธ์‹ (Named Entity Recognition)
- **์„ค๋ช…**: {description}
""")
if __name__ == "__main__":
demo.launch()'''
def create_demo_package(app_code, pipeline_tag, model_name, demo_title, demo_description):
"""์™„์ „ํ•œ ๋ฐ๋ชจ ํŒจํ‚ค์ง€ ์ƒ์„ฑ"""
if not app_code:
return "๋จผ์ € ๋ฐ๋ชจ ์ฝ”๋“œ๋ฅผ ์ƒ์„ฑํ•ด์ฃผ์„ธ์š”.", None
try:
# ์ž„์‹œ ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ
temp_dir = tempfile.mkdtemp()
demo_dir = os.path.join(temp_dir, demo_title.lower().replace(' ', '-'))
os.makedirs(demo_dir, exist_ok=True)
# 1. app.py ์ €์žฅ
with open(os.path.join(demo_dir, "app.py"), 'w', encoding='utf-8') as f:
f.write(app_code)
# 2. requirements.txt ์ƒ์„ฑ
requirements = """gradio
transformers
torch
"""
with open(os.path.join(demo_dir, "requirements.txt"), 'w', encoding='utf-8') as f:
f.write(requirements)
# 3. README.md ์ƒ์„ฑ
space_name = demo_title.lower().replace(' ', '-')
readme_content = f"""---
title: {demo_title}
emoji: ๐Ÿค–
colorFrom: blue
colorTo: purple
sdk: gradio
sdk_version: 5.31.0
app_file: app.py
pinned: false
models:
- {model_name}
---
# {demo_title}
{demo_description}
## ๐Ÿš€ ์‚ฌ์šฉ๋ฒ•
์ด ๋ฐ๋ชจ๋Š” [{model_name}](https://huggingface.co/{model_name}) ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค.
### ํƒœ์Šคํฌ: {pipeline_tag}
{get_task_description(pipeline_tag)}
## ๐Ÿ› ๏ธ ๋กœ์ปฌ ์‹คํ–‰
```bash
pip install -r requirements.txt
python app.py
```
## ๐Ÿ“ ๋ชจ๋ธ ์ •๋ณด
- **๋ชจ๋ธ**: [{model_name}](https://huggingface.co/{model_name})
- **ํƒœ์Šคํฌ**: {pipeline_tag}
- **๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ**: transformers
---
*์ด ๋ฐ๋ชจ๋Š” [Demo Generator](https://huggingface.co/spaces/your-username/demo-generator)๋กœ ์ž๋™ ์ƒ์„ฑ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.*
"""
with open(os.path.join(demo_dir, "README.md"), 'w', encoding='utf-8') as f:
f.write(readme_content)
# 4. ZIP ํŒŒ์ผ ์ƒ์„ฑ
zip_path = os.path.join(temp_dir, f"{space_name}_demo.zip")
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(demo_dir):
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, demo_dir)
zipf.write(file_path, arcname)
success_msg = f"""
## โœ… ๋ฐ๋ชจ ์ƒ์„ฑ ์™„๋ฃŒ!
**{demo_title}** ๋ฐ๋ชจ๊ฐ€ ์„ฑ๊ณต์ ์œผ๋กœ ์ƒ์„ฑ๋˜์—ˆ์Šต๋‹ˆ๋‹ค!
### ๐Ÿ“ฆ ํฌํ•จ๋œ ํŒŒ์ผ๋“ค:
- `app.py`: ๋ฉ”์ธ ๋ฐ๋ชจ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜
- `requirements.txt`: ํ•„์š”ํ•œ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ๋ชฉ๋ก
- `README.md`: Space ์„ค๋ช… ๋ฐ ์„ค์ •
### ๐Ÿš€ ๋ฐฐํฌ ๋ฐฉ๋ฒ•:
1. ์•„๋ž˜ ZIP ํŒŒ์ผ์„ ๋‹ค์šด๋กœ๋“œํ•˜์„ธ์š”
2. ํ—ˆ๊น…ํŽ˜์ด์Šค์—์„œ ์ƒˆ Space๋ฅผ ์ƒ์„ฑํ•˜์„ธ์š”
3. ํŒŒ์ผ๋“ค์„ ์—…๋กœ๋“œํ•˜๊ฑฐ๋‚˜ Git์œผ๋กœ pushํ•˜์„ธ์š”
4. ์ž๋™์œผ๋กœ ๋นŒ๋“œ๋˜๊ณ  ๋ฐฐํฌ๋ฉ๋‹ˆ๋‹ค!
### ๐ŸŽฏ ํƒœ์Šคํฌ: {pipeline_tag}
### ๐Ÿค– ๋ชจ๋ธ: {model_name}
"""
return success_msg, zip_path
except Exception as e:
return f"โŒ ํŒจํ‚ค์ง€ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}", None
def get_task_description(pipeline_tag):
"""ํƒœ์Šคํฌ๋ณ„ ์„ค๋ช… ๋ฐ˜ํ™˜"""
descriptions = {
'text-classification': 'ํ…์ŠคํŠธ๋ฅผ ์ž…๋ ฅํ•˜๋ฉด ์นดํ…Œ๊ณ ๋ฆฌ๋ณ„๋กœ ๋ถ„๋ฅ˜ํ•ฉ๋‹ˆ๋‹ค. ๊ฐ์ •๋ถ„์„, ์ฃผ์ œ๋ถ„๋ฅ˜ ๋“ฑ์— ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค.',
'question-answering': '์ปจํ…์ŠคํŠธ์™€ ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜๋ฉด ๋‹ต๋ณ€์„ ์ฐพ์•„์ค๋‹ˆ๋‹ค.',
'text-generation': '์ฃผ์–ด์ง„ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ด์–ด์„œ ํ…์ŠคํŠธ๋ฅผ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค.',
'summarization': '๊ธด ํ…์ŠคํŠธ๋ฅผ ์š”์•ฝํ•ด์ค๋‹ˆ๋‹ค.',
'translation': 'ํ…์ŠคํŠธ๋ฅผ ๋‹ค๋ฅธ ์–ธ์–ด๋กœ ๋ฒˆ์—ญํ•ฉ๋‹ˆ๋‹ค.',
'fill-mask': '[MASK] ํ† ํฐ์ด ํฌํ•จ๋œ ๋ฌธ์žฅ์—์„œ ๋นˆ์นธ์„ ์ฑ„์›Œ์ค๋‹ˆ๋‹ค.',
'token-classification': 'ํ…์ŠคํŠธ์—์„œ ์‚ฌ๋žŒ๋ช…, ์ง€๋ช…, ๊ธฐ๊ด€๋ช… ๋“ฑ์˜ ๊ฐœ์ฒด๋ฅผ ์ธ์‹ํ•ฉ๋‹ˆ๋‹ค.'
}
return descriptions.get(pipeline_tag, '๋‹ค์–‘ํ•œ NLP ํƒœ์Šคํฌ๋ฅผ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค.')
# Gradio ์ธํ„ฐํŽ˜์ด์Šค
with gr.Blocks(title="๐ŸŽฎ Demo Generator", theme=gr.themes.Soft()) as demo:
# ์ƒํƒœ ์ €์žฅ์šฉ
model_info_state = gr.State()
demo_possible_state = gr.State()
gr.Markdown("# ๐ŸŽฎ Hugging Face Demo Generator")
gr.Markdown("๋ชจ๋ธ๋ช…๋งŒ ์ž…๋ ฅํ•˜๋ฉด ๋ฐ”๋กœ ์ž‘๋™ํ•˜๋Š” ๋ฐ๋ชจ Space๋ฅผ ์ž๋™์œผ๋กœ ์ƒ์„ฑํ•ด๋“œ๋ฆฝ๋‹ˆ๋‹ค!")
with gr.Row():
with gr.Column(scale=1):
gr.Markdown("### ๐Ÿ” ๋ชจ๋ธ ๋ถ„์„")
model_name_input = gr.Textbox(
label="๋ชจ๋ธ๋ช…",
placeholder="์˜ˆ: klue/bert-base",
info="Hugging Face Hub์˜ ์ •ํ™•ํ•œ ๋ชจ๋ธ๋ช…์„ ์ž…๋ ฅํ•˜์„ธ์š”"
)
analyze_btn = gr.Button("๐Ÿ” ๋ชจ๋ธ ๋ถ„์„", variant="primary")
gr.Markdown("### โš™๏ธ ๋ฐ๋ชจ ์„ค์ •")
demo_title = gr.Textbox(
label="๋ฐ๋ชจ ์ œ๋ชฉ",
placeholder="์˜ˆ: Korean BERT Sentiment Demo",
value=""
)
demo_description = gr.Textbox(
label="๋ฐ๋ชจ ์„ค๋ช…",
placeholder="์˜ˆ: ํ•œ๊ตญ์–ด ๊ฐ์ • ๋ถ„์„์„ ์œ„ํ•œ BERT ๋ชจ๋ธ ๋ฐ๋ชจ",
lines=3
)
generate_btn = gr.Button("๐ŸŽฎ ๋ฐ๋ชจ ์ƒ์„ฑ", variant="secondary")
create_package_btn = gr.Button("๐Ÿ“ฆ ์™„์ „ํ•œ ํŒจํ‚ค์ง€ ์ƒ์„ฑ", variant="secondary")
with gr.Column(scale=2):
gr.Markdown("### ๐Ÿ“Š ๋ถ„์„ ๊ฒฐ๊ณผ")
analysis_output = gr.Markdown("๋ชจ๋ธ๋ช…์„ ์ž…๋ ฅํ•˜๊ณ  ๋ถ„์„ ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์„ธ์š”.")
gr.Markdown("### ๐Ÿ’ป ์ƒ์„ฑ๋œ ๋ฐ๋ชจ ์ฝ”๋“œ")
code_output = gr.Code(language="python", label="app.py")
gr.Markdown("### ๐Ÿ“ฆ ๋‹ค์šด๋กœ๋“œ")
download_file = gr.File(label="์™„์ „ํ•œ ๋ฐ๋ชจ ํŒจํ‚ค์ง€", visible=False)
# ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
def analyze_and_store(model_name):
analysis, info, possible = analyze_model(model_name)
return analysis, info, possible
def generate_and_show_code(info, model_name, title, description):
if not info:
return "๋จผ์ € ๋ชจ๋ธ์„ ๋ถ„์„ํ•ด์ฃผ์„ธ์š”."
code, task = generate_demo_code(info, model_name, title, description)
return code
def create_package_and_download(info, model_name, title, description, code):
if not info or not code:
return "๋จผ์ € ๋ฐ๋ชจ ์ฝ”๋“œ๋ฅผ ์ƒ์„ฑํ•ด์ฃผ์„ธ์š”.", None
pipeline_tag = getattr(info, 'pipeline_tag', 'unknown')
result, zip_path = create_demo_package(code, pipeline_tag, model_name, title, description)
if zip_path:
return result, gr.File(value=zip_path, visible=True)
else:
return result, None
analyze_btn.click(
fn=analyze_and_store,
inputs=model_name_input,
outputs=[analysis_output, model_info_state, demo_possible_state]
)
generate_btn.click(
fn=generate_and_show_code,
inputs=[model_info_state, model_name_input, demo_title, demo_description],
outputs=code_output
)
create_package_btn.click(
fn=create_package_and_download,
inputs=[model_info_state, model_name_input, demo_title, demo_description, code_output],
outputs=[analysis_output, download_file]
)
gr.Markdown("""
### ๐Ÿ’ก ์‚ฌ์šฉ๋ฒ• ๊ฐ€์ด๋“œ
#### 1๏ธโƒฃ ๋ชจ๋ธ ๋ถ„์„
- Hugging Face Hub์˜ ์ •ํ™•ํ•œ ๋ชจ๋ธ๋ช… ์ž…๋ ฅ (์˜ˆ: `klue/bert-base`)
- ๋ชจ๋ธ์˜ ํƒœ์Šคํฌ์™€ ํ˜ธํ™˜์„ฑ ์ž๋™ ํ™•์ธ
#### 2๏ธโƒฃ ๋ฐ๋ชจ ์„ค์ •
- **์ œ๋ชฉ**: Space์—์„œ ๋ณด์—ฌ์งˆ ๋ฐ๋ชจ ์ œ๋ชฉ
- **์„ค๋ช…**: ๋ฐ๋ชจ์˜ ์šฉ๋„์™€ ๊ธฐ๋Šฅ ์„ค๋ช…
#### 3๏ธโƒฃ ์ฝ”๋“œ ์ƒ์„ฑ
- ํƒœ์Šคํฌ์— ๋งž๋Š” ์ตœ์ ํ™”๋œ Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ž๋™ ์ƒ์„ฑ
- ๋ฐ”๋กœ ๋ณต์‚ฌํ•ด์„œ ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ์™„์ „ํ•œ ์ฝ”๋“œ
#### 4๏ธโƒฃ ํŒจํ‚ค์ง€ ๋‹ค์šด๋กœ๋“œ
- `app.py`, `requirements.txt`, `README.md` ํฌํ•จ
- ZIP ๋‹ค์šด๋กœ๋“œ ํ›„ ๋ฐ”๋กœ Space์— ์—…๋กœ๋“œ ๊ฐ€๋Šฅ
### ๐ŸŽฏ ์ง€์›ํ•˜๋Š” ํƒœ์Šคํฌ
- **ํ…์ŠคํŠธ ๋ถ„๋ฅ˜**: ๊ฐ์ •๋ถ„์„, ์ฃผ์ œ๋ถ„๋ฅ˜, ์–ธ์–ด๊ฐ์ง€ ๋“ฑ
- **์งˆ์˜์‘๋‹ต**: ๋ฌธ์„œ ๊ธฐ๋ฐ˜ QA ์‹œ์Šคํ…œ
- **ํ…์ŠคํŠธ ์ƒ์„ฑ**: ์–ธ์–ด๋ชจ๋ธ ๊ธฐ๋ฐ˜ ํ…์ŠคํŠธ ์™„์„ฑ
- **์š”์•ฝ**: ๊ธด ํ…์ŠคํŠธ์˜ ํ•ต์‹ฌ ์š”์•ฝ
- **๋ฒˆ์—ญ**: ์–ธ์–ด ๊ฐ„ ๋ฒˆ์—ญ
- **๋นˆ์นธ ์ฑ„์šฐ๊ธฐ**: BERT ์Šคํƒ€์ผ ๋งˆ์Šคํฌ ์˜ˆ์ธก
- **๊ฐœ์ฒด๋ช… ์ธ์‹**: ์‚ฌ๋žŒ๋ช…, ์ง€๋ช…, ๊ธฐ๊ด€๋ช… ๋“ฑ ์ถ”์ถœ
### ๐Ÿ”— ์›Œํฌํ”Œ๋กœ์šฐ ์—ฐ๊ณ„
์ด ๋„๊ตฌ๋Š” ๋‹ค๋ฅธ ๋„๊ตฌ๋“ค๊ณผ ์™„๋ฒฝํ•˜๊ฒŒ ์—ฐ๊ณ„๋ฉ๋‹ˆ๋‹ค:
1. **[Model Search Tool](https://huggingface.co/spaces/your-username/model-search-tool)**: ์ตœ์  ๋ชจ๋ธ ๊ฒ€์ƒ‰
2. **[Dataset Converter](https://huggingface.co/spaces/your-username/dataset-converter)**: ๋ฐ์ดํ„ฐ ์ค€๋น„
3. **Demo Generator**: ๋ฐ๋ชจ ์ƒ์„ฑ โ† *ํ˜„์žฌ ๋„๊ตฌ*
4. **[Model Card Generator](https://huggingface.co/spaces/your-username/model-card-generator)**: ๋ฌธ์„œํ™”
""")
if __name__ == "__main__":
demo.launch()