title-generator / script.py
oriolgds's picture
Revert "Some optimizations"
d6aca47 unverified
raw
history blame
5.93 kB
# Crear el código completo para un espacio de Hugging Face que genera títulos
# usando Llama-3.2-1B-Instruct con interfaz Gradio y API
app_code = '''import gradio as gr
import os
from huggingface_hub import InferenceClient
# Obtener el token de HF desde los secrets
HF_TOKEN = os.environ.get("HF_TOKEN")
# Inicializar el cliente de inferencia con el modelo Llama
client = InferenceClient(
model="meta-llama/Llama-3.2-1B-Instruct",
token=HF_TOKEN
)
def generate_title(text_or_history, max_length=50):
"""
Genera un título a partir de texto o historial de conversación
Args:
text_or_history: Puede ser texto simple o una lista de mensajes
max_length: Longitud máxima del título
Returns:
El título generado
"""
try:
# Si es una lista (historial), convertirla a texto
if isinstance(text_or_history, list):
# Formatear el historial como conversación
conversation_text = "\\n".join([
f"{msg.get('role', 'user')}: {msg.get('content', '')}"
for msg in text_or_history
])
else:
conversation_text = str(text_or_history)
# Crear el prompt para generar título
prompt = f"""Based on the following conversation or text, generate a short, concise title (maximum 10 words):
{conversation_text}
Title:"""
# Generar el título usando el modelo
messages = [
{"role": "user", "content": prompt}
]
response = ""
for message in client.chat_completion(
messages=messages,
max_tokens=max_length,
temperature=0.7,
stream=True
):
token = message.choices[0].delta.content
if token:
response += token
# Limpiar el título (quitar saltos de línea extra, etc.)
title = response.strip().split("\\n")[0]
return title
except Exception as e:
return f"Error: {str(e)}"
# Crear la interfaz de Gradio
with gr.Blocks(title="Title Generator with Llama 3.2") as demo:
gr.Markdown("# 📝 AI Title Generator")
gr.Markdown("Generate concise titles from text or conversation history using Llama 3.2-1B-Instruct")
with gr.Tab("Text Input"):
text_input = gr.Textbox(
label="Enter your text",
placeholder="Paste your text or conversation here...",
lines=10
)
text_button = gr.Button("Generate Title", variant="primary")
text_output = gr.Textbox(label="Generated Title", lines=2)
text_button.click(
fn=generate_title,
inputs=[text_input],
outputs=[text_output]
)
with gr.Tab("History/List Input"):
gr.Markdown("Enter conversation history as JSON format:")
gr.Markdown('Example: `[{"role": "user", "content": "Hello"}, {"role": "assistant", "content": "Hi there!"}]`')
history_input = gr.Textbox(
label="Conversation History (JSON)",
placeholder='[{"role": "user", "content": "Your message here"}]',
lines=10
)
history_button = gr.Button("Generate Title", variant="primary")
history_output = gr.Textbox(label="Generated Title", lines=2)
def process_history(history_json):
try:
import json
history_list = json.loads(history_json)
return generate_title(history_list)
except json.JSONDecodeError:
return "Error: Invalid JSON format"
history_button.click(
fn=process_history,
inputs=[history_input],
outputs=[history_output]
)
gr.Markdown("---")
gr.Markdown("### API Usage")
gr.Markdown("""
You can use this API with CURL:
```bash
curl -X POST "https://YOUR-SPACE-URL/call/generate_title" \\
-H "Content-Type: application/json" \\
-d '{"data": ["Your text here"]}'
```
""")
# Lanzar la aplicación con API habilitada
if __name__ == "__main__":
demo.launch(show_api=True)
'''
# Guardar el código en un archivo
with open('app.py', 'w', encoding='utf-8') as f:
f.write(app_code)
# Crear el archivo requirements.txt
requirements = '''gradio>=4.0.0
huggingface_hub>=0.19.0
'''
with open('requirements.txt', 'w', encoding='utf-8') as f:
f.write(requirements)
# Crear el README con instrucciones
readme = '''---
title: Title Generator with Llama 3.2
emoji: 📝
colorFrom: blue
colorTo: purple
sdk: gradio
sdk_version: 4.44.0
app_file: app.py
pinned: false
license: mit
---
# Title Generator with Llama 3.2-1B-Instruct
Generate concise titles from text or conversation history using Meta's Llama 3.2-1B-Instruct model.
## Features
- 📝 Generate titles from plain text
- 💬 Generate titles from conversation history
- 🚀 Fast inference with Llama 3.2-1B
- 🔌 RESTful API support for integration
## Setup
1. Go to your Space settings
2. Add a new secret: `HF_TOKEN` with your Hugging Face token
3. Make sure you have access to `meta-llama/Llama-3.2-1B-Instruct` (accept the gated model)
## API Usage
### CURL Example
```bash
curl -X POST "https://YOUR-SPACE-URL/call/generate_title" \\
-H "Content-Type: application/json" \\
-d '{"data": ["Your text or conversation here"]}'
```
### Python Example
```python
from gradio_client import Client
client = Client("YOUR-SPACE-URL")
result = client.predict("Your text here", api_name="/generate_title")
print(result)
```
## License
MIT License
'''
with open('README.md', 'w', encoding='utf-8') as f:
f.write(readme)
print("✅ Archivos generados exitosamente:")
print("- app.py")
print("- requirements.txt")
print("- README.md")
print("\n📦 Archivos listos para subir a Hugging Face Space")