Upload 6 files
Browse files- Dockerfile.1 +37 -0
- app.py +84 -48
- start.sh +144 -76
- vince.sh +95 -0
- vincie_service.py +56 -58
Dockerfile.1
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM nvidia/cuda:12.4.1-devel-ubuntu22.04
|
| 2 |
+
|
| 3 |
+
ENV DEBIAN_FRONTEND=noninteractive
|
| 4 |
+
ENV HF_HOME=/app/model_cache
|
| 5 |
+
ENV HF_HUB_CACHE=/app/model_cache/hub
|
| 6 |
+
ENV TORCH_HOME=/app/model_cache/torch
|
| 7 |
+
WORKDIR /app
|
| 8 |
+
|
| 9 |
+
# Sistema e toolchain
|
| 10 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 11 |
+
python3.10 python3.10-dev python3.10-distutils python3-pip \
|
| 12 |
+
git wget curl ffmpeg libglib2.0-0 libsm6 libxext6 libxrender-dev \
|
| 13 |
+
libgomp1 build-essential ninja-build cmake \
|
| 14 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 15 |
+
|
| 16 |
+
RUN ln -sf /usr/bin/python3.10 /usr/bin/python3 && \
|
| 17 |
+
ln -sf /usr/bin/python3.10 /usr/bin/python && \
|
| 18 |
+
python3 -m pip install --upgrade pip
|
| 19 |
+
|
| 20 |
+
# PyTorch 2.6.0 + cu124
|
| 21 |
+
RUN pip install --no-cache-dir --index-url https://download.pytorch.org/whl/cu124 \
|
| 22 |
+
torch==2.6.0 torchvision torchaudio
|
| 23 |
+
|
| 24 |
+
# Dependências Python base do app (sem FA/Apex aqui)
|
| 25 |
+
COPY requirements.txt .
|
| 26 |
+
RUN pip install --no-cache-dir -r requirements.txt \
|
| 27 |
+
&& pip install --no-cache-dir "tiktoken>=0.7,<0.9"
|
| 28 |
+
|
| 29 |
+
# Copiar app e start
|
| 30 |
+
COPY . .
|
| 31 |
+
RUN chmod +x ./start.sh
|
| 32 |
+
|
| 33 |
+
# Usuário não-root
|
| 34 |
+
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
|
| 35 |
+
USER appuser
|
| 36 |
+
ENV PORT=7860
|
| 37 |
+
CMD ["./start.sh"]
|
app.py
CHANGED
|
@@ -1,13 +1,18 @@
|
|
| 1 |
#!/usr/bin/env python3
|
|
|
|
|
|
|
| 2 |
import gradio as gr
|
| 3 |
from pathlib import Path
|
| 4 |
-
from typing import List
|
|
|
|
| 5 |
from vincie_service import VincieService
|
| 6 |
|
| 7 |
-
svc = VincieService()
|
| 8 |
|
| 9 |
-
def setup():
|
| 10 |
-
|
|
|
|
|
|
|
| 11 |
try:
|
| 12 |
svc.ensure_repo()
|
| 13 |
svc.ensure_model()
|
|
@@ -15,64 +20,95 @@ def setup():
|
|
| 15 |
return f"Setup falhou: {e}"
|
| 16 |
return "Setup OK"
|
| 17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
def ui_multi_turn(input_image, turns_text):
|
| 19 |
-
|
| 20 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
turns = [ln.strip() for ln in turns_text.splitlines() if ln.strip()]
|
| 22 |
-
|
| 23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
| 25 |
def ui_multi_concept(files, descs_text, final_prompt):
|
| 26 |
-
|
| 27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
descs = [ln.strip() for ln in descs_text.splitlines() if ln.strip()]
|
| 29 |
if len(descs) != len(files):
|
| 30 |
-
return f"Número de descrições ({len(descs)}) difere do número de imagens ({len(files)})."
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
|
| 35 |
with gr.Blocks(title="VINCIE Service") as demo:
|
| 36 |
-
gr.Markdown("# 🎨 VINCIE Service")
|
| 37 |
-
|
| 38 |
-
|
|
|
|
|
|
|
| 39 |
setup_btn.click(fn=setup, outputs=setup_out)
|
| 40 |
|
| 41 |
with gr.Tab("🔄 Multi-turn Editing"):
|
| 42 |
-
|
| 43 |
-
|
|
|
|
| 44 |
run1 = gr.Button("Executar")
|
| 45 |
-
|
| 46 |
-
|
|
|
|
|
|
|
| 47 |
|
| 48 |
-
with gr.Tab("
|
| 49 |
-
files = gr.File(file_count="multiple",
|
| 50 |
-
descs = gr.Textbox(lines=
|
| 51 |
-
|
| 52 |
run2 = gr.Button("Executar")
|
| 53 |
-
|
| 54 |
-
|
|
|
|
|
|
|
| 55 |
|
| 56 |
-
# --- <INÍCIO DA ATUALIZAÇÃO> ---
|
| 57 |
if __name__ == "__main__":
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
server_name = "127.0.0.1"
|
| 63 |
-
|
| 64 |
-
# Verifica se argumentos foram passados na linha de comando
|
| 65 |
-
if '--share' in sys.argv:
|
| 66 |
-
share_option = True
|
| 67 |
-
print("Iniciando com a opção de compartilhamento público (share=True)")
|
| 68 |
-
|
| 69 |
-
if '--listen' in sys.argv:
|
| 70 |
-
server_name = "0.0.0.0"
|
| 71 |
-
print("Iniciando para ser acessível na rede local (listen)")
|
| 72 |
-
|
| 73 |
-
# Inicia a aplicação com as opções definidas
|
| 74 |
-
demo.queue().launch(
|
| 75 |
-
debug=True,
|
| 76 |
-
share=share_option,
|
| 77 |
-
server_name=server_name
|
| 78 |
)
|
|
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
+
import os
|
| 3 |
+
import glob
|
| 4 |
import gradio as gr
|
| 5 |
from pathlib import Path
|
| 6 |
+
from typing import List, Tuple, Optional
|
| 7 |
+
|
| 8 |
from vincie_service import VincieService
|
| 9 |
|
| 10 |
+
svc = VincieService() # usa /app/VINCIE e /app/ckpt/VINCIE-3B por padrão
|
| 11 |
|
| 12 |
+
def setup() -> str:
|
| 13 |
+
"""
|
| 14 |
+
Prepara repositório e modelos (idempotente).
|
| 15 |
+
"""
|
| 16 |
try:
|
| 17 |
svc.ensure_repo()
|
| 18 |
svc.ensure_model()
|
|
|
|
| 20 |
return f"Setup falhou: {e}"
|
| 21 |
return "Setup OK"
|
| 22 |
|
| 23 |
+
def _list_media(out_dir: Path, max_images: int = 24) -> Tuple[List[str], Optional[str]]:
|
| 24 |
+
"""
|
| 25 |
+
Varre o diretório de saída por imagens e vídeo, retornando caminhos ordenados.
|
| 26 |
+
"""
|
| 27 |
+
exts_img = ("*.png", "*.jpg", "*.jpeg", "*.webp")
|
| 28 |
+
imgs: List[Path] = []
|
| 29 |
+
for pat in exts_img:
|
| 30 |
+
imgs += list(out_dir.rglob(pat))
|
| 31 |
+
imgs = sorted(imgs, key=lambda p: p.stat().st_mtime)
|
| 32 |
+
imgs_paths = [str(p) for p in imgs[-max_images:]] if imgs else []
|
| 33 |
+
|
| 34 |
+
vids = sorted(out_dir.rglob("*.mp4"), key=lambda p: p.stat().st_mtime)
|
| 35 |
+
vid_path = str(vids[-1]) if vids else None
|
| 36 |
+
return imgs_paths, vid_path
|
| 37 |
+
|
| 38 |
def ui_multi_turn(input_image, turns_text):
|
| 39 |
+
"""
|
| 40 |
+
Executa multi-turn editing e retorna mídia do diretório de saída.
|
| 41 |
+
"""
|
| 42 |
+
if not input_image or not str(input_image).strip():
|
| 43 |
+
return [], None, "Forneça uma imagem."
|
| 44 |
+
if not turns_text or not turns_text.strip():
|
| 45 |
+
return [], None, "Forneça turns (um por linha)."
|
| 46 |
+
|
| 47 |
turns = [ln.strip() for ln in turns_text.splitlines() if ln.strip()]
|
| 48 |
+
try:
|
| 49 |
+
out_dir = svc.multi_turn_edit(input_image, turns)
|
| 50 |
+
except Exception as e:
|
| 51 |
+
return [], None, f"Erro na geração: {e}"
|
| 52 |
+
|
| 53 |
+
imgs, vid = _list_media(Path(out_dir))
|
| 54 |
+
status = f"Gerado em: {str(out_dir)}"
|
| 55 |
+
return imgs, vid, status
|
| 56 |
|
| 57 |
def ui_multi_concept(files, descs_text, final_prompt):
|
| 58 |
+
"""
|
| 59 |
+
Executa composição multi-conceito e retorna mídia do diretório de saída.
|
| 60 |
+
"""
|
| 61 |
+
if not files:
|
| 62 |
+
return [], None, "Envie imagens de conceito."
|
| 63 |
+
if not descs_text or not descs_text.strip():
|
| 64 |
+
return [], None, "Forneça descrições (uma por linha)."
|
| 65 |
+
if not final_prompt or not final_prompt.strip():
|
| 66 |
+
return [], None, "Forneça o prompt final."
|
| 67 |
+
|
| 68 |
descs = [ln.strip() for ln in descs_text.splitlines() if ln.strip()]
|
| 69 |
if len(descs) != len(files):
|
| 70 |
+
return [], None, f"Número de descrições ({len(descs)}) difere do número de imagens ({len(files)})."
|
| 71 |
+
|
| 72 |
+
try:
|
| 73 |
+
out_dir = svc.multi_concept_compose(files, descs, final_prompt)
|
| 74 |
+
except Exception as e:
|
| 75 |
+
return [], None, f"Erro na geração: {e}"
|
| 76 |
+
|
| 77 |
+
imgs, vid = _list_media(Path(out_dir))
|
| 78 |
+
status = f"Gerado em: {str(out_dir)}"
|
| 79 |
+
return imgs, vid, status
|
| 80 |
|
| 81 |
with gr.Blocks(title="VINCIE Service") as demo:
|
| 82 |
+
gr.Markdown("# 🎨 VINCIE Service — Multi-turn e Multi-concept")
|
| 83 |
+
|
| 84 |
+
with gr.Row():
|
| 85 |
+
setup_btn = gr.Button("Preparar (repo + modelos)")
|
| 86 |
+
setup_out = gr.Textbox(label="Status", interactive=False)
|
| 87 |
setup_btn.click(fn=setup, outputs=setup_out)
|
| 88 |
|
| 89 |
with gr.Tab("🔄 Multi-turn Editing"):
|
| 90 |
+
with gr.Row():
|
| 91 |
+
img = gr.Image(type="filepath", label="Imagem inicial")
|
| 92 |
+
turns = gr.Textbox(lines=8, label="Turns (um por linha)")
|
| 93 |
run1 = gr.Button("Executar")
|
| 94 |
+
out_gallery = gr.Gallery(label="Imagens", columns=4, height="auto")
|
| 95 |
+
out_video = gr.Video(label="Vídeo (se houver)")
|
| 96 |
+
out_status = gr.Textbox(label="Saída", interactive=False)
|
| 97 |
+
run1.click(ui_multi_turn, inputs=[img, turns], outputs=[out_gallery, out_video, out_status])
|
| 98 |
|
| 99 |
+
with gr.Tab("🧩 Multi-concept Compose"):
|
| 100 |
+
files = gr.File(file_count="multiple", type="filepath", label="Imagens de conceito")
|
| 101 |
+
descs = gr.Textbox(lines=8, label="Descrições (uma por linha, na mesma ordem das imagens)")
|
| 102 |
+
final_prompt = gr.Textbox(lines=2, label="Prompt final")
|
| 103 |
run2 = gr.Button("Executar")
|
| 104 |
+
out_gallery2 = gr.Gallery(label="Imagens", columns=4, height="auto")
|
| 105 |
+
out_video2 = gr.Video(label="Vídeo (se houver)")
|
| 106 |
+
out_status2 = gr.Textbox(label="Saída", interactive=False)
|
| 107 |
+
run2.click(ui_multi_concept, inputs=[files, descs, final_prompt], outputs=[out_gallery2, out_video2, out_status2])
|
| 108 |
|
|
|
|
| 109 |
if __name__ == "__main__":
|
| 110 |
+
demo.launch(
|
| 111 |
+
server_name="0.0.0.0",
|
| 112 |
+
server_port=int(os.getenv("PORT", "7860")),
|
| 113 |
+
allowed_paths=["/app/outputs", "/app/ckpt"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 114 |
)
|
start.sh
CHANGED
|
@@ -1,95 +1,163 @@
|
|
| 1 |
#!/usr/bin/env bash
|
| 2 |
set -euo pipefail
|
| 3 |
|
| 4 |
-
echo "🚀
|
| 5 |
-
echo "🐳 Container: $(hostname)"
|
| 6 |
-
echo "👤 User: $(whoami)"
|
| 7 |
-
echo "🎮 GPUs: $(nvidia-smi --list-gpus | wc -l || echo '0')"
|
| 8 |
-
|
| 9 |
-
if command -v nvidia-smi >/dev/null 2>&1; then
|
| 10 |
-
echo "💾 CUDA Memory:"
|
| 11 |
-
nvidia-smi --query-gpu=memory.total,memory.used --format=csv,noheader,nounits | nl
|
| 12 |
-
fi
|
| 13 |
|
| 14 |
-
#
|
| 15 |
-
|
| 16 |
-
export
|
| 17 |
-
export OUTPUT_DIR=${OUTPUT_DIR:-/app/outputs}
|
| 18 |
-
mkdir -p "$HF_HOME" "$OUTPUT_DIR" /app/ckpt/VINCIE-3B
|
| 19 |
|
| 20 |
-
#
|
| 21 |
-
|
| 22 |
-
echo "📦 Clonando VINCIE..."
|
| 23 |
-
git clone https://github.com/ByteDance-Seed/VINCIE /app/VINCIE
|
| 24 |
-
fi
|
| 25 |
|
| 26 |
-
#
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
save_dir = '/app/ckpt/VINCIE-3B'
|
| 31 |
-
os.makedirs(save_dir, exist_ok=True)
|
| 32 |
-
try:
|
| 33 |
-
print('📥 Baixando VINCIE-3B...')
|
| 34 |
-
snapshot_download(
|
| 35 |
-
repo_id='ByteDance-Seed/VINCIE-3B',
|
| 36 |
-
local_dir=save_dir,
|
| 37 |
-
resume_download=True,
|
| 38 |
-
local_dir_use_symlinks=False
|
| 39 |
-
)
|
| 40 |
-
print('✅ Modelo ok')
|
| 41 |
-
except Exception as e:
|
| 42 |
-
print(f'⚠️ Download falhou: {e}')
|
| 43 |
-
PY
|
| 44 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
|
| 46 |
-
#
|
| 47 |
-
|
| 48 |
-
import os
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
save_dir = '/app/ckpt/VINCIE-3B'
|
| 52 |
-
os.makedirs(save_dir, exist_ok=True)
|
| 53 |
-
|
| 54 |
-
needed = ['dit.pth', 'vae.pth']
|
| 55 |
-
for fn in needed:
|
| 56 |
-
p = os.path.join(save_dir, fn)
|
| 57 |
-
if not (os.path.exists(p) and os.path.getsize(p) > 1024*1024):
|
| 58 |
-
print(f'Baixando {fn}...')
|
| 59 |
-
hf_hub_download(repo_id='ByteDance-Seed/VINCIE-3B', filename=fn, local_dir=save_dir)
|
| 60 |
-
print('✅ Checkpoints ok')
|
| 61 |
PY
|
| 62 |
|
| 63 |
-
#
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
try:
|
| 71 |
-
|
| 72 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
except Exception as e:
|
| 74 |
-
|
| 75 |
-
print("➡️ Seguindo com fallback (atenção nativa do PyTorch).")
|
| 76 |
PY
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 79 |
|
| 80 |
-
#
|
| 81 |
-
|
| 82 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 83 |
|
| 84 |
-
#
|
| 85 |
-
if [ -
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
python3 app.py --listen --port ${PORT:-7861}
|
| 89 |
-
elif [ -f "/app/app.py" ]; then
|
| 90 |
-
echo "🚀 Iniciando app.py..."
|
| 91 |
-
python3 /app/app.py --listen --port ${PORT:-7860}
|
| 92 |
else
|
| 93 |
-
|
| 94 |
-
|
| 95 |
fi
|
|
|
|
| 1 |
#!/usr/bin/env bash
|
| 2 |
set -euo pipefail
|
| 3 |
|
| 4 |
+
echo "🚀 Starting runtime bootstrap..."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
+
# ========= Config gerais =========
|
| 7 |
+
# Repo no Hub que hospeda os wheels pré-compilados (altere se necessário)
|
| 8 |
+
export SELF_HF_REPO_ID="${SELF_HF_REPO_ID:-XCarleX/Apex-l40s}"
|
|
|
|
|
|
|
| 9 |
|
| 10 |
+
# Aceleração para Ada / L40S
|
| 11 |
+
export TORCH_CUDA_ARCH_LIST="${TORCH_CUDA_ARCH_LIST:-8.9}"
|
|
|
|
|
|
|
|
|
|
| 12 |
|
| 13 |
+
# Paralelismo (ajuste conforme a máquina)
|
| 14 |
+
export MAX_JOBS="${MAX_JOBS:-64}"
|
| 15 |
+
export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-64}"
|
| 16 |
+
export NVCC_APPEND_FLAGS="${NVCC_APPEND_FLAGS:---threads 16}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
|
| 18 |
+
# Caches persistentes
|
| 19 |
+
export HF_HOME="${HF_HOME:-/app/model_cache}"
|
| 20 |
+
export HF_HUB_CACHE="${HF_HUB_CACHE:-$HF_HOME/hub}"
|
| 21 |
+
export TORCH_HOME="${TORCH_HOME:-$HF_HOME/torch}"
|
| 22 |
+
mkdir -p /app/wheels /app/cuda_cache "$HF_HOME" "$TORCH_HOME"
|
| 23 |
+
export CUDA_CACHE_PATH="/app/cuda_cache"
|
| 24 |
|
| 25 |
+
# Mostrar estado do Torch/ABI
|
| 26 |
+
python - <<'PY'
|
| 27 |
+
import torch, os
|
| 28 |
+
print("Torch:", torch.__version__, "CUDA:", torch.version.cuda, "SM:", os.environ.get("TORCH_CUDA_ARCH_LIST"))
|
| 29 |
+
print("ABI:", int(torch._C._GLIBCXX_USE_CXX11_ABI))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
PY
|
| 31 |
|
| 32 |
+
# ========= Hugging Face Hub utils (com aceleração opcional) =========
|
| 33 |
+
python -m pip install -q -U "huggingface_hub[hf_transfer]" || python -m pip install -q -U huggingface_hub
|
| 34 |
+
export HF_HUB_ENABLE_HF_TRANSFER="${HF_HUB_ENABLE_HF_TRANSFER:-1}"
|
| 35 |
+
|
| 36 |
+
# Função: baixar wheel do próprio repo (pasta wheels/)
|
| 37 |
+
fetch_wheel () {
|
| 38 |
+
local KIND="$1" # "flash" | "apex"
|
| 39 |
+
python - <<'PY'
|
| 40 |
+
import os, re
|
| 41 |
+
from huggingface_hub import HfApi, hf_hub_download
|
| 42 |
+
repo_id = os.environ["SELF_HF_REPO_ID"]
|
| 43 |
+
kind = os.environ["KIND"]
|
| 44 |
+
pat = r"wheels/flash_attn-.*\.whl" if kind=="flash" else r"wheels/apex-.*\.whl"
|
| 45 |
+
api = HfApi()
|
| 46 |
try:
|
| 47 |
+
files = api.list_repo_files(repo_id=repo_id, repo_type="model")
|
| 48 |
+
cands = [f for f in files if re.match(pat, f)]
|
| 49 |
+
if cands:
|
| 50 |
+
target = sorted(cands)[-1]
|
| 51 |
+
p = hf_hub_download(repo_id=repo_id, filename=target, repo_type="model")
|
| 52 |
+
print(p)
|
| 53 |
except Exception as e:
|
| 54 |
+
pass
|
|
|
|
| 55 |
PY
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
# Função: enviar wheel recém-compilada para o repo (requer HF_TOKEN com write)
|
| 59 |
+
upload_wheel () {
|
| 60 |
+
local WHEEL_PATH="$1"
|
| 61 |
+
if [ "${HF_UPLOAD_WHEELS:-0}" != "1" ]; then
|
| 62 |
+
return 0
|
| 63 |
+
fi
|
| 64 |
+
python - <<'PY'
|
| 65 |
+
import os
|
| 66 |
+
from huggingface_hub import HfApi
|
| 67 |
+
repo_id = os.environ["SELF_HF_REPO_ID"]
|
| 68 |
+
wheel = os.environ["WHEEL_PATH"]
|
| 69 |
+
dst = "wheels/" + os.path.basename(wheel)
|
| 70 |
+
api = HfApi()
|
| 71 |
+
api.upload_file(path_or_fileobj=wheel, path_in_repo=dst, repo_id=repo_id, repo_type="model")
|
| 72 |
+
print("Uploaded:", dst)
|
| 73 |
+
PY
|
| 74 |
+
}
|
| 75 |
|
| 76 |
+
# ========= FlashAttention (reuso de wheel ou build) =========
|
| 77 |
+
if python - >/dev/null 2>&1 <<'PY'
|
| 78 |
+
import flash_attn, torch
|
| 79 |
+
print("ok")
|
| 80 |
+
PY
|
| 81 |
+
then
|
| 82 |
+
echo "✅ flash-attn já instalado"
|
| 83 |
+
else
|
| 84 |
+
echo "ℹ️ Buscando wheel de flash-attn em ${SELF_HF_REPO_ID}..."
|
| 85 |
+
if W=$(KIND=flash fetch_wheel) && [ -n "${W:-}" ]; then
|
| 86 |
+
echo "📦 Instalando flash-attn do repo: $W"
|
| 87 |
+
python -m pip install -U "$W"
|
| 88 |
+
else
|
| 89 |
+
echo "⚙️ Compilando flash-attn e salvando wheel local..."
|
| 90 |
+
python -m pip wheel --no-build-isolation --no-binary :all: flash-attn==2.7.4.post1 -w /app/wheels
|
| 91 |
+
python -m pip install -U /app/wheels/flash_attn-2.7.4.post1-*.whl
|
| 92 |
+
if WHEEL_PATH=$(ls /app/wheels/flash_attn-2.7.4.post1-*.whl 2>/dev/null | head -n1); then
|
| 93 |
+
WHEEL_PATH="$WHEEL_PATH" upload_wheel || true
|
| 94 |
+
fi
|
| 95 |
+
fi
|
| 96 |
+
fi
|
| 97 |
|
| 98 |
+
# ========= NVIDIA Apex (núcleo; reuso de wheel ou build) =========
|
| 99 |
+
# Testar se fused_layer_norm_cuda está presente
|
| 100 |
+
if python - >/dev/null 2>&1 <<'PY'
|
| 101 |
+
import importlib
|
| 102 |
+
from apex.normalization import FusedLayerNorm
|
| 103 |
+
importlib.import_module("fused_layer_norm_cuda")
|
| 104 |
+
print("ok")
|
| 105 |
+
PY
|
| 106 |
+
then
|
| 107 |
+
echo "✅ Apex com fused_layer_norm_cuda já instalado"
|
| 108 |
+
else
|
| 109 |
+
echo "ℹ️ Buscando wheel de Apex em ${SELF_HF_REPO_ID}..."
|
| 110 |
+
if W=$(KIND=apex fetch_wheel) && [ -n "${W:-}" ]; then
|
| 111 |
+
echo "📦 Instalando Apex do repo: $W"
|
| 112 |
+
python -m pip install -U "$W" || true
|
| 113 |
+
fi
|
| 114 |
+
|
| 115 |
+
# Validar após tentativa do repo
|
| 116 |
+
if ! python - >/dev/null 2>&1 <<'PY'
|
| 117 |
+
import importlib
|
| 118 |
+
from apex.normalization import FusedLayerNorm
|
| 119 |
+
importlib.import_module("fused_layer_norm_cuda")
|
| 120 |
+
print("ok")
|
| 121 |
+
PY
|
| 122 |
+
then
|
| 123 |
+
echo "⚙️ Compilando Apex (core) e salvando wheel local..."
|
| 124 |
+
rm -rf /tmp/apex && git clone --depth 1 https://github.com/NVIDIA/apex /tmp/apex
|
| 125 |
+
pushd /tmp/apex >/dev/null
|
| 126 |
+
export APEX_CPP_EXT=1 APEX_CUDA_EXT=1 APEX_ALL_CONTRIB_EXT=0
|
| 127 |
+
python -m pip wheel --no-build-isolation . -w /app/wheels
|
| 128 |
+
python -m pip install -U /app/wheels/apex-0.1-*.whl || python -m pip install -v --no-build-isolation .
|
| 129 |
+
popd >/dev/null
|
| 130 |
+
|
| 131 |
+
# Upload opcional do wheel
|
| 132 |
+
if WHEEL_PATH=$(ls /app/wheels/apex-0.1-*.whl 2>/dev/null | head -n1); then
|
| 133 |
+
WHEEL_PATH="$WHEEL_PATH" upload_wheel || true
|
| 134 |
+
fi
|
| 135 |
+
|
| 136 |
+
# Validação final
|
| 137 |
+
python - <<'PY'
|
| 138 |
+
import importlib
|
| 139 |
+
from apex.normalization import FusedLayerNorm, FusedRMSNorm
|
| 140 |
+
print("Apex OK:", FusedLayerNorm, FusedRMSNorm)
|
| 141 |
+
importlib.import_module("fused_layer_norm_cuda")
|
| 142 |
+
print("fused_layer_norm_cuda OK")
|
| 143 |
+
PY
|
| 144 |
+
fi
|
| 145 |
+
fi
|
| 146 |
|
| 147 |
+
# ========= Vincie: garantir repo e checkpoints (hf_hub_download + local_dir) =========
|
| 148 |
+
python - <<'PY'
|
| 149 |
+
from vincie_service import VincieService
|
| 150 |
+
svc = VincieService()
|
| 151 |
+
svc.ensure_repo()
|
| 152 |
+
svc.ensure_model()
|
| 153 |
+
print("Service pronto:", svc.ready())
|
| 154 |
+
PY
|
| 155 |
|
| 156 |
+
# ========= Iniciar app (ou Vince.sh se existir) =========
|
| 157 |
+
if [ -x "./Vince.sh" ]; then
|
| 158 |
+
echo "▶️ Executando Vince.sh"
|
| 159 |
+
exec ./Vince.sh
|
|
|
|
|
|
|
|
|
|
|
|
|
| 160 |
else
|
| 161 |
+
echo "▶️ Executando app-32.py"
|
| 162 |
+
exec python app-32.py
|
| 163 |
fi
|
vince.sh
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env bash
|
| 2 |
+
set -euo pipefail
|
| 3 |
+
|
| 4 |
+
echo "🚀 Complete AI Video Suite - Docker Container Starting..."
|
| 5 |
+
echo "🐳 Container: $(hostname)"
|
| 6 |
+
echo "👤 User: $(whoami)"
|
| 7 |
+
echo "🎮 GPUs: $(nvidia-smi --list-gpus | wc -l || echo '0')"
|
| 8 |
+
|
| 9 |
+
if command -v nvidia-smi >/dev/null 2>&1; then
|
| 10 |
+
echo "💾 CUDA Memory:"
|
| 11 |
+
nvidia-smi --query-gpu=memory.total,memory.used --format=csv,noheader,nounits | nl
|
| 12 |
+
fi
|
| 13 |
+
|
| 14 |
+
# Básico
|
| 15 |
+
export CUDA_VISIBLE_DEVICES=${CUDA_VISIBLE_DEVICES:-0,1,2,3,4,5,6,7}
|
| 16 |
+
export HF_HOME=${HF_HOME:-/app/model_cache}
|
| 17 |
+
export OUTPUT_DIR=${OUTPUT_DIR:-/app/outputs}
|
| 18 |
+
mkdir -p "$HF_HOME" "$OUTPUT_DIR" /app/ckpt/VINCIE-3B
|
| 19 |
+
|
| 20 |
+
# Clonar VINCIE se não existir
|
| 21 |
+
if [ ! -d "/app/VINCIE" ]; then
|
| 22 |
+
echo "📦 Clonando VINCIE..."
|
| 23 |
+
git clone https://github.com/ByteDance-Seed/VINCIE /app/VINCIE
|
| 24 |
+
fi
|
| 25 |
+
|
| 26 |
+
# Baixar modelo
|
| 27 |
+
python3 - <<'PY'
|
| 28 |
+
from huggingface_hub import snapshot_download
|
| 29 |
+
import os
|
| 30 |
+
save_dir = '/app/ckpt/VINCIE-3B'
|
| 31 |
+
os.makedirs(save_dir, exist_ok=True)
|
| 32 |
+
try:
|
| 33 |
+
print('📥 Baixando VINCIE-3B...')
|
| 34 |
+
snapshot_download(
|
| 35 |
+
repo_id='ByteDance-Seed/VINCIE-3B',
|
| 36 |
+
local_dir=save_dir,
|
| 37 |
+
resume_download=True,
|
| 38 |
+
local_dir_use_symlinks=False
|
| 39 |
+
)
|
| 40 |
+
print('✅ Modelo ok')
|
| 41 |
+
except Exception as e:
|
| 42 |
+
print(f'⚠️ Download falhou: {e}')
|
| 43 |
+
PY
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
# Garantir dit.pth e vae.pth por nome
|
| 47 |
+
python3 - <<'PY'
|
| 48 |
+
import os
|
| 49 |
+
from huggingface_hub import hf_hub_download
|
| 50 |
+
|
| 51 |
+
save_dir = '/app/ckpt/VINCIE-3B'
|
| 52 |
+
os.makedirs(save_dir, exist_ok=True)
|
| 53 |
+
|
| 54 |
+
needed = ['dit.pth', 'vae.pth']
|
| 55 |
+
for fn in needed:
|
| 56 |
+
p = os.path.join(save_dir, fn)
|
| 57 |
+
if not (os.path.exists(p) and os.path.getsize(p) > 1024*1024):
|
| 58 |
+
print(f'Baixando {fn}...')
|
| 59 |
+
hf_hub_download(repo_id='ByteDance-Seed/VINCIE-3B', filename=fn, local_dir=save_dir)
|
| 60 |
+
print('✅ Checkpoints ok')
|
| 61 |
+
PY
|
| 62 |
+
|
| 63 |
+
# Compatibilidade com caminho relativo do repo (ckpt/VINCIE-3B)
|
| 64 |
+
mkdir -p /app/VINCIE/ckpt
|
| 65 |
+
ln -sfn /app/ckpt/VINCIE-3B /app/VINCIE/ckpt/VINCIE-3B
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
# Verificar flash-attn (sem reinstalar em user site)
|
| 69 |
+
python - <<'PY'
|
| 70 |
+
try:
|
| 71 |
+
import torch, flash_attn
|
| 72 |
+
print(f"✅ Flash-attn pronto (Torch {torch.__version__}, CUDA {torch.version.cuda})")
|
| 73 |
+
except Exception as e:
|
| 74 |
+
print(f"⚠️ Flash-attn indisponível: {e}")
|
| 75 |
+
print("➡️ Seguindo com fallback (atenção nativa do PyTorch).")
|
| 76 |
+
PY
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
# Copiar arquivos do serviço
|
| 81 |
+
cp /app/vincie_service.py /app/VINCIE/ 2>/dev/null || echo "vincie_service.py não encontrado"
|
| 82 |
+
cp /app/app.py /app/VINCIE/ 2>/dev/null || echo "app.py não encontrado"
|
| 83 |
+
|
| 84 |
+
# Determinar qual app executar
|
| 85 |
+
if [ -f "/app/VINCIE/app.py" ]; then
|
| 86 |
+
echo "🚀 Iniciando VINCIE Service..."
|
| 87 |
+
cd /app/VINCIE
|
| 88 |
+
python3 app.py --listen --port ${PORT:-7861}
|
| 89 |
+
elif [ -f "/app/app.py" ]; then
|
| 90 |
+
echo "🚀 Iniciando app.py..."
|
| 91 |
+
python3 /app/app.py --listen --port ${PORT:-7860}
|
| 92 |
+
else
|
| 93 |
+
echo "❌ Nenhum app.py encontrado"
|
| 94 |
+
exit 1
|
| 95 |
+
fi
|
vincie_service.py
CHANGED
|
@@ -12,10 +12,10 @@ class VincieService:
|
|
| 12 |
"""
|
| 13 |
Serviço que:
|
| 14 |
- garante que o repo VINCIE está presente
|
| 15 |
-
- baixa dit.pth e vae.pth
|
| 16 |
-
- cria symlink
|
| 17 |
-
- executa main.py com overrides
|
| 18 |
-
- fornece
|
| 19 |
"""
|
| 20 |
|
| 21 |
def __init__(
|
|
@@ -34,8 +34,6 @@ class VincieService:
|
|
| 34 |
self.assets_dir = self.repo_dir / "assets"
|
| 35 |
self.output_root = Path("/app/outputs")
|
| 36 |
self.output_root.mkdir(parents=True, exist_ok=True)
|
| 37 |
-
|
| 38 |
-
# Garantir existência de pasta de ckpt relativa no repo para symlink
|
| 39 |
(self.repo_dir / "ckpt").mkdir(parents=True, exist_ok=True)
|
| 40 |
|
| 41 |
# ---------- Setup ----------
|
|
@@ -45,56 +43,21 @@ class VincieService:
|
|
| 45 |
if not self.repo_dir.exists():
|
| 46 |
subprocess.run(["git", "clone", git_url, str(self.repo_dir)], check=True)
|
| 47 |
|
| 48 |
-
def ensure_apex(self, enable_shim: bool = True) -> None:
|
| 49 |
-
"""
|
| 50 |
-
Garante que apex.normalization.FusedRMSNorm está importável.
|
| 51 |
-
Se Apex não estiver instalado e enable_shim=True, cria um shim baseado em nn.RMSNorm.
|
| 52 |
-
"""
|
| 53 |
-
try:
|
| 54 |
-
import importlib
|
| 55 |
-
importlib.import_module("apex.normalization")
|
| 56 |
-
return
|
| 57 |
-
except Exception:
|
| 58 |
-
if not enable_shim:
|
| 59 |
-
return
|
| 60 |
-
|
| 61 |
-
# Criar shim em /app/shims/apex/normalization.py
|
| 62 |
-
shim_root = Path("/app/shims")
|
| 63 |
-
apex_pkg = shim_root / "apex"
|
| 64 |
-
apex_pkg.mkdir(parents=True, exist_ok=True)
|
| 65 |
-
|
| 66 |
-
(apex_pkg / "__init__.py").write_text("from .normalization import *\n")
|
| 67 |
-
|
| 68 |
-
(apex_pkg / "normalization.py").write_text(
|
| 69 |
-
"import torch\n"
|
| 70 |
-
"import torch.nn as nn\n"
|
| 71 |
-
"\n"
|
| 72 |
-
"class FusedRMSNorm(nn.Module):\n"
|
| 73 |
-
" def __init__(self, normalized_shape, eps=1e-6, elementwise_affine=True):\n"
|
| 74 |
-
" super().__init__()\n"
|
| 75 |
-
" self.mod = nn.RMSNorm(normalized_shape, eps=eps, elementwise_affine=elementwise_affine)\n"
|
| 76 |
-
" def forward(self, x):\n"
|
| 77 |
-
" return self.mod(x)\n"
|
| 78 |
-
)
|
| 79 |
-
|
| 80 |
-
# Tornar o shim visível neste processo e nos subprocessos
|
| 81 |
-
sys.path.insert(0, str(shim_root))
|
| 82 |
-
os.environ["PYTHONPATH"] = f"{str(shim_root)}:{os.environ.get('PYTHONPATH','')}"
|
| 83 |
-
|
| 84 |
def ensure_model(self, hf_token: Optional[str] = None) -> None:
|
| 85 |
"""
|
| 86 |
Baixa apenas os arquivos necessários do repo ByteDance-Seed/VINCIE-3B:
|
| 87 |
- dit.pth
|
| 88 |
- vae.pth
|
| 89 |
-
Usa
|
| 90 |
-
Cria symlink de compatibilidade em /app/VINCIE/ckpt/VINCIE-3B -> /app/ckpt/VINCIE-3B.
|
| 91 |
"""
|
| 92 |
self.ckpt_dir.mkdir(parents=True, exist_ok=True)
|
| 93 |
-
|
| 94 |
token = hf_token or os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_TOKEN")
|
| 95 |
|
| 96 |
def _need(p: Path) -> bool:
|
| 97 |
-
|
|
|
|
|
|
|
|
|
|
| 98 |
|
| 99 |
for fname in ["dit.pth", "vae.pth"]:
|
| 100 |
dst = self.ckpt_dir / fname
|
|
@@ -113,19 +76,58 @@ class VincieService:
|
|
| 113 |
link = self.repo_dir / "ckpt" / "VINCIE-3B"
|
| 114 |
try:
|
| 115 |
if link.is_symlink() or link.exists():
|
| 116 |
-
# Remover link anterior inválido
|
| 117 |
try:
|
| 118 |
link.unlink()
|
| 119 |
except IsADirectoryError:
|
| 120 |
-
# Se for diretório, não remover conteúdos; só criar link se não existir
|
| 121 |
pass
|
| 122 |
if not link.exists():
|
| 123 |
link.symlink_to(self.ckpt_dir, target_is_directory=True)
|
| 124 |
except Exception as e:
|
| 125 |
print("Aviso: falha ao criar symlink de ckpt:", e)
|
| 126 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 127 |
def ready(self) -> bool:
|
| 128 |
-
"""Verifica se
|
| 129 |
have_repo = self.repo_dir.exists() and self.generate_yaml.exists()
|
| 130 |
dit_ok = (self.ckpt_dir / "dit.pth").exists()
|
| 131 |
vae_ok = (self.ckpt_dir / "vae.pth").exists()
|
|
@@ -144,7 +146,6 @@ class VincieService:
|
|
| 144 |
*overrides,
|
| 145 |
f"generation.output.dir={str(work_output)}",
|
| 146 |
]
|
| 147 |
-
# Herdar ambiente (inclui PYTHONPATH do shim, se aplicado)
|
| 148 |
env = os.environ.copy()
|
| 149 |
subprocess.run(cmd, cwd=self.repo_dir, check=True, env=env)
|
| 150 |
|
|
@@ -157,11 +158,9 @@ class VincieService:
|
|
| 157 |
out_dir_name: Optional[str] = None,
|
| 158 |
) -> Path:
|
| 159 |
"""
|
| 160 |
-
Equivalente ao exemplo:
|
| 161 |
-
|
| 162 |
-
generation.positive_prompt.
|
| 163 |
-
generation.positive_prompt.prompts="[...]"
|
| 164 |
-
generation.output.dir=...
|
| 165 |
"""
|
| 166 |
out_dir = self.output_root / (out_dir_name or f"multi_turn_{self._slug(input_image)}")
|
| 167 |
image_json = json.dumps([str(input_image)])
|
|
@@ -184,9 +183,8 @@ class VincieService:
|
|
| 184 |
out_dir_name: Optional[str] = None,
|
| 185 |
) -> Path:
|
| 186 |
"""
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
- prompts: [p1, p2, ..., final_prompt]
|
| 190 |
"""
|
| 191 |
out_dir = self.output_root / (out_dir_name or "multi_concept")
|
| 192 |
imgs_json = json.dumps([str(p) for p in concept_images])
|
|
|
|
| 12 |
"""
|
| 13 |
Serviço que:
|
| 14 |
- garante que o repo VINCIE está presente
|
| 15 |
+
- baixa dit.pth e vae.pth via hf_hub_download (local_dir)
|
| 16 |
+
- cria symlink /app/VINCIE/ckpt/VINCIE-3B -> /app/ckpt/VINCIE-3B
|
| 17 |
+
- executa main.py com overrides Hydra/YACS (multi-turn e multi-concept)
|
| 18 |
+
- fornece fallback (shim) para apex.normalization se Apex não existir
|
| 19 |
"""
|
| 20 |
|
| 21 |
def __init__(
|
|
|
|
| 34 |
self.assets_dir = self.repo_dir / "assets"
|
| 35 |
self.output_root = Path("/app/outputs")
|
| 36 |
self.output_root.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
| 37 |
(self.repo_dir / "ckpt").mkdir(parents=True, exist_ok=True)
|
| 38 |
|
| 39 |
# ---------- Setup ----------
|
|
|
|
| 43 |
if not self.repo_dir.exists():
|
| 44 |
subprocess.run(["git", "clone", git_url, str(self.repo_dir)], check=True)
|
| 45 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 46 |
def ensure_model(self, hf_token: Optional[str] = None) -> None:
|
| 47 |
"""
|
| 48 |
Baixa apenas os arquivos necessários do repo ByteDance-Seed/VINCIE-3B:
|
| 49 |
- dit.pth
|
| 50 |
- vae.pth
|
| 51 |
+
Usa hf_hub_download com local_dir e cria symlink de compatibilidade.
|
|
|
|
| 52 |
"""
|
| 53 |
self.ckpt_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
| 54 |
token = hf_token or os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_TOKEN")
|
| 55 |
|
| 56 |
def _need(p: Path) -> bool:
|
| 57 |
+
try:
|
| 58 |
+
return not (p.exists() and p.stat().st_size > 1_000_000)
|
| 59 |
+
except FileNotFoundError:
|
| 60 |
+
return True
|
| 61 |
|
| 62 |
for fname in ["dit.pth", "vae.pth"]:
|
| 63 |
dst = self.ckpt_dir / fname
|
|
|
|
| 76 |
link = self.repo_dir / "ckpt" / "VINCIE-3B"
|
| 77 |
try:
|
| 78 |
if link.is_symlink() or link.exists():
|
|
|
|
| 79 |
try:
|
| 80 |
link.unlink()
|
| 81 |
except IsADirectoryError:
|
|
|
|
| 82 |
pass
|
| 83 |
if not link.exists():
|
| 84 |
link.symlink_to(self.ckpt_dir, target_is_directory=True)
|
| 85 |
except Exception as e:
|
| 86 |
print("Aviso: falha ao criar symlink de ckpt:", e)
|
| 87 |
|
| 88 |
+
def ensure_apex(self, enable_shim: bool = True) -> None:
|
| 89 |
+
"""
|
| 90 |
+
Se Apex não estiver presente, injeta um shim mínimo para FusedRMSNorm/FusedLayerNorm
|
| 91 |
+
usando torch.nn, evitando falhas de import nos caminhos que dependem de apex.normalization.
|
| 92 |
+
"""
|
| 93 |
+
try:
|
| 94 |
+
import importlib
|
| 95 |
+
importlib.import_module("apex.normalization")
|
| 96 |
+
return
|
| 97 |
+
except Exception:
|
| 98 |
+
if not enable_shim:
|
| 99 |
+
return
|
| 100 |
+
|
| 101 |
+
shim_root = Path("/app/shims")
|
| 102 |
+
apex_pkg = shim_root / "apex"
|
| 103 |
+
apex_pkg.mkdir(parents=True, exist_ok=True)
|
| 104 |
+
(apex_pkg / "__init__.py").write_text("from .normalization import *\n")
|
| 105 |
+
|
| 106 |
+
(apex_pkg / "normalization.py").write_text(
|
| 107 |
+
"import torch\n"
|
| 108 |
+
"import torch.nn as nn\n"
|
| 109 |
+
"\n"
|
| 110 |
+
"class FusedRMSNorm(nn.Module):\n"
|
| 111 |
+
" def __init__(self, normalized_shape, eps=1e-6, elementwise_affine=True):\n"
|
| 112 |
+
" super().__init__()\n"
|
| 113 |
+
" self.mod = nn.RMSNorm(normalized_shape, eps=eps, elementwise_affine=elementwise_affine)\n"
|
| 114 |
+
" def forward(self, x):\n"
|
| 115 |
+
" return self.mod(x)\n"
|
| 116 |
+
"\n"
|
| 117 |
+
"class FusedLayerNorm(nn.Module):\n"
|
| 118 |
+
" def __init__(self, normalized_shape, eps=1e-5, elementwise_affine=True):\n"
|
| 119 |
+
" super().__init__()\n"
|
| 120 |
+
" self.mod = nn.LayerNorm(normalized_shape, eps=eps, elementwise_affine=elementwise_affine)\n"
|
| 121 |
+
" def forward(self, x):\n"
|
| 122 |
+
" return self.mod(x)\n"
|
| 123 |
+
)
|
| 124 |
+
|
| 125 |
+
# Tornar o shim visível neste processo e nos subprocessos
|
| 126 |
+
sys.path.insert(0, str(shim_root))
|
| 127 |
+
os.environ["PYTHONPATH"] = f"{str(shim_root)}:{os.environ.get('PYTHONPATH','')}"
|
| 128 |
+
|
| 129 |
def ready(self) -> bool:
|
| 130 |
+
"""Verifica se repo/config e checkpoints obrigatórios existem."""
|
| 131 |
have_repo = self.repo_dir.exists() and self.generate_yaml.exists()
|
| 132 |
dit_ok = (self.ckpt_dir / "dit.pth").exists()
|
| 133 |
vae_ok = (self.ckpt_dir / "vae.pth").exists()
|
|
|
|
| 146 |
*overrides,
|
| 147 |
f"generation.output.dir={str(work_output)}",
|
| 148 |
]
|
|
|
|
| 149 |
env = os.environ.copy()
|
| 150 |
subprocess.run(cmd, cwd=self.repo_dir, check=True, env=env)
|
| 151 |
|
|
|
|
| 158 |
out_dir_name: Optional[str] = None,
|
| 159 |
) -> Path:
|
| 160 |
"""
|
| 161 |
+
Equivalente ao exemplo oficial:
|
| 162 |
+
generation.positive_prompt.image_path=[...]
|
| 163 |
+
generation.positive_prompt.prompts=[...]
|
|
|
|
|
|
|
| 164 |
"""
|
| 165 |
out_dir = self.output_root / (out_dir_name or f"multi_turn_{self._slug(input_image)}")
|
| 166 |
image_json = json.dumps([str(input_image)])
|
|
|
|
| 183 |
out_dir_name: Optional[str] = None,
|
| 184 |
) -> Path:
|
| 185 |
"""
|
| 186 |
+
Usa image_path como lista de imagens de conceito e prompts = [p1, p2, ..., final],
|
| 187 |
+
mantendo compatibilidade com o pipeline do VINCIE.
|
|
|
|
| 188 |
"""
|
| 189 |
out_dir = self.output_root / (out_dir_name or "multi_concept")
|
| 190 |
imgs_json = json.dumps([str(p) for p in concept_images])
|