bash / provision.sh
cozyga's picture
Update provision.sh
114ea7f verified
#!/bin/bash
# =============================================================================
# ComfyUI Provisioning Script for Vast.ai
#
# Unified setup: installs custom nodes, downloads all models (LoRAs, VAE,
# checkpoints) from HuggingFace (private + public) and CivitAI.
#
# Uses aria2c for fast parallel downloads (16 connections per file).
#
# Environment variables (set in Vast.ai template Docker Options):
# HF_TOKEN - Required. HuggingFace token for private repos.
# CIVITAI_TOKEN - Required. CivitAI API token for gated models.
#
# Usage:
# # Set in Vast.ai template environment:
# HF_TOKEN=hf_xxx CIVITAI_TOKEN=xxx ./setup-comfyui-vastai.sh
#
# # Or source the ai-dock venv first (if using ai-dock template):
# source /venv/main/bin/activate && ./setup-comfyui-vastai.sh
# =============================================================================
set -uo pipefail
# =============================================================================
# Configuration
# =============================================================================
WORKSPACE="${WORKSPACE:-/workspace}"
COMFYUI_DIR="${COMFYUI_DIR:-${WORKSPACE}/ComfyUI}"
LORA_DIR="${COMFYUI_DIR}/models/loras"
VAE_DIR="${COMFYUI_DIR}/models/vae"
CKPT_DIR="${COMFYUI_DIR}/models/checkpoints"
CLIP_DIR="${COMFYUI_DIR}/models/clip"
TEXT_ENC_DIR="${COMFYUI_DIR}/models/text_encoders"
DIFFUSION_DIR="${COMFYUI_DIR}/models/diffusion_models"
UPSCALE_DIR="${COMFYUI_DIR}/models/upscale_models"
ULTRALYTICS_DIR="${COMFYUI_DIR}/models/ultralytics/bbox"
NODES_DIR="${COMFYUI_DIR}/custom_nodes"
# Activate venv if available (ai-dock template)
if [ -f /venv/main/bin/activate ]; then
source /venv/main/bin/activate
fi
# =============================================================================
# Model Definitions
# =============================================================================
# HuggingFace LoRAs — private repo (requires HF_TOKEN)
# Format: "filename|url"
HF_PRIVATE_LORAS=(
"Z-Isla_3K_0002_000003500.safetensors|https://huggingface.co/cozyga/test/resolve/main/Z-Isla_3K_0002_000003500.safetensors"
"z-kyla-v1.safetensors|https://huggingface.co/cozyga/test/resolve/main/z-kyla-v1.safetensors"
"m1a_zit_3.safetensors|https://huggingface.co/cozyga/test/resolve/main/m1a_zit_3.safetensors"
"c3lia_v1_000003600.safetensors|https://huggingface.co/cozyga/test/resolve/main/c3lia_v1_000003600.safetensors"
"c3lia_v1_000003900.safetensors|https://huggingface.co/cozyga/test/resolve/main/c3lia_v1_000003900.safetensors"
"qwen_c3lia_000003600.safetensors|https://huggingface.co/cozyga/test/resolve/main/qwen_c3lia_000003600.safetensors"
"s0fia_v2_000004800.safetensors|https://huggingface.co/cozyga/test/resolve/main/s0fia_v2_000004800.safetensors"
"s0fia_v2_000003700.safetensors|https://huggingface.co/cozyga/test/resolve/main/s0fia_v2_000003700.safetensors"
"s0fia_v2_000004300.safetensors|https://huggingface.co/cozyga/test/resolve/main/s0fia_v2_000004300.safetensors"
)
# HuggingFace LoRAs — public repos
HF_PUBLIC_LORAS=(
"l0ki_zit_v1_000003000.safetensors|https://huggingface.co/XLB666/qwenzitloras/resolve/main/l0ki_zit_v1_000003000.safetensors"
"l0ki_qwen_v1_000002800.safetensors|https://huggingface.co/XLB666/qwenzitloras/resolve/main/l0ki_qwen_v1_000002800.safetensors"
"dmd2_sdxl_4step_lora_fp16.safetensors|https://huggingface.co/tianweiy/DMD2/resolve/main/dmd2_sdxl_4step_lora_fp16.safetensors"
)
# CivitAI LoRAs (requires CIVITAI_TOKEN)
# Format: "filename|url"
# Explicit filenames avoid the CivitAI redirect long-filename bug
CIVITAI_LORAS=(
"zittitslider.safetensors|https://civitai.com/api/download/models/2478366"
"QwenEditVersatilePoses.safetensors|https://civitai.com/api/download/models/2457989?type=Model&format=SafeTensor"
"sdxlcloseuppussy.safetensors|https://civitai.com/api/download/models/2376235?type=Model&format=SafeTensor"
"amateurphotography.safetensors|https://civitai.com/api/download/models/2363467"
"nextscene.safetensors|https://civitai.com/api/download/models/2337214?type=Model&format=SafeTensor"
)
# CivitAI VAE models
CIVITAI_VAE=(
"zImage_vae.safetensors|https://civitai.com/api/download/models/2442479"
)
# CivitAI Checkpoints
CIVITAI_CHECKPOINTS=(
"checkpoint.safetensors|https://civitai.com/api/download/models/798204?type=Model&format=SafeTensor&size=full&fp=fp16"
"LustifyENDGAME.safetensors|https://civitai.com/api/download/models/1094291?type=Model&format=SafeTensor&size=pruned&fp=fp16"
"analogMadnessXL.safetensors|https://civitai.com/api/download/models/2207703?type=Model&format=SafeTensor&size=full&fp=fp16"
)
# --- HuggingFace public models (various directories) ---
HF_PUBLIC_TEXT_ENCODERS=(
"t5xxl_fp8_e4m3fn_scaled.safetensors|https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn_scaled.safetensors"
"qwen_2.5_vl_7b_fp8_scaled.safetensors|https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors"
)
HF_PUBLIC_CLIP=(
"qwen_3_4b.safetensors|https://huggingface.co/Comfy-Org/z_image_turbo/resolve/main/split_files/text_encoders/qwen_3_4b.safetensors"
)
HF_PUBLIC_DIFFUSION_MODELS=(
"z_image_turbo_bf16.safetensors|https://huggingface.co/Comfy-Org/z_image_turbo/resolve/main/split_files/diffusion_models/z_image_turbo_bf16.safetensors"
)
HF_PUBLIC_VAE=(
"qwen_image_vae.safetensors|https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors"
"flux_vae.safetensors|https://huggingface.co/StableDiffusionVN/Flux/resolve/main/Vae/flux_vae.safetensors"
)
HF_PUBLIC_CHECKPOINTS=(
"Qwen-Rapid-AIO-SFW-v23.safetensors|https://huggingface.co/Phr00t/Qwen-Image-Edit-Rapid-AIO/resolve/main/v23/Qwen-Rapid-AIO-SFW-v23.safetensors"
"Qwen-Rapid-AIO-NSFW-v23.safetensors|https://huggingface.co/Phr00t/Qwen-Image-Edit-Rapid-AIO/resolve/main/v23/Qwen-Rapid-AIO-NSFW-v23.safetensors"
)
HF_PUBLIC_ULTRALYTICS=(
"FacesV1.pt|https://huggingface.co/ashllay/YOLO_Models/resolve/e07b01219ff1807e1885015f439d788b038f49bd/bbox/FacesV1.pt"
)
# --- HuggingFace private models — cozyga/test (non-lora) ---
HF_PRIVATE_CHECKPOINTS=(
"lustifySDXLNSFW_endgame.safetensors|https://huggingface.co/cozyga/test/resolve/main/lustifySDXLNSFW_endgame.safetensors"
)
HF_PRIVATE_UPSCALE=(
"1x-ITF-SkinDiffDetail-Lite-v1.pth|https://huggingface.co/cozyga/test/resolve/main/1x-ITF-SkinDiffDetail-Lite-v1.pth"
)
HF_PRIVATE_ULTRALYTICS=(
"nipple.pt|https://huggingface.co/cozyga/test/resolve/main/nipple.pt"
"pussyV2.pt|https://huggingface.co/cozyga/test/resolve/main/pussyV2.pt"
)
# Custom nodes to install/update
NODES=(
"https://github.com/ltdrdata/ComfyUI-Manager"
"https://github.com/ltdrdata/ComfyUI-Impact-Pack"
"https://github.com/ltdrdata/ComfyUI-Impact-Subpack"
"https://github.com/cubiq/ComfyUI_essentials"
"https://github.com/yolain/ComfyUI-Easy-Use"
"https://github.com/rgthree/rgthree-comfy"
"https://github.com/chflame163/ComfyUI_LayerStyle"
"https://github.com/numz/ComfyUI-SeedVR2_VideoUpscaler"
"https://github.com/EllangoK/ComfyUI-post-processing-nodes"
"https://github.com/wallish77/wlsh_nodes"
"https://github.com/chrisgoringe/cg-image-filter"
"https://github.com/Azornes/Comfyui-Resolution-Master"
"https://github.com/ltdrdata/was-node-suite-comfyui"
"https://github.com/DanrisiUA/ComfyUI-ZImage-LoRA-Merger"
# NOTE: moonwhaler/comfyui-seedvr2-tilingups returns 404 — removed
)
# Extra pip packages (installed after nodes)
PIP_PACKAGES=(
# "package-name"
)
# =============================================================================
# Functions
# =============================================================================
print_header() {
echo ""
echo "=============================================="
echo " ComfyUI Provisioning — Vast.ai"
echo "=============================================="
echo " Workspace: ${WORKSPACE}"
echo " ComfyUI: ${COMFYUI_DIR}"
echo " HF_TOKEN: ${HF_TOKEN:+set}${HF_TOKEN:-MISSING}"
echo " CIVITAI: ${CIVITAI_TOKEN:+set}${CIVITAI_TOKEN:-MISSING}"
echo "=============================================="
echo ""
}
# Install aria2c if not present (vast.ai containers are typically Debian/Ubuntu)
install_aria2() {
if command -v aria2c &>/dev/null; then
return 0
fi
echo "Installing aria2c for fast parallel downloads..."
if [ "$(id -u)" -eq 0 ]; then
apt-get update -qq && apt-get install -y -qq aria2
elif command -v sudo &>/dev/null; then
sudo apt-get update -qq && sudo apt-get install -y -qq aria2
else
echo "ERROR: aria2 not found and cannot install (not root, no sudo)."
echo "Falling back to wget."
return 1
fi
}
# Validate that a HuggingFace token is valid
validate_hf_token() {
if [ -z "${HF_TOKEN:-}" ]; then
echo "WARNING: HF_TOKEN is not set. Private HuggingFace downloads will fail."
return 1
fi
local status
status=$(curl -s -o /dev/null -w "%{http_code}" \
-H "Authorization: Bearer ${HF_TOKEN}" \
"https://huggingface.co/api/whoami-v2")
if [ "$status" -eq 200 ]; then
echo "HF_TOKEN: validated"
return 0
else
echo "WARNING: HF_TOKEN validation failed (HTTP ${status}). Private downloads may fail."
return 1
fi
}
# Validate that a CivitAI token is valid
validate_civitai_token() {
if [ -z "${CIVITAI_TOKEN:-}" ]; then
echo "WARNING: CIVITAI_TOKEN is not set. CivitAI downloads will fail."
return 1
fi
local status
status=$(curl -s -o /dev/null -w "%{http_code}" \
-H "Authorization: Bearer ${CIVITAI_TOKEN}" \
"https://civitai.com/api/v1/models?hidden=1&limit=1")
if [ "$status" -eq 200 ]; then
echo "CIVITAI_TOKEN: validated"
return 0
else
echo "WARNING: CIVITAI_TOKEN validation failed (HTTP ${status}). CivitAI downloads may fail."
return 1
fi
}
# Download a file if it doesn't already exist
# Uses aria2c with 16 parallel connections for speed
# Falls back to wget if aria2c is unavailable
#
# Usage: download_if_missing <dir> <filename> <url> [auth_header]
download_if_missing() {
local dir="$1"
local filename="$2"
local url="$3"
local auth_header="${4:-}"
mkdir -p "$dir"
if [ -f "${dir}/${filename}" ]; then
echo " [skip] ${filename}"
return 0
fi
echo " [downloading] ${filename}..."
# CivitAI uses redirect chains that aria2c can't follow with auth.
# Use curl -L for CivitAI URLs, aria2c for everything else.
if [[ "$url" =~ civitai\.com ]]; then
if [ -n "$auth_header" ]; then
curl -L -# -H "Authorization: Bearer ${auth_header}" \
-o "${dir}/${filename}" "$url"
else
curl -L -# -o "${dir}/${filename}" "$url"
fi
elif command -v aria2c &>/dev/null; then
if [ -n "$auth_header" ]; then
aria2c -x 16 -s 16 -k 1M -c \
--header="Authorization: Bearer ${auth_header}" \
-d "$dir" -o "$filename" "$url" 2>&1 | tail -1
else
aria2c -x 16 -s 16 -k 1M -c \
-d "$dir" -o "$filename" "$url" 2>&1 | tail -1
fi
else
# Fallback: wget
if [ -n "$auth_header" ]; then
wget -q --show-progress \
--header="Authorization: Bearer ${auth_header}" \
-O "${dir}/${filename}" "$url"
else
wget -q --show-progress -O "${dir}/${filename}" "$url"
fi
fi
# Verify file was actually downloaded (not a 0-byte error)
if [ ! -s "${dir}/${filename}" ]; then
echo " [FAILED] ${filename} — downloaded file is empty, removing"
rm -f "${dir}/${filename}"
return 1
fi
}
# Download a batch of files from an array of "filename|url" entries
# Usage: download_batch <dir> <auth_token> <array_entries...>
download_batch() {
local dir="$1"
local auth_token="$2"
shift 2
local failed=0
for entry in "$@"; do
local filename="${entry%%|*}"
local url="${entry#*|}"
download_if_missing "$dir" "$filename" "$url" "$auth_token" || ((failed++)) || true
done
if [ "$failed" -gt 0 ]; then
echo " WARNING: ${failed} download(s) failed in ${dir}"
fi
}
# Install/update custom nodes
install_nodes() {
if [ ${#NODES[@]} -eq 0 ]; then
return 0
fi
echo "=== Installing Custom Nodes ==="
mkdir -p "$NODES_DIR"
for repo in "${NODES[@]}"; do
local dirname="${repo##*/}"
local path="${NODES_DIR}/${dirname}"
local requirements="${path}/requirements.txt"
if [ -d "$path" ]; then
echo " [update] ${dirname}"
(cd "$path" && git pull -q) || echo " [WARN] git pull failed for ${dirname}, skipping"
else
echo " [clone] ${dirname}"
git clone --recursive -q "$repo" "$path" || {
echo " [WARN] git clone failed for ${dirname}, skipping"
continue
}
fi
if [ -f "$requirements" ]; then
pip install --no-cache-dir -q -r "$requirements" || echo " [WARN] pip install failed for ${dirname}"
fi
done
echo ""
}
# Install extra pip packages
install_pip_packages() {
if [ ${#PIP_PACKAGES[@]} -eq 0 ]; then
return 0
fi
echo "=== Installing pip packages ==="
pip install --no-cache-dir -q "${PIP_PACKAGES[@]}"
echo ""
}
# =============================================================================
# Main
# =============================================================================
print_header
# Install aria2c for fast downloads
install_aria2 || true
# Validate tokens (non-fatal — script continues with warnings)
validate_hf_token || true
validate_civitai_token || true
echo ""
# Create all model directories
mkdir -p "$LORA_DIR" "$VAE_DIR" "$CKPT_DIR" "$CLIP_DIR" "$TEXT_ENC_DIR" \
"$DIFFUSION_DIR" "$UPSCALE_DIR" "$ULTRALYTICS_DIR"
# --- Custom Nodes ---
install_nodes
# --- Pip Packages ---
install_pip_packages
# --- HuggingFace Private LoRAs ---
echo "=== HuggingFace Private LoRAs (cozyga/test) ==="
download_batch "$LORA_DIR" "${HF_TOKEN:-}" "${HF_PRIVATE_LORAS[@]}"
echo ""
# --- HuggingFace Public LoRAs ---
echo "=== HuggingFace Public LoRAs ==="
download_batch "$LORA_DIR" "${HF_TOKEN:-}" "${HF_PUBLIC_LORAS[@]}"
echo ""
# --- CivitAI LoRAs ---
echo "=== CivitAI LoRAs ==="
download_batch "$LORA_DIR" "${CIVITAI_TOKEN:-}" "${CIVITAI_LORAS[@]}"
echo ""
# --- CivitAI VAE ---
echo "=== CivitAI VAE ==="
download_batch "$VAE_DIR" "${CIVITAI_TOKEN:-}" "${CIVITAI_VAE[@]}"
echo ""
# --- CivitAI Checkpoints ---
echo "=== CivitAI Checkpoints ==="
download_batch "$CKPT_DIR" "${CIVITAI_TOKEN:-}" "${CIVITAI_CHECKPOINTS[@]}"
echo ""
# --- HuggingFace Public Text Encoders ---
echo "=== HuggingFace Text Encoders ==="
download_batch "$TEXT_ENC_DIR" "${HF_TOKEN:-}" "${HF_PUBLIC_TEXT_ENCODERS[@]}"
echo ""
# --- HuggingFace Public CLIP ---
echo "=== HuggingFace CLIP ==="
download_batch "$CLIP_DIR" "${HF_TOKEN:-}" "${HF_PUBLIC_CLIP[@]}"
echo ""
# --- HuggingFace Public Diffusion Models ---
echo "=== HuggingFace Diffusion Models ==="
download_batch "$DIFFUSION_DIR" "${HF_TOKEN:-}" "${HF_PUBLIC_DIFFUSION_MODELS[@]}"
echo ""
# --- HuggingFace Public VAE ---
echo "=== HuggingFace VAE ==="
download_batch "$VAE_DIR" "${HF_TOKEN:-}" "${HF_PUBLIC_VAE[@]}"
echo ""
# --- HuggingFace Public Checkpoints ---
echo "=== HuggingFace Checkpoints (Qwen) ==="
download_batch "$CKPT_DIR" "${HF_TOKEN:-}" "${HF_PUBLIC_CHECKPOINTS[@]}"
echo ""
# --- HuggingFace Public Ultralytics ---
echo "=== HuggingFace Ultralytics (YOLO) ==="
download_batch "$ULTRALYTICS_DIR" "${HF_TOKEN:-}" "${HF_PUBLIC_ULTRALYTICS[@]}"
echo ""
# --- HuggingFace Private Checkpoints (cozyga/test) ---
echo "=== HuggingFace Private Checkpoints (cozyga/test) ==="
download_batch "$CKPT_DIR" "${HF_TOKEN:-}" "${HF_PRIVATE_CHECKPOINTS[@]}"
echo ""
# --- HuggingFace Private Upscale Models (cozyga/test) ---
echo "=== HuggingFace Private Upscale Models (cozyga/test) ==="
download_batch "$UPSCALE_DIR" "${HF_TOKEN:-}" "${HF_PRIVATE_UPSCALE[@]}"
echo ""
# --- HuggingFace Private Ultralytics (cozyga/test) ---
echo "=== HuggingFace Private Ultralytics (cozyga/test) ==="
download_batch "$ULTRALYTICS_DIR" "${HF_TOKEN:-}" "${HF_PRIVATE_ULTRALYTICS[@]}"
echo ""
# =============================================================================
# Summary
# =============================================================================
echo "=============================================="
echo " Provisioning complete"
echo "=============================================="
echo ""
for label_dir in \
"LoRAs|${LORA_DIR}" \
"Checkpoints|${CKPT_DIR}" \
"VAE|${VAE_DIR}" \
"CLIP|${CLIP_DIR}" \
"Text Encoders|${TEXT_ENC_DIR}" \
"Diffusion Models|${DIFFUSION_DIR}" \
"Upscale Models|${UPSCALE_DIR}" \
"Ultralytics|${ULTRALYTICS_DIR}"; do
label="${label_dir%%|*}"
dir="${label_dir#*|}"
count=$(find "$dir" -maxdepth 1 -type f 2>/dev/null | wc -l | tr -d ' ')
echo " ${label}: ${count} files"
done
echo ""
echo "Application will start now."