dl_models_addon / scripts /colab_sd_models.py
2ch's picture
Update scripts/colab_sd_models.py
300039b verified
from concurrent.futures import ThreadPoolExecutor, as_completed
from pathlib import Path
from re import search
from shutil import disk_usage
from subprocess import PIPE, Popen, STDOUT, run
import gradio as gr
from requests import get as requests_get, head as requests_head
from modules import script_callbacks, sd_models, shared
from modules.paths_internal import data_path
DL_COMMAND = 'wget -nv -t 10 --show-progress --progress=bar:force -q --content-disposition "{link}" -P {dl_path}'
WEBUI_ROOT = Path(data_path)
LINKS_FILE = WEBUI_ROOT / 'links.txt'
MODELS_FOLDER_PATH = Path(sd_models.model_path)
LORAS_FOLDER_PATH = Path(shared.cmd_opts.lora_dir)
EMBEDDINGS_FOLDER_PATH = Path(shared.cmd_opts.embeddings_dir)
CIVITAI_TOKEN = '542c1d6077168822e1b49e30e3437a5d'
def del_null_model():
null_model_path = MODELS_FOLDER_PATH / 'nullModel.ckpt'
if null_model_path.exists():
try:
null_model_path.unlink(missing_ok=True)
except:
pass
def find_mount_point():
path = Path(__file__).resolve()
while not path.is_mount():
path = path.parent
return path
def free_space():
total, used, free = disk_usage(find_mount_point())
power = 2 ** 10
n = 0
power_labels = {0: '', 1: 'Кило', 2: 'Мега', 3: 'Гига', 4: 'Тера'}
while free > power:
free /= power
n += 1
return f'{free:.2f} {power_labels[n]}байт'
def extract_url(command_eith_url):
pattern = r'["\']?((?:https?|ftp|ftps)://[^\s"\'<>]+)["\']?'
match = search(pattern, command_eith_url)
return match.group(1) if match else None
def hf_size(url: str) -> int:
try:
modified_url = url.replace('resolve', 'raw')
response = requests_get(modified_url, timeout=10)
response.raise_for_status()
content = response.text
size_str = content.split('size')[-1].strip().split()[0]
return int(size_str) if size_str.isdigit() else 0
except:
return 0
def cv_size(url: str) -> int:
try:
model_version_id = url.split('/')[-1]
response = requests_get(f'https://civitai.com/api/v1/model-versions/{model_version_id}?token={CIVITAI_TOKEN}', timeout=10)
response.raise_for_status()
files = response.json().get('files', [])
if files:
size_kb = files[0].get('sizeKB', 0)
return int(size_kb * 1024)
return 0
except:
return 0
def get_file_size(command_with_url: str) -> int:
url = extract_url(command_with_url)
if not url:
print(f'в строке `{command_with_url}` ссылка не найдена')
return 0
file_size = 0
if 'huggingface' in url:
file_size = hf_size(url)
elif 'civitai' in url:
file_size = cv_size(url)
if file_size:
return file_size
try:
response = requests_head(url, allow_redirects=True, timeout=10)
response.raise_for_status()
content_length = response.headers.get('Content-Length')
if content_length and content_length.isdigit():
return int(content_length)
content_disposition = response.headers.get('Content-Disposition')
if content_disposition:
size_str = next((part.split('=')[1] for part in content_disposition.split(';') if 'size' in part), None)
if size_str and size_str.isdigit():
return int(size_str)
except Exception:
pass
try:
result = run(['curl', '-sI', url], capture_output=True, text=True)
if result.returncode == 0:
for line in result.stdout.splitlines():
if 'Content-Length' in line:
return int(line.split(':')[1].strip())
except Exception:
pass
try:
result = run(['wget', '--spider', '--server-response', url], capture_output=True, text=True)
if result.returncode == 0:
for line in result.stderr.splitlines():
if 'Content-Length' in line:
return int(line.split(':')[1].strip())
except Exception:
pass
return 0
def get_total_file_size(urls: list):
total_file_size = 0
with ThreadPoolExecutor(max_workers=len(urls)) as executor:
futures = [executor.submit(get_file_size, url) for url in urls]
for future in as_completed(futures):
total_file_size += future.result()
return total_file_size
def bytes_convert(size_bytes):
if size_bytes >= 1073741824:
return f'{round(size_bytes / 1073741824, 2)} Гб'
else:
return f'{round(size_bytes / 1048576, 2)} Мб'
def get_own_links(ownmodels, ownloras, ownembeddings):
dl_commands = []
for text, dlpath in zip([ownmodels, ownloras, ownembeddings], [MODELS_FOLDER_PATH, LORAS_FOLDER_PATH, EMBEDDINGS_FOLDER_PATH]):
lines = text.split('\n')
for line in lines:
if line.strip():
link = line.strip() + (f"?token={CIVITAI_TOKEN}" if "?" not in line else f"&token={CIVITAI_TOKEN}") if "civitai" in line else line.strip()
dl_command = DL_COMMAND.format(link=link, dl_path=dlpath.resolve().as_posix())
dl_commands.append(dl_command)
LINKS_FILE.write_text('\n'.join(dl_commands).strip(), encoding='utf-8')
print('список загрузки сформирован...')
def get_models_paths():
file_paths = []
for file in MODELS_FOLDER_PATH.rglob('*'):
if file.is_file():
file_paths.append(file.resolve().as_posix())
return '\n'.join(file_paths)
def del_models(inputs):
files_to_delete = inputs.split('\n')
for file in files_to_delete:
if file and file != 'None':
try:
(MODELS_FOLDER_PATH / file).unlink()
print(f'успешно удалена модель: {file}')
except OSError as e:
print(f'ОШИБКА: {e.filename} - {e.strerror}.')
else:
print('удалять нечего, или ничего не выбрано для удаления')
def downloader(command_with_url):
process = Popen(command_with_url, shell=True, stdout=PIPE, stderr=STDOUT)
while True:
output = process.stdout.readline().decode('utf-8')
if output == '' and process.poll() is not None:
break
if output:
yield output.strip()
return process.poll()
def parallel_download(command_with_url):
with ThreadPoolExecutor(max_workers=len(command_with_url)) as executor:
futures = [executor.submit(downloader, url) for url in command_with_url]
for future in as_completed(futures):
for line in future.result():
print(line)
def on_ui_tabs():
with gr.Blocks() as models_list:
gr.HTML(
'<div class="models_top_container"><div class="models_top_header_text"><h1 class="models_dl_header">выбор и скачивание моделей</h1><p>учитывай весьма ограниченное пространство на диске в колабе!</p></div><div class="freespaceinfo"><div id="frespace_output"><span>свободно в колабе: <span id="frespace_out">нажми на кнопочку</span></div><div id="freespace_get"></div></div></div>')
gr.HTML('<div class="ownfiles_header"><h2>здесь можно указать прямые ссылки на загрузку моделей, лор и внедрений</h2></div>')
with gr.Row():
plhd = 'вставляй каждую ссылку с новой строки!\nпримеры ссылок:\nhttps://models.tensorplay.ai/104249\nhttps://civitai.com/api/download/models/110660\nhttps://huggingface.co/2ch/gay/resolve/main/lora/BettercocksFlaccid.safetensors'
ownmodels = gr.Textbox(label="модели", placeholder=plhd, info="прямые ссылки на Checkpoints", lines=5, elem_id="ownmodels")
ownloras = gr.Textbox(label="лоры", placeholder=plhd, info="прямые ссылки на LoRas", lines=5, elem_id="ownloras")
ownembeddings = gr.Textbox(label="внедрения", placeholder=plhd, info="прямые ссылки на Textual Inversions", lines=5, elem_id="ownembeddings")
download_button = gr.Button('запустить загрузку', elem_id='general_download_button')
button = gr.Button('скачать по ссылкам', elem_id='ownlinks_download_button')
button.click(get_own_links, inputs=[ownmodels, ownloras, ownembeddings])
download_button = gr.Button('скачать модели', elem_id='checkboxes_download_button')
def start_download():
try:
urls = LINKS_FILE.read_text(encoding='utf-8').splitlines()
LINKS_FILE.unlink(missing_ok=True)
total_file_size = get_total_file_size(urls)
total, used, free = disk_usage(find_mount_point())
if total_file_size <= (free - 1073741824):
print(f'загрузка {bytes_convert(total_file_size)} уже началась, жди!')
parallel_download(urls)
del_null_model()
return 'функция загрузки завершила работу!'
else:
msg = f'слишком много файлов! ты пытаешься скачать {bytes_convert(total_file_size)}, имея свободных только {bytes_convert(free)} (и как минимум 1 Гб должен оставаться не занятым на диске!).'
print(msg)
return msg
except Exception as e:
print(f'ОШИБКА: {e}')
return f'ОШИБКА: {e}'
dl_result_box = gr.Textbox(label='', elem_id='dlresultbox')
download_button.click(start_download, outputs=dl_result_box)
gr.HTML('<div class="downloads_result_container"><div class="models_porgress_loader"></div><div id="downloads_start_text">задача по загрузке запущена, подробности в выводе ячейки в колабе...</div><div id="downloads_result_text"><span class="finish_dl_func"></span><span class="dl_progress_info"></span></div></div>')
space_textbox = gr.Textbox(label="", elem_id="free_space_area")
space_button = gr.Button("проверить свободное место", elem_id="free_space_button")
space_button.click(fn=free_space, outputs=space_textbox)
gr.HTML('<hr class="divider"/><div id="filemanager"><h2 class="current_models_files">файлы моделей которые можно удалить для освобождения места:</h2><div id="files_checkbox"></div><div class="filebuttons"><div id="delete_files_button"></div><div id="refresh_files_button"></div></div></div>')
files_textbox = gr.Textbox(label='', elem_id='files_area')
files_button = gr.Button('установленные модели', elem_id='files_button')
files_button.click(fn=get_models_paths, outputs=files_textbox)
delete_textbox = gr.Textbox(label='', elem_id='delete_area')
delete_button = gr.Button('удалить', elem_id='delete_button')
delete_button.click(fn=del_models, inputs=delete_textbox, outputs=delete_textbox)
return (models_list, 'модели', 'models_list'),
script_callbacks.on_ui_tabs(on_ui_tabs)