update download code & zip support & add download adetailer models
Browse files- files_cells/notebooks/en/auto-cleaner_en.ipynb +1 -10
- files_cells/notebooks/en/downloading_en.ipynb +56 -82
- files_cells/notebooks/en/launch_en.ipynb +34 -52
- files_cells/notebooks/en/widgets_en.ipynb +4 -7
- files_cells/notebooks/ru/auto-cleaner_ru.ipynb +1 -10
- files_cells/notebooks/ru/downloading_ru.ipynb +55 -81
- files_cells/notebooks/ru/launch_ru.ipynb +34 -52
- files_cells/notebooks/ru/widgets_ru.ipynb +4 -7
- files_cells/python/en/auto-cleaner_en.py +1 -10
- files_cells/python/en/downloading_en.py +55 -81
- files_cells/python/en/launch_en.py +34 -52
- files_cells/python/en/widgets_en.py +5 -8
- files_cells/python/ru/auto-cleaner_ru.py +1 -10
- files_cells/python/ru/downloading_ru.py +54 -80
- files_cells/python/ru/launch_ru.py +34 -52
- files_cells/python/ru/widgets_ru.py +4 -7
files_cells/notebooks/en/auto-cleaner_en.ipynb
CHANGED
@@ -19,14 +19,6 @@
|
|
19 |
"source": [
|
20 |
"##~ AutoCleaner V3.6 CODE | BY: ANXETY ~##\n",
|
21 |
"\n",
|
22 |
-
"# --change log--\n",
|
23 |
-
"\"\"\"\n",
|
24 |
-
"V3.6 | 13.03.24\n",
|
25 |
-
"Fixed selection window\n",
|
26 |
-
"Dynamic update of memory display\n",
|
27 |
-
"\"\"\"\n",
|
28 |
-
"\n",
|
29 |
-
"\n",
|
30 |
"import os\n",
|
31 |
"import time\n",
|
32 |
"import ipywidgets as widgets\n",
|
@@ -39,8 +31,7 @@
|
|
39 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
40 |
" environments = {\n",
|
41 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
42 |
-
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")
|
43 |
-
" 'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', \"/home/studio-lab-user/content\")\n",
|
44 |
" }\n",
|
45 |
"\n",
|
46 |
" for env_var, (environment, path) in environments.items():\n",
|
|
|
19 |
"source": [
|
20 |
"##~ AutoCleaner V3.6 CODE | BY: ANXETY ~##\n",
|
21 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
"import os\n",
|
23 |
"import time\n",
|
24 |
"import ipywidgets as widgets\n",
|
|
|
31 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
32 |
" environments = {\n",
|
33 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
34 |
+
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
|
|
|
35 |
" }\n",
|
36 |
"\n",
|
37 |
" for env_var, (environment, path) in environments.items():\n",
|
files_cells/notebooks/en/downloading_en.ipynb
CHANGED
@@ -23,6 +23,8 @@
|
|
23 |
"import re\n",
|
24 |
"import time\n",
|
25 |
"import json\n",
|
|
|
|
|
26 |
"import requests\n",
|
27 |
"import subprocess\n",
|
28 |
"from datetime import timedelta\n",
|
@@ -37,8 +39,7 @@
|
|
37 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
38 |
" environments = {\n",
|
39 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
40 |
-
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")
|
41 |
-
" 'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', \"/home/studio-lab-user/content\")\n",
|
42 |
" }\n",
|
43 |
"\n",
|
44 |
" for env_var, (environment, path) in environments.items():\n",
|
@@ -50,21 +51,11 @@
|
|
50 |
"# ----------------------------------------------\n",
|
51 |
"\n",
|
52 |
"\n",
|
53 |
-
"#
|
54 |
-
"if env == \"SageMaker Studio Lab\":\n",
|
55 |
-
" print(\"Updating dependencies, may take some time...\")\n",
|
56 |
-
" !pip install -q --upgrade torchsde\n",
|
57 |
-
" !pip install -q --upgrade pip\n",
|
58 |
-
" !pip install -q --upgrade psutil\n",
|
59 |
-
"\n",
|
60 |
-
" clear_output()\n",
|
61 |
-
"\n",
|
62 |
-
"\n",
|
63 |
-
"# ================ LIBRARIES ================\n",
|
64 |
"flag_file = f\"{root_path}/libraries_installed.txt\"\n",
|
65 |
"\n",
|
66 |
"if not os.path.exists(flag_file):\n",
|
67 |
-
" print(\"💿 Installing the libraries, it's going to take a while
|
68 |
"\n",
|
69 |
" install_lib = {\n",
|
70 |
" \"gdown\": \"pip install -U gdown\",\n",
|
@@ -81,16 +72,6 @@
|
|
81 |
" \"Kaggle\": {\n",
|
82 |
" \"xformers\": \"pip install -q xformers==0.0.23.post1 triton==2.1.0\",\n",
|
83 |
" \"torch\": \"pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\"\n",
|
84 |
-
" },\n",
|
85 |
-
" \"SageMaker Studio Lab\": {\n",
|
86 |
-
" \"opencv\": \"pip install -q opencv-python-headless\",\n",
|
87 |
-
" \"huggingface\": \"pip install -q huggingface-hub\",\n",
|
88 |
-
" \"conda_update\": \"conda update -q -n base conda\",\n",
|
89 |
-
" \"conda_aria2\": \"conda install -q -y aria2\",\n",
|
90 |
-
" \"conda_glib\": \"conda install -q -y glib\",\n",
|
91 |
-
" \"tensorflow\": \"pip install tensorflow\",\n",
|
92 |
-
" \"xformers\": \"pip install -q xformers==0.0.23.post1 triton==2.1.0\",\n",
|
93 |
-
" \"torch\": \"pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\"\n",
|
94 |
" }\n",
|
95 |
" }\n",
|
96 |
"\n",
|
@@ -108,7 +89,6 @@
|
|
108 |
" !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
|
109 |
" !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
|
110 |
" !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz\n",
|
111 |
-
" !wget -P /home/studio-lab-user https://huggingface.co/NagisaNao/fast_repo/resolve/main/sagemaker/FULL_DELETED_NOTEBOOK.ipynb\n",
|
112 |
" del cap\n",
|
113 |
"\n",
|
114 |
" clear_output()\n",
|
@@ -156,14 +136,10 @@
|
|
156 |
"loras_dir = f\"{webui_path}/models/Lora\"\n",
|
157 |
"extensions_dir = f\"{webui_path}/extensions\"\n",
|
158 |
"control_dir = f\"{webui_path}/models/ControlNet\"\n",
|
|
|
159 |
"\n",
|
160 |
"\n",
|
161 |
"# ================= MAIN CODE =================\n",
|
162 |
-
"# --- Obsolescence warning ---\n",
|
163 |
-
"if env == \"SageMaker Studio Lab\":\n",
|
164 |
-
" print(\"You are using the 'SageMaker' environment - this environment is outdated so many bugs will not be fixed and it will be cut in functionality. To save memory and/or to avoid bugs.\\n\\n\")\n",
|
165 |
-
"\n",
|
166 |
-
"\n",
|
167 |
"if not os.path.exists(webui_path):\n",
|
168 |
" start_install = int(time.time())\n",
|
169 |
" print(\"⌚ Unpacking Stable Diffusion...\", end='')\n",
|
@@ -183,8 +159,6 @@
|
|
183 |
" install_time = timedelta(seconds=time.time()-start_install)\n",
|
184 |
" print(\"\\r🚀 Unpacking is complete! For\",\"%02d:%02d:%02d ⚡\\n\" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)\n",
|
185 |
"else:\n",
|
186 |
-
" if env == \"SageMaker Studio Lab\":\n",
|
187 |
-
" !echo -n {start_colab} > {webui_path}/static/colabTimer.txt\n",
|
188 |
" print(\"🚀 All unpacked... Skip. ⚡\")\n",
|
189 |
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
190 |
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
@@ -207,10 +181,7 @@
|
|
207 |
"\n",
|
208 |
" ## Update extensions\n",
|
209 |
" if latest_exstensions:\n",
|
210 |
-
"
|
211 |
-
" !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n",
|
212 |
-
" else:\n",
|
213 |
-
" !{'for dir in /home/studio-lab-user/content/sdw/extensions/*/; do cd \\\"$dir\\\" && git fetch origin && git pull; done'}\n",
|
214 |
"\n",
|
215 |
" # My Chinese friend, you broke the images again in the latest update... >W<'\n",
|
216 |
" %cd {webui_path}/extensions/Encrypt-Image\n",
|
@@ -219,24 +190,18 @@
|
|
219 |
" print(f\"\\r✨ {action} Completed!\")\n",
|
220 |
"\n",
|
221 |
"\n",
|
222 |
-
"# === FIXING
|
223 |
"anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n",
|
224 |
"\n",
|
225 |
"with capture.capture_output() as cap:\n",
|
|
|
|
|
|
|
226 |
" # --- Encrypt-Image ---\n",
|
227 |
-
" !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js\n",
|
228 |
"\n",
|
229 |
" # --- Additional-Networks ---\n",
|
230 |
-
" !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py\n",
|
231 |
-
"\n",
|
232 |
-
" # --= SageMaker =--\n",
|
233 |
-
" if env == \"SageMaker Studio Lab\":\n",
|
234 |
-
" with capture.capture_output() as cap:\n",
|
235 |
-
" # --- SuperMerger Remove ---\n",
|
236 |
-
" if os.path.exists(f\"{webui_path}/extensions/supermerger\"):\n",
|
237 |
-
" !rm -rf {webui_path}/extensions/supermerger\n",
|
238 |
-
" # --- Launch (Style) ---\n",
|
239 |
-
" !wget -O {webui_path}/modules/styles.py {anxety_repos}/sagemaker/fixing/webui/styles.py\n",
|
240 |
"del cap\n",
|
241 |
"\n",
|
242 |
"\n",
|
@@ -259,8 +224,9 @@
|
|
259 |
" {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_v2.safetensors\"},\n",
|
260 |
" {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_v2-inpainting.safetensors\"}\n",
|
261 |
" ],\n",
|
262 |
-
" \"2.BluMix [Anime] [V7]\": [\n",
|
263 |
-
" {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_v7.safetensors\"}
|
|
|
264 |
" ],\n",
|
265 |
" \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
|
266 |
" {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
|
@@ -363,19 +329,18 @@
|
|
363 |
"\n",
|
364 |
"extension_repo = []\n",
|
365 |
"prefixes = {\n",
|
366 |
-
"
|
367 |
-
" \"
|
368 |
-
" \"
|
369 |
-
" \"
|
370 |
-
" \"
|
371 |
-
" \"
|
372 |
-
" \"
|
373 |
"}\n",
|
374 |
"\n",
|
375 |
-
"!mkdir -p {models_dir} {vaes_dir} {loras_dir} {embeddings_dir} {extensions_dir} {control_dir}\n",
|
376 |
"\n",
|
377 |
"url = \"\"\n",
|
378 |
-
"ControlNet_url = \"\"\n",
|
379 |
"hf_token = optional_huggingface_token if optional_huggingface_token else \"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\"\n",
|
380 |
"user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n",
|
381 |
"\n",
|
@@ -389,7 +354,7 @@
|
|
389 |
" if file_name:\n",
|
390 |
" url = re.sub(r'\\[.*?\\]', '', url)\n",
|
391 |
"\n",
|
392 |
-
" for prefix,
|
393 |
" if original_url.startswith(f\"{prefix}:\"):\n",
|
394 |
" if prefix != \"extension\":\n",
|
395 |
" manual_download(url, dir, file_name=file_name)\n",
|
@@ -400,7 +365,8 @@
|
|
400 |
" basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
|
401 |
" header_option = f\"--header={user_header}\"\n",
|
402 |
"\n",
|
403 |
-
" print(\"\\033[32m---\"*45 + f\"\\n\\033[33mURL: \\033[34m{url}\\n\\033[33mSAVE DIR: \\033[34m{dst_dir}\\n\\033[33mFILE NAME: \\033[34m{file_name}\\033[32m\\n~~~\\033[0m\")\n",
|
|
|
404 |
"\n",
|
405 |
" # I do it at my own risk..... Fucking CivitAi >:(\n",
|
406 |
" civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
|
@@ -447,6 +413,22 @@
|
|
447 |
" url, dst_dir, file_name = link_or_path.split()\n",
|
448 |
" manual_download(url, dst_dir, file_name)\n",
|
449 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
450 |
"''' submodels - added urls '''\n",
|
451 |
"\n",
|
452 |
"submodels = []\n",
|
@@ -489,6 +471,8 @@
|
|
489 |
"unique_urls = []\n",
|
490 |
"\n",
|
491 |
"def process_file_download(file_url):\n",
|
|
|
|
|
492 |
" if file_url.startswith(\"http\"):\n",
|
493 |
" if \"blob\" in file_url:\n",
|
494 |
" file_url = file_url.replace(\"blob\", \"raw\")\n",
|
@@ -503,13 +487,17 @@
|
|
503 |
" if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
|
504 |
" current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
|
505 |
"\n",
|
506 |
-
" urls = [url.strip() for url in line.split(',')]\n",
|
507 |
" for url in urls:\n",
|
508 |
" if url.startswith(\"http\") and url not in unique_urls:\n",
|
509 |
-
"
|
|
|
510 |
" unique_urls.append(url)\n",
|
511 |
"\n",
|
|
|
|
|
512 |
"# fix all possible errors/options and function call\n",
|
|
|
513 |
"if custom_file_urls:\n",
|
514 |
" for custom_file_url in custom_file_urls.replace(',', '').split():\n",
|
515 |
" if not custom_file_url.endswith('.txt'):\n",
|
@@ -519,17 +507,14 @@
|
|
519 |
" custom_file_url = f'{root_path}/{custom_file_url}'\n",
|
520 |
"\n",
|
521 |
" try:\n",
|
522 |
-
" process_file_download(custom_file_url)\n",
|
523 |
" except FileNotFoundError:\n",
|
524 |
" pass\n",
|
525 |
"\n",
|
526 |
"# url prefixing\n",
|
527 |
-
"urls = [
|
528 |
-
"for
|
529 |
-
"
|
530 |
-
" prefixed_urls = [f\"{prefix}:{url}\" for url in urls[i].replace(',', '').split()]\n",
|
531 |
-
" if prefixed_urls:\n",
|
532 |
-
" url += \", \".join(prefixed_urls) + \", \"\n",
|
533 |
"\n",
|
534 |
"if detailed_download == \"on\":\n",
|
535 |
" print(\"\\n\\n\\033[33m# ====== Detailed Download ====== #\\n\\033[0m\")\n",
|
@@ -596,18 +581,7 @@
|
|
596 |
"with capture.capture_output() as cap:\n",
|
597 |
" for file in files_umi:\n",
|
598 |
" !aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {save_dir_path} {file}\n",
|
599 |
-
"del cap
|
600 |
-
"\n",
|
601 |
-
"\n",
|
602 |
-
"# === ONLY SAGEMAKER ===\n",
|
603 |
-
"if env == \"SageMaker Studio Lab\":\n",
|
604 |
-
" with capture.capture_output() as cap:\n",
|
605 |
-
" !rm -rf /home/studio-lab-user/.conda/envs/studiolab-safemode\n",
|
606 |
-
" !rm -rf /home/studio-lab-user/.conda/envs/sagemaker-distribution\n",
|
607 |
-
" !rm -rf /home/studio-lab-user/.conda/pkgs/cache\n",
|
608 |
-
" !pip cache purge\n",
|
609 |
-
" !rm -rf ~/.cache\n",
|
610 |
-
" del cap"
|
611 |
],
|
612 |
"metadata": {
|
613 |
"id": "2lJmbqrs3Mu8"
|
|
|
23 |
"import re\n",
|
24 |
"import time\n",
|
25 |
"import json\n",
|
26 |
+
"import shutil\n",
|
27 |
+
"import zipfile\n",
|
28 |
"import requests\n",
|
29 |
"import subprocess\n",
|
30 |
"from datetime import timedelta\n",
|
|
|
39 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
40 |
" environments = {\n",
|
41 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
42 |
+
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
|
|
|
43 |
" }\n",
|
44 |
"\n",
|
45 |
" for env_var, (environment, path) in environments.items():\n",
|
|
|
51 |
"# ----------------------------------------------\n",
|
52 |
"\n",
|
53 |
"\n",
|
54 |
+
"# ================ LIBRARIES V2 ================\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
"flag_file = f\"{root_path}/libraries_installed.txt\"\n",
|
56 |
"\n",
|
57 |
"if not os.path.exists(flag_file):\n",
|
58 |
+
" print(\"💿 Installing the libraries, it's going to take a while:\\n\")\n",
|
59 |
"\n",
|
60 |
" install_lib = {\n",
|
61 |
" \"gdown\": \"pip install -U gdown\",\n",
|
|
|
72 |
" \"Kaggle\": {\n",
|
73 |
" \"xformers\": \"pip install -q xformers==0.0.23.post1 triton==2.1.0\",\n",
|
74 |
" \"torch\": \"pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
" }\n",
|
76 |
" }\n",
|
77 |
"\n",
|
|
|
89 |
" !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
|
90 |
" !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
|
91 |
" !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz\n",
|
|
|
92 |
" del cap\n",
|
93 |
"\n",
|
94 |
" clear_output()\n",
|
|
|
136 |
"loras_dir = f\"{webui_path}/models/Lora\"\n",
|
137 |
"extensions_dir = f\"{webui_path}/extensions\"\n",
|
138 |
"control_dir = f\"{webui_path}/models/ControlNet\"\n",
|
139 |
+
"adetailer_dir = f\"{webui_path}/models/adetailer/\"\n",
|
140 |
"\n",
|
141 |
"\n",
|
142 |
"# ================= MAIN CODE =================\n",
|
|
|
|
|
|
|
|
|
|
|
143 |
"if not os.path.exists(webui_path):\n",
|
144 |
" start_install = int(time.time())\n",
|
145 |
" print(\"⌚ Unpacking Stable Diffusion...\", end='')\n",
|
|
|
159 |
" install_time = timedelta(seconds=time.time()-start_install)\n",
|
160 |
" print(\"\\r🚀 Unpacking is complete! For\",\"%02d:%02d:%02d ⚡\\n\" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)\n",
|
161 |
"else:\n",
|
|
|
|
|
162 |
" print(\"🚀 All unpacked... Skip. ⚡\")\n",
|
163 |
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
164 |
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
|
|
181 |
"\n",
|
182 |
" ## Update extensions\n",
|
183 |
" if latest_exstensions:\n",
|
184 |
+
" !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n",
|
|
|
|
|
|
|
185 |
"\n",
|
186 |
" # My Chinese friend, you broke the images again in the latest update... >W<'\n",
|
187 |
" %cd {webui_path}/extensions/Encrypt-Image\n",
|
|
|
190 |
" print(f\"\\r✨ {action} Completed!\")\n",
|
191 |
"\n",
|
192 |
"\n",
|
193 |
+
"# === FIXING EXTENSIONS ===\n",
|
194 |
"anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n",
|
195 |
"\n",
|
196 |
"with capture.capture_output() as cap:\n",
|
197 |
+
" # --- Umi-Wildcard ---\n",
|
198 |
+
" !sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default\n",
|
199 |
+
"\n",
|
200 |
" # --- Encrypt-Image ---\n",
|
201 |
+
" !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui\n",
|
202 |
"\n",
|
203 |
" # --- Additional-Networks ---\n",
|
204 |
+
" !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
205 |
"del cap\n",
|
206 |
"\n",
|
207 |
"\n",
|
|
|
224 |
" {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_v2.safetensors\"},\n",
|
225 |
" {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_v2-inpainting.safetensors\"}\n",
|
226 |
" ],\n",
|
227 |
+
" \"2.BluMix [Anime] [V7] + INP\": [\n",
|
228 |
+
" {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_v7.safetensors\"},\n",
|
229 |
+
" {\"url\": \"https://civitai.com/api/download/models/363850\", \"name\": \"BluMix_v7-inpainting.safetensors\"}\n",
|
230 |
" ],\n",
|
231 |
" \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
|
232 |
" {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
|
|
|
329 |
"\n",
|
330 |
"extension_repo = []\n",
|
331 |
"prefixes = {\n",
|
332 |
+
" \"model\": models_dir,\n",
|
333 |
+
" \"vae\": vaes_dir,\n",
|
334 |
+
" \"lora\": loras_dir,\n",
|
335 |
+
" \"embed\": embeddings_dir,\n",
|
336 |
+
" \"extension\": extensions_dir,\n",
|
337 |
+
" \"control\": control_dir,\n",
|
338 |
+
" \"adetailer\": adetailer_dir\n",
|
339 |
"}\n",
|
340 |
"\n",
|
341 |
+
"!mkdir -p {models_dir} {vaes_dir} {loras_dir} {embeddings_dir} {extensions_dir} {control_dir} {adetailer_dir}\n",
|
342 |
"\n",
|
343 |
"url = \"\"\n",
|
|
|
344 |
"hf_token = optional_huggingface_token if optional_huggingface_token else \"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\"\n",
|
345 |
"user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n",
|
346 |
"\n",
|
|
|
354 |
" if file_name:\n",
|
355 |
" url = re.sub(r'\\[.*?\\]', '', url)\n",
|
356 |
"\n",
|
357 |
+
" for prefix, dir in prefixes.items():\n",
|
358 |
" if original_url.startswith(f\"{prefix}:\"):\n",
|
359 |
" if prefix != \"extension\":\n",
|
360 |
" manual_download(url, dir, file_name=file_name)\n",
|
|
|
365 |
" basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
|
366 |
" header_option = f\"--header={user_header}\"\n",
|
367 |
"\n",
|
368 |
+
" # print(\"\\033[32m---\"*45 + f\"\\n\\033[33mURL: \\033[34m{url}\\n\\033[33mSAVE DIR: \\033[34m{dst_dir}\\n\\033[33mFILE NAME: \\033[34m{file_name}\\033[32m\\n~~~\\033[0m\")\n",
|
369 |
+
" print(url, dst_dir, file_name)\n",
|
370 |
"\n",
|
371 |
" # I do it at my own risk..... Fucking CivitAi >:(\n",
|
372 |
" civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
|
|
|
413 |
" url, dst_dir, file_name = link_or_path.split()\n",
|
414 |
" manual_download(url, dst_dir, file_name)\n",
|
415 |
"\n",
|
416 |
+
" unpucking_zip_files()\n",
|
417 |
+
"\n",
|
418 |
+
"## unpucking zip files\n",
|
419 |
+
"def unpucking_zip_files():\n",
|
420 |
+
" directories = [models_dir, vaes_dir, embeddings_dir, loras_dir , extensions_dir, control_dir , adetailer_dir]\n",
|
421 |
+
"\n",
|
422 |
+
" for directory in directories:\n",
|
423 |
+
" for root, dirs, files in os.walk(directory):\n",
|
424 |
+
" for file in files:\n",
|
425 |
+
" if file.endswith(\".zip\"):\n",
|
426 |
+
" zip_path = os.path.join(root, file)\n",
|
427 |
+
" extract_path = os.path.splitext(zip_path)[0]\n",
|
428 |
+
" with zipfile.ZipFile(zip_path, 'r') as zip_ref:\n",
|
429 |
+
" zip_ref.extractall(extract_path)\n",
|
430 |
+
" os.remove(zip_path)\n",
|
431 |
+
"\n",
|
432 |
"''' submodels - added urls '''\n",
|
433 |
"\n",
|
434 |
"submodels = []\n",
|
|
|
471 |
"unique_urls = []\n",
|
472 |
"\n",
|
473 |
"def process_file_download(file_url):\n",
|
474 |
+
" files_urls = \"\"\n",
|
475 |
+
"\n",
|
476 |
" if file_url.startswith(\"http\"):\n",
|
477 |
" if \"blob\" in file_url:\n",
|
478 |
" file_url = file_url.replace(\"blob\", \"raw\")\n",
|
|
|
487 |
" if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
|
488 |
" current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
|
489 |
"\n",
|
490 |
+
" urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
|
491 |
" for url in urls:\n",
|
492 |
" if url.startswith(\"http\") and url not in unique_urls:\n",
|
493 |
+
" # handle_manual(f\"{current_tag}:{url}\")\n",
|
494 |
+
" files_urls += f\"{current_tag}:{url}, \"\n",
|
495 |
" unique_urls.append(url)\n",
|
496 |
"\n",
|
497 |
+
" return files_urls\n",
|
498 |
+
"\n",
|
499 |
"# fix all possible errors/options and function call\n",
|
500 |
+
"file_urls = \"\"\n",
|
501 |
"if custom_file_urls:\n",
|
502 |
" for custom_file_url in custom_file_urls.replace(',', '').split():\n",
|
503 |
" if not custom_file_url.endswith('.txt'):\n",
|
|
|
507 |
" custom_file_url = f'{root_path}/{custom_file_url}'\n",
|
508 |
"\n",
|
509 |
" try:\n",
|
510 |
+
" file_urls += process_file_download(custom_file_url)\n",
|
511 |
" except FileNotFoundError:\n",
|
512 |
" pass\n",
|
513 |
"\n",
|
514 |
"# url prefixing\n",
|
515 |
+
"urls = [Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url]\n",
|
516 |
+
"prefixed_urls = [f\"{prefix}:{url}\" for prefix, url in zip(prefixes.keys(), urls) if url]\n",
|
517 |
+
"url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
|
|
|
|
|
|
|
518 |
"\n",
|
519 |
"if detailed_download == \"on\":\n",
|
520 |
" print(\"\\n\\n\\033[33m# ====== Detailed Download ====== #\\n\\033[0m\")\n",
|
|
|
581 |
"with capture.capture_output() as cap:\n",
|
582 |
" for file in files_umi:\n",
|
583 |
" !aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {save_dir_path} {file}\n",
|
584 |
+
"del cap"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
585 |
],
|
586 |
"metadata": {
|
587 |
"id": "2lJmbqrs3Mu8"
|
files_cells/notebooks/en/launch_en.ipynb
CHANGED
@@ -37,8 +37,7 @@
|
|
37 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
38 |
" environments = {\n",
|
39 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
40 |
-
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")
|
41 |
-
" 'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', \"/home/studio-lab-user/content\")\n",
|
42 |
" }\n",
|
43 |
"\n",
|
44 |
" for env_var, (environment, path) in environments.items():\n",
|
@@ -63,31 +62,30 @@
|
|
63 |
"\n",
|
64 |
"\n",
|
65 |
"# ======================== TUNNEL ========================\n",
|
66 |
-
"
|
67 |
-
"
|
68 |
-
"
|
69 |
-
"
|
70 |
-
"
|
71 |
-
"
|
72 |
-
"
|
73 |
-
"
|
74 |
-
"
|
75 |
-
"
|
76 |
-
"
|
77 |
-
"
|
78 |
-
"\n",
|
79 |
-
"
|
80 |
-
"\n",
|
81 |
-
"
|
82 |
-
"
|
83 |
-
"
|
84 |
-
"
|
85 |
-
"
|
86 |
-
"\n",
|
87 |
-
"
|
88 |
-
"
|
89 |
-
"
|
90 |
-
" tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
|
91 |
"# ======================== TUNNEL ========================\n",
|
92 |
"\n",
|
93 |
"\n",
|
@@ -98,36 +96,20 @@
|
|
98 |
"!sed -i 's/\"sd_checkpoint_hash\": \".*\"/\"sd_checkpoint_hash\": \"\"/g; s/\"sd_model_checkpoint\": \".*\"/\"sd_model_checkpoint\": \"\"/g; s/\"sd_vae\": \".*\"/\"sd_vae\": \"None\"/g' {webui_path}/config.json\n",
|
99 |
"\n",
|
100 |
"\n",
|
101 |
-
"
|
102 |
-
"
|
103 |
-
"
|
104 |
-
" commandline_arguments += f\" --port=1769\"\n",
|
105 |
"\n",
|
106 |
-
" if ngrok_token:\n",
|
107 |
-
" commandline_arguments += ' --ngrok ' + ngrok_token\n",
|
108 |
-
" if env != \"Google Colab\":\n",
|
109 |
-
" commandline_arguments += f\" --encrypt-pass=1769\"\n",
|
110 |
-
"\n",
|
111 |
-
" !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
|
112 |
-
"\n",
|
113 |
-
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
114 |
-
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
115 |
-
" print(f\"\\n⌚️ \\033[0mYou have been conducting this session for - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
|
116 |
-
"\n",
|
117 |
-
"else:\n",
|
118 |
" if ngrok_token:\n",
|
119 |
-
" %cd {webui_path}\n",
|
120 |
-
"\n",
|
121 |
" commandline_arguments += ' --ngrok ' + ngrok_token\n",
|
|
|
|
|
122 |
"\n",
|
123 |
-
"
|
124 |
-
"\n",
|
125 |
-
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
126 |
-
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
127 |
-
" print(f\"\\n⌚️ \\033[0mYou have been conducting this session for - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
|
128 |
"\n",
|
129 |
-
"
|
130 |
-
"
|
|
|
131 |
]
|
132 |
}
|
133 |
]
|
|
|
37 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
38 |
" environments = {\n",
|
39 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
40 |
+
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
|
|
|
41 |
" }\n",
|
42 |
"\n",
|
43 |
" for env_var, (environment, path) in environments.items():\n",
|
|
|
62 |
"\n",
|
63 |
"\n",
|
64 |
"# ======================== TUNNEL ========================\n",
|
65 |
+
"import cloudpickle as pickle\n",
|
66 |
+
"\n",
|
67 |
+
"def get_public_ip(version='ipv4'):\n",
|
68 |
+
" try:\n",
|
69 |
+
" url = f'https://api64.ipify.org?format=json&{version}=true'\n",
|
70 |
+
" response = requests.get(url)\n",
|
71 |
+
" data = response.json()\n",
|
72 |
+
" public_ip = data['ip']\n",
|
73 |
+
" return public_ip\n",
|
74 |
+
" except Exception as e:\n",
|
75 |
+
" print(f\"Error getting public {version} address:\", e)\n",
|
76 |
+
"\n",
|
77 |
+
"public_ipv4 = get_public_ip(version='ipv4')\n",
|
78 |
+
"\n",
|
79 |
+
"tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
|
80 |
+
"tunnel_port= 1769\n",
|
81 |
+
"tunnel = tunnel_class(tunnel_port)\n",
|
82 |
+
"tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
|
83 |
+
"tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
|
84 |
+
"\n",
|
85 |
+
"''' add zrok tunnel '''\n",
|
86 |
+
"if zrok_token:\n",
|
87 |
+
" get_ipython().system('zrok enable {zrok_token} &> /dev/null')\n",
|
88 |
+
" tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
|
|
|
89 |
"# ======================== TUNNEL ========================\n",
|
90 |
"\n",
|
91 |
"\n",
|
|
|
96 |
"!sed -i 's/\"sd_checkpoint_hash\": \".*\"/\"sd_checkpoint_hash\": \"\"/g; s/\"sd_model_checkpoint\": \".*\"/\"sd_model_checkpoint\": \"\"/g; s/\"sd_vae\": \".*\"/\"sd_vae\": \"None\"/g' {webui_path}/config.json\n",
|
97 |
"\n",
|
98 |
"\n",
|
99 |
+
"with tunnel:\n",
|
100 |
+
" %cd {webui_path}\n",
|
101 |
+
" commandline_arguments += f\" --port=1769\"\n",
|
|
|
102 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
" if ngrok_token:\n",
|
|
|
|
|
104 |
" commandline_arguments += ' --ngrok ' + ngrok_token\n",
|
105 |
+
" if env != \"Google Colab\":\n",
|
106 |
+
" commandline_arguments += f\" --encrypt-pass=1769\"\n",
|
107 |
"\n",
|
108 |
+
" !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
|
|
|
|
|
|
|
|
|
109 |
"\n",
|
110 |
+
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
111 |
+
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
112 |
+
" print(f\"\\n⌚️ \\033[0mYou have been conducting this session for - \\033[33m{time_since_start}\\033[0m\\n\\n\")"
|
113 |
]
|
114 |
}
|
115 |
]
|
files_cells/notebooks/en/widgets_en.ipynb
CHANGED
@@ -32,8 +32,7 @@
|
|
32 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
33 |
" environments = {\n",
|
34 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
35 |
-
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")
|
36 |
-
" 'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', \"/home/studio-lab-user/content\")\n",
|
37 |
" }\n",
|
38 |
"\n",
|
39 |
" for env_var, (environment, path) in environments.items():\n",
|
@@ -71,8 +70,8 @@
|
|
71 |
"blur_fields = args.blur_fields\n",
|
72 |
"\n",
|
73 |
"## ---\n",
|
74 |
-
"\"\"\" WTF
|
75 |
-
"fix_heigh_img = \"-810px\" if env
|
76 |
"\n",
|
77 |
"\"\"\" transperent fields \"\"\"\n",
|
78 |
"t_bg_alpha = \"1\" if not args.transparent else \"0.65\"\n",
|
@@ -468,7 +467,7 @@
|
|
468 |
"model_header = widgets.HTML('<div class=\"header\">Model Selection<div>')\n",
|
469 |
"model_options = ['none',\n",
|
470 |
" '1.Anime (by XpucT) + INP',\n",
|
471 |
-
" '2.BluMix [Anime] [V7]',\n",
|
472 |
" '3.Cetus-Mix [Anime] [V4] + INP',\n",
|
473 |
" '4.Counterfeit [Anime] [V3] + INP',\n",
|
474 |
" '5.CuteColor [Anime] [V3]',\n",
|
@@ -532,8 +531,6 @@
|
|
532 |
"additional_widget_list = [additional_header, latest_changes_widget, HR, controlnet_widget, controlnet_Num_widget, commit_hash_widget, optional_huggingface_token_widget, ngrok_widget, zrok_widget, HR, commandline_arguments_widget]\n",
|
533 |
"if free_plan and env == \"Google Colab\": # remove ngrok from colab\n",
|
534 |
" additional_widget_list.remove(ngrok_widget)\n",
|
535 |
-
"if env == \"SageMaker Studio Lab\": # remove zrok from sagemaker\n",
|
536 |
-
" additional_widget_list.remove(zrok_widget)\n",
|
537 |
"# ```\n",
|
538 |
"all_additional_box = widgets.VBox(additional_widget_list).add_class(\"container\").add_class(\"image_3\")\n",
|
539 |
"display(all_additional_box)\n",
|
|
|
32 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
33 |
" environments = {\n",
|
34 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
35 |
+
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
|
|
|
36 |
" }\n",
|
37 |
"\n",
|
38 |
" for env_var, (environment, path) in environments.items():\n",
|
|
|
70 |
"blur_fields = args.blur_fields\n",
|
71 |
"\n",
|
72 |
"## ---\n",
|
73 |
+
"\"\"\" WTF KAGGLE - WHAT THE FUCK IS THE DIFFERENCE OF 35 PIXELS!?!?!? \"\"\"\n",
|
74 |
+
"fix_heigh_img = \"-810px\" if env == \"Kaggle\" else \"-775px\"\n",
|
75 |
"\n",
|
76 |
"\"\"\" transperent fields \"\"\"\n",
|
77 |
"t_bg_alpha = \"1\" if not args.transparent else \"0.65\"\n",
|
|
|
467 |
"model_header = widgets.HTML('<div class=\"header\">Model Selection<div>')\n",
|
468 |
"model_options = ['none',\n",
|
469 |
" '1.Anime (by XpucT) + INP',\n",
|
470 |
+
" '2.BluMix [Anime] [V7] + INP',\n",
|
471 |
" '3.Cetus-Mix [Anime] [V4] + INP',\n",
|
472 |
" '4.Counterfeit [Anime] [V3] + INP',\n",
|
473 |
" '5.CuteColor [Anime] [V3]',\n",
|
|
|
531 |
"additional_widget_list = [additional_header, latest_changes_widget, HR, controlnet_widget, controlnet_Num_widget, commit_hash_widget, optional_huggingface_token_widget, ngrok_widget, zrok_widget, HR, commandline_arguments_widget]\n",
|
532 |
"if free_plan and env == \"Google Colab\": # remove ngrok from colab\n",
|
533 |
" additional_widget_list.remove(ngrok_widget)\n",
|
|
|
|
|
534 |
"# ```\n",
|
535 |
"all_additional_box = widgets.VBox(additional_widget_list).add_class(\"container\").add_class(\"image_3\")\n",
|
536 |
"display(all_additional_box)\n",
|
files_cells/notebooks/ru/auto-cleaner_ru.ipynb
CHANGED
@@ -19,14 +19,6 @@
|
|
19 |
"source": [
|
20 |
"##~ AutoCleaner V3.6 CODE | BY: ANXETY ~##\n",
|
21 |
"\n",
|
22 |
-
"# --change log--\n",
|
23 |
-
"\"\"\"\n",
|
24 |
-
"V3.6 | 13.03.24\n",
|
25 |
-
"Fixed selection window\n",
|
26 |
-
"Dynamic update of memory display\n",
|
27 |
-
"\"\"\"\n",
|
28 |
-
"\n",
|
29 |
-
"\n",
|
30 |
"import os\n",
|
31 |
"import time\n",
|
32 |
"import ipywidgets as widgets\n",
|
@@ -39,8 +31,7 @@
|
|
39 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
40 |
" environments = {\n",
|
41 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
42 |
-
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")
|
43 |
-
" 'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', \"/home/studio-lab-user/content\")\n",
|
44 |
" }\n",
|
45 |
"\n",
|
46 |
" for env_var, (environment, path) in environments.items():\n",
|
|
|
19 |
"source": [
|
20 |
"##~ AutoCleaner V3.6 CODE | BY: ANXETY ~##\n",
|
21 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
"import os\n",
|
23 |
"import time\n",
|
24 |
"import ipywidgets as widgets\n",
|
|
|
31 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
32 |
" environments = {\n",
|
33 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
34 |
+
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
|
|
|
35 |
" }\n",
|
36 |
"\n",
|
37 |
" for env_var, (environment, path) in environments.items():\n",
|
files_cells/notebooks/ru/downloading_ru.ipynb
CHANGED
@@ -23,6 +23,8 @@
|
|
23 |
"import re\n",
|
24 |
"import time\n",
|
25 |
"import json\n",
|
|
|
|
|
26 |
"import requests\n",
|
27 |
"import subprocess\n",
|
28 |
"from datetime import timedelta\n",
|
@@ -37,8 +39,7 @@
|
|
37 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
38 |
" environments = {\n",
|
39 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
40 |
-
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")
|
41 |
-
" 'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', \"/home/studio-lab-user/content\")\n",
|
42 |
" }\n",
|
43 |
"\n",
|
44 |
" for env_var, (environment, path) in environments.items():\n",
|
@@ -50,21 +51,11 @@
|
|
50 |
"# ----------------------------------------------\n",
|
51 |
"\n",
|
52 |
"\n",
|
53 |
-
"# === ONLY SAGEMAKER ===\n",
|
54 |
-
"if env == \"SageMaker Studio Lab\":\n",
|
55 |
-
" print(\"Обновление зависимостей, может занять некоторое время...\")\n",
|
56 |
-
" !pip install -q --upgrade torchsde\n",
|
57 |
-
" !pip install -q --upgrade pip\n",
|
58 |
-
" !pip install -q --upgrade psutil\n",
|
59 |
-
"\n",
|
60 |
-
" clear_output()\n",
|
61 |
-
"\n",
|
62 |
-
"\n",
|
63 |
"# ================ LIBRARIES V2 ================\n",
|
64 |
"flag_file = f\"{root_path}/libraries_installed.txt\"\n",
|
65 |
"\n",
|
66 |
"if not os.path.exists(flag_file):\n",
|
67 |
-
" print(\"💿 Установка библиотек, это займет какое-то
|
68 |
"\n",
|
69 |
" install_lib = {\n",
|
70 |
" \"gdown\": \"pip install -U gdown\",\n",
|
@@ -81,16 +72,6 @@
|
|
81 |
" \"Kaggle\": {\n",
|
82 |
" \"xformers\": \"pip install -q xformers==0.0.23.post1 triton==2.1.0\",\n",
|
83 |
" \"torch\": \"pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\"\n",
|
84 |
-
" },\n",
|
85 |
-
" \"SageMaker Studio Lab\": {\n",
|
86 |
-
" \"opencv\": \"pip install -q opencv-python-headless\",\n",
|
87 |
-
" \"huggingface\": \"pip install -q huggingface-hub\",\n",
|
88 |
-
" \"conda_update\": \"conda update -q -n base conda\",\n",
|
89 |
-
" \"conda_aria2\": \"conda install -q -y aria2\",\n",
|
90 |
-
" \"conda_glib\": \"conda install -q -y glib\",\n",
|
91 |
-
" \"tensorflow\": \"pip install tensorflow\",\n",
|
92 |
-
" \"xformers\": \"pip install -q xformers==0.0.23.post1 triton==2.1.0\",\n",
|
93 |
-
" \"torch\": \"pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\"\n",
|
94 |
" }\n",
|
95 |
" }\n",
|
96 |
"\n",
|
@@ -108,7 +89,6 @@
|
|
108 |
" !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
|
109 |
" !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
|
110 |
" !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz\n",
|
111 |
-
" !wget -P /home/studio-lab-user https://huggingface.co/NagisaNao/fast_repo/resolve/main/sagemaker/FULL_DELETED_NOTEBOOK.ipynb\n",
|
112 |
" del cap\n",
|
113 |
"\n",
|
114 |
" clear_output()\n",
|
@@ -156,14 +136,10 @@
|
|
156 |
"loras_dir = f\"{webui_path}/models/Lora\"\n",
|
157 |
"extensions_dir = f\"{webui_path}/extensions\"\n",
|
158 |
"control_dir = f\"{webui_path}/models/ControlNet\"\n",
|
|
|
159 |
"\n",
|
160 |
"\n",
|
161 |
"# ================= MAIN CODE =================\n",
|
162 |
-
"# --- Obsolescence warning ---\n",
|
163 |
-
"if env == \"SageMaker Studio Lab\":\n",
|
164 |
-
" print(\"Вы используете среду 'SageMaker' - эта среда устарела, поэтому многие ошибки не будут исправлены, а ее функциональность будет урезана. Для экономии памяти и/или во избежание ошибок.\\n\\n\")\n",
|
165 |
-
"\n",
|
166 |
-
"\n",
|
167 |
"if not os.path.exists(webui_path):\n",
|
168 |
" start_install = int(time.time())\n",
|
169 |
" print(\"⌚ Распоковка Stable Diffusion...\", end='')\n",
|
@@ -183,8 +159,6 @@
|
|
183 |
" install_time = timedelta(seconds=time.time()-start_install)\n",
|
184 |
" print(\"\\r🚀 Распаковка Завершена! За\",\"%02d:%02d:%02d ⚡\\n\" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)\n",
|
185 |
"else:\n",
|
186 |
-
" if env == \"SageMaker Studio Lab\":\n",
|
187 |
-
" !echo -n {start_colab} > {webui_path}/static/colabTimer.txt\n",
|
188 |
" print(\"🚀 Все распакованно... Пропуск. ⚡\")\n",
|
189 |
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
190 |
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
@@ -207,10 +181,7 @@
|
|
207 |
"\n",
|
208 |
" ## Update extensions\n",
|
209 |
" if latest_exstensions:\n",
|
210 |
-
"
|
211 |
-
" !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n",
|
212 |
-
" else:\n",
|
213 |
-
" !{'for dir in /home/studio-lab-user/content/sdw/extensions/*/; do cd \\\"$dir\\\" && git fetch origin && git pull; done'}\n",
|
214 |
"\n",
|
215 |
" # My Chinese friend, you broke the images again in the latest update... >W<'\n",
|
216 |
" %cd {webui_path}/extensions/Encrypt-Image\n",
|
@@ -219,24 +190,18 @@
|
|
219 |
" print(f\"\\r✨ {action} Завершено!\")\n",
|
220 |
"\n",
|
221 |
"\n",
|
222 |
-
"# === FIXING
|
223 |
"anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n",
|
224 |
"\n",
|
225 |
"with capture.capture_output() as cap:\n",
|
|
|
|
|
|
|
226 |
" # --- Encrypt-Image ---\n",
|
227 |
-
" !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js\n",
|
228 |
"\n",
|
229 |
" # --- Additional-Networks ---\n",
|
230 |
-
" !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py\n",
|
231 |
-
"\n",
|
232 |
-
" # --= SageMaker =--\n",
|
233 |
-
" if env == \"SageMaker Studio Lab\":\n",
|
234 |
-
" with capture.capture_output() as cap:\n",
|
235 |
-
" # --- SuperMerger Remove ---\n",
|
236 |
-
" if os.path.exists(f\"{webui_path}/extensions/supermerger\"):\n",
|
237 |
-
" !rm -rf {webui_path}/extensions/supermerger\n",
|
238 |
-
" # --- Launch (Style) ---\n",
|
239 |
-
" !wget -O {webui_path}/modules/styles.py {anxety_repos}/sagemaker/fixing/webui/styles.py\n",
|
240 |
"del cap\n",
|
241 |
"\n",
|
242 |
"\n",
|
@@ -259,8 +224,9 @@
|
|
259 |
" {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_v2.safetensors\"},\n",
|
260 |
" {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_v2-inpainting.safetensors\"}\n",
|
261 |
" ],\n",
|
262 |
-
" \"2.BluMix [Anime] [V7]\": [\n",
|
263 |
-
" {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_v7.safetensors\"}
|
|
|
264 |
" ],\n",
|
265 |
" \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
|
266 |
" {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
|
@@ -363,19 +329,18 @@
|
|
363 |
"\n",
|
364 |
"extension_repo = []\n",
|
365 |
"prefixes = {\n",
|
366 |
-
"
|
367 |
-
" \"
|
368 |
-
" \"
|
369 |
-
" \"
|
370 |
-
" \"
|
371 |
-
" \"
|
372 |
-
" \"
|
373 |
"}\n",
|
374 |
"\n",
|
375 |
-
"!mkdir -p {models_dir} {vaes_dir} {loras_dir} {embeddings_dir} {extensions_dir} {control_dir}\n",
|
376 |
"\n",
|
377 |
"url = \"\"\n",
|
378 |
-
"ControlNet_url = \"\"\n",
|
379 |
"hf_token = optional_huggingface_token if optional_huggingface_token else \"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\"\n",
|
380 |
"user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n",
|
381 |
"\n",
|
@@ -389,7 +354,7 @@
|
|
389 |
" if file_name:\n",
|
390 |
" url = re.sub(r'\\[.*?\\]', '', url)\n",
|
391 |
"\n",
|
392 |
-
" for prefix,
|
393 |
" if original_url.startswith(f\"{prefix}:\"):\n",
|
394 |
" if prefix != \"extension\":\n",
|
395 |
" manual_download(url, dir, file_name=file_name)\n",
|
@@ -400,7 +365,8 @@
|
|
400 |
" basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
|
401 |
" header_option = f\"--header={user_header}\"\n",
|
402 |
"\n",
|
403 |
-
" print(\"\\033[32m---\"*45 + f\"\\n\\033[33mURL: \\033[34m{url}\\n\\033[33mSAVE DIR: \\033[34m{dst_dir}\\n\\033[33mFILE NAME: \\033[34m{file_name}\\033[32m\\n~~~\\033[0m\")\n",
|
|
|
404 |
"\n",
|
405 |
" # I do it at my own risk..... Fucking CivitAi >:(\n",
|
406 |
" civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
|
@@ -447,6 +413,22 @@
|
|
447 |
" url, dst_dir, file_name = link_or_path.split()\n",
|
448 |
" manual_download(url, dst_dir, file_name)\n",
|
449 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
450 |
"''' submodels - added urls '''\n",
|
451 |
"\n",
|
452 |
"submodels = []\n",
|
@@ -489,6 +471,8 @@
|
|
489 |
"unique_urls = []\n",
|
490 |
"\n",
|
491 |
"def process_file_download(file_url):\n",
|
|
|
|
|
492 |
" if file_url.startswith(\"http\"):\n",
|
493 |
" if \"blob\" in file_url:\n",
|
494 |
" file_url = file_url.replace(\"blob\", \"raw\")\n",
|
@@ -503,13 +487,17 @@
|
|
503 |
" if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
|
504 |
" current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
|
505 |
"\n",
|
506 |
-
" urls = [url.strip() for url in line.split(',')]\n",
|
507 |
" for url in urls:\n",
|
508 |
" if url.startswith(\"http\") and url not in unique_urls:\n",
|
509 |
-
"
|
|
|
510 |
" unique_urls.append(url)\n",
|
511 |
"\n",
|
|
|
|
|
512 |
"# fix all possible errors/options and function call\n",
|
|
|
513 |
"if custom_file_urls:\n",
|
514 |
" for custom_file_url in custom_file_urls.replace(',', '').split():\n",
|
515 |
" if not custom_file_url.endswith('.txt'):\n",
|
@@ -519,17 +507,14 @@
|
|
519 |
" custom_file_url = f'{root_path}/{custom_file_url}'\n",
|
520 |
"\n",
|
521 |
" try:\n",
|
522 |
-
" process_file_download(custom_file_url)\n",
|
523 |
" except FileNotFoundError:\n",
|
524 |
" pass\n",
|
525 |
"\n",
|
526 |
"# url prefixing\n",
|
527 |
-
"urls = [
|
528 |
-
"for
|
529 |
-
"
|
530 |
-
" prefixed_urls = [f\"{prefix}:{url}\" for url in urls[i].replace(',', '').split()]\n",
|
531 |
-
" if prefixed_urls:\n",
|
532 |
-
" url += \", \".join(prefixed_urls) + \", \"\n",
|
533 |
"\n",
|
534 |
"if detailed_download == \"on\":\n",
|
535 |
" print(\"\\n\\n\\033[33m# ====== Подробная Загрузка ====== #\\n\\033[0m\")\n",
|
@@ -596,18 +581,7 @@
|
|
596 |
"with capture.capture_output() as cap:\n",
|
597 |
" for file in files_umi:\n",
|
598 |
" !aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {save_dir_path} {file}\n",
|
599 |
-
"del cap
|
600 |
-
"\n",
|
601 |
-
"\n",
|
602 |
-
"# === ONLY SAGEMAKER ===\n",
|
603 |
-
"if env == \"SageMaker Studio Lab\":\n",
|
604 |
-
" with capture.capture_output() as cap:\n",
|
605 |
-
" !rm -rf /home/studio-lab-user/.conda/envs/studiolab-safemode\n",
|
606 |
-
" !rm -rf /home/studio-lab-user/.conda/envs/sagemaker-distribution\n",
|
607 |
-
" !rm -rf /home/studio-lab-user/.conda/pkgs/cache\n",
|
608 |
-
" !pip cache purge\n",
|
609 |
-
" !rm -rf ~/.cache\n",
|
610 |
-
" del cap"
|
611 |
],
|
612 |
"metadata": {
|
613 |
"id": "2lJmbqrs3Mu8"
|
|
|
23 |
"import re\n",
|
24 |
"import time\n",
|
25 |
"import json\n",
|
26 |
+
"import shutil\n",
|
27 |
+
"import zipfile\n",
|
28 |
"import requests\n",
|
29 |
"import subprocess\n",
|
30 |
"from datetime import timedelta\n",
|
|
|
39 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
40 |
" environments = {\n",
|
41 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
42 |
+
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
|
|
|
43 |
" }\n",
|
44 |
"\n",
|
45 |
" for env_var, (environment, path) in environments.items():\n",
|
|
|
51 |
"# ----------------------------------------------\n",
|
52 |
"\n",
|
53 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
54 |
"# ================ LIBRARIES V2 ================\n",
|
55 |
"flag_file = f\"{root_path}/libraries_installed.txt\"\n",
|
56 |
"\n",
|
57 |
"if not os.path.exists(flag_file):\n",
|
58 |
+
" print(\"💿 Установка библиотек, это займет какое-то время:\\n\")\n",
|
59 |
"\n",
|
60 |
" install_lib = {\n",
|
61 |
" \"gdown\": \"pip install -U gdown\",\n",
|
|
|
72 |
" \"Kaggle\": {\n",
|
73 |
" \"xformers\": \"pip install -q xformers==0.0.23.post1 triton==2.1.0\",\n",
|
74 |
" \"torch\": \"pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121\"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
" }\n",
|
76 |
" }\n",
|
77 |
"\n",
|
|
|
89 |
" !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
|
90 |
" !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
|
91 |
" !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz\n",
|
|
|
92 |
" del cap\n",
|
93 |
"\n",
|
94 |
" clear_output()\n",
|
|
|
136 |
"loras_dir = f\"{webui_path}/models/Lora\"\n",
|
137 |
"extensions_dir = f\"{webui_path}/extensions\"\n",
|
138 |
"control_dir = f\"{webui_path}/models/ControlNet\"\n",
|
139 |
+
"adetailer_dir = f\"{webui_path}/models/adetailer/\"\n",
|
140 |
"\n",
|
141 |
"\n",
|
142 |
"# ================= MAIN CODE =================\n",
|
|
|
|
|
|
|
|
|
|
|
143 |
"if not os.path.exists(webui_path):\n",
|
144 |
" start_install = int(time.time())\n",
|
145 |
" print(\"⌚ Распоковка Stable Diffusion...\", end='')\n",
|
|
|
159 |
" install_time = timedelta(seconds=time.time()-start_install)\n",
|
160 |
" print(\"\\r🚀 Распаковка Завершена! За\",\"%02d:%02d:%02d ⚡\\n\" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)\n",
|
161 |
"else:\n",
|
|
|
|
|
162 |
" print(\"🚀 Все распакованно... Пропуск. ⚡\")\n",
|
163 |
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
164 |
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
|
|
181 |
"\n",
|
182 |
" ## Update extensions\n",
|
183 |
" if latest_exstensions:\n",
|
184 |
+
" !{'for dir in ' + webui_path + '/extensions/*/; do cd \\\"$dir\\\" && git reset --hard && git pull; done'}\n",
|
|
|
|
|
|
|
185 |
"\n",
|
186 |
" # My Chinese friend, you broke the images again in the latest update... >W<'\n",
|
187 |
" %cd {webui_path}/extensions/Encrypt-Image\n",
|
|
|
190 |
" print(f\"\\r✨ {action} Завершено!\")\n",
|
191 |
"\n",
|
192 |
"\n",
|
193 |
+
"# === FIXING EXTENSIONS ===\n",
|
194 |
"anxety_repos = \"https://huggingface.co/NagisaNao/fast_repo/resolve/main\"\n",
|
195 |
"\n",
|
196 |
"with capture.capture_output() as cap:\n",
|
197 |
+
" # --- Umi-Wildcard ---\n",
|
198 |
+
" !sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default\n",
|
199 |
+
"\n",
|
200 |
" # --- Encrypt-Image ---\n",
|
201 |
+
" !sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui\n",
|
202 |
"\n",
|
203 |
" # --- Additional-Networks ---\n",
|
204 |
+
" !wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
205 |
"del cap\n",
|
206 |
"\n",
|
207 |
"\n",
|
|
|
224 |
" {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_v2.safetensors\"},\n",
|
225 |
" {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_v2-inpainting.safetensors\"}\n",
|
226 |
" ],\n",
|
227 |
+
" \"2.BluMix [Anime] [V7] + INP\": [\n",
|
228 |
+
" {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_v7.safetensors\"},\n",
|
229 |
+
" {\"url\": \"https://civitai.com/api/download/models/363850\", \"name\": \"BluMix_v7-inpainting.safetensors\"}\n",
|
230 |
" ],\n",
|
231 |
" \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
|
232 |
" {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
|
|
|
329 |
"\n",
|
330 |
"extension_repo = []\n",
|
331 |
"prefixes = {\n",
|
332 |
+
" \"model\": models_dir,\n",
|
333 |
+
" \"vae\": vaes_dir,\n",
|
334 |
+
" \"lora\": loras_dir,\n",
|
335 |
+
" \"embed\": embeddings_dir,\n",
|
336 |
+
" \"extension\": extensions_dir,\n",
|
337 |
+
" \"control\": control_dir,\n",
|
338 |
+
" \"adetailer\": adetailer_dir\n",
|
339 |
"}\n",
|
340 |
"\n",
|
341 |
+
"!mkdir -p {models_dir} {vaes_dir} {loras_dir} {embeddings_dir} {extensions_dir} {control_dir} {adetailer_dir}\n",
|
342 |
"\n",
|
343 |
"url = \"\"\n",
|
|
|
344 |
"hf_token = optional_huggingface_token if optional_huggingface_token else \"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\"\n",
|
345 |
"user_header = f\"\\\"Authorization: Bearer {hf_token}\\\"\"\n",
|
346 |
"\n",
|
|
|
354 |
" if file_name:\n",
|
355 |
" url = re.sub(r'\\[.*?\\]', '', url)\n",
|
356 |
"\n",
|
357 |
+
" for prefix, dir in prefixes.items():\n",
|
358 |
" if original_url.startswith(f\"{prefix}:\"):\n",
|
359 |
" if prefix != \"extension\":\n",
|
360 |
" manual_download(url, dir, file_name=file_name)\n",
|
|
|
365 |
" basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
|
366 |
" header_option = f\"--header={user_header}\"\n",
|
367 |
"\n",
|
368 |
+
" # print(\"\\033[32m---\"*45 + f\"\\n\\033[33mURL: \\033[34m{url}\\n\\033[33mSAVE DIR: \\033[34m{dst_dir}\\n\\033[33mFILE NAME: \\033[34m{file_name}\\033[32m\\n~~~\\033[0m\")\n",
|
369 |
+
" print(url, dst_dir, file_name)\n",
|
370 |
"\n",
|
371 |
" # I do it at my own risk..... Fucking CivitAi >:(\n",
|
372 |
" civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
|
|
|
413 |
" url, dst_dir, file_name = link_or_path.split()\n",
|
414 |
" manual_download(url, dst_dir, file_name)\n",
|
415 |
"\n",
|
416 |
+
" unpucking_zip_files()\n",
|
417 |
+
"\n",
|
418 |
+
"## unpucking zip files\n",
|
419 |
+
"def unpucking_zip_files():\n",
|
420 |
+
" directories = [models_dir, vaes_dir, embeddings_dir, loras_dir , extensions_dir, control_dir , adetailer_dir]\n",
|
421 |
+
"\n",
|
422 |
+
" for directory in directories:\n",
|
423 |
+
" for root, dirs, files in os.walk(directory):\n",
|
424 |
+
" for file in files:\n",
|
425 |
+
" if file.endswith(\".zip\"):\n",
|
426 |
+
" zip_path = os.path.join(root, file)\n",
|
427 |
+
" extract_path = os.path.splitext(zip_path)[0]\n",
|
428 |
+
" with zipfile.ZipFile(zip_path, 'r') as zip_ref:\n",
|
429 |
+
" zip_ref.extractall(extract_path)\n",
|
430 |
+
" os.remove(zip_path)\n",
|
431 |
+
"\n",
|
432 |
"''' submodels - added urls '''\n",
|
433 |
"\n",
|
434 |
"submodels = []\n",
|
|
|
471 |
"unique_urls = []\n",
|
472 |
"\n",
|
473 |
"def process_file_download(file_url):\n",
|
474 |
+
" files_urls = \"\"\n",
|
475 |
+
"\n",
|
476 |
" if file_url.startswith(\"http\"):\n",
|
477 |
" if \"blob\" in file_url:\n",
|
478 |
" file_url = file_url.replace(\"blob\", \"raw\")\n",
|
|
|
487 |
" if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
|
488 |
" current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
|
489 |
"\n",
|
490 |
+
" urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
|
491 |
" for url in urls:\n",
|
492 |
" if url.startswith(\"http\") and url not in unique_urls:\n",
|
493 |
+
" # handle_manual(f\"{current_tag}:{url}\")\n",
|
494 |
+
" files_urls += f\"{current_tag}:{url}, \"\n",
|
495 |
" unique_urls.append(url)\n",
|
496 |
"\n",
|
497 |
+
" return files_urls\n",
|
498 |
+
"\n",
|
499 |
"# fix all possible errors/options and function call\n",
|
500 |
+
"file_urls = \"\"\n",
|
501 |
"if custom_file_urls:\n",
|
502 |
" for custom_file_url in custom_file_urls.replace(',', '').split():\n",
|
503 |
" if not custom_file_url.endswith('.txt'):\n",
|
|
|
507 |
" custom_file_url = f'{root_path}/{custom_file_url}'\n",
|
508 |
"\n",
|
509 |
" try:\n",
|
510 |
+
" file_urls += process_file_download(custom_file_url)\n",
|
511 |
" except FileNotFoundError:\n",
|
512 |
" pass\n",
|
513 |
"\n",
|
514 |
"# url prefixing\n",
|
515 |
+
"urls = [Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url]\n",
|
516 |
+
"prefixed_urls = [f\"{prefix}:{url}\" for prefix, url in zip(prefixes.keys(), urls) if url]\n",
|
517 |
+
"url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
|
|
|
|
|
|
|
518 |
"\n",
|
519 |
"if detailed_download == \"on\":\n",
|
520 |
" print(\"\\n\\n\\033[33m# ====== Подробная Загрузка ====== #\\n\\033[0m\")\n",
|
|
|
581 |
"with capture.capture_output() as cap:\n",
|
582 |
" for file in files_umi:\n",
|
583 |
" !aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {save_dir_path} {file}\n",
|
584 |
+
"del cap"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
585 |
],
|
586 |
"metadata": {
|
587 |
"id": "2lJmbqrs3Mu8"
|
files_cells/notebooks/ru/launch_ru.ipynb
CHANGED
@@ -37,8 +37,7 @@
|
|
37 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
38 |
" environments = {\n",
|
39 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
40 |
-
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")
|
41 |
-
" 'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', \"/home/studio-lab-user/content\")\n",
|
42 |
" }\n",
|
43 |
"\n",
|
44 |
" for env_var, (environment, path) in environments.items():\n",
|
@@ -63,31 +62,30 @@
|
|
63 |
"\n",
|
64 |
"\n",
|
65 |
"# ======================== TUNNEL ========================\n",
|
66 |
-
"
|
67 |
-
"
|
68 |
-
"
|
69 |
-
"
|
70 |
-
"
|
71 |
-
"
|
72 |
-
"
|
73 |
-
"
|
74 |
-
"
|
75 |
-
"
|
76 |
-
"
|
77 |
-
"
|
78 |
-
"\n",
|
79 |
-
"
|
80 |
-
"\n",
|
81 |
-
"
|
82 |
-
"
|
83 |
-
"
|
84 |
-
"
|
85 |
-
"
|
86 |
-
"\n",
|
87 |
-
"
|
88 |
-
"
|
89 |
-
"
|
90 |
-
" tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
|
91 |
"# ======================== TUNNEL ========================\n",
|
92 |
"\n",
|
93 |
"\n",
|
@@ -98,36 +96,20 @@
|
|
98 |
"!sed -i 's/\"sd_checkpoint_hash\": \".*\"/\"sd_checkpoint_hash\": \"\"/g; s/\"sd_model_checkpoint\": \".*\"/\"sd_model_checkpoint\": \"\"/g; s/\"sd_vae\": \".*\"/\"sd_vae\": \"None\"/g' {webui_path}/config.json\n",
|
99 |
"\n",
|
100 |
"\n",
|
101 |
-
"
|
102 |
-
"
|
103 |
-
"
|
104 |
-
" commandline_arguments += f\" --port=1769\"\n",
|
105 |
"\n",
|
106 |
-
" if ngrok_token:\n",
|
107 |
-
" commandline_arguments += ' --ngrok ' + ngrok_token\n",
|
108 |
-
" if env != \"Google Colab\":\n",
|
109 |
-
" commandline_arguments += f\" --encrypt-pass=1769\"\n",
|
110 |
-
"\n",
|
111 |
-
" !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
|
112 |
-
"\n",
|
113 |
-
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
114 |
-
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
115 |
-
" print(f\"\\n⌚️ \\033[0mВы проводите эту сессию в течение - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
|
116 |
-
"\n",
|
117 |
-
"else:\n",
|
118 |
" if ngrok_token:\n",
|
119 |
-
" %cd {webui_path}\n",
|
120 |
-
"\n",
|
121 |
" commandline_arguments += ' --ngrok ' + ngrok_token\n",
|
|
|
|
|
122 |
"\n",
|
123 |
-
"
|
124 |
-
"\n",
|
125 |
-
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
126 |
-
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
127 |
-
" print(f\"\\n⌚️ \\033[0mВы проводите эту сессию в течение - \\033[33m{time_since_start}\\033[0m\\n\\n\")\n",
|
128 |
"\n",
|
129 |
-
"
|
130 |
-
"
|
|
|
131 |
]
|
132 |
}
|
133 |
]
|
|
|
37 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
38 |
" environments = {\n",
|
39 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
40 |
+
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
|
|
|
41 |
" }\n",
|
42 |
"\n",
|
43 |
" for env_var, (environment, path) in environments.items():\n",
|
|
|
62 |
"\n",
|
63 |
"\n",
|
64 |
"# ======================== TUNNEL ========================\n",
|
65 |
+
"import cloudpickle as pickle\n",
|
66 |
+
"\n",
|
67 |
+
"def get_public_ip(version='ipv4'):\n",
|
68 |
+
" try:\n",
|
69 |
+
" url = f'https://api64.ipify.org?format=json&{version}=true'\n",
|
70 |
+
" response = requests.get(url)\n",
|
71 |
+
" data = response.json()\n",
|
72 |
+
" public_ip = data['ip']\n",
|
73 |
+
" return public_ip\n",
|
74 |
+
" except Exception as e:\n",
|
75 |
+
" print(f\"Error getting public {version} address:\", e)\n",
|
76 |
+
"\n",
|
77 |
+
"public_ipv4 = get_public_ip(version='ipv4')\n",
|
78 |
+
"\n",
|
79 |
+
"tunnel_class = pickle.load(open(f\"{root_path}/new_tunnel\", \"rb\"), encoding=\"utf-8\")\n",
|
80 |
+
"tunnel_port= 1769\n",
|
81 |
+
"tunnel = tunnel_class(tunnel_port)\n",
|
82 |
+
"tunnel.add_tunnel(command=\"cl tunnel --url localhost:{port}\", name=\"cl\", pattern=re.compile(r\"[\\w-]+\\.trycloudflare\\.com\"))\n",
|
83 |
+
"tunnel.add_tunnel(command=\"lt --port {port}\", name=\"lt\", pattern=re.compile(r\"[\\w-]+\\.loca\\.lt\"), note=\"Password : \" + \"\\033[32m\" + public_ipv4 + \"\\033[0m\" + \" rerun cell if 404 error.\")\n",
|
84 |
+
"\n",
|
85 |
+
"''' add zrok tunnel '''\n",
|
86 |
+
"if zrok_token:\n",
|
87 |
+
" get_ipython().system('zrok enable {zrok_token} &> /dev/null')\n",
|
88 |
+
" tunnel.add_tunnel(command=\"zrok share public http://localhost:{port}/ --headless\", name=\"zrok\", pattern=re.compile(r\"[\\w-]+\\.share\\.zrok\\.io\"))\n",
|
|
|
89 |
"# ======================== TUNNEL ========================\n",
|
90 |
"\n",
|
91 |
"\n",
|
|
|
96 |
"!sed -i 's/\"sd_checkpoint_hash\": \".*\"/\"sd_checkpoint_hash\": \"\"/g; s/\"sd_model_checkpoint\": \".*\"/\"sd_model_checkpoint\": \"\"/g; s/\"sd_vae\": \".*\"/\"sd_vae\": \"None\"/g' {webui_path}/config.json\n",
|
97 |
"\n",
|
98 |
"\n",
|
99 |
+
"with tunnel:\n",
|
100 |
+
" %cd {webui_path}\n",
|
101 |
+
" commandline_arguments += f\" --port=1769\"\n",
|
|
|
102 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
103 |
" if ngrok_token:\n",
|
|
|
|
|
104 |
" commandline_arguments += ' --ngrok ' + ngrok_token\n",
|
105 |
+
" if env != \"Google Colab\":\n",
|
106 |
+
" commandline_arguments += f\" --encrypt-pass=1769\"\n",
|
107 |
"\n",
|
108 |
+
" !COMMANDLINE_ARGS=\"{commandline_arguments}\" python launch.py\n",
|
|
|
|
|
|
|
|
|
109 |
"\n",
|
110 |
+
" start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())\n",
|
111 |
+
" time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]\n",
|
112 |
+
" print(f\"\\n⌚️ \\033[0mВы проводите эту сессию в течение - \\033[33m{time_since_start}\\033[0m\\n\\n\")"
|
113 |
]
|
114 |
}
|
115 |
]
|
files_cells/notebooks/ru/widgets_ru.ipynb
CHANGED
@@ -32,8 +32,7 @@
|
|
32 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
33 |
" environments = {\n",
|
34 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
35 |
-
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")
|
36 |
-
" 'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', \"/home/studio-lab-user/content\")\n",
|
37 |
" }\n",
|
38 |
"\n",
|
39 |
" for env_var, (environment, path) in environments.items():\n",
|
@@ -71,8 +70,8 @@
|
|
71 |
"blur_fields = args.blur_fields\n",
|
72 |
"\n",
|
73 |
"## ---\n",
|
74 |
-
"\"\"\" WTF
|
75 |
-
"fix_heigh_img = \"-810px\" if env
|
76 |
"\n",
|
77 |
"\"\"\" transperent fields \"\"\"\n",
|
78 |
"t_bg_alpha = \"1\" if not args.transparent else \"0.65\"\n",
|
@@ -468,7 +467,7 @@
|
|
468 |
"model_header = widgets.HTML('<div class=\"header\">Выбор Модели<div>')\n",
|
469 |
"model_options = ['none',\n",
|
470 |
" '1.Anime (by XpucT) + INP',\n",
|
471 |
-
" '2.BluMix [Anime] [V7]',\n",
|
472 |
" '3.Cetus-Mix [Anime] [V4] + INP',\n",
|
473 |
" '4.Counterfeit [Anime] [V3] + INP',\n",
|
474 |
" '5.CuteColor [Anime] [V3]',\n",
|
@@ -532,8 +531,6 @@
|
|
532 |
"additional_widget_list = [additional_header, latest_changes_widget, HR, controlnet_widget, controlnet_Num_widget, commit_hash_widget, optional_huggingface_token_widget, ngrok_widget, zrok_widget, HR, commandline_arguments_widget]\n",
|
533 |
"if free_plan and env == \"Google Colab\": # remove ngrok from colab\n",
|
534 |
" additional_widget_list.remove(ngrok_widget)\n",
|
535 |
-
"if env == \"SageMaker Studio Lab\": # remove zrok from sagemaker\n",
|
536 |
-
" additional_widget_list.remove(zrok_widget)\n",
|
537 |
"# ```\n",
|
538 |
"all_additional_box = widgets.VBox(additional_widget_list).add_class(\"container\").add_class(\"image_3\")\n",
|
539 |
"display(all_additional_box)\n",
|
|
|
32 |
" free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)\n",
|
33 |
" environments = {\n",
|
34 |
" 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
|
35 |
+
" 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
|
|
|
36 |
" }\n",
|
37 |
"\n",
|
38 |
" for env_var, (environment, path) in environments.items():\n",
|
|
|
70 |
"blur_fields = args.blur_fields\n",
|
71 |
"\n",
|
72 |
"## ---\n",
|
73 |
+
"\"\"\" WTF KAGGLE - WHAT THE FUCK IS THE DIFFERENCE OF 35 PIXELS!?!?!? \"\"\"\n",
|
74 |
+
"fix_heigh_img = \"-810px\" if env == \"Kaggle\" else \"-775px\"\n",
|
75 |
"\n",
|
76 |
"\"\"\" transperent fields \"\"\"\n",
|
77 |
"t_bg_alpha = \"1\" if not args.transparent else \"0.65\"\n",
|
|
|
467 |
"model_header = widgets.HTML('<div class=\"header\">Выбор Модели<div>')\n",
|
468 |
"model_options = ['none',\n",
|
469 |
" '1.Anime (by XpucT) + INP',\n",
|
470 |
+
" '2.BluMix [Anime] [V7] + INP',\n",
|
471 |
" '3.Cetus-Mix [Anime] [V4] + INP',\n",
|
472 |
" '4.Counterfeit [Anime] [V3] + INP',\n",
|
473 |
" '5.CuteColor [Anime] [V3]',\n",
|
|
|
531 |
"additional_widget_list = [additional_header, latest_changes_widget, HR, controlnet_widget, controlnet_Num_widget, commit_hash_widget, optional_huggingface_token_widget, ngrok_widget, zrok_widget, HR, commandline_arguments_widget]\n",
|
532 |
"if free_plan and env == \"Google Colab\": # remove ngrok from colab\n",
|
533 |
" additional_widget_list.remove(ngrok_widget)\n",
|
|
|
|
|
534 |
"# ```\n",
|
535 |
"all_additional_box = widgets.VBox(additional_widget_list).add_class(\"container\").add_class(\"image_3\")\n",
|
536 |
"display(all_additional_box)\n",
|
files_cells/python/en/auto-cleaner_en.py
CHANGED
@@ -1,13 +1,5 @@
|
|
1 |
##~ AutoCleaner V3.6 CODE | BY: ANXETY ~##
|
2 |
|
3 |
-
# --change log--
|
4 |
-
"""
|
5 |
-
V3.6 | 13.03.24
|
6 |
-
Fixed selection window
|
7 |
-
Dynamic update of memory display
|
8 |
-
"""
|
9 |
-
|
10 |
-
|
11 |
import os
|
12 |
import time
|
13 |
import ipywidgets as widgets
|
@@ -20,8 +12,7 @@ def detect_environment():
|
|
20 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
21 |
environments = {
|
22 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
23 |
-
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
24 |
-
'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', "/home/studio-lab-user/content")
|
25 |
}
|
26 |
|
27 |
for env_var, (environment, path) in environments.items():
|
|
|
1 |
##~ AutoCleaner V3.6 CODE | BY: ANXETY ~##
|
2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
import os
|
4 |
import time
|
5 |
import ipywidgets as widgets
|
|
|
12 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
13 |
environments = {
|
14 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
15 |
+
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
|
|
16 |
}
|
17 |
|
18 |
for env_var, (environment, path) in environments.items():
|
files_cells/python/en/downloading_en.py
CHANGED
@@ -4,6 +4,8 @@ import os
|
|
4 |
import re
|
5 |
import time
|
6 |
import json
|
|
|
|
|
7 |
import requests
|
8 |
import subprocess
|
9 |
from datetime import timedelta
|
@@ -18,8 +20,7 @@ def detect_environment():
|
|
18 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
19 |
environments = {
|
20 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
21 |
-
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
22 |
-
'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', "/home/studio-lab-user/content")
|
23 |
}
|
24 |
|
25 |
for env_var, (environment, path) in environments.items():
|
@@ -31,21 +32,11 @@ webui_path = f"{root_path}/sdw"
|
|
31 |
# ----------------------------------------------
|
32 |
|
33 |
|
34 |
-
#
|
35 |
-
if env == "SageMaker Studio Lab":
|
36 |
-
print("Updating dependencies, may take some time...")
|
37 |
-
get_ipython().system('pip install -q --upgrade torchsde')
|
38 |
-
get_ipython().system('pip install -q --upgrade pip')
|
39 |
-
get_ipython().system('pip install -q --upgrade psutil')
|
40 |
-
|
41 |
-
clear_output()
|
42 |
-
|
43 |
-
|
44 |
-
# ================ LIBRARIES ================
|
45 |
flag_file = f"{root_path}/libraries_installed.txt"
|
46 |
|
47 |
if not os.path.exists(flag_file):
|
48 |
-
print("💿 Installing the libraries, it's going to take a while
|
49 |
|
50 |
install_lib = {
|
51 |
"gdown": "pip install -U gdown",
|
@@ -62,16 +53,6 @@ if not os.path.exists(flag_file):
|
|
62 |
"Kaggle": {
|
63 |
"xformers": "pip install -q xformers==0.0.23.post1 triton==2.1.0",
|
64 |
"torch": "pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121"
|
65 |
-
},
|
66 |
-
"SageMaker Studio Lab": {
|
67 |
-
"opencv": "pip install -q opencv-python-headless",
|
68 |
-
"huggingface": "pip install -q huggingface-hub",
|
69 |
-
"conda_update": "conda update -q -n base conda",
|
70 |
-
"conda_aria2": "conda install -q -y aria2",
|
71 |
-
"conda_glib": "conda install -q -y glib",
|
72 |
-
"tensorflow": "pip install tensorflow",
|
73 |
-
"xformers": "pip install -q xformers==0.0.23.post1 triton==2.1.0",
|
74 |
-
"torch": "pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121"
|
75 |
}
|
76 |
}
|
77 |
|
@@ -89,7 +70,6 @@ if not os.path.exists(flag_file):
|
|
89 |
get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
|
90 |
get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
|
91 |
get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
|
92 |
-
get_ipython().system('wget -P /home/studio-lab-user https://huggingface.co/NagisaNao/fast_repo/resolve/main/sagemaker/FULL_DELETED_NOTEBOOK.ipynb')
|
93 |
del cap
|
94 |
|
95 |
clear_output()
|
@@ -137,14 +117,10 @@ embeddings_dir = f"{webui_path}/embeddings"
|
|
137 |
loras_dir = f"{webui_path}/models/Lora"
|
138 |
extensions_dir = f"{webui_path}/extensions"
|
139 |
control_dir = f"{webui_path}/models/ControlNet"
|
|
|
140 |
|
141 |
|
142 |
# ================= MAIN CODE =================
|
143 |
-
# --- Obsolescence warning ---
|
144 |
-
if env == "SageMaker Studio Lab":
|
145 |
-
print("You are using the 'SageMaker' environment - this environment is outdated so many bugs will not be fixed and it will be cut in functionality. To save memory and/or to avoid bugs.\n\n")
|
146 |
-
|
147 |
-
|
148 |
if not os.path.exists(webui_path):
|
149 |
start_install = int(time.time())
|
150 |
print("⌚ Unpacking Stable Diffusion...", end='')
|
@@ -164,8 +140,6 @@ if not os.path.exists(webui_path):
|
|
164 |
install_time = timedelta(seconds=time.time()-start_install)
|
165 |
print("\r🚀 Unpacking is complete! For","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)
|
166 |
else:
|
167 |
-
if env == "SageMaker Studio Lab":
|
168 |
-
get_ipython().system('echo -n {start_colab} > {webui_path}/static/colabTimer.txt')
|
169 |
print("🚀 All unpacked... Skip. ⚡")
|
170 |
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
171 |
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
@@ -188,10 +162,7 @@ if latest_webui or latest_exstensions:
|
|
188 |
|
189 |
## Update extensions
|
190 |
if latest_exstensions:
|
191 |
-
|
192 |
-
get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
|
193 |
-
else:
|
194 |
-
get_ipython().system('{\'for dir in /home/studio-lab-user/content/sdw/extensions/*/; do cd \\"$dir\\" && git fetch origin && git pull; done\'}')
|
195 |
|
196 |
# My Chinese friend, you broke the images again in the latest update... >W<'
|
197 |
get_ipython().run_line_magic('cd', '{webui_path}/extensions/Encrypt-Image')
|
@@ -200,24 +171,18 @@ if latest_webui or latest_exstensions:
|
|
200 |
print(f"\r✨ {action} Completed!")
|
201 |
|
202 |
|
203 |
-
# === FIXING
|
204 |
anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main"
|
205 |
|
206 |
with capture.capture_output() as cap:
|
|
|
|
|
|
|
207 |
# --- Encrypt-Image ---
|
208 |
-
get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js")
|
209 |
|
210 |
# --- Additional-Networks ---
|
211 |
-
get_ipython().system('wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py')
|
212 |
-
|
213 |
-
# --= SageMaker =--
|
214 |
-
if env == "SageMaker Studio Lab":
|
215 |
-
with capture.capture_output() as cap:
|
216 |
-
# --- SuperMerger Remove ---
|
217 |
-
if os.path.exists(f"{webui_path}/extensions/supermerger"):
|
218 |
-
get_ipython().system('rm -rf {webui_path}/extensions/supermerger')
|
219 |
-
# --- Launch (Style) ---
|
220 |
-
get_ipython().system('wget -O {webui_path}/modules/styles.py {anxety_repos}/sagemaker/fixing/webui/styles.py')
|
221 |
del cap
|
222 |
|
223 |
|
@@ -240,8 +205,9 @@ model_list = {
|
|
240 |
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_v2.safetensors"},
|
241 |
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_v2-inpainting.safetensors"}
|
242 |
],
|
243 |
-
"2.BluMix [Anime] [V7]": [
|
244 |
-
{"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_v7.safetensors"}
|
|
|
245 |
],
|
246 |
"3.Cetus-Mix [Anime] [V4] + INP": [
|
247 |
{"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
|
@@ -344,19 +310,18 @@ controlnet_list = {
|
|
344 |
|
345 |
extension_repo = []
|
346 |
prefixes = {
|
347 |
-
|
348 |
-
"
|
349 |
-
"
|
350 |
-
"
|
351 |
-
"
|
352 |
-
"
|
353 |
-
"
|
354 |
}
|
355 |
|
356 |
-
get_ipython().system('mkdir -p {models_dir} {vaes_dir} {loras_dir} {embeddings_dir} {extensions_dir} {control_dir}')
|
357 |
|
358 |
url = ""
|
359 |
-
ControlNet_url = ""
|
360 |
hf_token = optional_huggingface_token if optional_huggingface_token else "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"
|
361 |
user_header = f"\"Authorization: Bearer {hf_token}\""
|
362 |
|
@@ -370,7 +335,7 @@ def handle_manual(url):
|
|
370 |
if file_name:
|
371 |
url = re.sub(r'\[.*?\]', '', url)
|
372 |
|
373 |
-
for prefix,
|
374 |
if original_url.startswith(f"{prefix}:"):
|
375 |
if prefix != "extension":
|
376 |
manual_download(url, dir, file_name=file_name)
|
@@ -381,7 +346,8 @@ def manual_download(url, dst_dir, file_name):
|
|
381 |
basename = url.split("/")[-1] if file_name is None else file_name
|
382 |
header_option = f"--header={user_header}"
|
383 |
|
384 |
-
print("\033[32m---"*45 + f"\n\033[33mURL: \033[34m{url}\n\033[33mSAVE DIR: \033[34m{dst_dir}\n\033[33mFILE NAME: \033[34m{file_name}\033[32m\n~~~\033[0m")
|
|
|
385 |
|
386 |
# I do it at my own risk..... Fucking CivitAi >:(
|
387 |
civitai_token = "62c0c5956b2f9defbd844d754000180b"
|
@@ -428,6 +394,22 @@ def download(url):
|
|
428 |
url, dst_dir, file_name = link_or_path.split()
|
429 |
manual_download(url, dst_dir, file_name)
|
430 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
431 |
''' submodels - added urls '''
|
432 |
|
433 |
submodels = []
|
@@ -470,6 +452,8 @@ for submodel in submodels:
|
|
470 |
unique_urls = []
|
471 |
|
472 |
def process_file_download(file_url):
|
|
|
|
|
473 |
if file_url.startswith("http"):
|
474 |
if "blob" in file_url:
|
475 |
file_url = file_url.replace("blob", "raw")
|
@@ -484,13 +468,17 @@ def process_file_download(file_url):
|
|
484 |
if any(f'# {tag}' in line.lower() for tag in prefixes):
|
485 |
current_tag = next((tag for tag in prefixes if tag in line.lower()))
|
486 |
|
487 |
-
urls = [url.strip() for url in line.split(',')]
|
488 |
for url in urls:
|
489 |
if url.startswith("http") and url not in unique_urls:
|
490 |
-
|
|
|
491 |
unique_urls.append(url)
|
492 |
|
|
|
|
|
493 |
# fix all possible errors/options and function call
|
|
|
494 |
if custom_file_urls:
|
495 |
for custom_file_url in custom_file_urls.replace(',', '').split():
|
496 |
if not custom_file_url.endswith('.txt'):
|
@@ -500,17 +488,14 @@ if custom_file_urls:
|
|
500 |
custom_file_url = f'{root_path}/{custom_file_url}'
|
501 |
|
502 |
try:
|
503 |
-
process_file_download(custom_file_url)
|
504 |
except FileNotFoundError:
|
505 |
pass
|
506 |
|
507 |
# url prefixing
|
508 |
-
urls = [
|
509 |
-
for
|
510 |
-
|
511 |
-
prefixed_urls = [f"{prefix}:{url}" for url in urls[i].replace(',', '').split()]
|
512 |
-
if prefixed_urls:
|
513 |
-
url += ", ".join(prefixed_urls) + ", "
|
514 |
|
515 |
if detailed_download == "on":
|
516 |
print("\n\n\033[33m# ====== Detailed Download ====== #\n\033[0m")
|
@@ -576,14 +561,3 @@ with capture.capture_output() as cap:
|
|
576 |
get_ipython().system('aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {save_dir_path} {file}')
|
577 |
del cap
|
578 |
|
579 |
-
|
580 |
-
# === ONLY SAGEMAKER ===
|
581 |
-
if env == "SageMaker Studio Lab":
|
582 |
-
with capture.capture_output() as cap:
|
583 |
-
get_ipython().system('rm -rf /home/studio-lab-user/.conda/envs/studiolab-safemode')
|
584 |
-
get_ipython().system('rm -rf /home/studio-lab-user/.conda/envs/sagemaker-distribution')
|
585 |
-
get_ipython().system('rm -rf /home/studio-lab-user/.conda/pkgs/cache')
|
586 |
-
get_ipython().system('pip cache purge')
|
587 |
-
get_ipython().system('rm -rf ~/.cache')
|
588 |
-
del cap
|
589 |
-
|
|
|
4 |
import re
|
5 |
import time
|
6 |
import json
|
7 |
+
import shutil
|
8 |
+
import zipfile
|
9 |
import requests
|
10 |
import subprocess
|
11 |
from datetime import timedelta
|
|
|
20 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
21 |
environments = {
|
22 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
23 |
+
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
|
|
24 |
}
|
25 |
|
26 |
for env_var, (environment, path) in environments.items():
|
|
|
32 |
# ----------------------------------------------
|
33 |
|
34 |
|
35 |
+
# ================ LIBRARIES V2 ================
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
flag_file = f"{root_path}/libraries_installed.txt"
|
37 |
|
38 |
if not os.path.exists(flag_file):
|
39 |
+
print("💿 Installing the libraries, it's going to take a while:\n")
|
40 |
|
41 |
install_lib = {
|
42 |
"gdown": "pip install -U gdown",
|
|
|
53 |
"Kaggle": {
|
54 |
"xformers": "pip install -q xformers==0.0.23.post1 triton==2.1.0",
|
55 |
"torch": "pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
}
|
57 |
}
|
58 |
|
|
|
70 |
get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
|
71 |
get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
|
72 |
get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
|
|
|
73 |
del cap
|
74 |
|
75 |
clear_output()
|
|
|
117 |
loras_dir = f"{webui_path}/models/Lora"
|
118 |
extensions_dir = f"{webui_path}/extensions"
|
119 |
control_dir = f"{webui_path}/models/ControlNet"
|
120 |
+
adetailer_dir = f"{webui_path}/models/adetailer/"
|
121 |
|
122 |
|
123 |
# ================= MAIN CODE =================
|
|
|
|
|
|
|
|
|
|
|
124 |
if not os.path.exists(webui_path):
|
125 |
start_install = int(time.time())
|
126 |
print("⌚ Unpacking Stable Diffusion...", end='')
|
|
|
140 |
install_time = timedelta(seconds=time.time()-start_install)
|
141 |
print("\r🚀 Unpacking is complete! For","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)
|
142 |
else:
|
|
|
|
|
143 |
print("🚀 All unpacked... Skip. ⚡")
|
144 |
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
145 |
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
|
|
162 |
|
163 |
## Update extensions
|
164 |
if latest_exstensions:
|
165 |
+
get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
|
|
|
|
|
|
|
166 |
|
167 |
# My Chinese friend, you broke the images again in the latest update... >W<'
|
168 |
get_ipython().run_line_magic('cd', '{webui_path}/extensions/Encrypt-Image')
|
|
|
171 |
print(f"\r✨ {action} Completed!")
|
172 |
|
173 |
|
174 |
+
# === FIXING EXTENSIONS ===
|
175 |
anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main"
|
176 |
|
177 |
with capture.capture_output() as cap:
|
178 |
+
# --- Umi-Wildcard ---
|
179 |
+
get_ipython().system("sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default")
|
180 |
+
|
181 |
# --- Encrypt-Image ---
|
182 |
+
get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui")
|
183 |
|
184 |
# --- Additional-Networks ---
|
185 |
+
get_ipython().system('wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
186 |
del cap
|
187 |
|
188 |
|
|
|
205 |
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_v2.safetensors"},
|
206 |
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_v2-inpainting.safetensors"}
|
207 |
],
|
208 |
+
"2.BluMix [Anime] [V7] + INP": [
|
209 |
+
{"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_v7.safetensors"},
|
210 |
+
{"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_v7-inpainting.safetensors"}
|
211 |
],
|
212 |
"3.Cetus-Mix [Anime] [V4] + INP": [
|
213 |
{"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
|
|
|
310 |
|
311 |
extension_repo = []
|
312 |
prefixes = {
|
313 |
+
"model": models_dir,
|
314 |
+
"vae": vaes_dir,
|
315 |
+
"lora": loras_dir,
|
316 |
+
"embed": embeddings_dir,
|
317 |
+
"extension": extensions_dir,
|
318 |
+
"control": control_dir,
|
319 |
+
"adetailer": adetailer_dir
|
320 |
}
|
321 |
|
322 |
+
get_ipython().system('mkdir -p {models_dir} {vaes_dir} {loras_dir} {embeddings_dir} {extensions_dir} {control_dir} {adetailer_dir}')
|
323 |
|
324 |
url = ""
|
|
|
325 |
hf_token = optional_huggingface_token if optional_huggingface_token else "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"
|
326 |
user_header = f"\"Authorization: Bearer {hf_token}\""
|
327 |
|
|
|
335 |
if file_name:
|
336 |
url = re.sub(r'\[.*?\]', '', url)
|
337 |
|
338 |
+
for prefix, dir in prefixes.items():
|
339 |
if original_url.startswith(f"{prefix}:"):
|
340 |
if prefix != "extension":
|
341 |
manual_download(url, dir, file_name=file_name)
|
|
|
346 |
basename = url.split("/")[-1] if file_name is None else file_name
|
347 |
header_option = f"--header={user_header}"
|
348 |
|
349 |
+
# print("\033[32m---"*45 + f"\n\033[33mURL: \033[34m{url}\n\033[33mSAVE DIR: \033[34m{dst_dir}\n\033[33mFILE NAME: \033[34m{file_name}\033[32m\n~~~\033[0m")
|
350 |
+
print(url, dst_dir, file_name)
|
351 |
|
352 |
# I do it at my own risk..... Fucking CivitAi >:(
|
353 |
civitai_token = "62c0c5956b2f9defbd844d754000180b"
|
|
|
394 |
url, dst_dir, file_name = link_or_path.split()
|
395 |
manual_download(url, dst_dir, file_name)
|
396 |
|
397 |
+
unpucking_zip_files()
|
398 |
+
|
399 |
+
## unpucking zip files
|
400 |
+
def unpucking_zip_files():
|
401 |
+
directories = [models_dir, vaes_dir, embeddings_dir, loras_dir , extensions_dir, control_dir , adetailer_dir]
|
402 |
+
|
403 |
+
for directory in directories:
|
404 |
+
for root, dirs, files in os.walk(directory):
|
405 |
+
for file in files:
|
406 |
+
if file.endswith(".zip"):
|
407 |
+
zip_path = os.path.join(root, file)
|
408 |
+
extract_path = os.path.splitext(zip_path)[0]
|
409 |
+
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
410 |
+
zip_ref.extractall(extract_path)
|
411 |
+
os.remove(zip_path)
|
412 |
+
|
413 |
''' submodels - added urls '''
|
414 |
|
415 |
submodels = []
|
|
|
452 |
unique_urls = []
|
453 |
|
454 |
def process_file_download(file_url):
|
455 |
+
files_urls = ""
|
456 |
+
|
457 |
if file_url.startswith("http"):
|
458 |
if "blob" in file_url:
|
459 |
file_url = file_url.replace("blob", "raw")
|
|
|
468 |
if any(f'# {tag}' in line.lower() for tag in prefixes):
|
469 |
current_tag = next((tag for tag in prefixes if tag in line.lower()))
|
470 |
|
471 |
+
urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
|
472 |
for url in urls:
|
473 |
if url.startswith("http") and url not in unique_urls:
|
474 |
+
# handle_manual(f"{current_tag}:{url}")
|
475 |
+
files_urls += f"{current_tag}:{url}, "
|
476 |
unique_urls.append(url)
|
477 |
|
478 |
+
return files_urls
|
479 |
+
|
480 |
# fix all possible errors/options and function call
|
481 |
+
file_urls = ""
|
482 |
if custom_file_urls:
|
483 |
for custom_file_url in custom_file_urls.replace(',', '').split():
|
484 |
if not custom_file_url.endswith('.txt'):
|
|
|
488 |
custom_file_url = f'{root_path}/{custom_file_url}'
|
489 |
|
490 |
try:
|
491 |
+
file_urls += process_file_download(custom_file_url)
|
492 |
except FileNotFoundError:
|
493 |
pass
|
494 |
|
495 |
# url prefixing
|
496 |
+
urls = [Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url]
|
497 |
+
prefixed_urls = [f"{prefix}:{url}" for prefix, url in zip(prefixes.keys(), urls) if url]
|
498 |
+
url += ", ".join(prefixed_urls) + ", " + file_urls
|
|
|
|
|
|
|
499 |
|
500 |
if detailed_download == "on":
|
501 |
print("\n\n\033[33m# ====== Detailed Download ====== #\n\033[0m")
|
|
|
561 |
get_ipython().system('aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {save_dir_path} {file}')
|
562 |
del cap
|
563 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
files_cells/python/en/launch_en.py
CHANGED
@@ -13,8 +13,7 @@ def detect_environment():
|
|
13 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
14 |
environments = {
|
15 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
16 |
-
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
17 |
-
'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', "/home/studio-lab-user/content")
|
18 |
}
|
19 |
|
20 |
for env_var, (environment, path) in environments.items():
|
@@ -39,31 +38,30 @@ commandline_arguments = settings['commandline_arguments']
|
|
39 |
|
40 |
|
41 |
# ======================== TUNNEL ========================
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
|
67 |
# ======================== TUNNEL ========================
|
68 |
|
69 |
|
@@ -74,34 +72,18 @@ get_ipython().system('sed -i \'s#"additional_networks_extra_lora_path": ".*model
|
|
74 |
get_ipython().system('sed -i \'s/"sd_checkpoint_hash": ".*"/"sd_checkpoint_hash": ""/g; s/"sd_model_checkpoint": ".*"/"sd_model_checkpoint": ""/g; s/"sd_vae": ".*"/"sd_vae": "None"/g\' {webui_path}/config.json')
|
75 |
|
76 |
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
commandline_arguments += f" --port=1769"
|
81 |
|
82 |
-
if ngrok_token:
|
83 |
-
commandline_arguments += ' --ngrok ' + ngrok_token
|
84 |
-
if env != "Google Colab":
|
85 |
-
commandline_arguments += f" --encrypt-pass=1769"
|
86 |
-
|
87 |
-
get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
|
88 |
-
|
89 |
-
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
90 |
-
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
91 |
-
print(f"\n⌚️ \033[0mYou have been conducting this session for - \033[33m{time_since_start}\033[0m\n\n")
|
92 |
-
|
93 |
-
else:
|
94 |
if ngrok_token:
|
95 |
-
get_ipython().run_line_magic('cd', '{webui_path}')
|
96 |
-
|
97 |
commandline_arguments += ' --ngrok ' + ngrok_token
|
|
|
|
|
98 |
|
99 |
-
|
100 |
-
|
101 |
-
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
102 |
-
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
103 |
-
print(f"\n⌚️ \033[0mYou have been conducting this session for - \033[33m{time_since_start}\033[0m\n\n")
|
104 |
|
105 |
-
|
106 |
-
|
|
|
107 |
|
|
|
13 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
14 |
environments = {
|
15 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
16 |
+
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
|
|
17 |
}
|
18 |
|
19 |
for env_var, (environment, path) in environments.items():
|
|
|
38 |
|
39 |
|
40 |
# ======================== TUNNEL ========================
|
41 |
+
import cloudpickle as pickle
|
42 |
+
|
43 |
+
def get_public_ip(version='ipv4'):
|
44 |
+
try:
|
45 |
+
url = f'https://api64.ipify.org?format=json&{version}=true'
|
46 |
+
response = requests.get(url)
|
47 |
+
data = response.json()
|
48 |
+
public_ip = data['ip']
|
49 |
+
return public_ip
|
50 |
+
except Exception as e:
|
51 |
+
print(f"Error getting public {version} address:", e)
|
52 |
+
|
53 |
+
public_ipv4 = get_public_ip(version='ipv4')
|
54 |
+
|
55 |
+
tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
|
56 |
+
tunnel_port= 1769
|
57 |
+
tunnel = tunnel_class(tunnel_port)
|
58 |
+
tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
|
59 |
+
tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
|
60 |
+
|
61 |
+
''' add zrok tunnel '''
|
62 |
+
if zrok_token:
|
63 |
+
get_ipython().system('zrok enable {zrok_token} &> /dev/null')
|
64 |
+
tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
|
|
|
65 |
# ======================== TUNNEL ========================
|
66 |
|
67 |
|
|
|
72 |
get_ipython().system('sed -i \'s/"sd_checkpoint_hash": ".*"/"sd_checkpoint_hash": ""/g; s/"sd_model_checkpoint": ".*"/"sd_model_checkpoint": ""/g; s/"sd_vae": ".*"/"sd_vae": "None"/g\' {webui_path}/config.json')
|
73 |
|
74 |
|
75 |
+
with tunnel:
|
76 |
+
get_ipython().run_line_magic('cd', '{webui_path}')
|
77 |
+
commandline_arguments += f" --port=1769"
|
|
|
78 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
if ngrok_token:
|
|
|
|
|
80 |
commandline_arguments += ' --ngrok ' + ngrok_token
|
81 |
+
if env != "Google Colab":
|
82 |
+
commandline_arguments += f" --encrypt-pass=1769"
|
83 |
|
84 |
+
get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
|
|
|
|
|
|
|
|
|
85 |
|
86 |
+
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
87 |
+
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
88 |
+
print(f"\n⌚️ \033[0mYou have been conducting this session for - \033[33m{time_since_start}\033[0m\n\n")
|
89 |
|
files_cells/python/en/widgets_en.py
CHANGED
@@ -13,8 +13,7 @@ def detect_environment():
|
|
13 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
14 |
environments = {
|
15 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
16 |
-
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
17 |
-
'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', "/home/studio-lab-user/content")
|
18 |
}
|
19 |
|
20 |
for env_var, (environment, path) in environments.items():
|
@@ -38,7 +37,7 @@ parser.add_argument('-b', '--blur', type=str, help='Blur level for the image', m
|
|
38 |
parser.add_argument('-y', type=int, help='Y coordinate for the image in px', metavar='', default=0)
|
39 |
parser.add_argument('-x', type=int, help='X coordinate for the image in px', metavar='', default=0)
|
40 |
parser.add_argument('-s', '--scale', type=int, help='Scale image in %%', metavar='', default=100)
|
41 |
-
parser.add_argument('-m', '--mode',action='store_true', help='Removes repetitive image tiles')
|
42 |
parser.add_argument('-t', '--transparent', action='store_true', help='Makes input/selection fields 35%% more transparent')
|
43 |
parser.add_argument('-bf', '--blur-fields', type=str, help='Background blur level for input/selection fields', metavar='', default=2)
|
44 |
args = parser.parse_args()
|
@@ -52,8 +51,8 @@ scale_img = args.scale
|
|
52 |
blur_fields = args.blur_fields
|
53 |
|
54 |
## ---
|
55 |
-
""" WTF
|
56 |
-
fix_heigh_img = "-810px" if env
|
57 |
|
58 |
""" transperent fields """
|
59 |
t_bg_alpha = "1" if not args.transparent else "0.65"
|
@@ -449,7 +448,7 @@ HR = widgets.HTML('<hr>')
|
|
449 |
model_header = widgets.HTML('<div class="header">Model Selection<div>')
|
450 |
model_options = ['none',
|
451 |
'1.Anime (by XpucT) + INP',
|
452 |
-
'2.BluMix [Anime] [V7]',
|
453 |
'3.Cetus-Mix [Anime] [V4] + INP',
|
454 |
'4.Counterfeit [Anime] [V3] + INP',
|
455 |
'5.CuteColor [Anime] [V3]',
|
@@ -513,8 +512,6 @@ commandline_arguments_widget = widgets.Text(description='Arguments:', value=comm
|
|
513 |
additional_widget_list = [additional_header, latest_changes_widget, HR, controlnet_widget, controlnet_Num_widget, commit_hash_widget, optional_huggingface_token_widget, ngrok_widget, zrok_widget, HR, commandline_arguments_widget]
|
514 |
if free_plan and env == "Google Colab": # remove ngrok from colab
|
515 |
additional_widget_list.remove(ngrok_widget)
|
516 |
-
if env == "SageMaker Studio Lab": # remove zrok from sagemaker
|
517 |
-
additional_widget_list.remove(zrok_widget)
|
518 |
# ```
|
519 |
all_additional_box = widgets.VBox(additional_widget_list).add_class("container").add_class("image_3")
|
520 |
display(all_additional_box)
|
|
|
13 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
14 |
environments = {
|
15 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
16 |
+
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
|
|
17 |
}
|
18 |
|
19 |
for env_var, (environment, path) in environments.items():
|
|
|
37 |
parser.add_argument('-y', type=int, help='Y coordinate for the image in px', metavar='', default=0)
|
38 |
parser.add_argument('-x', type=int, help='X coordinate for the image in px', metavar='', default=0)
|
39 |
parser.add_argument('-s', '--scale', type=int, help='Scale image in %%', metavar='', default=100)
|
40 |
+
parser.add_argument('-m', '--mode', action='store_true', help='Removes repetitive image tiles')
|
41 |
parser.add_argument('-t', '--transparent', action='store_true', help='Makes input/selection fields 35%% more transparent')
|
42 |
parser.add_argument('-bf', '--blur-fields', type=str, help='Background blur level for input/selection fields', metavar='', default=2)
|
43 |
args = parser.parse_args()
|
|
|
51 |
blur_fields = args.blur_fields
|
52 |
|
53 |
## ---
|
54 |
+
""" WTF KAGGLE - WHAT THE FUCK IS THE DIFFERENCE OF 35 PIXELS!?!?!? """
|
55 |
+
fix_heigh_img = "-810px" if env == "Kaggle" else "-775px"
|
56 |
|
57 |
""" transperent fields """
|
58 |
t_bg_alpha = "1" if not args.transparent else "0.65"
|
|
|
448 |
model_header = widgets.HTML('<div class="header">Model Selection<div>')
|
449 |
model_options = ['none',
|
450 |
'1.Anime (by XpucT) + INP',
|
451 |
+
'2.BluMix [Anime] [V7] + INP',
|
452 |
'3.Cetus-Mix [Anime] [V4] + INP',
|
453 |
'4.Counterfeit [Anime] [V3] + INP',
|
454 |
'5.CuteColor [Anime] [V3]',
|
|
|
512 |
additional_widget_list = [additional_header, latest_changes_widget, HR, controlnet_widget, controlnet_Num_widget, commit_hash_widget, optional_huggingface_token_widget, ngrok_widget, zrok_widget, HR, commandline_arguments_widget]
|
513 |
if free_plan and env == "Google Colab": # remove ngrok from colab
|
514 |
additional_widget_list.remove(ngrok_widget)
|
|
|
|
|
515 |
# ```
|
516 |
all_additional_box = widgets.VBox(additional_widget_list).add_class("container").add_class("image_3")
|
517 |
display(all_additional_box)
|
files_cells/python/ru/auto-cleaner_ru.py
CHANGED
@@ -1,13 +1,5 @@
|
|
1 |
##~ AutoCleaner V3.6 CODE | BY: ANXETY ~##
|
2 |
|
3 |
-
# --change log--
|
4 |
-
"""
|
5 |
-
V3.6 | 13.03.24
|
6 |
-
Fixed selection window
|
7 |
-
Dynamic update of memory display
|
8 |
-
"""
|
9 |
-
|
10 |
-
|
11 |
import os
|
12 |
import time
|
13 |
import ipywidgets as widgets
|
@@ -20,8 +12,7 @@ def detect_environment():
|
|
20 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
21 |
environments = {
|
22 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
23 |
-
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
24 |
-
'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', "/home/studio-lab-user/content")
|
25 |
}
|
26 |
|
27 |
for env_var, (environment, path) in environments.items():
|
|
|
1 |
##~ AutoCleaner V3.6 CODE | BY: ANXETY ~##
|
2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
import os
|
4 |
import time
|
5 |
import ipywidgets as widgets
|
|
|
12 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
13 |
environments = {
|
14 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
15 |
+
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
|
|
16 |
}
|
17 |
|
18 |
for env_var, (environment, path) in environments.items():
|
files_cells/python/ru/downloading_ru.py
CHANGED
@@ -4,6 +4,8 @@ import os
|
|
4 |
import re
|
5 |
import time
|
6 |
import json
|
|
|
|
|
7 |
import requests
|
8 |
import subprocess
|
9 |
from datetime import timedelta
|
@@ -18,8 +20,7 @@ def detect_environment():
|
|
18 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
19 |
environments = {
|
20 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
21 |
-
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
22 |
-
'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', "/home/studio-lab-user/content")
|
23 |
}
|
24 |
|
25 |
for env_var, (environment, path) in environments.items():
|
@@ -31,21 +32,11 @@ webui_path = f"{root_path}/sdw"
|
|
31 |
# ----------------------------------------------
|
32 |
|
33 |
|
34 |
-
# === ONLY SAGEMAKER ===
|
35 |
-
if env == "SageMaker Studio Lab":
|
36 |
-
print("Обновление зависимостей, может занять некоторое время...")
|
37 |
-
get_ipython().system('pip install -q --upgrade torchsde')
|
38 |
-
get_ipython().system('pip install -q --upgrade pip')
|
39 |
-
get_ipython().system('pip install -q --upgrade psutil')
|
40 |
-
|
41 |
-
clear_output()
|
42 |
-
|
43 |
-
|
44 |
# ================ LIBRARIES V2 ================
|
45 |
flag_file = f"{root_path}/libraries_installed.txt"
|
46 |
|
47 |
if not os.path.exists(flag_file):
|
48 |
-
print("💿 Установка библиотек, это займет какое-то
|
49 |
|
50 |
install_lib = {
|
51 |
"gdown": "pip install -U gdown",
|
@@ -62,16 +53,6 @@ if not os.path.exists(flag_file):
|
|
62 |
"Kaggle": {
|
63 |
"xformers": "pip install -q xformers==0.0.23.post1 triton==2.1.0",
|
64 |
"torch": "pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121"
|
65 |
-
},
|
66 |
-
"SageMaker Studio Lab": {
|
67 |
-
"opencv": "pip install -q opencv-python-headless",
|
68 |
-
"huggingface": "pip install -q huggingface-hub",
|
69 |
-
"conda_update": "conda update -q -n base conda",
|
70 |
-
"conda_aria2": "conda install -q -y aria2",
|
71 |
-
"conda_glib": "conda install -q -y glib",
|
72 |
-
"tensorflow": "pip install tensorflow",
|
73 |
-
"xformers": "pip install -q xformers==0.0.23.post1 triton==2.1.0",
|
74 |
-
"torch": "pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121"
|
75 |
}
|
76 |
}
|
77 |
|
@@ -89,7 +70,6 @@ if not os.path.exists(flag_file):
|
|
89 |
get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
|
90 |
get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
|
91 |
get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
|
92 |
-
get_ipython().system('wget -P /home/studio-lab-user https://huggingface.co/NagisaNao/fast_repo/resolve/main/sagemaker/FULL_DELETED_NOTEBOOK.ipynb')
|
93 |
del cap
|
94 |
|
95 |
clear_output()
|
@@ -137,14 +117,10 @@ embeddings_dir = f"{webui_path}/embeddings"
|
|
137 |
loras_dir = f"{webui_path}/models/Lora"
|
138 |
extensions_dir = f"{webui_path}/extensions"
|
139 |
control_dir = f"{webui_path}/models/ControlNet"
|
|
|
140 |
|
141 |
|
142 |
# ================= MAIN CODE =================
|
143 |
-
# --- Obsolescence warning ---
|
144 |
-
if env == "SageMaker Studio Lab":
|
145 |
-
print("Вы используете среду 'SageMaker' - эта среда устарела, поэтому многие ошибки не будут исправлены, а ее функциональность будет урезана. Для экономии памяти и/или во избежание ошибок.\n\n")
|
146 |
-
|
147 |
-
|
148 |
if not os.path.exists(webui_path):
|
149 |
start_install = int(time.time())
|
150 |
print("⌚ Распоковка Stable Diffusion...", end='')
|
@@ -164,8 +140,6 @@ if not os.path.exists(webui_path):
|
|
164 |
install_time = timedelta(seconds=time.time()-start_install)
|
165 |
print("\r🚀 Распаковка Завершена! За","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)
|
166 |
else:
|
167 |
-
if env == "SageMaker Studio Lab":
|
168 |
-
get_ipython().system('echo -n {start_colab} > {webui_path}/static/colabTimer.txt')
|
169 |
print("🚀 Все распакованно... Пропуск. ⚡")
|
170 |
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
171 |
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
@@ -188,10 +162,7 @@ if latest_webui or latest_exstensions:
|
|
188 |
|
189 |
## Update extensions
|
190 |
if latest_exstensions:
|
191 |
-
|
192 |
-
get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
|
193 |
-
else:
|
194 |
-
get_ipython().system('{\'for dir in /home/studio-lab-user/content/sdw/extensions/*/; do cd \\"$dir\\" && git fetch origin && git pull; done\'}')
|
195 |
|
196 |
# My Chinese friend, you broke the images again in the latest update... >W<'
|
197 |
get_ipython().run_line_magic('cd', '{webui_path}/extensions/Encrypt-Image')
|
@@ -200,24 +171,18 @@ if latest_webui or latest_exstensions:
|
|
200 |
print(f"\r✨ {action} Завершено!")
|
201 |
|
202 |
|
203 |
-
# === FIXING
|
204 |
anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main"
|
205 |
|
206 |
with capture.capture_output() as cap:
|
|
|
|
|
|
|
207 |
# --- Encrypt-Image ---
|
208 |
-
get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js")
|
209 |
|
210 |
# --- Additional-Networks ---
|
211 |
-
get_ipython().system('wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py')
|
212 |
-
|
213 |
-
# --= SageMaker =--
|
214 |
-
if env == "SageMaker Studio Lab":
|
215 |
-
with capture.capture_output() as cap:
|
216 |
-
# --- SuperMerger Remove ---
|
217 |
-
if os.path.exists(f"{webui_path}/extensions/supermerger"):
|
218 |
-
get_ipython().system('rm -rf {webui_path}/extensions/supermerger')
|
219 |
-
# --- Launch (Style) ---
|
220 |
-
get_ipython().system('wget -O {webui_path}/modules/styles.py {anxety_repos}/sagemaker/fixing/webui/styles.py')
|
221 |
del cap
|
222 |
|
223 |
|
@@ -240,8 +205,9 @@ model_list = {
|
|
240 |
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_v2.safetensors"},
|
241 |
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_v2-inpainting.safetensors"}
|
242 |
],
|
243 |
-
"2.BluMix [Anime] [V7]": [
|
244 |
-
{"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_v7.safetensors"}
|
|
|
245 |
],
|
246 |
"3.Cetus-Mix [Anime] [V4] + INP": [
|
247 |
{"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
|
@@ -344,19 +310,18 @@ controlnet_list = {
|
|
344 |
|
345 |
extension_repo = []
|
346 |
prefixes = {
|
347 |
-
|
348 |
-
"
|
349 |
-
"
|
350 |
-
"
|
351 |
-
"
|
352 |
-
"
|
353 |
-
"
|
354 |
}
|
355 |
|
356 |
-
get_ipython().system('mkdir -p {models_dir} {vaes_dir} {loras_dir} {embeddings_dir} {extensions_dir} {control_dir}')
|
357 |
|
358 |
url = ""
|
359 |
-
ControlNet_url = ""
|
360 |
hf_token = optional_huggingface_token if optional_huggingface_token else "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"
|
361 |
user_header = f"\"Authorization: Bearer {hf_token}\""
|
362 |
|
@@ -370,7 +335,7 @@ def handle_manual(url):
|
|
370 |
if file_name:
|
371 |
url = re.sub(r'\[.*?\]', '', url)
|
372 |
|
373 |
-
for prefix,
|
374 |
if original_url.startswith(f"{prefix}:"):
|
375 |
if prefix != "extension":
|
376 |
manual_download(url, dir, file_name=file_name)
|
@@ -381,7 +346,8 @@ def manual_download(url, dst_dir, file_name):
|
|
381 |
basename = url.split("/")[-1] if file_name is None else file_name
|
382 |
header_option = f"--header={user_header}"
|
383 |
|
384 |
-
print("\033[32m---"*45 + f"\n\033[33mURL: \033[34m{url}\n\033[33mSAVE DIR: \033[34m{dst_dir}\n\033[33mFILE NAME: \033[34m{file_name}\033[32m\n~~~\033[0m")
|
|
|
385 |
|
386 |
# I do it at my own risk..... Fucking CivitAi >:(
|
387 |
civitai_token = "62c0c5956b2f9defbd844d754000180b"
|
@@ -428,6 +394,22 @@ def download(url):
|
|
428 |
url, dst_dir, file_name = link_or_path.split()
|
429 |
manual_download(url, dst_dir, file_name)
|
430 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
431 |
''' submodels - added urls '''
|
432 |
|
433 |
submodels = []
|
@@ -470,6 +452,8 @@ for submodel in submodels:
|
|
470 |
unique_urls = []
|
471 |
|
472 |
def process_file_download(file_url):
|
|
|
|
|
473 |
if file_url.startswith("http"):
|
474 |
if "blob" in file_url:
|
475 |
file_url = file_url.replace("blob", "raw")
|
@@ -484,13 +468,17 @@ def process_file_download(file_url):
|
|
484 |
if any(f'# {tag}' in line.lower() for tag in prefixes):
|
485 |
current_tag = next((tag for tag in prefixes if tag in line.lower()))
|
486 |
|
487 |
-
urls = [url.strip() for url in line.split(',')]
|
488 |
for url in urls:
|
489 |
if url.startswith("http") and url not in unique_urls:
|
490 |
-
|
|
|
491 |
unique_urls.append(url)
|
492 |
|
|
|
|
|
493 |
# fix all possible errors/options and function call
|
|
|
494 |
if custom_file_urls:
|
495 |
for custom_file_url in custom_file_urls.replace(',', '').split():
|
496 |
if not custom_file_url.endswith('.txt'):
|
@@ -500,17 +488,14 @@ if custom_file_urls:
|
|
500 |
custom_file_url = f'{root_path}/{custom_file_url}'
|
501 |
|
502 |
try:
|
503 |
-
process_file_download(custom_file_url)
|
504 |
except FileNotFoundError:
|
505 |
pass
|
506 |
|
507 |
# url prefixing
|
508 |
-
urls = [
|
509 |
-
for
|
510 |
-
|
511 |
-
prefixed_urls = [f"{prefix}:{url}" for url in urls[i].replace(',', '').split()]
|
512 |
-
if prefixed_urls:
|
513 |
-
url += ", ".join(prefixed_urls) + ", "
|
514 |
|
515 |
if detailed_download == "on":
|
516 |
print("\n\n\033[33m# ====== Подробная Загрузка ====== #\n\033[0m")
|
@@ -576,14 +561,3 @@ with capture.capture_output() as cap:
|
|
576 |
get_ipython().system('aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {save_dir_path} {file}')
|
577 |
del cap
|
578 |
|
579 |
-
|
580 |
-
# === ONLY SAGEMAKER ===
|
581 |
-
if env == "SageMaker Studio Lab":
|
582 |
-
with capture.capture_output() as cap:
|
583 |
-
get_ipython().system('rm -rf /home/studio-lab-user/.conda/envs/studiolab-safemode')
|
584 |
-
get_ipython().system('rm -rf /home/studio-lab-user/.conda/envs/sagemaker-distribution')
|
585 |
-
get_ipython().system('rm -rf /home/studio-lab-user/.conda/pkgs/cache')
|
586 |
-
get_ipython().system('pip cache purge')
|
587 |
-
get_ipython().system('rm -rf ~/.cache')
|
588 |
-
del cap
|
589 |
-
|
|
|
4 |
import re
|
5 |
import time
|
6 |
import json
|
7 |
+
import shutil
|
8 |
+
import zipfile
|
9 |
import requests
|
10 |
import subprocess
|
11 |
from datetime import timedelta
|
|
|
20 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
21 |
environments = {
|
22 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
23 |
+
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
|
|
24 |
}
|
25 |
|
26 |
for env_var, (environment, path) in environments.items():
|
|
|
32 |
# ----------------------------------------------
|
33 |
|
34 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
# ================ LIBRARIES V2 ================
|
36 |
flag_file = f"{root_path}/libraries_installed.txt"
|
37 |
|
38 |
if not os.path.exists(flag_file):
|
39 |
+
print("💿 Установка библиотек, это займет какое-то время:\n")
|
40 |
|
41 |
install_lib = {
|
42 |
"gdown": "pip install -U gdown",
|
|
|
53 |
"Kaggle": {
|
54 |
"xformers": "pip install -q xformers==0.0.23.post1 triton==2.1.0",
|
55 |
"torch": "pip install -q torch==2.1.2+cu121 torchvision==0.16.2+cu121 torchaudio==2.1.2 --extra-index-url https://download.pytorch.org/whl/cu121"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
}
|
57 |
}
|
58 |
|
|
|
70 |
get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
|
71 |
get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
|
72 |
get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
|
|
|
73 |
del cap
|
74 |
|
75 |
clear_output()
|
|
|
117 |
loras_dir = f"{webui_path}/models/Lora"
|
118 |
extensions_dir = f"{webui_path}/extensions"
|
119 |
control_dir = f"{webui_path}/models/ControlNet"
|
120 |
+
adetailer_dir = f"{webui_path}/models/adetailer/"
|
121 |
|
122 |
|
123 |
# ================= MAIN CODE =================
|
|
|
|
|
|
|
|
|
|
|
124 |
if not os.path.exists(webui_path):
|
125 |
start_install = int(time.time())
|
126 |
print("⌚ Распоковка Stable Diffusion...", end='')
|
|
|
140 |
install_time = timedelta(seconds=time.time()-start_install)
|
141 |
print("\r🚀 Распаковка Завершена! За","%02d:%02d:%02d ⚡\n" % (install_time.seconds / 3600, (install_time.seconds / 60) % 60, install_time.seconds % 60), end='', flush=True)
|
142 |
else:
|
|
|
|
|
143 |
print("🚀 Все распакованно... Пропуск. ⚡")
|
144 |
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
145 |
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
|
|
162 |
|
163 |
## Update extensions
|
164 |
if latest_exstensions:
|
165 |
+
get_ipython().system('{\'for dir in \' + webui_path + \'/extensions/*/; do cd \\"$dir\\" && git reset --hard && git pull; done\'}')
|
|
|
|
|
|
|
166 |
|
167 |
# My Chinese friend, you broke the images again in the latest update... >W<'
|
168 |
get_ipython().run_line_magic('cd', '{webui_path}/extensions/Encrypt-Image')
|
|
|
171 |
print(f"\r✨ {action} Завершено!")
|
172 |
|
173 |
|
174 |
+
# === FIXING EXTENSIONS ===
|
175 |
anxety_repos = "https://huggingface.co/NagisaNao/fast_repo/resolve/main"
|
176 |
|
177 |
with capture.capture_output() as cap:
|
178 |
+
# --- Umi-Wildcard ---
|
179 |
+
get_ipython().system("sed -i '521s/open=\\(False\\|True\\)/open=False/' {webui_path}/extensions/Umi-AI-Wildcards/scripts/wildcard_recursive.py # Closed accordion by default")
|
180 |
+
|
181 |
# --- Encrypt-Image ---
|
182 |
+
get_ipython().system("sed -i '9,37d' {webui_path}/extensions/Encrypt-Image/javascript/encrypt_images_info.js # Removes the weird text in webui")
|
183 |
|
184 |
# --- Additional-Networks ---
|
185 |
+
get_ipython().system('wget -O {webui_path}/extensions/additional-networks/scripts/metadata_editor.py {anxety_repos}/extensions/Additional-Networks/fix/metadata_editor.py # Fixing an error due to old style')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
186 |
del cap
|
187 |
|
188 |
|
|
|
205 |
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_v2.safetensors"},
|
206 |
{"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_v2-inpainting.safetensors"}
|
207 |
],
|
208 |
+
"2.BluMix [Anime] [V7] + INP": [
|
209 |
+
{"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_v7.safetensors"},
|
210 |
+
{"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_v7-inpainting.safetensors"}
|
211 |
],
|
212 |
"3.Cetus-Mix [Anime] [V4] + INP": [
|
213 |
{"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
|
|
|
310 |
|
311 |
extension_repo = []
|
312 |
prefixes = {
|
313 |
+
"model": models_dir,
|
314 |
+
"vae": vaes_dir,
|
315 |
+
"lora": loras_dir,
|
316 |
+
"embed": embeddings_dir,
|
317 |
+
"extension": extensions_dir,
|
318 |
+
"control": control_dir,
|
319 |
+
"adetailer": adetailer_dir
|
320 |
}
|
321 |
|
322 |
+
get_ipython().system('mkdir -p {models_dir} {vaes_dir} {loras_dir} {embeddings_dir} {extensions_dir} {control_dir} {adetailer_dir}')
|
323 |
|
324 |
url = ""
|
|
|
325 |
hf_token = optional_huggingface_token if optional_huggingface_token else "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"
|
326 |
user_header = f"\"Authorization: Bearer {hf_token}\""
|
327 |
|
|
|
335 |
if file_name:
|
336 |
url = re.sub(r'\[.*?\]', '', url)
|
337 |
|
338 |
+
for prefix, dir in prefixes.items():
|
339 |
if original_url.startswith(f"{prefix}:"):
|
340 |
if prefix != "extension":
|
341 |
manual_download(url, dir, file_name=file_name)
|
|
|
346 |
basename = url.split("/")[-1] if file_name is None else file_name
|
347 |
header_option = f"--header={user_header}"
|
348 |
|
349 |
+
# print("\033[32m---"*45 + f"\n\033[33mURL: \033[34m{url}\n\033[33mSAVE DIR: \033[34m{dst_dir}\n\033[33mFILE NAME: \033[34m{file_name}\033[32m\n~~~\033[0m")
|
350 |
+
print(url, dst_dir, file_name)
|
351 |
|
352 |
# I do it at my own risk..... Fucking CivitAi >:(
|
353 |
civitai_token = "62c0c5956b2f9defbd844d754000180b"
|
|
|
394 |
url, dst_dir, file_name = link_or_path.split()
|
395 |
manual_download(url, dst_dir, file_name)
|
396 |
|
397 |
+
unpucking_zip_files()
|
398 |
+
|
399 |
+
## unpucking zip files
|
400 |
+
def unpucking_zip_files():
|
401 |
+
directories = [models_dir, vaes_dir, embeddings_dir, loras_dir , extensions_dir, control_dir , adetailer_dir]
|
402 |
+
|
403 |
+
for directory in directories:
|
404 |
+
for root, dirs, files in os.walk(directory):
|
405 |
+
for file in files:
|
406 |
+
if file.endswith(".zip"):
|
407 |
+
zip_path = os.path.join(root, file)
|
408 |
+
extract_path = os.path.splitext(zip_path)[0]
|
409 |
+
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
410 |
+
zip_ref.extractall(extract_path)
|
411 |
+
os.remove(zip_path)
|
412 |
+
|
413 |
''' submodels - added urls '''
|
414 |
|
415 |
submodels = []
|
|
|
452 |
unique_urls = []
|
453 |
|
454 |
def process_file_download(file_url):
|
455 |
+
files_urls = ""
|
456 |
+
|
457 |
if file_url.startswith("http"):
|
458 |
if "blob" in file_url:
|
459 |
file_url = file_url.replace("blob", "raw")
|
|
|
468 |
if any(f'# {tag}' in line.lower() for tag in prefixes):
|
469 |
current_tag = next((tag for tag in prefixes if tag in line.lower()))
|
470 |
|
471 |
+
urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
|
472 |
for url in urls:
|
473 |
if url.startswith("http") and url not in unique_urls:
|
474 |
+
# handle_manual(f"{current_tag}:{url}")
|
475 |
+
files_urls += f"{current_tag}:{url}, "
|
476 |
unique_urls.append(url)
|
477 |
|
478 |
+
return files_urls
|
479 |
+
|
480 |
# fix all possible errors/options and function call
|
481 |
+
file_urls = ""
|
482 |
if custom_file_urls:
|
483 |
for custom_file_url in custom_file_urls.replace(',', '').split():
|
484 |
if not custom_file_url.endswith('.txt'):
|
|
|
488 |
custom_file_url = f'{root_path}/{custom_file_url}'
|
489 |
|
490 |
try:
|
491 |
+
file_urls += process_file_download(custom_file_url)
|
492 |
except FileNotFoundError:
|
493 |
pass
|
494 |
|
495 |
# url prefixing
|
496 |
+
urls = [Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url]
|
497 |
+
prefixed_urls = [f"{prefix}:{url}" for prefix, url in zip(prefixes.keys(), urls) if url]
|
498 |
+
url += ", ".join(prefixed_urls) + ", " + file_urls
|
|
|
|
|
|
|
499 |
|
500 |
if detailed_download == "on":
|
501 |
print("\n\n\033[33m# ====== Подробная Загрузка ====== #\n\033[0m")
|
|
|
561 |
get_ipython().system('aria2c --optimize-concurrent-downloads --console-log-level=error --summary-interval=10 -j5 -x16 -s16 -k1M -c -d {save_dir_path} {file}')
|
562 |
del cap
|
563 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
files_cells/python/ru/launch_ru.py
CHANGED
@@ -13,8 +13,7 @@ def detect_environment():
|
|
13 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
14 |
environments = {
|
15 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
16 |
-
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
17 |
-
'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', "/home/studio-lab-user/content")
|
18 |
}
|
19 |
|
20 |
for env_var, (environment, path) in environments.items():
|
@@ -39,31 +38,30 @@ commandline_arguments = settings['commandline_arguments']
|
|
39 |
|
40 |
|
41 |
# ======================== TUNNEL ========================
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
|
67 |
# ======================== TUNNEL ========================
|
68 |
|
69 |
|
@@ -74,34 +72,18 @@ get_ipython().system('sed -i \'s#"additional_networks_extra_lora_path": ".*model
|
|
74 |
get_ipython().system('sed -i \'s/"sd_checkpoint_hash": ".*"/"sd_checkpoint_hash": ""/g; s/"sd_model_checkpoint": ".*"/"sd_model_checkpoint": ""/g; s/"sd_vae": ".*"/"sd_vae": "None"/g\' {webui_path}/config.json')
|
75 |
|
76 |
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
commandline_arguments += f" --port=1769"
|
81 |
|
82 |
-
if ngrok_token:
|
83 |
-
commandline_arguments += ' --ngrok ' + ngrok_token
|
84 |
-
if env != "Google Colab":
|
85 |
-
commandline_arguments += f" --encrypt-pass=1769"
|
86 |
-
|
87 |
-
get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
|
88 |
-
|
89 |
-
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
90 |
-
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
91 |
-
print(f"\n⌚️ \033[0mВы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m\n\n")
|
92 |
-
|
93 |
-
else:
|
94 |
if ngrok_token:
|
95 |
-
get_ipython().run_line_magic('cd', '{webui_path}')
|
96 |
-
|
97 |
commandline_arguments += ' --ngrok ' + ngrok_token
|
|
|
|
|
98 |
|
99 |
-
|
100 |
-
|
101 |
-
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
102 |
-
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
103 |
-
print(f"\n⌚️ \033[0mВы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m\n\n")
|
104 |
|
105 |
-
|
106 |
-
|
|
|
107 |
|
|
|
13 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
14 |
environments = {
|
15 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
16 |
+
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
|
|
17 |
}
|
18 |
|
19 |
for env_var, (environment, path) in environments.items():
|
|
|
38 |
|
39 |
|
40 |
# ======================== TUNNEL ========================
|
41 |
+
import cloudpickle as pickle
|
42 |
+
|
43 |
+
def get_public_ip(version='ipv4'):
|
44 |
+
try:
|
45 |
+
url = f'https://api64.ipify.org?format=json&{version}=true'
|
46 |
+
response = requests.get(url)
|
47 |
+
data = response.json()
|
48 |
+
public_ip = data['ip']
|
49 |
+
return public_ip
|
50 |
+
except Exception as e:
|
51 |
+
print(f"Error getting public {version} address:", e)
|
52 |
+
|
53 |
+
public_ipv4 = get_public_ip(version='ipv4')
|
54 |
+
|
55 |
+
tunnel_class = pickle.load(open(f"{root_path}/new_tunnel", "rb"), encoding="utf-8")
|
56 |
+
tunnel_port= 1769
|
57 |
+
tunnel = tunnel_class(tunnel_port)
|
58 |
+
tunnel.add_tunnel(command="cl tunnel --url localhost:{port}", name="cl", pattern=re.compile(r"[\w-]+\.trycloudflare\.com"))
|
59 |
+
tunnel.add_tunnel(command="lt --port {port}", name="lt", pattern=re.compile(r"[\w-]+\.loca\.lt"), note="Password : " + "\033[32m" + public_ipv4 + "\033[0m" + " rerun cell if 404 error.")
|
60 |
+
|
61 |
+
''' add zrok tunnel '''
|
62 |
+
if zrok_token:
|
63 |
+
get_ipython().system('zrok enable {zrok_token} &> /dev/null')
|
64 |
+
tunnel.add_tunnel(command="zrok share public http://localhost:{port}/ --headless", name="zrok", pattern=re.compile(r"[\w-]+\.share\.zrok\.io"))
|
|
|
65 |
# ======================== TUNNEL ========================
|
66 |
|
67 |
|
|
|
72 |
get_ipython().system('sed -i \'s/"sd_checkpoint_hash": ".*"/"sd_checkpoint_hash": ""/g; s/"sd_model_checkpoint": ".*"/"sd_model_checkpoint": ""/g; s/"sd_vae": ".*"/"sd_vae": "None"/g\' {webui_path}/config.json')
|
73 |
|
74 |
|
75 |
+
with tunnel:
|
76 |
+
get_ipython().run_line_magic('cd', '{webui_path}')
|
77 |
+
commandline_arguments += f" --port=1769"
|
|
|
78 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
if ngrok_token:
|
|
|
|
|
80 |
commandline_arguments += ' --ngrok ' + ngrok_token
|
81 |
+
if env != "Google Colab":
|
82 |
+
commandline_arguments += f" --encrypt-pass=1769"
|
83 |
|
84 |
+
get_ipython().system('COMMANDLINE_ARGS="{commandline_arguments}" python launch.py')
|
|
|
|
|
|
|
|
|
85 |
|
86 |
+
start_colab = float(open(f'{webui_path}/static/colabTimer.txt', 'r').read())
|
87 |
+
time_since_start = str(timedelta(seconds=time.time()-start_colab)).split('.')[0]
|
88 |
+
print(f"\n⌚️ \033[0mВы проводите эту сессию в течение - \033[33m{time_since_start}\033[0m\n\n")
|
89 |
|
files_cells/python/ru/widgets_ru.py
CHANGED
@@ -13,8 +13,7 @@ def detect_environment():
|
|
13 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
14 |
environments = {
|
15 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
16 |
-
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
17 |
-
'SAGEMAKER_INTERNAL_IMAGE_URI': ('SageMaker Studio Lab', "/home/studio-lab-user/content")
|
18 |
}
|
19 |
|
20 |
for env_var, (environment, path) in environments.items():
|
@@ -52,8 +51,8 @@ scale_img = args.scale
|
|
52 |
blur_fields = args.blur_fields
|
53 |
|
54 |
## ---
|
55 |
-
""" WTF
|
56 |
-
fix_heigh_img = "-810px" if env
|
57 |
|
58 |
""" transperent fields """
|
59 |
t_bg_alpha = "1" if not args.transparent else "0.65"
|
@@ -449,7 +448,7 @@ HR = widgets.HTML('<hr>')
|
|
449 |
model_header = widgets.HTML('<div class="header">Выбор Модели<div>')
|
450 |
model_options = ['none',
|
451 |
'1.Anime (by XpucT) + INP',
|
452 |
-
'2.BluMix [Anime] [V7]',
|
453 |
'3.Cetus-Mix [Anime] [V4] + INP',
|
454 |
'4.Counterfeit [Anime] [V3] + INP',
|
455 |
'5.CuteColor [Anime] [V3]',
|
@@ -513,8 +512,6 @@ commandline_arguments_widget = widgets.Text(description='Аргументы:', v
|
|
513 |
additional_widget_list = [additional_header, latest_changes_widget, HR, controlnet_widget, controlnet_Num_widget, commit_hash_widget, optional_huggingface_token_widget, ngrok_widget, zrok_widget, HR, commandline_arguments_widget]
|
514 |
if free_plan and env == "Google Colab": # remove ngrok from colab
|
515 |
additional_widget_list.remove(ngrok_widget)
|
516 |
-
if env == "SageMaker Studio Lab": # remove zrok from sagemaker
|
517 |
-
additional_widget_list.remove(zrok_widget)
|
518 |
# ```
|
519 |
all_additional_box = widgets.VBox(additional_widget_list).add_class("container").add_class("image_3")
|
520 |
display(all_additional_box)
|
|
|
13 |
free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024. ** 3) <= 20)
|
14 |
environments = {
|
15 |
'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
|
16 |
+
'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
|
|
|
17 |
}
|
18 |
|
19 |
for env_var, (environment, path) in environments.items():
|
|
|
51 |
blur_fields = args.blur_fields
|
52 |
|
53 |
## ---
|
54 |
+
""" WTF KAGGLE - WHAT THE FUCK IS THE DIFFERENCE OF 35 PIXELS!?!?!? """
|
55 |
+
fix_heigh_img = "-810px" if env == "Kaggle" else "-775px"
|
56 |
|
57 |
""" transperent fields """
|
58 |
t_bg_alpha = "1" if not args.transparent else "0.65"
|
|
|
448 |
model_header = widgets.HTML('<div class="header">Выбор Модели<div>')
|
449 |
model_options = ['none',
|
450 |
'1.Anime (by XpucT) + INP',
|
451 |
+
'2.BluMix [Anime] [V7] + INP',
|
452 |
'3.Cetus-Mix [Anime] [V4] + INP',
|
453 |
'4.Counterfeit [Anime] [V3] + INP',
|
454 |
'5.CuteColor [Anime] [V3]',
|
|
|
512 |
additional_widget_list = [additional_header, latest_changes_widget, HR, controlnet_widget, controlnet_Num_widget, commit_hash_widget, optional_huggingface_token_widget, ngrok_widget, zrok_widget, HR, commandline_arguments_widget]
|
513 |
if free_plan and env == "Google Colab": # remove ngrok from colab
|
514 |
additional_widget_list.remove(ngrok_widget)
|
|
|
|
|
515 |
# ```
|
516 |
all_additional_box = widgets.VBox(additional_widget_list).add_class("container").add_class("image_3")
|
517 |
display(all_additional_box)
|