jnkr36 commited on
Commit
28270d3
1 Parent(s): f0ed034

Upload comfyui_colab.ipynb

Browse files
Files changed (1) hide show
  1. notebooks/comfyui_colab.ipynb +237 -0
notebooks/comfyui_colab.ipynb ADDED
@@ -0,0 +1,237 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "metadata": {
6
+ "id": "aaaaaaaaaa"
7
+ },
8
+ "source": [
9
+ "Git clone the repo and install the requirements. (ignore the pip errors about protobuf)"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": null,
15
+ "metadata": {
16
+ "id": "bbbbbbbbbb"
17
+ },
18
+ "outputs": [],
19
+ "source": [
20
+ "#@title Environment Setup\n",
21
+ "\n",
22
+ "from pathlib import Path\n",
23
+ "\n",
24
+ "OPTIONS = {}\n",
25
+ "\n",
26
+ "USE_GOOGLE_DRIVE = False #@param {type:\"boolean\"}\n",
27
+ "UPDATE_COMFY_UI = True #@param {type:\"boolean\"}\n",
28
+ "WORKSPACE = 'ComfyUI'\n",
29
+ "OPTIONS['USE_GOOGLE_DRIVE'] = USE_GOOGLE_DRIVE\n",
30
+ "OPTIONS['UPDATE_COMFY_UI'] = UPDATE_COMFY_UI\n",
31
+ "\n",
32
+ "if OPTIONS['USE_GOOGLE_DRIVE']:\n",
33
+ " !echo \"Mounting Google Drive...\"\n",
34
+ " %cd /\n",
35
+ " \n",
36
+ " from google.colab import drive\n",
37
+ " drive.mount('/content/drive')\n",
38
+ "\n",
39
+ " WORKSPACE = \"/content/drive/MyDrive/ComfyUI\"\n",
40
+ " %cd /content/drive/MyDrive\n",
41
+ "\n",
42
+ "![ ! -d $WORKSPACE ] && echo -= Initial setup ComfyUI =- && git clone https://github.com/comfyanonymous/ComfyUI\n",
43
+ "%cd $WORKSPACE\n",
44
+ "\n",
45
+ "if OPTIONS['UPDATE_COMFY_UI']:\n",
46
+ " !echo -= Updating ComfyUI =-\n",
47
+ " !git pull\n",
48
+ "\n",
49
+ "!echo -= Install dependencies =-\n",
50
+ "!pip install xformers==0.0.16 -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu117"
51
+ ]
52
+ },
53
+ {
54
+ "cell_type": "markdown",
55
+ "metadata": {
56
+ "id": "cccccccccc"
57
+ },
58
+ "source": [
59
+ "Download some models/checkpoints/vae or custom comfyui nodes (uncomment the commands for the ones you want)"
60
+ ]
61
+ },
62
+ {
63
+ "cell_type": "code",
64
+ "execution_count": null,
65
+ "metadata": {
66
+ "id": "dddddddddd"
67
+ },
68
+ "outputs": [],
69
+ "source": [
70
+ "# Checkpoints\n",
71
+ "\n",
72
+ "# SD1.5\n",
73
+ "!wget -c https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/v1-5-pruned-emaonly.ckpt -P ./models/checkpoints/\n",
74
+ "\n",
75
+ "# SD2\n",
76
+ "#!wget -c https://huggingface.co/stabilityai/stable-diffusion-2-1-base/resolve/main/v2-1_512-ema-pruned.safetensors -P ./models/checkpoints/\n",
77
+ "#!wget -c https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors -P ./models/checkpoints/\n",
78
+ "\n",
79
+ "# Some SD1.5 anime style\n",
80
+ "#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix2/AbyssOrangeMix2_hard.safetensors -P ./models/checkpoints/\n",
81
+ "#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix3/AOM3A1_orangemixs.safetensors -P ./models/checkpoints/\n",
82
+ "#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix3/AOM3A3_orangemixs.safetensors -P ./models/checkpoints/\n",
83
+ "#!wget -c https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/anything-v3-fp16-pruned.safetensors -P ./models/checkpoints/\n",
84
+ "\n",
85
+ "# Waifu Diffusion 1.5 (anime style SD2.x 768-v)\n",
86
+ "#!wget -c https://huggingface.co/waifu-diffusion/wd-1-5-beta2/resolve/main/checkpoints/wd-1-5-beta2-fp16.safetensors -P ./models/checkpoints/\n",
87
+ "\n",
88
+ "\n",
89
+ "# VAE\n",
90
+ "!wget -c https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors -P ./models/vae/\n",
91
+ "#!wget -c https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/VAEs/orangemix.vae.pt -P ./models/vae/\n",
92
+ "#!wget -c https://huggingface.co/hakurei/waifu-diffusion-v1-4/resolve/main/vae/kl-f8-anime2.ckpt -P ./models/vae/\n",
93
+ "\n",
94
+ "\n",
95
+ "# Loras\n",
96
+ "#!wget -c https://civitai.com/api/download/models/10350 -O ./models/loras/theovercomer8sContrastFix_sd21768.safetensors #theovercomer8sContrastFix SD2.x 768-v\n",
97
+ "#!wget -c https://civitai.com/api/download/models/10638 -O ./models/loras/theovercomer8sContrastFix_sd15.safetensors #theovercomer8sContrastFix SD1.x\n",
98
+ "\n",
99
+ "\n",
100
+ "# T2I-Adapter\n",
101
+ "#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_depth_sd14v1.pth -P ./models/controlnet/\n",
102
+ "#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_seg_sd14v1.pth -P ./models/controlnet/\n",
103
+ "#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_sketch_sd14v1.pth -P ./models/controlnet/\n",
104
+ "#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_keypose_sd14v1.pth -P ./models/controlnet/\n",
105
+ "#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_openpose_sd14v1.pth -P ./models/controlnet/\n",
106
+ "#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_color_sd14v1.pth -P ./models/controlnet/\n",
107
+ "#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_canny_sd14v1.pth -P ./models/controlnet/\n",
108
+ "\n",
109
+ "# T2I Styles Model\n",
110
+ "#!wget -c https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_style_sd14v1.pth -P ./models/style_models/\n",
111
+ "\n",
112
+ "# CLIPVision model (needed for styles model)\n",
113
+ "#!wget -c https://huggingface.co/openai/clip-vit-large-patch14/resolve/main/pytorch_model.bin -O ./models/clip_vision/clip_vit14.bin\n",
114
+ "\n",
115
+ "\n",
116
+ "# ControlNet\n",
117
+ "#!wget -c https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_depth-fp16.safetensors -P ./models/controlnet/\n",
118
+ "#!wget -c https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_scribble-fp16.safetensors -P ./models/controlnet/\n",
119
+ "#!wget -c https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_openpose-fp16.safetensors -P ./models/controlnet/\n",
120
+ "\n",
121
+ "\n",
122
+ "# Controlnet Preprocessor nodes by Fannovel16\n",
123
+ "#!cd custom_nodes && git clone https://github.com/Fannovel16/comfy_controlnet_preprocessors; cd comfy_controlnet_preprocessors && python install.py\n",
124
+ "\n",
125
+ "# ESRGAN upscale model\n",
126
+ "#!wget -c https://huggingface.co/sberbank-ai/Real-ESRGAN/resolve/main/RealESRGAN_x2.pth -P ./models/upscale_models/\n",
127
+ "#!wget -c https://huggingface.co/sberbank-ai/Real-ESRGAN/resolve/main/RealESRGAN_x4.pth -P ./models/upscale_models/\n",
128
+ "\n",
129
+ "\n"
130
+ ]
131
+ },
132
+ {
133
+ "cell_type": "markdown",
134
+ "metadata": {
135
+ "id": "kkkkkkkkkkkkkk"
136
+ },
137
+ "source": [
138
+ "### Run ComfyUI with localtunnel (Recommended Way)\n",
139
+ "\n",
140
+ "\n"
141
+ ]
142
+ },
143
+ {
144
+ "cell_type": "code",
145
+ "execution_count": null,
146
+ "metadata": {
147
+ "id": "jjjjjjjjjjjjj"
148
+ },
149
+ "outputs": [],
150
+ "source": [
151
+ "!npm install -g localtunnel\n",
152
+ "\n",
153
+ "import subprocess\n",
154
+ "import threading\n",
155
+ "import time\n",
156
+ "import socket\n",
157
+ "def iframe_thread(port):\n",
158
+ " while True:\n",
159
+ " time.sleep(0.5)\n",
160
+ " sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
161
+ " result = sock.connect_ex(('127.0.0.1', port))\n",
162
+ " if result == 0:\n",
163
+ " break\n",
164
+ " sock.close()\n",
165
+ " print(\"\\nComfyUI finished loading, trying to launch localtunnel (if it gets stuck here localtunnel is having issues)\")\n",
166
+ " p = subprocess.Popen([\"lt\", \"--port\", \"{}\".format(port)], stdout=subprocess.PIPE)\n",
167
+ " for line in p.stdout:\n",
168
+ " print(line.decode(), end='')\n",
169
+ "\n",
170
+ "\n",
171
+ "threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n",
172
+ "\n",
173
+ "!python main.py --dont-print-server"
174
+ ]
175
+ },
176
+ {
177
+ "cell_type": "markdown",
178
+ "metadata": {
179
+ "id": "gggggggggg"
180
+ },
181
+ "source": [
182
+ "### Run ComfyUI with colab iframe (use only in case the previous way with localtunnel doesn't work)\n",
183
+ "\n",
184
+ "You should see the ui appear in an iframe. If you get a 403 error, it's your firefox settings or an extension that's messing things up.\n",
185
+ "\n",
186
+ "If you want to open it in another window use the link.\n",
187
+ "\n",
188
+ "Note that some UI features like live image previews won't work because the colab iframe blocks websockets."
189
+ ]
190
+ },
191
+ {
192
+ "cell_type": "code",
193
+ "execution_count": null,
194
+ "metadata": {
195
+ "id": "hhhhhhhhhh"
196
+ },
197
+ "outputs": [],
198
+ "source": [
199
+ "import threading\n",
200
+ "import time\n",
201
+ "import socket\n",
202
+ "def iframe_thread(port):\n",
203
+ " while True:\n",
204
+ " time.sleep(0.5)\n",
205
+ " sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
206
+ " result = sock.connect_ex(('127.0.0.1', port))\n",
207
+ " if result == 0:\n",
208
+ " break\n",
209
+ " sock.close()\n",
210
+ " from google.colab import output\n",
211
+ " output.serve_kernel_port_as_iframe(port, height=1024)\n",
212
+ " print(\"to open it in a window you can open this link here:\")\n",
213
+ " output.serve_kernel_port_as_window(port)\n",
214
+ "\n",
215
+ "threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n",
216
+ "\n",
217
+ "!python main.py --dont-print-server"
218
+ ]
219
+ }
220
+ ],
221
+ "metadata": {
222
+ "accelerator": "GPU",
223
+ "colab": {
224
+ "provenance": []
225
+ },
226
+ "gpuClass": "standard",
227
+ "kernelspec": {
228
+ "display_name": "Python 3",
229
+ "name": "python3"
230
+ },
231
+ "language_info": {
232
+ "name": "python"
233
+ }
234
+ },
235
+ "nbformat": 4,
236
+ "nbformat_minor": 0
237
+ }