t2i / fast_stable_diffusion_automatic1111.py
Yash911's picture
Upload fast_stable_diffusion_automatic1111.py
f070ffb
raw
history blame
23.9 kB
# -*- coding: utf-8 -*-
"""fast_stable_diffusion_AUTOMATIC1111.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/github/TheLastBen/fast-stable-diffusion/blob/main/fast_stable_diffusion_AUTOMATIC1111.ipynb
# **Colab Pro notebook from https://github.com/TheLastBen/fast-stable-diffusion** *Alternatives : [RunPod](https://www.runpod.io/console/gpu-browse?template=runpod-stable-unified) | [Paperspace](https://console.paperspace.com/github/TheLastBen/PPS?machine=Free-GPU)*
##**[Support](https://ko-fi.com/thelastben)**
"""
#@markdown # Connect Google Drive
from google.colab import drive
from IPython.display import clear_output
import ipywidgets as widgets
import os
def inf(msg, style, wdth): inf = widgets.Button(description=msg, disabled=True, button_style=style, layout=widgets.Layout(min_width=wdth));display(inf)
Shared_Drive = "" #@param {type:"string"}
#@markdown - Leave empty if you're not using a shared drive
print("Connecting...")
drive.mount('/content/gdrive')
if Shared_Drive!="" and os.path.exists("/content/gdrive/Shareddrives"):
mainpth="Shareddrives/"+Shared_Drive
else:
mainpth="MyDrive"
clear_output()
inf('\u2714 Done','success', '50px')
#@markdown ---
# Commented out IPython magic to ensure Python compatibility.
#@markdown # Install/Update AUTOMATIC1111 repo
from IPython.utils import capture
from IPython.display import clear_output
from subprocess import getoutput
import ipywidgets as widgets
import sys
import fileinput
import os
import time
import base64
import gdown
from gdown.download import get_url_from_gdrive_confirmation
import requests
from urllib.request import urlopen, Request
from urllib.parse import urlparse, parse_qs, unquote
from tqdm import tqdm
import six
blasphemy=base64.b64decode(("d2VidWk=").encode('ascii')).decode('ascii')
if not os.path.exists("/content/gdrive"):
print('Gdrive not connected, using colab storage ...')
time.sleep(4)
mainpth="MyDrive"
!mkdir -p /content/gdrive/$mainpth
Shared_Drive=""
if Shared_Drive!="" and not os.path.exists("/content/gdrive/Shareddrives"):
print('Shared drive not detected, using default MyDrive')
mainpth="MyDrive"
with capture.capture_output() as cap:
def inf(msg, style, wdth): inf = widgets.Button(description=msg, disabled=True, button_style=style, layout=widgets.Layout(min_width=wdth));display(inf)
fgitclone = "git clone --depth 1"
# %mkdir -p /content/gdrive/$mainpth/sd
# %cd /content/gdrive/$mainpth/sd
!git clone -q --branch master https://github.com/AUTOMATIC1111/stable-diffusion-$blasphemy
!mkdir -p /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/cache/
os.environ['TRANSFORMERS_CACHE']=f"/content/gdrive/{mainpth}/sd/stable-diffusion-"+blasphemy+"/cache"
os.environ['TORCH_HOME'] = f"/content/gdrive/{mainpth}/sd/stable-diffusion-"+blasphemy+"/cache"
with capture.capture_output() as cap:
# %cd /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/
!git reset --hard
!git checkout master
time.sleep(1)
!rm webui.sh
!git pull
clear_output()
inf('\u2714 Done','success', '50px')
#@markdown ---
# Commented out IPython magic to ensure Python compatibility.
#@markdown # Requirements
print('Installing requirements...')
with capture.capture_output() as cap:
# %cd /content/
!wget -q -i https://raw.githubusercontent.com/TheLastBen/fast-stable-diffusion/main/Dependencies/A1111.txt
!dpkg -i *.deb
if not os.path.exists('/content/gdrive/'+mainpth+'/sd/stablediffusiond'): #restore later
!tar -C /content/gdrive/$mainpth --zstd -xf sd_mrep.tar.zst
!tar -C / --zstd -xf gcolabdeps.tar.zst
!rm *.deb | rm *.zst | rm *.txt
if not os.path.exists('gdrive/'+mainpth+'/sd/libtcmalloc/libtcmalloc_minimal.so.4'):
# %env CXXFLAGS=-std=c++14
!wget -q https://github.com/gperftools/gperftools/releases/download/gperftools-2.5/gperftools-2.5.tar.gz && tar zxf gperftools-2.5.tar.gz && mv gperftools-2.5 gperftools
!wget -q https://github.com/TheLastBen/fast-stable-diffusion/raw/main/AUTOMATIC1111_files/Patch
# %cd /content/gperftools
!patch -p1 < /content/Patch
!./configure --enable-minimal --enable-libunwind --enable-frame-pointers --enable-dynamic-sized-delete-support --enable-sized-delete --enable-emergency-malloc; make -j4
!mkdir -p /content/gdrive/$mainpth/sd/libtcmalloc && cp .libs/libtcmalloc*.so* /content/gdrive/$mainpth/sd/libtcmalloc
# %env LD_PRELOAD=/content/gdrive/$mainpth/sd/libtcmalloc/libtcmalloc_minimal.so.4
# %cd /content
!rm *.tar.gz Patch && rm -r /content/gperftools
else:
# %env LD_PRELOAD=/content/gdrive/$mainpth/sd/libtcmalloc/libtcmalloc_minimal.so.4
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
os.environ['PYTHONWARNINGS'] = 'ignore'
!sed -i 's@text = _formatwarnmsg(msg)@text =\"\"@g' /usr/lib/python3.10/warnings.py
!pip install open-clip-torch==2.20.0 -qq --no-deps
!pip install fastapi==0.94.0 -qq
clear_output()
inf('\u2714 Done','success', '50px')
#@markdown ---
#@markdown # Model Download/Load
Use_Temp_Storage = False #@param {type:"boolean"}
#@markdown - If not, make sure you have enough space on your gdrive
#@markdown ---
Model_Version = "SDXL" #@param ["SDXL", "1.5", "v1.5 Inpainting", "V2.1-768px"]
#@markdown Or
PATH_to_MODEL = "" #@param {type:"string"}
#@markdown - Insert the full path of your custom model or to a folder containing multiple models
#@markdown Or
MODEL_LINK = "" #@param {type:"string"}
def getsrc(url):
parsed_url = urlparse(url)
if parsed_url.netloc == 'civitai.com':
src='civitai'
elif parsed_url.netloc == 'drive.google.com':
src='gdrive'
elif parsed_url.netloc == 'huggingface.co':
src='huggingface'
else:
src='others'
return src
src=getsrc(MODEL_LINK)
def get_name(url, gdrive):
if not gdrive:
response = requests.get(url, allow_redirects=False)
if "Location" in response.headers:
redirected_url = response.headers["Location"]
quer = parse_qs(urlparse(redirected_url).query)
if "response-content-disposition" in quer:
disp_val = quer["response-content-disposition"][0].split(";")
for vals in disp_val:
if vals.strip().startswith("filename="):
filenm=unquote(vals.split("=", 1)[1].strip())
return filenm.replace("\"","")
else:
headers = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36"}
lnk="https://drive.google.com/uc?id={id}&export=download".format(id=url[url.find("/d/")+3:url.find("/view")])
res = requests.session().get(lnk, headers=headers, stream=True, verify=True)
res = requests.session().get(get_url_from_gdrive_confirmation(res.text), headers=headers, stream=True, verify=True)
content_disposition = six.moves.urllib_parse.unquote(res.headers["Content-Disposition"])
filenm = re.search(r"filename\*=UTF-8''(.*)", content_disposition).groups()[0].replace(os.path.sep, "_")
return filenm
def dwn(url, dst, msg):
file_size = None
req = Request(url, headers={"User-Agent": "torch.hub"})
u = urlopen(req)
meta = u.info()
if hasattr(meta, 'getheaders'):
content_length = meta.getheaders("Content-Length")
else:
content_length = meta.get_all("Content-Length")
if content_length is not None and len(content_length) > 0:
file_size = int(content_length[0])
with tqdm(total=file_size, disable=False, mininterval=0.5,
bar_format=msg+' |{bar:20}| {percentage:3.0f}%') as pbar:
with open(dst, "wb") as f:
while True:
buffer = u.read(8192)
if len(buffer) == 0:
break
f.write(buffer)
pbar.update(len(buffer))
f.close()
def sdmdls(ver, Use_Temp_Storage):
if ver=='1.5':
if Use_Temp_Storage:
os.makedirs('/content/temp_models', exist_ok=True)
model='/content/temp_models/v1-5-pruned-emaonly.safetensors'
else:
model='/content/gdrive/'+mainpth+'/sd/stable-diffusion-'+blasphemy+'/models/Stable-diffusion/v1-5-pruned-emaonly.safetensors'
link='https://huggingface.co/runwayml/stable-diffusion-v1-5/resolve/main/v1-5-pruned-emaonly.safetensors'
elif ver=='V2.1-768px':
if Use_Temp_Storage:
os.makedirs('/content/temp_models', exist_ok=True)
model='/content/temp_models/v2-1_768-ema-pruned.safetensors'
else:
model='/content/gdrive/'+mainpth+'/sd/stable-diffusion-'+blasphemy+'/models/Stable-diffusion/v2-1_768-ema-pruned.safetensors'
link='https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors'
elif ver=='v1.5 Inpainting':
if Use_Temp_Storage:
os.makedirs('/content/temp_models', exist_ok=True)
model='/content/temp_models/sd-v1-5-inpainting.ckpt'
else:
model='/content/gdrive/'+mainpth+'/sd/stable-diffusion-'+blasphemy+'/models/Stable-diffusion/sd-v1-5-inpainting.ckpt'
link='https://huggingface.co/runwayml/stable-diffusion-inpainting/resolve/main/sd-v1-5-inpainting.ckpt'
elif ver=='SDXL':
if Use_Temp_Storage:
os.makedirs('/content/temp_models', exist_ok=True)
model='/content/temp_models/sd_xl_base_1.0.safetensors'
else:
model='/content/gdrive/'+mainpth+'/sd/stable-diffusion-'+blasphemy+'/models/Stable-diffusion/sd_xl_base_1.0.safetensors'
link='https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors'
if not os.path.exists(model):
!gdown --fuzzy -O $model $link
if os.path.exists(model):
clear_output()
inf('\u2714 Done','success', '50px')
else:
inf('\u2718 Something went wrong, try again','danger', "250px")
else:
clear_output()
inf('\u2714 Model already exists','primary', '300px')
return model
if (PATH_to_MODEL !=''):
if os.path.exists(str(PATH_to_MODEL)):
inf('\u2714 Using the trained model.','success', '200px')
else:
while not os.path.exists(str(PATH_to_MODEL)):
inf('\u2718 Wrong path, use the colab file explorer to copy the path : ','danger', "400px")
PATH_to_MODEL=input()
if os.path.exists(str(PATH_to_MODEL)):
inf('\u2714 Using the custom model.','success', '200px')
model=PATH_to_MODEL
elif MODEL_LINK != "":
if src=='civitai':
modelname=get_name(MODEL_LINK, False)
if Use_Temp_Storage:
os.makedirs('/content/temp_models', exist_ok=True)
model=f'/content/temp_models/{modelname}'
else:
model=f'/content/gdrive/{mainpth}/sd/stable-diffusion-{blasphemy}/models/Stable-diffusion/{modelname}'
if not os.path.exists(model):
dwn(MODEL_LINK, model, 'Downloading the custom model')
clear_output()
else:
inf('\u2714 Model already exists','primary', '300px')
elif src=='gdrive':
modelname=get_name(MODEL_LINK, True)
if Use_Temp_Storage:
os.makedirs('/content/temp_models', exist_ok=True)
model=f'/content/temp_models/{modelname}'
else:
model=f'/content/gdrive/{mainpth}/sd/stable-diffusion-{blasphemy}/models/Stable-diffusion/{modelname}'
if not os.path.exists(model):
gdown.download(url=MODEL_LINK, output=model, quiet=False, fuzzy=True)
clear_output()
else:
inf('\u2714 Model already exists','primary', '300px')
else:
modelname=os.path.basename(MODEL_LINK)
if Use_Temp_Storage:
os.makedirs('/content/temp_models', exist_ok=True)
model=f'/content/temp_models/{modelname}'
else:
model=f'/content/gdrive/{mainpth}/sd/stable-diffusion-{blasphemy}/models/Stable-diffusion/{modelname}'
if not os.path.exists(model):
gdown.download(url=MODEL_LINK, output=model, quiet=False, fuzzy=True)
clear_output()
else:
inf('\u2714 Model already exists','primary', '700px')
if os.path.exists(model) and os.path.getsize(model) > 1810671599:
inf('\u2714 Model downloaded, using the custom model.','success', '300px')
else:
!rm model
inf('\u2718 Wrong link, check that the link is valid','danger', "300px")
else:
model=sdmdls(Model_Version, Use_Temp_Storage)
#@markdown ---
#@markdown # Download LoRA
LoRA_LINK = "" #@param {type:"string"}
os.makedirs('/content/gdrive/'+mainpth+'/sd/stable-diffusion-'+blasphemy+'/models/Lora', exist_ok=True)
src=getsrc(LoRA_LINK)
if src=='civitai':
modelname=get_name(LoRA_LINK, False)
loramodel=f'/content/gdrive/{mainpth}/sd/stable-diffusion-{blasphemy}/models/Lora/{modelname}'
if not os.path.exists(loramodel):
dwn(LoRA_LINK, loramodel, 'Downloading the LoRA model')
clear_output()
else:
inf('\u2714 Model already exists','primary', '300px')
elif src=='gdrive':
modelname=get_name(LoRA_LINK, True)
loramodel=f'/content/gdrive/{mainpth}/sd/stable-diffusion-{blasphemy}/models/Lora/{modelname}'
if not os.path.exists(loramodel):
gdown.download(url=LoRA_LINK, output=loramodel, quiet=False, fuzzy=True)
clear_output()
else:
inf('\u2714 Model already exists','primary', '300px')
else:
modelname=os.path.basename(LoRA_LINK)
loramodel=f'/content/gdrive/{mainpth}/sd/stable-diffusion-{blasphemy}/models/Lora/{modelname}'
if not os.path.exists(loramodel):
gdown.download(url=LoRA_LINK, output=loramodel, quiet=False, fuzzy=True)
clear_output()
else:
inf('\u2714 Model already exists','primary', '700px')
if os.path.exists(loramodel) :
inf('\u2714 LoRA downloaded','success', '300px')
else:
inf('\u2718 Wrong link, check that the link is valid','danger', "300px")
#@markdown ---
# Commented out IPython magic to ensure Python compatibility.
#@markdown # ControlNet
from torch.hub import download_url_to_file
from urllib.parse import urlparse
import re
from subprocess import run
Model = "None" #@param [ "None", "All (21GB)", "Canny", "Depth", "Lineart", "MLSD", "Normal", "OpenPose", "Scribble", "Seg", "ip2p", "Shuffle", "Inpaint", "Softedge", "Lineart_Anime", "Tile", "T2iadapter_Models"]
v2_Model = "None" #@param [ "None", "All", "Canny", "Depth", "HED", "OpenPose", "Scribble"]
#@markdown - Download/update ControlNet extension and its models
def download(url, model_dir):
filename = os.path.basename(urlparse(url).path)
pth = os.path.abspath(os.path.join(model_dir, filename))
if not os.path.exists(pth):
print('Downloading: '+os.path.basename(url))
download_url_to_file(url, pth, hash_prefix=None, progress=True)
else:
print(f"The model {filename} already exists")
Canny='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny.pth'
Depth='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth.pth'
Lineart='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart.pth'
MLSD='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd.pth'
Normal='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae.pth'
OpenPose='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose.pth'
Scribble='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble.pth'
Seg='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg.pth'
ip2p='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p.pth'
Shuffle='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle.pth'
Inpaint='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint.pth'
Softedge='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge.pth'
Lineart_Anime='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime.pth'
Tile='https://huggingface.co/lllyasviel/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile.pth'
with capture.capture_output() as cap:
# %cd /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/extensions
if not os.path.exists("sd-webui-controlnet"):
!git clone https://github.com/Mikubill/sd-$blasphemy-controlnet.git
# %cd /content
else:
# %cd sd-webui-controlnet
!git reset --hard
!git pull
# %cd /content
mdldir='/content/gdrive/'+mainpth+'/sd/stable-diffusion-'+blasphemy+'/extensions/sd-webui-controlnet/models'
for filename in os.listdir(mdldir):
if "_sd14v1" in filename:
renamed = re.sub("_sd14v1", "-fp16", filename)
os.rename(os.path.join(mdldir, filename), os.path.join(mdldir, renamed))
!wget -q -O CN_models.txt https://github.com/TheLastBen/fast-stable-diffusion/raw/main/AUTOMATIC1111_files/CN_models.txt
!wget -q -O CN_models_v2.txt https://github.com/TheLastBen/fast-stable-diffusion/raw/main/AUTOMATIC1111_files/CN_models_v2.txt
with open("CN_models.txt", 'r') as f:
mdllnk = f.read().splitlines()
with open("CN_models_v2.txt", 'r') as d:
mdllnk_v2 = d.read().splitlines()
!rm CN_models.txt CN_models_v2.txt
with capture.capture_output() as cap:
cfgnames=[os.path.basename(url).split('.')[0]+'.yaml' for url in mdllnk_v2]
# %cd /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/extensions/sd-webui-controlnet/models
for name in cfgnames:
run(['cp', 'cldm_v21.yaml', name])
# %cd /content
if Model == "All (21GB)":
for lnk in mdllnk:
download(lnk, mdldir)
clear_output()
elif Model == "T2iadapter_Models":
mdllnk=list(filter(lambda x: 't2i' in x, mdllnk))
for lnk in mdllnk:
download(lnk, mdldir)
clear_output()
elif Model == "None":
pass
clear_output()
else:
download(globals()[Model], mdldir)
clear_output()
Canny='https://huggingface.co/thibaud/controlnet-sd21/resolve/main/control_v11p_sd21_canny.safetensors'
Depth='https://huggingface.co/thibaud/controlnet-sd21/resolve/main/control_v11p_sd21_depth.safetensors'
HED='https://huggingface.co/thibaud/controlnet-sd21/resolve/main/control_v11p_sd21_hed.safetensors'
OpenPose='https://huggingface.co/thibaud/controlnet-sd21/resolve/main/control_v11p_sd21_openposev2.safetensors'
Scribble='https://huggingface.co/thibaud/controlnet-sd21/resolve/main/control_v11p_sd21_scribble.safetensors'
if v2_Model == "All":
for lnk_v2 in mdllnk_v2:
download(lnk_v2, mdldir)
clear_output()
inf('\u2714 Done','success', '50px')
elif v2_Model == "None":
pass
clear_output()
inf('\u2714 Done','success', '50px')
else:
download(globals()[v2_Model], mdldir)
clear_output()
inf('\u2714 Done','success', '50px')
#@markdown ---
# Commented out IPython magic to ensure Python compatibility.
#@markdown # Start Stable-Diffusion
from IPython.utils import capture
import time
import sys
import fileinput
from pyngrok import ngrok, conf
import re
Use_Cloudflare_Tunnel = False #@param {type:"boolean"}
#@markdown - Offers better gradio responsivity
Ngrok_token = "" #@param {type:"string"}
#@markdown - Input your ngrok token if you want to use ngrok server
User = "" #@param {type:"string"}
Password= "" #@param {type:"string"}
#@markdown - Add credentials to your Gradio interface (optional)
auth=f"--gradio-auth {User}:{Password}"
if User =="" or Password=="":
auth=""
with capture.capture_output() as cap:
# %cd /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/modules/
!wget -q -O extras.py https://raw.githubusercontent.com/AUTOMATIC1111/stable-diffusion-$blasphemy/master/modules/extras.py
!wget -q -O sd_models.py https://raw.githubusercontent.com/AUTOMATIC1111/stable-diffusion-$blasphemy/master/modules/sd_models.py
!wget -q -O /usr/local/lib/python3.10/dist-packages/gradio/blocks.py https://raw.githubusercontent.com/TheLastBen/fast-stable-diffusion/main/AUTOMATIC1111_files/blocks.py
# %cd /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/
!sed -i 's@ui.create_ui().*@ui.create_ui();shared.demo.queue(concurrency_count=999999,status_update_rate=0.1)@' /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/webui.py
!sed -i 's@possible_sd_paths =.*@possible_sd_paths = [\"/content/gdrive/{mainpth}/sd/stablediffusion\"]@' /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/modules/paths.py
!sed -i 's@\.\.\/@src/@g' /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/modules/paths.py
!sed -i 's@src/generative-models@generative-models@g' /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/modules/paths.py
!sed -i 's@print(\"No module.*@@' /content/gdrive/$mainpth/sd/stablediffusion/ldm/modules/diffusionmodules/model.py
!sed -i 's@\["sd_model_checkpoint"\]@\["sd_model_checkpoint", "sd_vae", "CLIP_stop_at_last_layers", "inpainting_mask_weight", "initial_noise_multiplier"\]@g' /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/modules/shared.py
share=''
if Ngrok_token!="":
ngrok.kill()
srv=ngrok.connect(7860, pyngrok_config=conf.PyngrokConfig(auth_token=Ngrok_token) , bind_tls=True).public_url
for line in fileinput.input('/usr/local/lib/python3.10/dist-packages/gradio/blocks.py', inplace=True):
if line.strip().startswith('self.server_name ='):
line = f' self.server_name = "{srv[8:]}"\n'
if line.strip().startswith('self.protocol = "https"'):
line = ' self.protocol = "https"\n'
if line.strip().startswith('if self.local_url.startswith("https") or self.is_colab'):
line = ''
if line.strip().startswith('else "http"'):
line = ''
sys.stdout.write(line)
elif Use_Cloudflare_Tunnel:
with capture.capture_output() as cap:
!pkill cloudflared
time.sleep(4)
!nohup cloudflared tunnel --url http://localhost:7860 > /content/srv.txt 2>&1 &
time.sleep(4)
with open('/content/srv.txt', "r") as file: text = file.read()
srv= re.findall(r"https?://(?:\S+?\.)?trycloudflare\.com\S*", text)[0]
for line in fileinput.input('/usr/local/lib/python3.10/dist-packages/gradio/blocks.py', inplace=True):
if line.strip().startswith('self.server_name ='):
line = f' self.server_name = "{srv[8:]}"\n'
if line.strip().startswith('self.protocol = "https"'):
line = ' self.protocol = "https"\n'
if line.strip().startswith('if self.local_url.startswith("https") or self.is_colab'):
line = ''
if line.strip().startswith('else "http"'):
line = ''
sys.stdout.write(line)
!rm /content/srv.txt
else:
share='--share'
ckptdir=''
if os.path.exists('/content/temp_models'):
ckptdir='--ckpt-dir /content/temp_models'
try:
model
if os.path.isfile(model):
!python /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/webui.py $share --api --disable-safe-unpickle --enable-insecure-extension-access --no-download-sd-model --no-half-vae --ckpt "$model" --xformers $auth --disable-console-progressbars --upcast-sampling $ckptdir
else:
!python /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/webui.py $share --api --disable-safe-unpickle --enable-insecure-extension-access --no-download-sd-model --no-half-vae --ckpt-dir "$model" --xformers $auth --disable-console-progressbars --upcast-sampling
except:
!python /content/gdrive/$mainpth/sd/stable-diffusion-$blasphemy/webui.py $share --api --disable-safe-unpickle --enable-insecure-extension-access --no-download-sd-model --no-half-vae --xformers $auth --disable-console-progressbars --upcast-sampling $ckptdir