Spaces:
Runtime error
Runtime error
import gradio as gr | |
import requests | |
import subprocess | |
import os | |
import torch | |
from huggingface_hub import whoami | |
from huggingface_hub import HfApi | |
from huggingface_hub import login | |
import random | |
import time | |
api=HfApi() | |
REPO_TYPES = ["model", "dataset", "space"] | |
def duplicate(source_url_model, source_url_vae, dst_repo, token, new_name, dst_repo_path, repo_type): | |
try: | |
_ = whoami(token) | |
# ^ this will throw if token is invalid | |
# make sure the user fills out the other required paths. | |
if not dst_repo_path[len(dst_repo_path)-1] == '/': | |
raise Exception("Your destination path *must* end with a /") | |
if not source_url_model: | |
raise Exception("You haven't chosen a model file to download!") | |
if not source_url_vae: | |
raise Exception("You haven't chosen a VAE file to download!") | |
if not dst_repo: | |
raise Exception("You haven't chosen a repo to download to") | |
login(token=token) | |
# keep things separate, partly in case people download different files with same name (`download.zip`). Especially, it also allows saving filename to work | |
dir="/home/user/apps/downloads/"+str(int(time.time()))+str(random.getrandbits(8))+"/" | |
subprocess.check_call([r"mkdir","-p",dir]) | |
subprocess.check_call([r"aria2c","-x16","--split=16","-o","source.ckpt",source_url_model,"--dir="+dir]) | |
subprocess.check_call([r"aria2c","-x16","--split=16","-o","vae.ckpt",source_url_vae,"--dir="+dir]) | |
#USE AT YOUR OWN RISK | |
#local path to runwayML SD 1.5 checkpoint (https://huggingface.co/runwayml/stable-diffusion-v1-5) | |
ckpt_15 = dir+"source.ckpt" | |
#local path to StabilityAI finetuned autoencoder (https://huggingface.co/stabilityai/sd-vae-ft-mse) | |
ckpt_vae = dir+"vae.ckpt" | |
#path to save merged model to | |
ckpt_out = dir+"source_vae.ckpt" | |
pl_sd = torch.load(ckpt_15, map_location="cpu") | |
sd = pl_sd["state_dict"] | |
over_sd = torch.load(ckpt_vae,map_location="cpu")["state_dict"] | |
sdk = sd.keys() | |
for key in over_sd.keys(): | |
if "first_stage_model."+key in sdk: | |
sd["first_stage_model."+key] = over_sd[key] | |
print(key,"overwritten") | |
torch.save(pl_sd,ckpt_out) | |
if new_name: | |
dst_repo_path=dst_repo_path | |
else: | |
dst_repo_path=dst_repo_path+"model+vae.ckpt" | |
api.upload_file( | |
path_or_fileobj=dir+"source_vae.ckpt", | |
path_in_repo=dst_repo_path, | |
repo_id=dst_repo, | |
repo_type=repo_type | |
) | |
# now clean up | |
os.remove(dir+files[0]) | |
os.rmdir(dir) | |
match repo_type: | |
case "space": | |
repo_url=f"https://hf.co/spaces/{dst_repo}" | |
case "dataset": | |
repo_url=f"https://hf.co/datasets/{dst_repo}" | |
case "model": | |
repo_url=f"https://hf.co/{dst_repo}" | |
return ( | |
f'Find your repo <a href=\'{repo_url}\' target="_blank" style="text-decoration:underline">here</a>', | |
"sp.jpg", | |
) | |
except Exception as e: | |
blames=["grandma","my boss","your boss","God","you","you. It's *all* your fault.","the pope"] | |
blameweights=(1,1,1,1,4,2,1) | |
excuses=["I blame it all on "+random.choices(blames,weights=blameweights)[0],"It's my fault, sorry.","I did it on purpose.","That file doesn't want to be downloaded.","You nincompoop!"] | |
excusesweights=(12,1,1,2,3) | |
excuse=random.choices(excuses,weights=excusesweights)[0] | |
return ( | |
f""" | |
### Error 😢😢😢 | |
{e} | |
<i>""" + excuse+"</i>", | |
None, | |
) | |
interface = gr.Interface( | |
fn=duplicate, | |
inputs=[ | |
gr.Textbox(placeholder="Source URL for model (e.g. civitai.com/api/download/models/4324322534)"), | |
gr.Textbox(placeholder="Source URL for VAE (e.g. civitai.com/api/download/models/4324322534)"), | |
gr.Textbox(placeholder="Destination repository (e.g. osanseviero/dst)"), | |
gr.Textbox(placeholder="Write access token", type="password"), | |
gr.Textbox(placeholder="Post-download name of your file, if you want it changed (e.g. stupidmodel_stupidvae.safetensors)"), | |
gr.Textbox(placeholder="Destination for your file within your repo. Don't include the filename, end path with a / (e.g. /models/Stable-diffusion/)"), | |
gr.Dropdown(choices=REPO_TYPES, value="model"), | |
], | |
outputs=[ | |
gr.Markdown(label="output"), | |
gr.Image(show_label=False), | |
], | |
title="Merge a VAE with a model!", | |
description="Merge a VAE with your model, and export to your Hugging Face repository! You need to specify a write token obtained in https://hf.co/settings/tokens. This Space is a an experimental demo. CKPT format only; I just ripped off someone else's script, I have no idea how this works...", | |
article="<p>credit to <a href='https://gist.github.com/Quasimondo/f344659f57dc15bd7892a969bd58ac67'>Quasimodo's script</a></p><p>Find your write token at <a href='https://huggingface.co/settings/tokens' target='_blank'>token settings</a></p>", | |
allow_flagging="never", | |
live=False, | |
) | |
interface.launch(enable_queue=True) | |