Spaces:
Running
Running
File size: 13,624 Bytes
2aeb649 a1c95a9 2aeb649 9071ed9 2aeb649 2a43fc6 65351e7 c50ed1f 65351e7 2aeb649 c50ed1f 2aeb649 8d92190 9071ed9 8d92190 2aeb649 8d92190 2aeb649 9071ed9 2aeb649 9071ed9 2aeb649 9071ed9 2aeb649 5e31c93 2aeb649 8d55e8b 2aeb649 8d55e8b f212c91 9c76a63 f212c91 2aeb649 f212c91 2aeb649 f212c91 2aeb649 f212c91 2aeb649 f212c91 2aeb649 9071ed9 6531480 9071ed9 6531480 2aeb649 3f71e88 6531480 2aeb649 8d92190 3f71e88 6531480 2aeb649 be3e019 7899646 2aeb649 8d92190 2aeb649 9071ed9 cacf670 c002974 cacf670 0dc3bb4 2aeb649 ef0171e 2aeb649 2a43fc6 9f4249c 2a43fc6 54bc5b6 2a43fc6 4cca9c8 7ddb9ce 2a43fc6 b189c01 5158f4a 2a43fc6 2d75e4d c50ed1f 2417ae8 b189c01 2aeb649 78994f5 0817061 eebe32a 0817061 2aeb649 c2dfb4a 1156c51 c2dfb4a 2aeb649 e4e751d b73f027 e6d2c96 2aeb649 74df29d 2aeb649 5751e99 2aeb649 74df29d 2aeb649 f212c91 d2a957f 2a43fc6 8d55e8b 0817061 2aeb649 e4ee626 4c06147 4b59481 c2dfb4a 2aeb649 d7d7222 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 |
import requests
import os
import gradio as gr
from huggingface_hub import HfApi, update_repo_visibility
from slugify import slugify
import gradio as gr
import re
import uuid
from typing import Optional
import json
TRUSTED_UPLOADERS = ["KappaNeuro", "CiroN2022"]
def get_json_data(url):
api_url = f"https://civitai.com/api/v1/models/{url.split('/')[4]}"
try:
response = requests.get(api_url)
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
print(f"Error fetching JSON data: {e}")
return None
def check_nsfw(profile: Optional[gr.OAuthProfile], json_data):
if(profile.preferred_username in TRUSTED_UPLOADERS):
return True
if json_data["nsfw"]:
return False
for model_version in json_data["modelVersions"]:
for image in model_version["images"]:
if image["nsfw"] != "None":
return False
return True
def extract_info(json_data):
if json_data["type"] == "LORA":
for model_version in json_data["modelVersions"]:
if model_version["baseModel"] in ["SDXL 1.0", "SDXL 0.9"]:
for file in model_version["files"]:
if file["primary"]:
# Start by adding the primary file to the list
urls_to_download = [{"url": file["downloadUrl"], "filename": file["name"], "type": "weightName"}]
# Then append all image URLs to the list
for image in model_version["images"]:
urls_to_download.append({
"url": image["url"],
"filename": os.path.basename(image["url"]),
"type": "imageName",
"prompt": image["meta"]["prompt"]
})
info = {
"urls_to_download": urls_to_download,
"id": model_version["id"],
"modelId": model_version["modelId"],
"name": json_data["name"],
"description": json_data["description"],
"trainedWords": model_version["trainedWords"],
"creator": json_data["creator"]["username"],
"tags": json_data["tags"]
}
return info
return None
def download_files(info, folder="."):
downloaded_files = {
"imageName": [],
"imagePrompt": [],
"weightName": []
}
for item in info["urls_to_download"]:
download_file(item["url"], item["filename"], folder)
downloaded_files[item["type"]].append(item["filename"])
if(item["type"] == "imageName"):
prompt_clean = re.sub(r'<.*?>', '', item["prompt"])
downloaded_files["imagePrompt"].append(prompt_clean)
return downloaded_files
def download_file(url, filename, folder="."):
try:
response = requests.get(url)
response.raise_for_status()
with open(f"{folder}/{filename}", 'wb') as f:
f.write(response.content)
except requests.exceptions.RequestException as e:
raise gr.Error(f"Error downloading file: {e}")
def process_url(url, do_download=True, folder="."):
json_data = get_json_data(url)
if json_data:
if check_nsfw(json_data):
info = extract_info(json_data)
if info:
if(do_download):
downloaded_files = download_files(info, folder)
else:
downloaded_files = []
return info, downloaded_files
else:
raise gr.Error("Only SDXL LoRAs are supported for now")
else:
raise gr.Error("This model has content tagged as unsafe by CivitAI")
else:
raise gr.Error("Something went wrong in fetching CivitAI API")
def create_readme(info, downloaded_files, is_author=True, folder="."):
readme_content = ""
original_url = f"https://civitai.com/models/{info['id']}"
non_author_disclaimer = f'This model was originally uploaded on [CivitAI]({original_url}), by [{info["creator"]}](https://civitai.com/user/{info["creator"]}/models). The information below was provided by the author on CivitAI:'
default_tags = ["text-to-image", "stable-diffusion", "lora", "diffusers"]
civit_tags = [t for t in info["tags"] if t not in default_tags]
widget_prompts = "\n- text: ".join(downloaded_files["imagePrompt"])
tags = default_tags + civit_tags
unpacked_tags = "\n- ".join(tags)
content = f"""---
license: other
tags:
- {unpacked_tags}
base_model: stabilityai/stable-diffusion-xl-base-1.0
instance_prompt: {info['trainedWords'][0] if 'trainedWords' in info and len(info['trainedWords']) > 0 else ''}
widget:
- text: {widget_prompts}
---
# {info["name"]}
{non_author_disclaimer if not is_author else ''}
![Image 0]({downloaded_files["imageName"][0]})
> {downloaded_files["imagePrompt"][0]}
{info["description"]}
"""
for index, (image, prompt) in enumerate(zip(downloaded_files["imageName"], downloaded_files["imagePrompt"])):
if index == 1:
content += f"## Image examples for the model:\n![Image {index}]({image})\n> {prompt}\n"
elif index > 1:
content += f"\n![Image {index}]({image})\n> {prompt}\n"
readme_content += content + "\n"
print(readme_content)
with open(f"{folder}/README.md", "w") as file:
file.write(readme_content)
def get_creator(username):
url = f"https://civitai.com/api/trpc/user.getCreator?input=%7B%22json%22%3A%7B%22username%22%3A%22{username}%22%2C%22authed%22%3Atrue%7D%7D"
headers = {
"authority": "civitai.com",
"accept": "*/*",
"accept-language": "en-BR,en;q=0.9,pt-BR;q=0.8,pt;q=0.7,es-ES;q=0.6,es;q=0.5,de-LI;q=0.4,de;q=0.3,en-GB;q=0.2,en-US;q=0.1,sk;q=0.1",
"content-type": "application/json",
"cookie": f'{os.environ["COOKIE_INFO"]}',
"if-modified-since": "Tue, 22 Aug 2023 07:18:52 GMT",
"referer": f"https://civitai.com/user/{username}/models",
"sec-ch-ua": "\"Not.A/Brand\";v=\"8\", \"Chromium\";v=\"114\", \"Google Chrome\";v=\"114\"",
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": "macOS",
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-origin",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
}
response = requests.get(url, headers=headers)
return response.json()
def extract_huggingface_username(username):
data = get_creator(username)
links = data.get('result', {}).get('data', {}).get('json', {}).get('links', [])
for link in links:
url = link.get('url', '')
if url.startswith('https://huggingface.co/') or url.startswith('https://www.huggingface.co/'):
username = url.split('/')[-1]
return username
return None
def check_civit_link(profile: Optional[gr.OAuthProfile], url):
info, _ = process_url(url, do_download=False)
hf_username = extract_huggingface_username(info['creator'])
attributes_methods = dir(profile)
if(profile.preferred_username == "multimodalart"):
return '', gr.update(interactive=True), gr.update(visible=False), gr.update(visible=True)
if(not hf_username):
no_username_text = f'If you are {info["creator"]} on CivitAI, hi! Your CivitAI profile seems to not have information about your Hugging Face account. Please visit <a href="https://civitai.com/user/account" target="_blank">https://civitai.com/user/account</a> and include it there<br><img width="60%" src="https://i.imgur.com/hCbo9uL.png" /><br>(if you are not {info["creator"]}, you cannot submit their model at this time)'
return no_username_text, gr.update(interactive=False), gr.update(visible=True), gr.update(visible=False)
if(profile.preferred_username != hf_username):
unmatched_username_text = '<h4>Oops, the Hugging Face account in your CivitAI profile seems to be different than the one your are using here. Please visit <a href="https://civitai.com/user/account">https://civitai.com/user/account</a> and update it there<br><img src="https://i.imgur.com/hCbo9uL.png" /></h4>'
return unmatched_username_text, gr.update(interactive=False), gr.update(visible=True), gr.update(visible=False)
else:
return '', gr.update(interactive=True), gr.update(visible=False), gr.update(visible=True)
def swap_fill(profile: Optional[gr.OAuthProfile]):
if profile is None:
return gr.update(visible=True), gr.update(visible=False)
else:
return gr.update(visible=False), gr.update(visible=True)
def show_output():
return gr.update(visible=True)
def upload_civit_to_hf(profile: Optional[gr.OAuthProfile], url, progress=gr.Progress(track_tqdm=True)):
if not profile.name:
return gr.Error("Are you sure you are logged in?")
folder = str(uuid.uuid4())
os.makedirs(folder, exist_ok=False)
info, downloaded_files = process_url(url, folder=folder)
create_readme(info, downloaded_files, folder=folder)
try:
api = HfApi(token=os.environ["HUGGING_FACE_HUB_TOKEN"])
username = api.whoami()["name"]
slug_name = slugify(info["name"])
repo_id = f"{username}/{profile.preferred_username}-{slug_name}"
api.create_repo(repo_id=repo_id, private=True, exist_ok=True)
api.upload_folder(
folder_path=folder,
repo_id=repo_id,
repo_type="model",
)
api.update_repo_visibility(repo_id=repo_id, private=False)
except:
raise gr.Error("something went wrong")
transfer_repos = gr.load("multimodalart/transfer_repos", hf_token=os.environ["HUGGING_FACE_HUB_TOKEN"], src="spaces")
user_repo_id = f"{profile.preferred_username}/{slug_name}"
response_code = transfer_repos(repo_id, user_repo_id)
i = 0
while response_code != "200":
message = None
if response_code == "409":
if i < 3:
user_repo_id = f"{profile.preferred_username}/{slug_name}-{i}"
response_code = transfer_repos(repo_id, user_repo_id)
i += 1
else:
message = "It seems this model has been uploaded already in your account."
elif response_code == "404":
message = "Something went wrong with the model upload. Try again."
else:
message = f"Unexpected response code: {response_code}."
if message:
api.delete_repo(repo_id=repo_id, repo_type="model")
raise gr.Error(message)
return f'''# Model uploaded to 🤗!
## Access it here [{user_repo_id}](https://huggingface.co/{user_repo_id}) '''
css = '''
#login {
font-size: 0px;
width: 100% !important;
margin: 0 auto;
}
#logout {
width: 100% !important;
margin-top: 4em;
}
#login:after {
content: 'Authorize this app before uploading your model';
visibility: visible;
display: block;
font-size: var(--button-large-text-size);
}
#login:disabled{
font-size: var(--button-large-text-size);
}
#login:disabled:after{
content:''
}
#disabled_upload{
opacity: 0.5;
pointer-events:none;
}
'''
with gr.Blocks(css=css) as demo:
gr.Markdown('''# Upload your CivitAI SDXL LoRA to Hugging Face 🤗
Get diffusers compatibility, a free GPU-based Inference Widget and possibility to submit to the [LoRA the Explorer](https://huggingface.co/spaces/multimodalart/LoraTheExplorer) space
''')
gr.LoginButton(elem_id="login")
with gr.Column(elem_id="disabled_upload") as disabled_area:
with gr.Row():
submit_source_civit = gr.Textbox(
label="CivitAI model URL",
info="URL of the CivitAI model, for now only SDXL LoRAs are supported",
)
submit_button_civit = gr.Button("Upload model to Hugging Face and submit", interactive=False)
with gr.Column(visible=False) as enabled_area:
with gr.Row():
submit_source_civit = gr.Textbox(
label="CivitAI model URL",
info="URL of the CivitAI model, for now only SDXL LoRAs are supported",
)
instructions = gr.HTML("")
try_again_button = gr.Button("I have added my HF profile to my account (it may take 1 minute to refresh)", visible=False)
submit_button_civit = gr.Button("Upload model to Hugging Face", interactive=False)
output = gr.Markdown(label="Output progress", visible=False)
demo.load(fn=swap_fill, outputs=[disabled_area, enabled_area])
submit_source_civit.change(fn=check_civit_link, inputs=[submit_source_civit], outputs=[instructions, submit_button_civit, try_again_button, submit_button_civit])
try_again_button.click(fn=check_civit_link, inputs=[submit_source_civit], outputs=[instructions, submit_button_civit, try_again_button, submit_button_civit])
submit_button_civit.click(fn=show_output, inputs=[], outputs=[output]).then(fn=upload_civit_to_hf, inputs=[submit_source_civit], outputs=[output])
gr.LogoutButton(elem_id="logout")
demo.queue()
demo.launch() |