hdparmar commited on
Commit
feba05f
1 Parent(s): 55ddab3

Upload sd_to_diff_hub.ipynb with huggingface_hub

Browse files
Files changed (1) hide show
  1. sd_to_diff_hub.ipynb +268 -0
sd_to_diff_hub.ipynb ADDED
@@ -0,0 +1,268 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "24817911-0228-4434-bd3f-9ca50e9d8763",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "pwd"
11
+ ]
12
+ },
13
+ {
14
+ "cell_type": "code",
15
+ "execution_count": null,
16
+ "id": "9c703396-7a83-4bb8-8faf-6f51550c0b4e",
17
+ "metadata": {},
18
+ "outputs": [],
19
+ "source": [
20
+ "!(python scripts/convert_sd_to_diffusers.py \\\n",
21
+ " --checkpoint_path logs/2023-10-22T14-53-45_pokemon/checkpoints/epoch=000202.ckpt \\\n",
22
+ " --original_config_file configs/stable-diffusion/pokemon.yaml \\\n",
23
+ " --dump_path tf_202 \\\n",
24
+ " --use_ema \\\n",
25
+ ")"
26
+ ]
27
+ },
28
+ {
29
+ "cell_type": "code",
30
+ "execution_count": null,
31
+ "id": "0f86f81a-21ce-4bac-8e9d-517bd0200b9c",
32
+ "metadata": {},
33
+ "outputs": [],
34
+ "source": [
35
+ "from huggingface_hub import notebook_login\n",
36
+ "\n",
37
+ "notebook_login()"
38
+ ]
39
+ },
40
+ {
41
+ "cell_type": "code",
42
+ "execution_count": null,
43
+ "id": "1b2be122-0c7d-4c99-84f0-70955d356721",
44
+ "metadata": {},
45
+ "outputs": [],
46
+ "source": [
47
+ "# @title Upload a locally saved pipeline to the hub\n",
48
+ "\n",
49
+ "# Code to upload a pipeline saved locally to the hub\n",
50
+ "from huggingface_hub import HfApi, ModelCard, create_repo, get_full_repo_name\n",
51
+ "\n",
52
+ "# Set up repo and upload files\n",
53
+ "model_name = \"tradfusion-v2\" # @param What you want it called on the hub\n",
54
+ "local_folder_name = \"tf_202\" # @param Created by the script or one you created via image_pipe.save_pretrained('save_name')\n",
55
+ "description = \"Fine-tuned Stable Diffusion Model on Irish Traditional Tunes Spectrograms\" # @param\n",
56
+ "checkpoint_path = \"logs/2023-10-22T14-53-45_pokemon/checkpoints\"\n",
57
+ "hub_model_id = get_full_repo_name(model_name)\n",
58
+ "create_repo(hub_model_id)\n",
59
+ "api = HfApi()\n",
60
+ "api.upload_folder(\n",
61
+ " folder_path=f\"{local_folder_name}/feature_extractor\", path_in_repo=\"feature_extractor\", repo_id=hub_model_id\n",
62
+ ")\n",
63
+ "api.upload_folder(\n",
64
+ " folder_path=f\"{local_folder_name}/safety_checker\", path_in_repo=\"safety_checker\", repo_id=hub_model_id\n",
65
+ ")\n",
66
+ "api.upload_folder(\n",
67
+ " folder_path=f\"{local_folder_name}/scheduler\", path_in_repo=\"scheduler\", repo_id=hub_model_id\n",
68
+ ")\n",
69
+ "api.upload_folder(\n",
70
+ " folder_path=f\"{local_folder_name}/text_encoder\", path_in_repo=\"text_encoder\", repo_id=hub_model_id\n",
71
+ ")\n",
72
+ "api.upload_folder(\n",
73
+ " folder_path=f\"{local_folder_name}/tokenizer\", path_in_repo=\"tokenizer\", repo_id=hub_model_id\n",
74
+ ")\n",
75
+ "api.upload_folder(\n",
76
+ " folder_path=f\"{local_folder_name}/unet\", path_in_repo=\"unet\", repo_id=hub_model_id\n",
77
+ ")\n",
78
+ "api.upload_folder(\n",
79
+ " folder_path=f\"{local_folder_name}/vae\", path_in_repo=\"vae\", repo_id=hub_model_id\n",
80
+ ")\n",
81
+ "api.upload_file(\n",
82
+ " path_or_fileobj=f\"{local_folder_name}/model_index.json\",\n",
83
+ " path_in_repo=\"model_index.json\",\n",
84
+ " repo_id=hub_model_id,\n",
85
+ ")\n",
86
+ "api.upload_file(\n",
87
+ " path_or_fileobj=f\"{checkpoint_path}/epoch=000202.ckpt\",\n",
88
+ " path_in_repo=\"checkpoint_epoch202.ckpt\",\n",
89
+ " repo_id=hub_model_id,\n",
90
+ ")\n",
91
+ "# Add a model card (optional but nice!)\n",
92
+ "content = f\"\"\"\n",
93
+ "---\n",
94
+ "license: mit\n",
95
+ "tags:\n",
96
+ "- pytorch\n",
97
+ "- diffusers\n",
98
+ "- unconditional-image-generation\n",
99
+ "- text-to-image\n",
100
+ "- diffusion-models-class\n",
101
+ "---\n",
102
+ "\n",
103
+ "# Example Fine-Tuned Model for Unit 2 of the [Diffusion Models Class 🧨](https://github.com/huggingface/diffusion-models-class)\n",
104
+ "\n",
105
+ "{description}\n",
106
+ "\n",
107
+ "## Usage\n",
108
+ "\n",
109
+ "```python\n",
110
+ "from diffusers import StableDiffusionPipeline\n",
111
+ "\n",
112
+ "pipeline = StableDiffusionPipeline.from_pretrained('{hub_model_id}')\n",
113
+ "image = pipeline().images[0]\n",
114
+ "image\n",
115
+ "```\n",
116
+ "\"\"\"\n",
117
+ "\n",
118
+ "card = ModelCard(content)\n",
119
+ "card.push_to_hub(hub_model_id)"
120
+ ]
121
+ },
122
+ {
123
+ "cell_type": "code",
124
+ "execution_count": null,
125
+ "id": "d4efff90-7300-4017-b0af-3ffafdb1eb9e",
126
+ "metadata": {},
127
+ "outputs": [],
128
+ "source": [
129
+ "# @title Upload a locally saved pipeline to the hub\n",
130
+ "\n",
131
+ "# Code to upload a pipeline saved locally to the hub\n",
132
+ "from huggingface_hub import HfApi, ModelCard, create_repo, get_full_repo_name\n",
133
+ "\n",
134
+ "# Set up repo and upload files\n",
135
+ "model_name = \"tradfusion-v2-training-files\" # @param What you want it called on the hub\n",
136
+ "local_folder_name = \"logs/2023-10-22T14-53-45_pokemon\" # @param Created by the script or one you created via image_pipe.save_pretrained('save_name')\n",
137
+ "#description = \"Fine-tuned Stable Diffusion Model on Irish Traditional Tunes Spectrograms\" # @param\n",
138
+ "checkpoint_path = \"logs/2023-10-22T14-53-45_pokemon/checkpoints\"\n",
139
+ "hub_model_id = get_full_repo_name(model_name)\n",
140
+ "#create_repo(hub_model_id)\n",
141
+ "api = HfApi()\n",
142
+ "api.upload_folder(\n",
143
+ " folder_path=f\"{local_folder_name}/configs\", path_in_repo=\"configs\", repo_id=hub_model_id\n",
144
+ ")\n",
145
+ "api.upload_folder(\n",
146
+ " folder_path=f\"{local_folder_name}/images\", path_in_repo=\"images\", repo_id=hub_model_id\n",
147
+ ")\n",
148
+ "api.upload_folder(\n",
149
+ " folder_path=f\"{local_folder_name}/testtube/version_0/tf\", path_in_repo=\"tf\", repo_id=hub_model_id\n",
150
+ ")\n",
151
+ "api.upload_file(\n",
152
+ " path_or_fileobj=f\"{local_folder_name}/testtube/version_0/metrics.csv\",\n",
153
+ " path_in_repo=\"metrics.csv\",\n",
154
+ " repo_id=hub_model_id,\n",
155
+ ")\n",
156
+ "api.upload_file(\n",
157
+ " path_or_fileobj=f\"{checkpoint_path}/epoch=000169.ckpt\",\n",
158
+ " path_in_repo=\"checkpoint_epoch169.ckpt\",\n",
159
+ " repo_id=hub_model_id,\n",
160
+ ")\n",
161
+ "api.upload_file(\n",
162
+ " path_or_fileobj=f\"{checkpoint_path}/epoch=000183.ckpt\",\n",
163
+ " path_in_repo=\"checkpoint_epoch183.ckpt\",\n",
164
+ " repo_id=hub_model_id,\n",
165
+ ")\n",
166
+ "api.upload_file(\n",
167
+ " path_or_fileobj=f\"{checkpoint_path}/epoch=000189.ckpt\",\n",
168
+ " path_in_repo=\"checkpoint_epoch189.ckpt\",\n",
169
+ " repo_id=hub_model_id,\n",
170
+ ")\n",
171
+ "api.upload_file(\n",
172
+ " path_or_fileobj=f\"{checkpoint_path}/epoch=000193.ckpt\",\n",
173
+ " path_in_repo=\"checkpoint_epoch193.ckpt\",\n",
174
+ " repo_id=hub_model_id,\n",
175
+ ")\n",
176
+ "api.upload_file(\n",
177
+ " path_or_fileobj=f\"{checkpoint_path}/epoch=000196.ckpt\",\n",
178
+ " path_in_repo=\"checkpoint_epoch196.ckpt\",\n",
179
+ " repo_id=hub_model_id,\n",
180
+ ")\n",
181
+ "api.upload_file(\n",
182
+ " path_or_fileobj=f\"{checkpoint_path}/epoch=000199.ckpt\",\n",
183
+ " path_in_repo=\"checkpoint_epoch199.ckpt\",\n",
184
+ " repo_id=hub_model_id,\n",
185
+ ")\n",
186
+ "api.upload_file(\n",
187
+ " path_or_fileobj=f\"{checkpoint_path}/epoch=000202.ckpt\",\n",
188
+ " path_in_repo=\"checkpoint_epoch202.ckpt\",\n",
189
+ " repo_id=hub_model_id,\n",
190
+ ")\n",
191
+ "api.upload_file(\n",
192
+ " path_or_fileobj=f\"{checkpoint_path}/last.ckpt\",\n",
193
+ " path_in_repo=\"checkpoint_last.ckpt\",\n",
194
+ " repo_id=hub_model_id,\n",
195
+ ")\n",
196
+ "# Add a model card (optional but nice!)\n",
197
+ "content = f\"\"\"\n",
198
+ "---\n",
199
+ "license: mit\n",
200
+ "tags:\n",
201
+ "- pytorch\n",
202
+ "- diffusers\n",
203
+ "- text-to-image\n",
204
+ "---\n",
205
+ "\n",
206
+ "## Usage\n",
207
+ "\n",
208
+ "Files logged during training, tensorboard, images generated, metrics and varias checkpoints\n",
209
+ "\"\"\"\n",
210
+ "\n",
211
+ "#card = ModelCard(content)\n",
212
+ "#card.push_to_hub(hub_model_id)"
213
+ ]
214
+ },
215
+ {
216
+ "cell_type": "code",
217
+ "execution_count": null,
218
+ "id": "66cafa2d-ba4b-4086-8694-fb52911e453c",
219
+ "metadata": {},
220
+ "outputs": [],
221
+ "source": [
222
+ "# @title Upload a locally saved pipeline to the hub\n",
223
+ "\n",
224
+ "# Code to upload a pipeline saved locally to the hub\n",
225
+ "from huggingface_hub import HfApi, ModelCard, create_repo, get_full_repo_name\n",
226
+ "\n",
227
+ "# Set up repo and upload files\n",
228
+ "model_name = \"tradfusion-v2-training-files\" # @param What you want it called on the hub\n",
229
+ "local_folder_name = \"wo\" # @param Created by the script or one you created via image_pipe.save_pretrained('save_name')\n",
230
+ "checkpoint_path = \"logs/2023-10-22T14-53-45_pokemon/checkpoints\"\n",
231
+ "hub_model_id = get_full_repo_name(model_name)\n",
232
+ "\n",
233
+ "api = HfApi()\n",
234
+ "api.upload_file(\n",
235
+ " path_or_fileobj=f\"{checkpoint_path}/epoch=000202.ckpt\",\n",
236
+ " path_in_repo=\"checkpoint_epoch202.ckpt\",\n",
237
+ " repo_id=hub_model_id,\n",
238
+ ")\n",
239
+ "api.upload_file(\n",
240
+ " path_or_fileobj=f\"{checkpoint_path}/last.ckpt\",\n",
241
+ " path_in_repo=\"checkpoint_last.ckpt\",\n",
242
+ " repo_id=hub_model_id,\n",
243
+ ")\n"
244
+ ]
245
+ }
246
+ ],
247
+ "metadata": {
248
+ "kernelspec": {
249
+ "display_name": "Python 3 (ipykernel)",
250
+ "language": "python",
251
+ "name": "python3"
252
+ },
253
+ "language_info": {
254
+ "codemirror_mode": {
255
+ "name": "ipython",
256
+ "version": 3
257
+ },
258
+ "file_extension": ".py",
259
+ "mimetype": "text/x-python",
260
+ "name": "python",
261
+ "nbconvert_exporter": "python",
262
+ "pygments_lexer": "ipython3",
263
+ "version": "3.10.6"
264
+ }
265
+ },
266
+ "nbformat": 4,
267
+ "nbformat_minor": 5
268
+ }