jmemon commited on
Commit
7c7dfcd
1 Parent(s): 673b5cb

Upload folder using huggingface_hub

Browse files
Files changed (28) hide show
  1. .DS_Store +0 -0
  2. .gitignore +2 -0
  3. config.py +1 -1
  4. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701696166.coffee.14798.0 +3 -0
  5. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701704512.coffee.17529.0 +3 -0
  6. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706238.coffee.17871.0 +3 -0
  7. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706320.coffee.17904.0 +3 -0
  8. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706355.coffee.17921.0 +3 -0
  9. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706396.coffee.17940.0 +3 -0
  10. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706427.coffee.17968.0 +3 -0
  11. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706694.coffee.18009.0 +3 -0
  12. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706759.coffee.18061.0 +3 -0
  13. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701709667.coffee.18436.0 +3 -0
  14. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701709707.coffee.18461.0 +3 -0
  15. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701710486.coffee.18593.0 +3 -0
  16. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701710600.coffee.18648.0 +3 -0
  17. logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701722303.coffee.24669.0 +3 -0
  18. main.py +9 -1
  19. model/adapter_model.bin +3 -0
  20. model/model_index.json +12 -0
  21. model/scheduler/scheduler_config.json +18 -0
  22. samples/0000.png +0 -0
  23. samples/0001.png +0 -0
  24. samples/0002.png +0 -0
  25. samples/0003.png +0 -0
  26. samples/0004.png +0 -0
  27. samples/0005.png +0 -0
  28. test.py +23 -0
.DS_Store ADDED
Binary file (6.15 kB). View file
 
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ .DS_Store
2
+ logs/
config.py CHANGED
@@ -13,7 +13,7 @@ class TrainingConfig:
13
  save_image_epochs = 1
14
  save_model_epochs = 3
15
  mixed_precision = 'fp16' # `no` for float32, `fp16` for automatic mixed precision
16
- output_dir = 'ddpm-paintings-128-finetuned-cifar10' # the model name locally and on the HF Hub
17
 
18
  push_to_hub = True # whether to upload the saved model to the HF Hub
19
  hub_model_id = 'jmemon/ddpm-paintings-128-finetuned-cifar10' # the name of the repository to create on the HF Hub
 
13
  save_image_epochs = 1
14
  save_model_epochs = 3
15
  mixed_precision = 'fp16' # `no` for float32, `fp16` for automatic mixed precision
16
+ output_dir = '.'
17
 
18
  push_to_hub = True # whether to upload the saved model to the HF Hub
19
  hub_model_id = 'jmemon/ddpm-paintings-128-finetuned-cifar10' # the name of the repository to create on the HF Hub
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701696166.coffee.14798.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2fbf486914eb9ed63fdbcf637c2874ca608a32f1ec948a4567e37a8e2e412f3
3
+ size 427942
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701704512.coffee.17529.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c99374f9a97092f9da24ebc79289a21d0e48e40598c677c8206b1f453c2b050
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706238.coffee.17871.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad7c95af04ab4ca658cd6ff68cd19799a5a716bf216360661c17e0ab65703596
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706320.coffee.17904.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c1e50dc4abd11c4f69c7d8dfe55847463efea052c836e9bc0ede0a4758ec337
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706355.coffee.17921.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdfd823412a65a0fa5ec2dfd1d86372d8c2b9df003c13c23820b51346d1c87f0
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706396.coffee.17940.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47a8f4b3cf0725aa3e31522a93854744d597385e487cb3437bf1fe4a07832ca0
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706427.coffee.17968.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:227a05ca228c15869d1e63691296fade396090d1d5d5d7f552c0c3708d573484
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706694.coffee.18009.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d760dca6e153561312573110ddaf00fc6b3baad015096308f866191db4689005
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706759.coffee.18061.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:392a36a3b8dc632533b02446f3eec6e03d682ca19c6aad3bdd0e0307480cf585
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701709667.coffee.18436.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c07f5a545a94f71490a26297e1a3832bb9d0048d7b49c82ef63a7c4c52f7d09
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701709707.coffee.18461.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9dce021c919f28197d4d63e701a42d8b148b776b40d4792e54f17d198c5673a2
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701710486.coffee.18593.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ace7ada8ac72db2c8bad4bebe20bb9899aab29a9c8fa08591e1798ef0087b485
3
+ size 88
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701710600.coffee.18648.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d98b7e9040a9c3d71d7737d66be4b6a155b4d1445c965788a4a58883aff70800
3
+ size 856186
logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701722303.coffee.24669.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9be8f366f0e44f95b899a7dec8e402557300425358d8b264cf5739617b6dcd14
3
+ size 88
main.py CHANGED
@@ -85,11 +85,19 @@ if __name__ == '__main__':
85
  'google/ddpm-celebahq-256',
86
  safetensors=True
87
  ).to('mps')
88
-
89
  scheduler = DDPMScheduler.from_pretrained(
90
  'google/ddpm-celebahq-256'
91
  )
92
 
 
 
 
 
 
 
 
 
93
  lora_config = LoraConfig(
94
  r=8,
95
  lora_alpha=8,
 
85
  'google/ddpm-celebahq-256',
86
  safetensors=True
87
  ).to('mps')
88
+
89
  scheduler = DDPMScheduler.from_pretrained(
90
  'google/ddpm-celebahq-256'
91
  )
92
 
93
+ """unet=UNet2DModel.from_pretrained(
94
+ 'jmemon/ddpm-paintings-128-finetuned-celebahq'
95
+ ).to('mps')
96
+
97
+ scheduler = DDPMScheduler.from_pretrained(
98
+ 'jmemon/ddpm-paintings-128-finetuned-celebahq'
99
+ )"""
100
+
101
  lora_config = LoraConfig(
102
  r=8,
103
  lora_alpha=8,
model/adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3d8671f9f0b0f8a1501a2584cbcc9386908b51245b0eabeae39faa31ce520bb
3
+ size 455281714
model/model_index.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "DDPMPipeline",
3
+ "_diffusers_version": "0.24.0",
4
+ "scheduler": [
5
+ "diffusers",
6
+ "DDPMScheduler"
7
+ ],
8
+ "unet": [
9
+ null,
10
+ null
11
+ ]
12
+ }
model/scheduler/scheduler_config.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "DDPMScheduler",
3
+ "_diffusers_version": "0.24.0",
4
+ "beta_end": 0.02,
5
+ "beta_schedule": "linear",
6
+ "beta_start": 0.0001,
7
+ "clip_sample": true,
8
+ "clip_sample_range": 1.0,
9
+ "dynamic_thresholding_ratio": 0.995,
10
+ "num_train_timesteps": 1000,
11
+ "prediction_type": "epsilon",
12
+ "sample_max_value": 1.0,
13
+ "steps_offset": 0,
14
+ "thresholding": false,
15
+ "timestep_spacing": "leading",
16
+ "trained_betas": null,
17
+ "variance_type": "fixed_small"
18
+ }
samples/0000.png ADDED
samples/0001.png ADDED
samples/0002.png ADDED
samples/0003.png ADDED
samples/0004.png ADDED
samples/0005.png ADDED
test.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pathlib import Path
2
+
3
+ from diffusers import DDPMPipeline, UNet2DModel, DDPMScheduler
4
+ from diffusers.utils import make_image_grid
5
+ import torch
6
+
7
+
8
+ if __name__ == '__main__':
9
+
10
+ unet=UNet2DModel.from_pretrained(
11
+ 'jmemon/ddpm-paintings-128-finetuned-celebahq'
12
+ )
13
+
14
+ scheduler = DDPMScheduler.from_pretrained(
15
+ 'jmemon/ddpm-paintings-128-finetuned-celebahq'
16
+ )
17
+
18
+ pipeline = DDPMPipeline(unet=unet, scheduler=scheduler).to('mps')
19
+ pipeline.enable_attention_slicing()
20
+
21
+ images = pipeline(batch_size=4, generator=torch.manual_seed(0), num_inference_steps=50).images
22
+ grid = make_image_grid(images, 2, 2)
23
+ grid.save(Path(__file__).parent / 'grid.png')