jmemon commited on
Commit
bf9d7e5
·
1 Parent(s): dd0a5af

Files: Epoch -1

Browse files
ddpm-paintings-128-finetuned-cifar10/logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701706759.coffee.18061.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:392a36a3b8dc632533b02446f3eec6e03d682ca19c6aad3bdd0e0307480cf585
3
+ size 88
ddpm-paintings-128-finetuned-cifar10/logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701709667.coffee.18436.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c07f5a545a94f71490a26297e1a3832bb9d0048d7b49c82ef63a7c4c52f7d09
3
+ size 88
main.py CHANGED
@@ -90,7 +90,13 @@ if __name__ == '__main__':
90
  'google/ddpm-celebahq-256'
91
  )
92
 
93
- lora_config = LoraConfig(r=8, lora_alpha=8, target_modules=['to_k','to_v'], lora_dropout=0.1, bias='none')
 
 
 
 
 
 
94
  lora_unet = get_peft_model(unet, lora_config)
95
 
96
  print_trainable_parameters(lora_unet)
 
90
  'google/ddpm-celebahq-256'
91
  )
92
 
93
+ lora_config = LoraConfig(
94
+ r=8,
95
+ lora_alpha=8,
96
+ modules_to_save=['unet'],
97
+ target_modules=['to_k','to_v'],
98
+ lora_dropout=0.1,
99
+ bias='none')
100
  lora_unet = get_peft_model(unet, lora_config)
101
 
102
  print_trainable_parameters(lora_unet)