jmemon commited on
Commit
f86feda
·
1 Parent(s): bf9d7e5

Files: Epoch -1

Browse files
ddpm-paintings-128-finetuned-cifar10/logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701709707.coffee.18461.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9dce021c919f28197d4d63e701a42d8b148b776b40d4792e54f17d198c5673a2
3
+ size 88
ddpm-paintings-128-finetuned-cifar10/logs/ddpm-paintings-128-finetuned-cifar10/events.out.tfevents.1701710486.coffee.18593.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ace7ada8ac72db2c8bad4bebe20bb9899aab29a9c8fa08591e1798ef0087b485
3
+ size 88
main.py CHANGED
@@ -93,7 +93,7 @@ if __name__ == '__main__':
93
  lora_config = LoraConfig(
94
  r=8,
95
  lora_alpha=8,
96
- modules_to_save=['unet'],
97
  target_modules=['to_k','to_v'],
98
  lora_dropout=0.1,
99
  bias='none')
@@ -133,13 +133,15 @@ if __name__ == '__main__':
133
  token='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'
134
  )
135
 
136
- pipeline.save_pretrained(
 
 
137
  str(Path(config.output_dir) / 'model'),
138
  #push_to_hub=True,
139
  repo_id=config.hub_model_id,
140
  commit_message=f'Model: Epoch {epoch}',
141
  token='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'
142
- )
143
 
144
  exit()
145
  global_step = 0
 
93
  lora_config = LoraConfig(
94
  r=8,
95
  lora_alpha=8,
96
+ #modules_to_save=['model'],
97
  target_modules=['to_k','to_v'],
98
  lora_dropout=0.1,
99
  bias='none')
 
133
  token='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'
134
  )
135
 
136
+ torch.save(pipeline.unet.state_dict(), Path(config.output_dir) / 'model' / 'adapter_model.bin')
137
+
138
+ """pipeline.save_pretrained(
139
  str(Path(config.output_dir) / 'model'),
140
  #push_to_hub=True,
141
  repo_id=config.hub_model_id,
142
  commit_message=f'Model: Epoch {epoch}',
143
  token='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'
144
+ )"""
145
 
146
  exit()
147
  global_step = 0