Don't save full model for lora
Browse files- scripts/finetune.py +3 -2
scripts/finetune.py
CHANGED
@@ -228,11 +228,12 @@ def train(
|
|
228 |
logging.info(
|
229 |
f"Training Completed!!! Saving pre-trained model to {cfg.output_dir}"
|
230 |
)
|
231 |
-
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
|
232 |
-
trainer.save_model(cfg.output_dir)
|
233 |
|
234 |
if cfg.adapter == 'lora':
|
235 |
trainer.save_pretrained(cfg.output_dir)
|
|
|
|
|
|
|
236 |
|
237 |
|
238 |
if __name__ == "__main__":
|
|
|
228 |
logging.info(
|
229 |
f"Training Completed!!! Saving pre-trained model to {cfg.output_dir}"
|
230 |
)
|
|
|
|
|
231 |
|
232 |
if cfg.adapter == 'lora':
|
233 |
trainer.save_pretrained(cfg.output_dir)
|
234 |
+
else:
|
235 |
+
# TODO do we need this fix? https://huggingface.co/docs/accelerate/usage_guides/fsdp#saving-and-loading
|
236 |
+
trainer.save_model(cfg.output_dir)
|
237 |
|
238 |
|
239 |
if __name__ == "__main__":
|