muellerzr HF staff commited on
Commit
13e3de7
1 Parent(s): 18db29a

Add comma between optimizer, scheduler as reported in https://github.com/huggingface/accelerate/issues/2661

Browse files
code_samples/base/accelerate CHANGED
@@ -1,7 +1,7 @@
1
  <pre>
2
  from accelerate import Accelerator
3
  accelerator = Accelerator()
4
- train_dataloader, model, optimizer scheduler = accelerator.prepare(
5
  dataloader, model, optimizer, scheduler
6
  )
7
 
 
1
  <pre>
2
  from accelerate import Accelerator
3
  accelerator = Accelerator()
4
+ train_dataloader, model, optimizer, scheduler = accelerator.prepare(
5
  dataloader, model, optimizer, scheduler
6
  )
7
 
code_samples/base/basic CHANGED
@@ -2,7 +2,7 @@
2
  <pre>
3
  +from accelerate import Accelerator
4
  +accelerator = Accelerator()
5
- +dataloader, model, optimizer scheduler = accelerator.prepare(
6
  + dataloader, model, optimizer, scheduler
7
  +)
8
 
 
2
  <pre>
3
  +from accelerate import Accelerator
4
  +accelerator = Accelerator()
5
+ +dataloader, model, optimizer, scheduler = accelerator.prepare(
6
  + dataloader, model, optimizer, scheduler
7
  +)
8
 
code_samples/base/checkpointing CHANGED
@@ -2,7 +2,7 @@
2
  <pre>
3
  from accelerate import Accelerator
4
  accelerator = Accelerator()
5
- dataloader, model, optimizer scheduler = accelerator.prepare(
6
  dataloader, model, optimizer, scheduler
7
  )
8
 
 
2
  <pre>
3
  from accelerate import Accelerator
4
  accelerator = Accelerator()
5
+ dataloader, model, optimizer, scheduler = accelerator.prepare(
6
  dataloader, model, optimizer, scheduler
7
  )
8
 
code_samples/base/experiment_tracking CHANGED
@@ -3,7 +3,7 @@
3
  from accelerate import Accelerator
4
  -accelerator = Accelerator()
5
  +accelerator = Accelerator(log_with="wandb")
6
- train_dataloader, model, optimizer scheduler = accelerator.prepare(
7
  dataloader, model, optimizer, scheduler
8
  )
9
  +accelerator.init_trackers()
 
3
  from accelerate import Accelerator
4
  -accelerator = Accelerator()
5
  +accelerator = Accelerator(log_with="wandb")
6
+ train_dataloader, model, optimizer, scheduler = accelerator.prepare(
7
  dataloader, model, optimizer, scheduler
8
  )
9
  +accelerator.init_trackers()
code_samples/base/gradient_accumulation CHANGED
@@ -4,7 +4,7 @@ from accelerate import Accelerator
4
  accelerator = Accelerator(
5
  + gradient_accumulation_steps=2,
6
  )
7
- dataloader, model, optimizer scheduler = accelerator.prepare(
8
  dataloader, model, optimizer, scheduler
9
  )
10
 
 
4
  accelerator = Accelerator(
5
  + gradient_accumulation_steps=2,
6
  )
7
+ dataloader, model, optimizer, scheduler = accelerator.prepare(
8
  dataloader, model, optimizer, scheduler
9
  )
10