winglian commited on
Commit
98b1bce
1 Parent(s): 0048202

pr comments addressed

Browse files
examples/lora-openllama-3b/config.yml CHANGED
@@ -13,7 +13,7 @@ dataset_prepared_path: last_run_prepared
13
  val_set_size: 0.02
14
  adapter: lora
15
  lora_model_dir:
16
- sequence_len: 512
17
  max_packed_sequence_len:
18
  lora_r: 8
19
  lora_alpha: 16
@@ -43,7 +43,7 @@ train_on_inputs: false
43
  group_by_length: false
44
  bf16: false
45
  fp16: true
46
- tf32: true
47
  gradient_checkpointing: true
48
  early_stopping_patience:
49
  resume_from_checkpoint:
 
13
  val_set_size: 0.02
14
  adapter: lora
15
  lora_model_dir:
16
+ sequence_len: 256
17
  max_packed_sequence_len:
18
  lora_r: 8
19
  lora_alpha: 16
 
43
  group_by_length: false
44
  bf16: false
45
  fp16: true
46
+ tf32: false
47
  gradient_checkpointing: true
48
  early_stopping_patience:
49
  resume_from_checkpoint: