nroggendorff commited on
Commit
3baa40e
1 Parent(s): ee095db

Update train.py

Browse files
Files changed (1) hide show
  1. train.py +3 -6
train.py CHANGED
@@ -8,10 +8,10 @@ from datasets import load_dataset
8
  from tokenizers import ByteLevelBPETokenizer
9
 
10
  MAX_SEQ_LENGTH = 512
11
- BATCH_SIZE = 16
12
  EPOCHS = 4
13
  LEARNING_RATE = 2e-4
14
- FACTOR = 128
15
  VOCAB_SIZE = 32000
16
  INPUT_DATASET = "nroggendorff/oak"
17
  OUTPUT_REPO = "smallama"
@@ -104,10 +104,7 @@ def train_model(model, tokenizer, dataset, push):
104
  weight_decay=DECAY,
105
  gradient_accumulation_steps=GRADIENT_ACCUMULATION_STEPS,
106
  fp16=FP16,
107
- max_grad_norm=CLIPPING,
108
- evaluation_strategy="steps",
109
- eval_steps=10,
110
- logging_steps=10
111
  )
112
 
113
  optimizer = AdamW(model.parameters(), lr=args.learning_rate)
 
8
  from tokenizers import ByteLevelBPETokenizer
9
 
10
  MAX_SEQ_LENGTH = 512
11
+ BATCH_SIZE = 64
12
  EPOCHS = 4
13
  LEARNING_RATE = 2e-4
14
+ FACTOR = 4
15
  VOCAB_SIZE = 32000
16
  INPUT_DATASET = "nroggendorff/oak"
17
  OUTPUT_REPO = "smallama"
 
104
  weight_decay=DECAY,
105
  gradient_accumulation_steps=GRADIENT_ACCUMULATION_STEPS,
106
  fp16=FP16,
107
+ max_grad_norm=CLIPPING
 
 
 
108
  )
109
 
110
  optimizer = AdamW(model.parameters(), lr=args.learning_rate)