wangjin2000 commited on
Commit
d9e008a
·
verified ·
1 Parent(s): b9700ce

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -91,7 +91,7 @@ def train_function_no_sweeps(base_model_path): #, train_dataset, test_dataset)
91
  "lr": 5.701568055793089e-04,
92
  "lr_scheduler_type": "cosine",
93
  "max_grad_norm": 0.5,
94
- "num_train_epochs": 2, #3, jw 20240628
95
  "per_device_train_batch_size": 12,
96
  "r": 2,
97
  "weight_decay": 0.2,
@@ -185,8 +185,7 @@ def train_function_no_sweeps(base_model_path): #, train_dataset, test_dataset)
185
  seed=8893,
186
  fp16=True,
187
  #report_to='wandb'
188
- report_to=None,
189
- class_weights=class_weights, #jw, 20240628
190
  )
191
 
192
  # Initialize Trainer
@@ -197,7 +196,8 @@ def train_function_no_sweeps(base_model_path): #, train_dataset, test_dataset)
197
  eval_dataset=test_dataset,
198
  tokenizer=tokenizer,
199
  data_collator=DataCollatorForTokenClassification(tokenizer=tokenizer),
200
- compute_metrics=compute_metrics
 
201
  )
202
 
203
  # Train and Save Model
 
91
  "lr": 5.701568055793089e-04,
92
  "lr_scheduler_type": "cosine",
93
  "max_grad_norm": 0.5,
94
+ "num_train_epochs": 1, #3, jw 20240628
95
  "per_device_train_batch_size": 12,
96
  "r": 2,
97
  "weight_decay": 0.2,
 
185
  seed=8893,
186
  fp16=True,
187
  #report_to='wandb'
188
+ report_to=None
 
189
  )
190
 
191
  # Initialize Trainer
 
196
  eval_dataset=test_dataset,
197
  tokenizer=tokenizer,
198
  data_collator=DataCollatorForTokenClassification(tokenizer=tokenizer),
199
+ compute_metrics=compute_metrics,
200
+ class_weights=class_weights, #add class_weights as input, jw 20240628
201
  )
202
 
203
  # Train and Save Model