pip install transformers from transformers import Trainer, TrainingArguments
Load the training and validation data
train_data = ... validation_data = ...
Define the model architecture and hyperparameters
model_name = "bert-base-cased" num_labels = 2
Define the training arguments
training_args = TrainingArguments( output_dir="./output", # directory to save the trained model num_train_epochs=3, # number of training epochs per_device_train_batch_size=32, # batch size per_device_eval_batch_size=64, # batch size for evaluation warmup_steps=500, # number of warmup steps weight_decay=0.01, # L2 regularization coefficient learning_rate=3e-5, # learning rate adam_epsilon=1e-8, # epsilon for Adam optimizer max_grad_norm=1.0, # maximum gradient norm for gradient clipping save_steps=1000, # number of steps after which to save the model save_total_limit=2, # maximum number of models to save )
Initialize the trainer
trainer = Trainer( model_name=model_name, num_labels=num_labels, data_collator=data_collator, # data collator for the training and validation data args=training_args, )
Train the model
trainer.train(train_data, validation_data)
- Downloads last month
- 0