experiment_group: training run_name: roberta-base_2024-04-23T15-11-06 seed: 42 model: name: roberta-base revision: null seed: 42 base_model: roberta-base estimator: accelerator: gpu precision: bf16-true deterministic: true tf32_mode: high convert_to_bettertransformer: false fit: max_epochs: 20 min_epochs: null optimizer_kwargs: name: adamw lr: 3.0e-05 init_kwargs: fused: true scheduler_kwargs: name: constant_schedule_with_warmup num_warmup_steps: 2000 log_interval: 100 enable_progress_bar: true limit_train_batches: null data: batch_size: 32 eval_batch_size: 128 shuffle: true replacement: false data_seed: 42 drop_last: false num_workers: 8 pin_memory: true persistent_workers: false multiprocessing_context: null max_length: 512 root_path: /home/pl487/coreset-project data_path: /home/pl487/coreset-project/data/processed dataset: mnli dataset_split: train evaluation: null loggers: tensorboard: _target_: energizer.loggers.TensorBoardLogger root_dir: ./ name: tb_logs version: null callbacks: timer: _target_: energizer.active_learning.callbacks.Timer lr_monitor: _target_: energizer.callbacks.lr_monitor.LearningRateMonitor model_checkpoint: _target_: energizer.callbacks.model_checkpoint.ModelCheckpoint dirpath: .checkpoints stage: train frequency: 1:epoch user: id: pl487