sescore_english_webnlg17 / hparams.yaml
xu1998hz's picture
Update hparams.yaml
d64e680
nr_frozen_epochs: 0.3
keep_embeddings_frozen: true
optimizer: AdamW
encoder_learning_rate: 1.0e-05
learning_rate: 3.1e-05
layerwise_decay: 0.95
encoder_model: RoBERTa
pretrained_model: roberta-large
pool: avg
layer: mix
dropout: 0.15
batch_size: 4
class_identifier: referenceless_regression_metric
train_data: data/jan_28_webnlg_num_0.5_del_0.5_mask_0.5_xlm_mbart.csv
validation_data: None
hidden_sizes:
- 2048
- 1024
activations: Tanh
final_activation: null
load_weights_from_checkpoint: null