africomet-qe-stl / hparams.yaml
jwang214's picture
Update hparams.yaml
e4b8931 verified
nr_frozen_epochs: 0.3
class_identifier: referenceless_regression_metric
keep_embeddings_frozen: True
optimizer: AdamW
encoder_learning_rate: 1.0e-06
learning_rate: 1.5e-05
layerwise_decay: 0.95
encoder_model: XLM-RoBERTa
pretrained_model: Davlan/afro-xlmr-large
pool: avg
layer: mix
layer_transformation: sparsemax
layer_norm: False
loss: mse
dropout: 0.1
batch_size: 4
train_data:
- data/1720-da.mlqe-src.csv
validation_data:
- data/wmt-ende-newstest2021.csv
- data/wmt-enru-newstest2021.csv
- data/wmt-zhen-newstest2021.csv
hidden_sizes:
- 3072
- 1024
activations: Tanh