wmt20-comet-qe-da / hparams.yaml
RicardoRei's picture
WMT20 model
ca2a08f
raw
history blame contribute delete
No virus
479 Bytes
# Training Seed 3
activations: Tanh
batch_size: 2
class_identifier: referenceless_regression_metric
dropout: 0.1
encoder_learning_rate: 1.0e-05
encoder_model: XLM-RoBERTa
hidden_sizes:
- 2048
- 1024
keep_embeddings_frozen: true
layer: mix
layerwise_decay: 0.95
learning_rate: 3.0e-05
load_weights_from_checkpoint: null
optimizer: Adam
pool: avg
pretrained_model: xlm-roberta-large
train_data: data/scores_1719.csv
validation_data: data/scores_1719.csv
final_activation: "Sigmoid"