nouhadziri commited on
Commit
3db57d2
1 Parent(s): e4f4d97

hyperparameters added

Browse files
Files changed (1) hide show
  1. hparams.yaml +42 -0
hparams.yaml ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ accumulate_grad_batches: 1
2
+ adafactor: false
3
+ adam_epsilon: 1.0e-08
4
+ attention_dropout: null
5
+ cache_dir: null
6
+ config_name: ''
7
+ deterministic: false
8
+ do_eval: false
9
+ do_test: false
10
+ do_train: true
11
+ dropout_rate: null
12
+ eval_batch_size: 16
13
+ eval_dataset_path: ''
14
+ force_reinit: false
15
+ fp16: true
16
+ gpus: -1
17
+ gradient_clip_val: 1.0
18
+ learning_rate: 1.0e-05
19
+ lr_scheduler: linear
20
+ max_epochs: 10
21
+ max_history: 1
22
+ max_seq_length: 0
23
+ min_delta: 0.0
24
+ model_name_or_path: roberta-large
25
+ num_workers: 8
26
+ output_dir: ./checkpoints/roberta-large-faithcritic
27
+ overwrite_output_dir: false
28
+ pad_to_multiple_of: 8
29
+ patience: 5
30
+ predict_dataset_path: null
31
+ save_last: false
32
+ seed: 42
33
+ test_dataset_path: null
34
+ test_task: FaithCritic
35
+ tokenizer_name: null
36
+ train_batch_size: 16
37
+ train_dataset_path: ''
38
+ train_task: FaithCritic
39
+ val_check_interval: 0.5
40
+ warmup_ratio: 0.08
41
+ warmup_steps: 0
42
+ weight_decay: 0.1