Tianduo commited on
Commit
709cc08
1 Parent(s): 5a97a6e

Upload HFRoBERTaGen

Browse files
Files changed (1) hide show
  1. config.json +3 -0
config.json CHANGED
@@ -10,8 +10,11 @@
10
  "decoder_heads": 8,
11
  "decoder_layers": 2,
12
  "dropout": 0.1,
 
13
  "label_smooth": 0.05,
14
  "lr": 1e-05,
 
 
15
  "torch_dtype": "float32",
16
  "transformers_version": "4.26.1",
17
  "use_adapter": true
 
10
  "decoder_heads": 8,
11
  "decoder_layers": 2,
12
  "dropout": 0.1,
13
+ "eval_bz": 100,
14
  "label_smooth": 0.05,
15
  "lr": 1e-05,
16
+ "max_in_len": 128,
17
+ "max_out_len": 64,
18
  "torch_dtype": "float32",
19
  "transformers_version": "4.26.1",
20
  "use_adapter": true