File size: 1,054 Bytes
dafb33c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
{
"model_name": "FacebookAI/xlm-roberta-large",
"name": "token level gliner",
"max_width": 100,
"hidden_size": 768,
"dropout": 0.1,
"fine_tune": true,
"subtoken_pooling": "first",
"span_mode": "token_level",
"num_steps": 120000,
"train_batch_size": 2,
"eval_every": 500,
"warmup_ratio": 0.1,
"scheduler_type": "cosine",
"gradient_accumulation_steps": 2,
"loss_alpha": -1,
"loss_gamma": 0,
"label_smoothing": 0,
"loss_reduction": "sum",
"lr_encoder": "3e-5",
"lr_others": "3e-5",
"weight_decay_encoder": 0.01,
"weight_decay_other": 0.01,
"root_dir": "gliner_logs",
"train_data": "/home/ltngoc/ngoclt/OriGliner/GLiNER/data/pilener_train.json",
"val_data_dir": "/home/ltngoc/ngoclt/OriGliner/GLiNER/data/viner",
"log_dir": "xlm-roberta",
"prev_path": "/home/ltngoc/ngoclt/OriGliner/GLiNER/xlm-roberta/model_78000",
"save_total_limit": 10,
"size_sup": -1,
"max_types": 12000,
"shuffle_types": true,
"random_drop": true,
"max_neg_type_ratio": 1,
"max_len": 512,
"freeze_token_rep": false
} |