|
{ |
|
"output_path": "/var/home/mkhalilia/results/arabicner/sharedtask2023/wojood_seed1_nested_lr0.00001_clip_no", |
|
"train_path": "/var/home/mkhalilia/data/wojood/seed1/nested/train.txt", |
|
"val_path": "/var/home/mkhalilia/data/wojood/seed1/nested/val.txt", |
|
"test_path": "/var/home/mkhalilia/data/wojood/seed1/nested/test.txt", |
|
"bert_model": "aubmindlab/bert-base-arabertv2", |
|
"gpus": [ |
|
0 |
|
], |
|
"log_interval": 100, |
|
"batch_size": 16, |
|
"num_workers": 8, |
|
"data_config": { |
|
"fn": "arabiner.data.datasets.NestedTagsDataset", |
|
"kwargs": { |
|
"max_seq_len": 512 |
|
} |
|
}, |
|
"trainer_config": { |
|
"fn": "arabiner.trainers.BertNestedTrainer", |
|
"kwargs": { |
|
"max_epochs": 50 |
|
} |
|
}, |
|
"network_config": { |
|
"fn": "arabiner.nn.BertNestedTagger", |
|
"kwargs": { |
|
"dropout": 0.1, |
|
"bert_model": "aubmindlab/bert-base-arabertv2", |
|
"num_labels": [ |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4, |
|
4 |
|
] |
|
} |
|
}, |
|
"optimizer": { |
|
"fn": "torch.optim.AdamW", |
|
"kwargs": { |
|
"lr": 1e-05 |
|
} |
|
}, |
|
"lr_scheduler": { |
|
"fn": "torch.optim.lr_scheduler.ExponentialLR", |
|
"kwargs": { |
|
"gamma": 1 |
|
} |
|
}, |
|
"loss": { |
|
"fn": "torch.nn.CrossEntropyLoss", |
|
"kwargs": {} |
|
}, |
|
"overwrite": true, |
|
"seed": 1 |
|
} |