File size: 1,277 Bytes
90c6796 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 |
{
"output_path": "/var/home/nhamad/WojoodNER/output-flat",
"train_path": "/var/home/nhamad/WojoodNER/flat/train.txt",
"val_path": "/var/home/nhamad/WojoodNER/flat/val.txt",
"test_path": "/var/home/nhamad/WojoodNER/flat/test.txt",
"bert_model": "aubmindlab/bert-base-arabertv2",
"gpus": [
0
],
"log_interval": 10,
"batch_size": 8,
"num_workers": 0,
"data_config": {
"fn": "arabiner.data.datasets.DefaultDataset",
"kwargs": {
"max_seq_len": 512
}
},
"trainer_config": {
"fn": "arabiner.trainers.BertTrainer",
"kwargs": {
"max_epochs": 50
}
},
"network_config": {
"fn": "arabiner.nn.BertSeqTagger",
"kwargs": {
"dropout": 0.1,
"bert_model": "aubmindlab/bert-base-arabertv2",
"num_labels": 108
}
},
"optimizer": {
"fn": "torch.optim.AdamW",
"kwargs": {
"lr": 0.0001
}
},
"lr_scheduler": {
"fn": "torch.optim.lr_scheduler.ExponentialLR",
"kwargs": {
"gamma": 1
}
},
"loss": {
"fn": "torch.nn.CrossEntropyLoss",
"kwargs": {}
},
"overwrite": false,
"seed": 1
} |