{ "output_path": "/content/output/", "train_path": "/content/ArabicNER/ANERCorp/train.txt", "test_path": "/content/ArabicNER/ANERCorp/test.txt", "val_path": "/content/ArabicNER/ANERCorp/val.txt", "seed": 1, "max_epochs": 50, "batch_size": 8, "bert_model": "UBC-NLP/ARBERTv2", "num_workers": 1, "gpus": [ 0 ], "learning_rate": 1e-05, "max_seq_len": 512, "overwrite": "True", "log_interval": 10, "network_config": { "fn": "arabiner.nn.BertSeqTagger", "kwargs": { "dropout": 0.1, "bert_model": "aubmindlab/bert-base-arabertv2", "num_labels": 9 } }, "optimizer_config": { "fn": "torch.optim.Adam", "kwargs": { "lr": 1e-05 } }, "trainer_config": { "fn": "arabiner.trainers.BertTrainer", "kwargs": { "max_epochs": 50 } }, "data_config": { "fn": "arabiner.data.datasets.DefaultDataset", "kwargs": { "max_seq_len": 512, "bert_model": "aubmindlab/bert-base-arabertv2" } }, "optimizer": { "fn": "torch.optim.AdamW", "kwargs": { "lr": 1e-05 } }, "lr_scheduler": { "fn": "torch.optim.lr_scheduler.ExponentialLR", "kwargs": { "gamma": 1 } }, "loss": { "fn": "torch.nn.CrossEntropyLoss", "kwargs": {} } }