Add missing config.json file for ReLiK model
Browse filesDescription:
This pull request adds the missing config.json file to the sapienzanlp/relik-entity-linking-base repository. The absence of this file was causing errors when attempting to load the model.
Changes Made:
Added a config.json file with necessary configuration parameters for the ReLiK model.
Reason for Changes:
The missing config.json file was preventing users from loading the model, resulting in errors.
Adding this file ensures that the model can be loaded correctly and used for entity linking tasks.
- config.json +24 -0
config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": ["BertForTokenClassification"],
|
3 |
+
"attention_probs_dropout_prob": 0.1,
|
4 |
+
"classifier_dropout": null,
|
5 |
+
"gradient_checkpointing": false,
|
6 |
+
"hidden_act": "gelu",
|
7 |
+
"hidden_dropout_prob": 0.1,
|
8 |
+
"hidden_size": 768,
|
9 |
+
"initializer_range": 0.02,
|
10 |
+
"intermediate_size": 3072,
|
11 |
+
"layer_norm_eps": 1e-12,
|
12 |
+
"max_position_embeddings": 512,
|
13 |
+
"model_type": "bert",
|
14 |
+
"num_attention_heads": 12,
|
15 |
+
"num_hidden_layers": 12,
|
16 |
+
"num_labels": 2, # Adjust based on your specific task
|
17 |
+
"output_attentions": false,
|
18 |
+
"output_hidden_states": false,
|
19 |
+
"pad_token_id": 0,
|
20 |
+
"position_embedding_type": "absolute",
|
21 |
+
"transformers_version": "4.6.1",
|
22 |
+
"use_cache": true,
|
23 |
+
"vocab_size": 30522
|
24 |
+
}
|