eliasbe commited on
Commit
5816eb5
1 Parent(s): 84c4490

Prófum þetta

Browse files
Files changed (1) hide show
  1. config.json +37 -21
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "eliasbe/XLMR-ENIS-finetuned-ner",
3
  "architectures": [
4
- "XLMRobertaForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
@@ -11,39 +11,55 @@
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
- "id2label": {
15
  "0": "O",
16
- "7": "B-MISC",
17
- "8": "I-MISC",
18
- "1": "B-PER",
19
- "2": "I-PER",
20
- "3": "B-ORG",
21
- "4": "I-ORG",
22
- "5": "B-LOC",
23
- "6": "I-LOC"
 
 
 
 
 
 
 
 
24
  },
25
  "initializer_range": 0.02,
26
  "intermediate_size": 3072,
27
- "label2id": {
28
- "B-LOC": 5,
29
- "B-MISC": 7,
30
- "B-ORG": 3,
31
- "B-PER": 1,
32
- "I-LOC": 6,
33
- "I-MISC": 8,
34
- "I-ORG": 4,
35
- "I-PER": 2,
 
 
 
 
 
 
 
 
36
  "O": 0
37
  },
38
  "layer_norm_eps": 1e-05,
39
  "max_position_embeddings": 514,
40
- "model_type": "xlm-roberta",
41
  "num_attention_heads": 12,
42
  "num_hidden_layers": 12,
43
  "pad_token_id": 1,
44
  "position_embedding_type": "absolute",
45
  "torch_dtype": "float32",
46
- "transformers_version": "4.10.3",
47
  "type_vocab_size": 1,
48
  "use_cache": true,
49
  "vocab_size": 50005
1
  {
2
  "_name_or_path": "eliasbe/XLMR-ENIS-finetuned-ner",
3
  "architectures": [
4
+ "RobertaForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
+ "id2label": {
15
  "0": "O",
16
+ "1": "B-Date",
17
+ "2": "B-Location",
18
+ "3": "B-Miscellaneous",
19
+ "4": "B-Money",
20
+ "5": "B-Organization",
21
+ "6": "B-Percent",
22
+ "7": "B-Person",
23
+ "8": "B-Time",
24
+ "9": "I-Date",
25
+ "10": "I-Location",
26
+ "11": "I-Miscellaneous",
27
+ "12": "I-Money",
28
+ "13": "I-Organization",
29
+ "14": "I-Percent",
30
+ "15": "I-Person",
31
+ "16": "I-Time"
32
  },
33
  "initializer_range": 0.02,
34
  "intermediate_size": 3072,
35
+ "label2id": {
36
+ "B-Date": 1,
37
+ "B-Location": 2,
38
+ "B-Miscellaneous": 3,
39
+ "B-Money": 4,
40
+ "B-Organization": 5,
41
+ "B-Percent": 6,
42
+ "B-Person": 7,
43
+ "B-Time": 8,
44
+ "I-Date": 9,
45
+ "I-Location": 10,
46
+ "I-Miscellaneous": 11,
47
+ "I-Money": 12,
48
+ "I-Organization": 13,
49
+ "I-Percent": 14,
50
+ "I-Person": 15,
51
+ "I-Time": 16,
52
  "O": 0
53
  },
54
  "layer_norm_eps": 1e-05,
55
  "max_position_embeddings": 514,
56
+ "model_type": "roberta",
57
  "num_attention_heads": 12,
58
  "num_hidden_layers": 12,
59
  "pad_token_id": 1,
60
  "position_embedding_type": "absolute",
61
  "torch_dtype": "float32",
62
+ "transformers_version": "4.11.2",
63
  "type_vocab_size": 1,
64
  "use_cache": true,
65
  "vocab_size": 50005