akaashp15 commited on
Commit
fa29e74
1 Parent(s): e69aba1

Upload config

Browse files
Files changed (1) hide show
  1. config.json +3 -26
config.json CHANGED
@@ -1,36 +1,13 @@
1
  {
2
- "_name_or_path": "distilbert-base-uncased",
3
- "activation": "gelu",
4
  "architectures": [
5
- "DistilBertForTokenClassification"
6
  ],
7
- "attention_dropout": 0.1,
8
  "dim": 768,
9
  "dropout": 0.1,
10
  "hidden_dim": 3072,
11
- "id2label": {
12
- "0": "O",
13
- "1": "B-PER",
14
- "2": "I-PER",
15
- "3": "B-ORG",
16
- "4": "I-ORG",
17
- "5": "B-LOC",
18
- "6": "I-LOC",
19
- "7": "B-MISC",
20
- "8": "I-MISC"
21
- },
22
  "initializer_range": 0.02,
23
- "label2id": {
24
- "B-LOC": 5,
25
- "B-MISC": 7,
26
- "B-ORG": 3,
27
- "B-PER": 1,
28
- "I-LOC": 6,
29
- "I-MISC": 8,
30
- "I-ORG": 4,
31
- "I-PER": 2,
32
- "O": 0
33
- },
34
  "max_position_embeddings": 512,
35
  "model_type": "distilbert",
36
  "n_heads": 12,
 
1
  {
2
+ "activation": "relu",
 
3
  "architectures": [
4
+ "DistilBertForMaskedLM"
5
  ],
6
+ "attention_dropout": 0.4,
7
  "dim": 768,
8
  "dropout": 0.1,
9
  "hidden_dim": 3072,
 
 
 
 
 
 
 
 
 
 
 
10
  "initializer_range": 0.02,
 
 
 
 
 
 
 
 
 
 
 
11
  "max_position_embeddings": 512,
12
  "model_type": "distilbert",
13
  "n_heads": 12,