philschmid's picture
philschmid
HF staff
Update config.json b9b7d39
1 {
2 "_name_or_path": "distilroberta-base",
3 "architectures": [
4 "RobertaForTokenClassification"
5 ],
6 "attention_probs_dropout_prob": 0.1,
7 "bos_token_id": 0,
8 "eos_token_id": 2,
9 "finetuning_task": "ner",
10 "gradient_checkpointing": false,
11 "hidden_act": "gelu",
12 "hidden_dropout_prob": 0.1,
13 "hidden_size": 768,
14 "id2label": {
15 "0": "O",
16 "1": "B-PER",
17 "2": "I-PER",
18 "3": "B-ORG",
19 "4": "I-ORG",
20 "5": "B-LOC",
21 "6": "I-LOC"
22 },
23 "initializer_range": 0.02,
24 "intermediate_size": 3072,
25 "label2id": {
26 "O": 0,
27 "B-PER": 1,
28 "I-PER": 2,
29 "B-ORG": 3,
30 "I-ORG": 4,
31 "B-LOC": 5,
32 "I-LOC": 6
33 },
34 "layer_norm_eps": 1e-05,
35 "max_position_embeddings": 514,
36 "model_type": "roberta",
37 "num_attention_heads": 12,
38 "num_hidden_layers": 6,
39 "pad_token_id": 1,
40 "position_embedding_type": "absolute",
41 "transformers_version": "4.6.1",
42 "type_vocab_size": 1,
43 "use_cache": true,
44 "vocab_size": 50265
45 }
46