cujabes commited on
Commit
cb00e25
1 Parent(s): 1b18c14

Delete config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +0 -92
config.json DELETED
@@ -1,92 +0,0 @@
1
- {
2
- "_name_or_path": "cujabes/koelectra-small-v3-discriminator",
3
- "architectures": [
4
- "ElectraForTokenClassification"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "classifier_dropout": null,
8
- "embedding_size": 128,
9
- "hidden_act": "gelu",
10
- "hidden_dropout_prob": 0.1,
11
- "hidden_size": 256,
12
- "id2label": {
13
- "0": "O",
14
- "1": "PER-B",
15
- "10": "LOC-I",
16
- "11": "CVL-B",
17
- "12": "CVL-I",
18
- "13": "DAT-B",
19
- "14": "DAT-I",
20
- "15": "TIM-B",
21
- "16": "TIM-I",
22
- "17": "NUM-B",
23
- "18": "NUM-I",
24
- "19": "EVT-B",
25
- "2": "PER-I",
26
- "20": "EVT-I",
27
- "21": "ANM-B",
28
- "22": "ANM-I",
29
- "23": "PLT-B",
30
- "24": "PLT-I",
31
- "25": "MAT-B",
32
- "26": "MAT-I",
33
- "27": "TRM-B",
34
- "28": "TRM-I",
35
- "3": "FLD-B",
36
- "4": "FLD-I",
37
- "5": "AFW-B",
38
- "6": "AFW-I",
39
- "7": "ORG-B",
40
- "8": "ORG-I",
41
- "9": "LOC-B"
42
- },
43
- "initializer_range": 0.02,
44
- "intermediate_size": 1024,
45
- "label2id": {
46
- "AFW-B": 5,
47
- "AFW-I": 6,
48
- "ANM-B": 21,
49
- "ANM-I": 22,
50
- "CVL-B": 11,
51
- "CVL-I": 12,
52
- "DAT-B": 13,
53
- "DAT-I": 14,
54
- "EVT-B": 19,
55
- "EVT-I": 20,
56
- "FLD-B": 3,
57
- "FLD-I": 4,
58
- "LOC-B": 9,
59
- "LOC-I": 10,
60
- "MAT-B": 25,
61
- "MAT-I": 26,
62
- "NUM-B": 17,
63
- "NUM-I": 18,
64
- "O": 0,
65
- "ORG-B": 7,
66
- "ORG-I": 8,
67
- "PER-B": 1,
68
- "PER-I": 2,
69
- "PLT-B": 23,
70
- "PLT-I": 24,
71
- "TIM-B": 15,
72
- "TIM-I": 16,
73
- "TRM-B": 27,
74
- "TRM-I": 28
75
- },
76
- "layer_norm_eps": 1e-12,
77
- "max_position_embeddings": 512,
78
- "model_type": "electra",
79
- "num_attention_heads": 4,
80
- "num_hidden_layers": 12,
81
- "pad_token_id": 0,
82
- "position_embedding_type": "absolute",
83
- "summary_activation": "gelu",
84
- "summary_last_dropout": 0.1,
85
- "summary_type": "first",
86
- "summary_use_proj": true,
87
- "torch_dtype": "float32",
88
- "transformers_version": "4.37.2",
89
- "type_vocab_size": 2,
90
- "use_cache": true,
91
- "vocab_size": 80000
92
- }