{ "_name_or_path": "Leo97/KoELECTRA-small-v3-modu-ner", "architectures": [ "ElectraForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "embedding_size": 128, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 256, "id2label": { "0": "O", "1": "B-PS", "10": "I-OG", "11": "B-LC", "12": "I-LC", "13": "B-CV", "14": "I-CV", "15": "B-DT", "16": "I-DT", "17": "B-TI", "18": "I-TI", "19": "B-QT", "2": "I-PS", "20": "I-QT", "21": "B-EV", "22": "I-EV", "23": "B-AM", "24": "I-AM", "25": "B-PT", "26": "I-PT", "27": "B-MT", "28": "I-MT", "29": "B-TM", "3": "B-FD", "30": "I-TM", "4": "I-FD", "5": "B-TR", "6": "I-TR", "7": "B-AF", "8": "I-AF", "9": "B-OG" }, "initializer_range": 0.02, "intermediate_size": 1024, "label2id": { "B-AF": "7", "B-AM": "23", "B-CV": "13", "B-DT": "15", "B-EV": "21", "B-FD": "3", "B-LC": "11", "B-MT": "27", "B-OG": "9", "B-PS": "1", "B-PT": "25", "B-QT": "19", "B-TI": "17", "B-TM": "29", "B-TR": "5", "I-AF": "8", "I-AM": "24", "I-CV": "14", "I-DT": "16", "I-EV": "22", "I-FD": "4", "I-LC": "12", "I-MT": "28", "I-OG": "10", "I-PS": "2", "I-PT": "26", "I-QT": "20", "I-TI": "18", "I-TM": "30", "I-TR": "6", "O": "0" }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "electra", "num_attention_heads": 4, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "summary_activation": "gelu", "summary_last_dropout": 0.1, "summary_type": "first", "summary_use_proj": true, "torch_dtype": "float32", "transformers_version": "4.33.1", "type_vocab_size": 2, "use_cache": true, "vocab_size": 35000 }