{ "_name_or_path": "studio-ousia/luke-japanese-large", "architectures": [ "LukeForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bert_model_name": "models/luke-japanese-large/hf_xlm_roberta", "bos_token_id": 0, "classifier_dropout": null, "cls_entity_prediction": false, "entity_emb_size": 256, "entity_vocab_size": 570505, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "NEUTRAL", "1": "\u79c1\u751f\u6d3b\u306e\u5e73\u7a4f\u306e\u4fb5\u5bb3", "2": "\u540d\u8a89\u611f\u60c5\u306e\u4fb5\u5bb3", "3": "\u540d\u8a89\u6a29\u306e\u4fb5\u5bb3" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "NEUTRAL": 0, "\u540d\u8a89\u611f\u60c5\u306e\u4fb5\u5bb3": 2, "\u540d\u8a89\u6a29\u306e\u4fb5\u5bb3": 3, "\u79c1\u751f\u6d3b\u306e\u5e73\u7a4f\u306e\u4fb5\u5bb3": 1 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "luke", "num_attention_heads": 16, "num_hidden_layers": 24, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "single_label_classification", "torch_dtype": "float32", "transformers_version": "4.23.1", "type_vocab_size": 1, "use_cache": true, "use_entity_aware_attention": true, "vocab_size": 32772 }