{ "_name_or_path": "microsoft/phi-2", "architectures": [ "PhiForSequenceClassification" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/phi-2--configuration_phi.PhiConfig", "AutoModelForCausalLM": "microsoft/phi-2--modeling_phi.PhiForCausalLM" }, "bos_token_id": 50256, "embd_pdrop": 0.0, "eos_token_id": 50256, "finetuning_task": "text-classification", "hidden_act": "gelu_new", "hidden_size": 2560, "id2label": { "0": "0", "1": "1", "2": "10", "3": "11", "4": "12", "5": "13", "6": "14", "7": "15", "8": "16", "9": "17", "10": "18", "11": "19", "12": "2", "13": "20", "14": "21", "15": "22", "16": "3", "17": "4", "18": "5", "19": "6", "20": "7", "21": "8", "22": "9" }, "initializer_range": 0.02, "intermediate_size": 10240, "label2id": { "0": 0, "1": 1, "10": 2, "11": 3, "12": 4, "13": 5, "14": 6, "15": 7, "16": 8, "17": 9, "18": 10, "19": 11, "2": 12, "20": 13, "21": 14, "22": 15, "3": 16, "4": 17, "5": 18, "6": 19, "7": 20, "8": 21, "9": 22 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 2048, "model_type": "phi", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pad_token_id": 50256, "partial_rotary_factor": 0.4, "problem_type": "single_label_classification", "qk_layernorm": false, "resid_pdrop": 0.1, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.39.0.dev0", "use_cache": true, "vocab_size": 50295 }