File size: 3,157 Bytes
1ceb68b 4830517 2d3aedb 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 1ceb68b 4830517 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
{
"auto_map": {
"AutoConfig": "modeling_llava.LlavaConfig",
"AutoModel": "modeling_llava.LlavaForCausalLM",
"AutoModelForCausalLM": "modeling_llava.LlavaForCausalLM"
},
"model_type": "mc-llava",
"ignore_index": -100,
"image_token_index": 50297,
"projector_hidden_act": "gelu",
"projector_tokens_num": 1,
"text_config": {
"_name_or_path": "vince62s/phi-2-psy",
"add_cross_attention": false,
"architectures": [
"PhiForCausalLM"
],
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "vince62s/phi-2-psy--configuration_phi.PhiConfig",
"AutoModelForCausalLM": "vince62s/phi-2-psy--modeling_phi.PhiForCausalLM"
},
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"cross_attention_hidden_size": null,
"decoder_start_token_id": null,
"diversity_penalty": 0.0,
"do_sample": false,
"early_stopping": false,
"embd_pdrop": 0.0,
"encoder_no_repeat_ngram_size": 0,
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"hidden_act": "gelu_new",
"hidden_size": 2560,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"initializer_range": 0.02,
"intermediate_size": 10240,
"is_decoder": false,
"is_encoder_decoder": false,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"layer_norm_eps": 1e-05,
"length_penalty": 1.0,
"max_length": 20,
"max_position_embeddings": 2048,
"min_length": 0,
"model_type": "phi",
"no_repeat_ngram_size": 0,
"num_attention_heads": 32,
"num_beam_groups": 1,
"num_beams": 1,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"num_return_sequences": 1,
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"pad_token_id": null,
"partial_rotary_factor": 0.4,
"prefix": null,
"problem_type": null,
"pruned_heads": {},
"qk_layernorm": false,
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"resid_pdrop": 0.1,
"return_dict": true,
"return_dict_in_generate": false,
"rope_scaling": null,
"rope_theta": 10000.0,
"sep_token_id": null,
"suppress_tokens": null,
"task_specific_params": null,
"temperature": 1.0,
"tf_legacy_loss": false,
"tie_encoder_decoder": false,
"tie_word_embeddings": false,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torch_dtype": "bfloat16",
"torchscript": false,
"typical_p": 1.0,
"use_bfloat16": false,
"use_cache": true,
"vocab_size": 51200
},
"torch_dtype": "bfloat16",
"transformers_version": "4.37.2",
"vision_config": {
"hidden_size": 1152,
"image_size": 384,
"intermediate_size": 4304,
"model_type": "siglip_vision_model",
"num_attention_heads": 16,
"num_hidden_layers": 27,
"patch_size": 14
},
"vision_embed_dim": 1152,
"vision_tower_name": "google/siglip-so400m-patch14-384",
"vocab_size": 51200
} |