{ "_name_or_path": "/hub/midm-7b-awq", "activation_function": "silu", "architectures": [ "MidmLMHeadModel" ], "attn_pdrop": 0.0, "auto_map": { "AutoConfig": "configuration_midm.MidmBitextConfig", "AutoModelForCausalLM": "modeling_midm.MidmLMHeadModel" }, "bos_token_id": 2, "embd_pdrop": 0.0, "eos_token_id": 3, "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "model_type": "midm-bitext-S", "n_embd": 4096, "n_head": 32, "n_inner": 10880, "n_layer": 32, "n_positions": 8192, "normalization_type": "layernorm", "pad_token_id": 1, "reorder_and_upcast_attn": false, "resid_pdrop": 0.0, "rotary_percentage": 0.5, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "scale_qk_by_inverse_layer_idx": true, "summary_activation": null, "summary_first_dropout": 0.1, "summary_proj_to_labels": true, "summary_type": "cls_index", "summary_use_proj": true, "tie_word_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.36.2", "use_absolute_position_embedding": false, "use_cache": true, "use_rotary_position_embedding": true, "vocab_size": 72192 }