gemma-7b-flax / flax_model.msgpack.index.json
lodestones's picture
Upload FlaxGemmaForCausalLM
e610f02 verified
{
"metadata": {
"total_size": 17075361792
},
"weight_map": {
"model/embed_tokens/embedding": "flax_model-00001-of-00004.msgpack",
"model/layers/0/input_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/0/mlp/down_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/0/mlp/gate_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/0/mlp/up_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/0/post_attention_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/0/self_attn/k_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/0/self_attn/o_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/0/self_attn/q_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/0/self_attn/v_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/1/input_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/1/mlp/down_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/1/mlp/gate_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/1/mlp/up_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/1/post_attention_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/1/self_attn/k_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/1/self_attn/o_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/1/self_attn/q_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/1/self_attn/v_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/10/input_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/10/mlp/down_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/10/mlp/gate_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/10/mlp/up_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/10/post_attention_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/10/self_attn/k_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/10/self_attn/o_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/10/self_attn/q_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/10/self_attn/v_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/11/input_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/11/mlp/down_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/11/mlp/gate_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/11/mlp/up_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/11/post_attention_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/11/self_attn/k_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/11/self_attn/o_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/11/self_attn/q_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/11/self_attn/v_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/12/input_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/12/mlp/down_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/12/mlp/gate_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/12/mlp/up_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/12/post_attention_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/12/self_attn/k_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/12/self_attn/o_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/12/self_attn/q_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/12/self_attn/v_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/13/input_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/13/mlp/down_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/13/mlp/gate_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/13/mlp/up_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/13/post_attention_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/13/self_attn/k_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/13/self_attn/o_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/13/self_attn/q_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/13/self_attn/v_proj/kernel": "flax_model-00001-of-00004.msgpack",
"model/layers/14/input_layernorm/weight": "flax_model-00001-of-00004.msgpack",
"model/layers/14/mlp/down_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/14/mlp/gate_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/14/mlp/up_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/14/post_attention_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/14/self_attn/k_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/14/self_attn/o_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/14/self_attn/q_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/14/self_attn/v_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/15/input_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/15/mlp/down_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/15/mlp/gate_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/15/mlp/up_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/15/post_attention_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/15/self_attn/k_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/15/self_attn/o_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/15/self_attn/q_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/15/self_attn/v_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/16/input_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/16/mlp/down_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/16/mlp/gate_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/16/mlp/up_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/16/post_attention_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/16/self_attn/k_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/16/self_attn/o_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/16/self_attn/q_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/16/self_attn/v_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/17/input_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/17/mlp/down_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/17/mlp/gate_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/17/mlp/up_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/17/post_attention_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/17/self_attn/k_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/17/self_attn/o_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/17/self_attn/q_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/17/self_attn/v_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/18/input_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/18/mlp/down_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/18/mlp/gate_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/18/mlp/up_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/18/post_attention_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/18/self_attn/k_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/18/self_attn/o_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/18/self_attn/q_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/18/self_attn/v_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/19/input_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/19/mlp/down_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/19/mlp/gate_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/19/mlp/up_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/19/post_attention_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/19/self_attn/k_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/19/self_attn/o_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/19/self_attn/q_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/19/self_attn/v_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/2/input_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/2/mlp/down_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/2/mlp/gate_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/2/mlp/up_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/2/post_attention_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/2/self_attn/k_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/2/self_attn/o_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/2/self_attn/q_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/2/self_attn/v_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/20/input_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/20/mlp/down_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/20/mlp/gate_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/20/mlp/up_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/20/post_attention_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/20/self_attn/k_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/20/self_attn/o_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/20/self_attn/q_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/20/self_attn/v_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/21/input_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/21/mlp/down_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/21/mlp/gate_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/21/mlp/up_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/21/post_attention_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/21/self_attn/k_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/21/self_attn/o_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/21/self_attn/q_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/21/self_attn/v_proj/kernel": "flax_model-00002-of-00004.msgpack",
"model/layers/22/input_layernorm/weight": "flax_model-00002-of-00004.msgpack",
"model/layers/22/mlp/down_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/22/mlp/gate_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/22/mlp/up_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/22/post_attention_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/22/self_attn/k_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/22/self_attn/o_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/22/self_attn/q_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/22/self_attn/v_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/23/input_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/23/mlp/down_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/23/mlp/gate_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/23/mlp/up_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/23/post_attention_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/23/self_attn/k_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/23/self_attn/o_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/23/self_attn/q_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/23/self_attn/v_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/24/input_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/24/mlp/down_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/24/mlp/gate_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/24/mlp/up_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/24/post_attention_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/24/self_attn/k_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/24/self_attn/o_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/24/self_attn/q_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/24/self_attn/v_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/25/input_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/25/mlp/down_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/25/mlp/gate_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/25/mlp/up_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/25/post_attention_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/25/self_attn/k_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/25/self_attn/o_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/25/self_attn/q_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/25/self_attn/v_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/26/input_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/26/mlp/down_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/26/mlp/gate_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/26/mlp/up_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/26/post_attention_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/26/self_attn/k_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/26/self_attn/o_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/26/self_attn/q_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/26/self_attn/v_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/27/input_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/27/mlp/down_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/27/mlp/gate_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/27/mlp/up_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/27/post_attention_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/27/self_attn/k_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/27/self_attn/o_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/27/self_attn/q_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/27/self_attn/v_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/3/input_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/3/mlp/down_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/3/mlp/gate_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/3/mlp/up_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/3/post_attention_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/3/self_attn/k_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/3/self_attn/o_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/3/self_attn/q_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/3/self_attn/v_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/4/input_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/4/mlp/down_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/4/mlp/gate_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/4/mlp/up_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/4/post_attention_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/4/self_attn/k_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/4/self_attn/o_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/4/self_attn/q_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/4/self_attn/v_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/5/input_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/5/mlp/down_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/5/mlp/gate_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/5/mlp/up_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/5/post_attention_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/5/self_attn/k_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/5/self_attn/o_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/5/self_attn/q_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/5/self_attn/v_proj/kernel": "flax_model-00003-of-00004.msgpack",
"model/layers/6/input_layernorm/weight": "flax_model-00003-of-00004.msgpack",
"model/layers/6/mlp/down_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/6/mlp/gate_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/6/mlp/up_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/6/post_attention_layernorm/weight": "flax_model-00004-of-00004.msgpack",
"model/layers/6/self_attn/k_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/6/self_attn/o_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/6/self_attn/q_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/6/self_attn/v_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/7/input_layernorm/weight": "flax_model-00004-of-00004.msgpack",
"model/layers/7/mlp/down_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/7/mlp/gate_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/7/mlp/up_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/7/post_attention_layernorm/weight": "flax_model-00004-of-00004.msgpack",
"model/layers/7/self_attn/k_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/7/self_attn/o_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/7/self_attn/q_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/7/self_attn/v_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/8/input_layernorm/weight": "flax_model-00004-of-00004.msgpack",
"model/layers/8/mlp/down_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/8/mlp/gate_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/8/mlp/up_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/8/post_attention_layernorm/weight": "flax_model-00004-of-00004.msgpack",
"model/layers/8/self_attn/k_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/8/self_attn/o_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/8/self_attn/q_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/8/self_attn/v_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/9/input_layernorm/weight": "flax_model-00004-of-00004.msgpack",
"model/layers/9/mlp/down_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/9/mlp/gate_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/9/mlp/up_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/9/post_attention_layernorm/weight": "flax_model-00004-of-00004.msgpack",
"model/layers/9/self_attn/k_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/9/self_attn/o_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/9/self_attn/q_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/layers/9/self_attn/v_proj/kernel": "flax_model-00004-of-00004.msgpack",
"model/norm/weight": "flax_model-00004-of-00004.msgpack"
}
}