diff --git "a/pytorch_model.bin.index.json" "b/pytorch_model.bin.index.json" new file mode 100644--- /dev/null +++ "b/pytorch_model.bin.index.json" @@ -0,0 +1,3882 @@ +{ + "metadata": { + "total_size": 30451385804 + }, + "weight_map": { + "priors.0.conditioner_blocks.0.cond.model.0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.10.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.10.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.10.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.10.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.11.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.11.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.11.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.11.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.12.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.12.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.12.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.12.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.13.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.13.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.13.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.13.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.14.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.14.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.14.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.14.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.15.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.15.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.15.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.15.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.8.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.8.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.8.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.8.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.9.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.9.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.9.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.0.model.9.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.1.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.10.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.10.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.10.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.10.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.11.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.11.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.11.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.11.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.12.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.12.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.12.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.12.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.13.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.13.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.13.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.13.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.14.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.14.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.14.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.14.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.15.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.15.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.15.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.15.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.8.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.8.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.8.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.8.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.9.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.9.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.9.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.0.model.9.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.cond.model.2.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.ln.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.ln.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.conditioner_blocks.0.x_emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.pos_emb.pos_emb": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.0.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.1.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.10.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.11.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.12.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.13.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.14.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.15.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.16.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.17.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.18.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.19.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.2.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.20.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.21.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.22.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.23.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.24.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.25.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.26.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.27.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.28.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.29.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.3.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.30.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.31.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.32.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.33.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.34.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.35.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.36.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.37.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.38.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.39.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.4.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.40.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.41.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.42.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.43.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.44.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.45.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.46.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.47.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.48.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.49.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.5.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.50.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.51.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.52.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.53.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.54.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.55.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.56.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.57.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.58.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.59.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.6.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.60.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.61.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.62.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.63.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.64.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.65.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.66.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.67.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.68.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.69.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.7.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.70.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.71.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.8.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.transformer._attn_mods.9.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.x_emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.prior.x_out.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.y_emb.artist_emb.emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.0.y_emb.bow_genre_emb.emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.10.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.10.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.10.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.10.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.11.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.11.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.11.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.11.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.12.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.12.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.12.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.12.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.13.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.13.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.13.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.13.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.14.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.14.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.14.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.14.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.15.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.15.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.15.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.15.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.8.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.8.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.8.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.8.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.9.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.9.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.9.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.0.model.9.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.1.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.10.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.10.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.10.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.10.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.11.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.11.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.11.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.11.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.12.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.12.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.12.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.12.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.13.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.13.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.13.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.13.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.14.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.14.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.14.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.14.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.15.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.15.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.15.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.15.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.8.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.8.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.8.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.8.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.9.model.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.9.model.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.9.model.3.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.0.model.9.model.3.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.cond.model.2.1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.ln.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.ln.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.conditioner_blocks.0.x_emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.pos_emb.pos_emb": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.0.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.1.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.10.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.11.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.12.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.13.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.14.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.15.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.16.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.17.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.18.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.19.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.2.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.20.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.21.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.22.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.23.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.24.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.25.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.26.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.27.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.28.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.29.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.3.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.30.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.31.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.32.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.33.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.34.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.35.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.36.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.37.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.38.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.39.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.4.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.40.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.41.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.42.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.43.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.44.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.45.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.46.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.47.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.48.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.49.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.5.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.50.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.51.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.52.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.53.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.54.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.55.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.56.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.57.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.58.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.59.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.6.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.60.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.61.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.62.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.63.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.64.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.65.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.66.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.67.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.68.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.69.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.7.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.70.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.71.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.8.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.transformer._attn_mods.9.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.x_emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.prior.x_out.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.y_emb.artist_emb.emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.1.y_emb.bow_genre_emb.emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.pos_emb.pos_emb": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.start_token": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.0.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.1.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.10.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.11.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.12.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.13.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.14.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.15.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.16.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.17.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.2.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.3.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.4.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.5.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.6.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.7.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.8.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.transformer._attn_mods.9.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_prior.x_emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_state_ln.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_state_ln.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_state_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_state_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prime_x_out.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.pos_emb.pos_emb": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.0.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.1.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.10.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.11.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.12.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.13.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.14.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.15.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.16.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.17.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.attn.c_enc_kv.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.attn.c_enc_kv.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.18.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.19.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.2.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.20.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.21.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.22.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.23.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.24.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.25.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.26.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.27.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.attn.c_enc_kv.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.attn.c_enc_kv.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.28.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.29.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.3.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.30.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.31.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.32.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.33.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.34.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.35.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.36.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.37.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.attn.c_enc_kv.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.attn.c_enc_kv.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.38.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.39.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.attn.c_attn.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.attn.c_attn.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.attn.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.attn.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.ln_0.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.ln_0.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.ln_1.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.ln_1.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.mlp.c_fc.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.mlp.c_fc.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.mlp.c_proj.bias": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.4.mlp.c_proj.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.40.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.41.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.42.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.43.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.44.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.45.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.46.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.47.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.attn.c_enc_kv.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.attn.c_enc_kv.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.48.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.49.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.5.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.50.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.51.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.52.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.53.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.54.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.55.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.56.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.57.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.attn.c_enc_kv.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.attn.c_enc_kv.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.58.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.59.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.6.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.60.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.61.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.62.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.63.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.64.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.65.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.66.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.67.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.attn.c_enc_kv.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.attn.c_enc_kv.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.68.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.69.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.7.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.70.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.71.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.72.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.73.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.74.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.75.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.attn.c_attn.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.attn.c_attn.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.attn.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.attn.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.ln_0.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.ln_0.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.ln_1.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.ln_1.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.mlp.c_fc.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.mlp.c_fc.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.mlp.c_proj.bias": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.76.mlp.c_proj.weight": "pytorch_model-00003-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.attn.c_attn.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.attn.c_attn.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.attn.c_proj.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.attn.c_proj.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.ln_0.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.ln_0.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.ln_1.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.ln_1.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.mlp.c_fc.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.mlp.c_fc.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.mlp.c_proj.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.77.mlp.c_proj.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.attn.c_attn.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.attn.c_attn.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.attn.c_enc_kv.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.attn.c_enc_kv.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.attn.c_proj.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.attn.c_proj.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.ln_0.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.ln_0.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.ln_1.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.ln_1.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.mlp.c_fc.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.mlp.c_fc.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.mlp.c_proj.bias": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.78.mlp.c_proj.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.8.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.attn.c_attn.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.attn.c_attn.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.attn.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.attn.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.ln_0.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.ln_0.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.ln_1.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.ln_1.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.mlp.c_fc.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.mlp.c_fc.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.mlp.c_proj.bias": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.transformer._attn_mods.9.mlp.c_proj.weight": "pytorch_model-00002-of-00004.bin", + "priors.2.prior.x_emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.prior.x_out.weight": "pytorch_model-00004-of-00004.bin", + "priors.2.y_emb.absolute_pos_emb.emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.y_emb.artist_emb.emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.y_emb.bow_genre_emb.emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.y_emb.relative_pos_emb.emb.weight": "pytorch_model-00001-of-00004.bin", + "priors.2.y_emb.total_length_emb.emb.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.bottleneck.level_blocks.0.k": "pytorch_model-00001-of-00004.bin", + "vqvae.bottleneck.level_blocks.1.k": "pytorch_model-00001-of-00004.bin", + "vqvae.bottleneck.level_blocks.2.k": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.0.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.1.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.0.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.2.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.0.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.level_blocks.0.model.3.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.out.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.0.out.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.1.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.2.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.0.model.3.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.1.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.level_blocks.1.model.2.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.out.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.1.out.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.1.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.2.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.0.model.3.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.1.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.1.model.2.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.1.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.0.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.level_blocks.2.model.2.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.out.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.decoders.2.out.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.0.1.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.1.1.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.4.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.4.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.4.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.4.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.5.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.5.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.5.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.5.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.6.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.6.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.6.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.6.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.7.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.7.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.7.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.2.1.model.7.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.0.level_blocks.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.0.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.1.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.2.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.0.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.1.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.2.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.1.level_blocks.1.model.2.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.0.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.1.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.2.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.0.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.1.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.2.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.1.model.2.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.0.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.0.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.0.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.0.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.0.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.0.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.0.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.1.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.1.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.1.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.1.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.2.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.2.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.2.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.2.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.3.model.1.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.3.model.1.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.3.model.3.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.1.1.model.3.model.3.weight": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.2.bias": "pytorch_model-00001-of-00004.bin", + "vqvae.encoders.2.level_blocks.2.model.2.weight": "pytorch_model-00001-of-00004.bin" + } +}