# 目前只支持合并都是单checkpoint的模型
import os
import torch
import json 
from transformers import AutoModel,AutoModelForCausalLM
import importlib


checkpoint_dir=['/hy-tmp/thenlper/gte-large-zh','/hy-tmp/multi-r2/Fuser/add2begin/baichuan7bchat',]
# 你在新构建的模型里，对这些不同的checkpoint起名为什么？
alias_in_new_model=['sent2vec','model']
PYTORCH_CHECKPOINT_NAME='pytorch_model.bin'
CONFIG_NAME='config.json'
weight_dict={}
current_cnt=1
for checkpoint,alias in zip(checkpoint_dir,alias_in_new_model):
    # c=os.path.join(checkpoint,PYTORCH_CHECKPOINT_NAME)
    config=json.load(open(os.path.join(checkpoint,CONFIG_NAME)))
    with torch.inference_mode():
        if config['architectures'][0].endswith('Model'):
            model= AutoModel.from_pretrained(checkpoint,low_cpu_mem_usage=True,trust_remote_code=True)
        else:
            model= AutoModelForCausalLM.from_pretrained(checkpoint,low_cpu_mem_usage=True,trust_remote_code=True)
        needed_keys=[alias+'.'+k for k in model.state_dict().keys()]
        # weight_dict={needed_keys:'ckpt1.bin'}
        weight_dict.update(dict(zip(needed_keys,[f'shard{current_cnt}.bin' for _ in range(len(needed_keys))])))
        print(weight_dict)
    current_cnt+=1
    # model=torch.load(c)
    # print(model['embeddings.position_ids'])

       
with open('pytorch_model.bin.index.json','w',encoding='utf-8') as o:
    json.dump({"weight_map":weight_dict},o,ensure_ascii=False,allow_nan=False)

