from transformers import AutoTokenizer,AutoModelForCausalLM
from peft import PeftModel

MODEL_PATH = 'F:\pretrain_model\ZhipuAI\chatglm3-6b'

model = AutoModelForCausalLM.from_pretrained(MODEL_PATH,low_cpu_mem_usage=True,trust_remote_code=True)
tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH,trust_remote_code=True)

p_model = PeftModel.from_pretrained(model,model_id='output/checkpoint-500/')

merge_model = p_model.merge_and_unload()

merge_model.save_pretrained('output/merge_model')