from peft import PeftModel
from transformers import AutoModelForCausalLM

base_model_name = "/gdata/hpdesktop/webui/models/Llama-2-7b-hf"
adapter_model_name = "/gdata/hpdesktop/zym/lora2checkPoint/checkpoint-2000"

model = AutoModelForCausalLM.from_pretrained(base_model_name)
model = PeftModel.from_pretrained(model, adapter_model_name)

model = model.merge_and_unload()
# means 50 steps finetuned on OpenAssistant datasets
# model.save_pretrained("./LoraModel/6000")
# model.save_pretrained("./LoraModel2/2000")