File size: 513 Bytes
dfb3b68
 
 
 
 
 
b491e93
dfb3b68
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
import torch
from peft import PeftModel
from transformers import AutoModelForCausalLM
from transformers import AutoTokenizer

BASE_MODEL = "cyberagent/open-calm-7b"
LORA_WEIGHTS = "izumi-lab/stormy-7b-10ep"

tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL)
model = AutoModelForCausalLM.from_pretrained(
    BASE_MODEL,
    load_in_8bit=False,
    torch_dtype=torch.float16,
    device_map="auto",
)
model = PeftModel.from_pretrained(
    model, LORA_WEIGHTS, torch_dtype=torch.float16, use_auth_token=True
)