import torch
from tqdm import tqdm
from peft import LoraConfig,get_peft_model
from modelscope import AutoTokenizer, AutoModel
from torch.utils.data import DataLoader, Dataset

model_dir = "C:\\Users\\16014\\.cache\\modelscope\\hub\\models\\ZhipuAI\\chatglm3-6b"
with torch.no_grad():
    tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True)
    model = AutoModel.from_pretrained(model_dir, trust_remote_code=True).half().cuda()

lora_config = LoraConfig(
    r=8,
    lora_alpha=16,
    target_modules=["query_key_value"],#query_key_value
    lora_dropout=0.05,
    bias="none",
    task_type="CAUSAL_LM",  #SEQ_2_SEQ_LM
)

BATCH_SIZE = 1
LEARNING_RATE = 2e-4
device = "cuda"

model = get_peft_model(model, lora_config)
model.print_trainable_parameters()

import get_data
train_dataset = get_data.ChatDataset()
datacollect = get_data.DataCollatorForChatDataset()
train_loader = (DataLoader(train_dataset, batch_size=BATCH_SIZE,shuffle=True,collate_fn=datacollect))



loss_fun = torch.nn.CrossEntropyLoss(ignore_index=-100)

optimizer = torch.optim.AdamW(model.parameters(), lr = LEARNING_RATE)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer,T_max = 2400,eta_min=2e-6,last_epoch=-1)

for epoch in range(20):
    pbar = tqdm(train_loader,total=len(train_loader))
    for data_dict in pbar:
        optimizer.zero_grad()
        input_ids = data_dict["input_ids"].to(device);input_ids = input_ids[:,:-1]
        labels = data_dict["labels"].to(device);labels = labels[:,1:]
        logits = model(input_ids)["logits"]
        logits = logits.view(-1, logits.size(-1));labels = labels.view(-1)
        loss = loss_fun(logits, labels)

        # outputs = model(
        #     input_ids=input_ids,
        #     labels=labels,
        # )
        # loss = outputs.loss
        loss.backward()
        optimizer.step()
        lr_scheduler.step()  # 执行优化器

        pbar.set_description(
            f"epoch:{epoch + 1}, train_loss:{loss.item():.5f}, lr:{lr_scheduler.get_last_lr()[0] * 1000:.5f}")


model.save_pretrained("./lora_saver/lora_query_key_value_20250720")



