import torch
from tqdm import tqdm
from peft import  LoraConfig, get_peft_model
from modelscope import AutoTokenizer, AutoModel
from torch.utils.data import DataLoader, Dataset

# model_dir = "../../chatglm3-6b"
# with torch.no_grad():
#     tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True)
#     model = AutoModel.from_pretrained(model_dir, trust_remote_code=True).half().cuda()

#input_ids = torch.tensor([[1,2,3],[4,5]])
res = torch.nn.utils.rnn.pad_sequence([torch.tensor([1,2,3]),torch.tensor([4,5])], batch_first=True, padding_value=0)
print(res)

