import os
import pickle
from torch.utils.data import Dataset
import torch

class ChatDataset(Dataset):
    def __init__(self, input_list, max_len):
        super().__init__()
        self.input_list = input_list
        self.max_len = max_len
    
    def __len__(self):
        return len(self.input_list)
    
    def __getitem__(self, index):
        input_ids = self.input_list[index]
        input_ids = input_ids[:self.max_len] # 长的这里处理，进行截断处理  &&&& 短的这里不处理，在真正训练的前一刻的时候再处理，也就是dataloader中处理
        input_ids = torch.tensor(input_ids, dtype=torch.long)
        return input_ids
    

def demo():
    current_dir = os.path.dirname(os.path.abspath(__file__))
    path = os.path.join(current_dir, '../data/medical_train.pkl')
    with open(path, 'rb') as fr:
        data_list = pickle.load(fr)

    train_dataset = ChatDataset(data_list, max_len=100)
    print(f'train_dataset[0]-->\n{train_dataset[0]}')
    print(f'len-->\n{len(train_dataset)}')



if __name__ == '__main__':
    demo()

    

    