#! -*- coding: utf-8 -*-
"""
@Author: AI
@Create Time: 20240625
@Info: 加载数据的回调函数
"""
import torch


def collate(batch):
    """DataLoader回调函数"""
    max_length = max([item['seq_len'] for item in batch])
    is_train = 'start_label' in batch[0]
    output = {'input_ids': [], 'token_type_ids': [], 'attention_mask': []}
    if is_train:
        output['start_label'] = []
        output['end_label'] = []

    for item in batch:
        padding = [0] * (max_length - item['seq_len'])
        for key in output.keys():
            value = item[key] + padding
            output[key].append(value)

    for key, value in output.items():
        output[key] = torch.LongTensor(value)

    return output
