import pandas as pd
import torch
from torch.utils.data import Dataset, DataLoader

from __00__config import Config

config = Config()
def load_raw_data(path):
	# 读取数据
	df = pd.read_csv(path, encoding='utf-8', sep=',')
	# 将文本和列表拿出来
	texts = df['questions']
	labels = df['labels']
	result = []
	for text, label in zip(texts, labels):
		result.append((text, label))
	return result


# 自定义dataset
class TextDataset(Dataset):
	def __init__(self, data_list):
		self.data_list = data_list

	def __len__(self):
		return len(self.data_list)

	def __getitem__(self, index):
		# 索引修正
		index = min(max(index, 0), len(self.data_list) - 1)
		# 获取数据
		text, label = self.data_list[index]
		return text, label


# collate函数
def collate_fn(batch):
	text, label = zip(*batch)
	text_token = config.bert_tokenizer.batch_encode_plus(
		text,
		max_length=config.pad_size,  # 设置目标长度
		truncation=True,  # 开启截断，防止超过目标长度
		padding='max_length',  # 按照最大长度进行填充
		add_special_tokens=True,  # 添加特殊标记[CLS]和[SEP]
		return_attention_mask=True  # 请求返回注意力掩码，以区分输入中的有效信息和填充信息
	)
	# print(text_token)
	input_ids = text_token['input_ids']
	attention_mask = text_token['attention_mask']
	# 将标签、输入ids，注意力掩码转换为张量
	label_tensor = torch.tensor(label)
	input_ids_tensor = torch.tensor(input_ids)
	attention_mask_tensor = torch.tensor(attention_mask)
	return input_ids_tensor, attention_mask_tensor, label_tensor


# 构建DataLoader
def get_dataloader(data_path, is_train=True):
	# 获取列表输出结果
	data_list = load_raw_data(data_path)
	# 构建dataset
	text_dataset = TextDataset(data_list)
	# 构建DataLoader
	text_dataloader = DataLoader(
		dataset=text_dataset,
		batch_size=config.batch_size,
		shuffle=is_train,
		collate_fn=collate_fn
	)
	return text_dataloader


if __name__ == '__main__':
	data = load_raw_data(config.train_datapath)

	print('--------------------------------')
	# # 测试collate_fn
	# text_dataset = TextDataset(data)
	# text_dataloader = DataLoader(
	# 	dataset=text_dataset,
	# 	batch_size=config.batch_size,
	# 	shuffle=True,
	# 	collate_fn=collate_fn
	# )
	# for _ in text_dataloader:
	# 	break
