import sys
import os
from tqdm import tqdm
import torch

# 获取项目根目录 (不这么搞的话，下面无法导包不了 
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '../src'))
sys.path.insert(0, project_root)
print(f'项目根目录 方便import测试: {project_root}')

from config import Bert_Config
from transformers import BertTokenizer

# text = '今天天气不错'


# params = Bert_Config()
# tokenizer = BertTokenizer.from_pretrained(params.bert_path)

# encoded_input = tokenizer(text, padding=True, truncation=True, return_tensors='pt')
# print(encoded_input)

# # {'input_ids': tensor([[ 101,  791, 1921, 1921, 3698,  679, 7231,  102]]), 
# #  'token_type_ids': tensor([[0, 0, 0, 0, 0, 0, 0, 0]]), 
# #  'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1]])}





text = ['今天天气不错']*2


params = Bert_Config()
tokenizer = BertTokenizer.from_pretrained(params.bert_path)

encoded_input = tokenizer(text, padding=True, truncation=True, return_tensors='pt')
print(encoded_input)

# {'input_ids': tensor([[ 101,  791, 1921, 1921, 3698,  679, 7231,  102],
#                       [ 101,  791, 1921, 1921, 3698,  679, 7231,  102]]), 
#  'token_type_ids': tensor([[0, 0, 0, 0, 0, 0, 0, 0],        
#                            [0, 0, 0, 0, 0, 0, 0, 0]]), 
#  'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1],
#                            [1, 1, 1, 1, 1, 1, 1, 1]])}
