from transformers import BertTokenizer, BartForConditionalGeneration
from torch.utils.data import Dataset, DataLoader
import torch

model_name = "D:/mygitisoft/python-ai/pytorch/dialogue/"
tokenizer = BertTokenizer.from_pretrained(model_name)
model = BartForConditionalGeneration.from_pretrained(model_name)

history = ["你会踢足球吗？"]
history_str = "对话历史：" + tokenizer.sep_token.join(history)

input_ids = tokenizer(history_str, return_tensors='pt').input_ids
output_ids = model.generate(input_ids)[0]
print(tokenizer.decode(output_ids, skip_special_tokens=True))
