


import numpy


from utils.transformer_utils import *
from transformer_dataset import PseudoDateDataset
from transformer import Transformer

def test(model: torch.nn.Module, x: torch.FloatTensor, y_true: torch.FloatTensor) -> torch.LongTensor:
	"""
	Transformer 测试 demo.
	:param model	: Transformer model.
	:param x		: Input tensor.
	:param y_true	: Ground truth tensor.
	"""
	x = x.unsqueeze(0)
	y_true = y_true.unsqueeze(0).unsqueeze(0)
	encoder_output = model.encoder(x)
	y_pred = y_true.clone()
	for step in range(14):
		decoder_output = model.decoder(encoder_output, y_pred)
		y = model.linear(decoder_output)
		y = torch.nn.Softmax(dim=-1)(y)
		y_pred = torch.cat((y_true, y), dim=1)
	return y_pred.squeeze(0)

if __name__ == '__main__':
	dataset = PseudoDateDataset(size=10000)
	params = dict(
		d_input_encoder=37,
		d_input_decoder=12,
		d_output_encoder=64,
		d_output_decoder=64,
		d_output=12,
		d_hidden_encoder=128,
		d_hidden_decoder=128,
		n_head_encoder=4,
		n_head_decoder=4,
		n_position_encoder=50,
		n_position_decoder=50,
	)
	model = Transformer(**params)
	try:
		training_epoch = find_last_checkpoint_epoch('checkpoint')
		print('Load model with epoch {}'.format(training_epoch))
	except Exception as e:
		print('No models found in checkpoint: {}'.format(e))
	model = load_model('checkpoint', None, model)
	model.eval()
	x, y, raw = dataset[0]
	pred = test(model, x, y[0])
	pred = numpy.argmax(pred.detach().numpy(), axis=1)[1:]
	pred = [dataset.inverse_machine_vocabulary[p] for p in pred]
	pred_str = ''.join(pred)
	human_readable_date = raw['human_readable_date']
	machine_readable_date = raw['machine_readable_date']
	print('[{}] --> [{}], answer: [{}]'.format(human_readable_date, pred_str, list(machine_readable_date)))
