import os
import sys
import logging

import torch
import numpy as np
from transformers import T5Tokenizer, T5ForConditionalGeneration

sys.path.append("..")
import helper

opsets = 12
model_dir = "../../models/t5-small"
max_dec_num = 32
seq_len = 64
hidden_dim = 512
tokenizer = T5Tokenizer.from_pretrained(model_dir)
model = T5ForConditionalGeneration.from_pretrained(model_dir)

contents = "summarize: studies have shown that owning a dog is good for you"
tokens = tokenizer(contents, padding="max_length", max_length=seq_len, return_tensors="pt")  # Batch size 1
input_ids = tokens.input_ids
attention_mask = tokens.attention_mask
logging.debug(f"ids:{input_ids} mask:{attention_mask}")
logging.info(f"ids shape:{input_ids.shape} dtype:{input_ids.dtype}")
logging.info(f"mask shape:{attention_mask.shape} dtype:{attention_mask.dtype}")

# outputs = model.generate(input_ids=input_ids, attention_mask=attention_mask, max_length=max_dec_num)
# logging.debug(f"outputs:{outputs}")
# logging.info(f"outputs shape:{outputs.shape} dtype:{outputs.dtype}")

# answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
# logging.info(f"answer: {answer}")


class T5Encoder(torch.nn.Module):
    def __init__(self, encoder):
        super().__init__()
        self.encoder = encoder

    def forward(self, input_ids, attention_mask):
        result = self.encoder(input_ids=input_ids, attention_mask=attention_mask)
        return result[0]

class T5Decoder(torch.nn.Module):
    def __init__(self, decoder):
        super().__init__()
        self.decoder = decoder

    def forward(self, input_ids, attention_mask, hidden_states):
        result = self.decoder(input_ids=input_ids, encoder_attention_mask=attention_mask, encoder_hidden_states=hidden_states)
        return result[0]

class T5LMHead(torch.nn.Module):
    def __init__(self, lm_head):
        super().__init__()
        self.lm_head = lm_head

    def forward(self, hidden_states):
        result = self.lm_head(hidden_states)
        return result

encoder = T5Encoder(model.encoder)
logging.info(f"encoder input dtype input_ids:{input_ids.dtype}"
            f"attention_mask:{attention_mask.dtype}")
encoder_onnx_path = f"{model_dir}/encoder.onnx"
torch.onnx.export(encoder, 
                args=(input_ids.int(), attention_mask.int()), 
                f=encoder_onnx_path, 
                opset_version=opsets,
                enable_onnx_checker=True,
                input_names=["input_ids", "attention_mask"],
                output_names=["hidden_states"],
                dynamic_axes={
                    "input_ids":{0:"batch_size", 1:"seq_len"},
                    "attention_mask":{0:"batch_size", 1:"seq_len"},
                    "hidden_states":{0:"batch_size", 1:"seq_len"}}
                )

decoder = T5Decoder(model.decoder)
dec_ids = torch.ones(1, max_dec_num)
hidden_states = torch.ones(1, seq_len, hidden_dim)
logging.info(f"decoder input dtype dec_ids:{dec_ids.dtype}"
        f"attention_mask:{attention_mask.dtype} hidden_states:{hidden_states.dtype}")
decoder_onnx_path = f"{model_dir}/decoder.onnx"
torch.onnx.export(decoder, 
                args=(dec_ids.int(), attention_mask.int(), hidden_states.float()), 
                f=decoder_onnx_path, 
                opset_version=opsets,
                enable_onnx_checker=True,
                input_names=["dec_ids", "encoder_attention_mask", "encoder_hidden_states"],
                output_names=["dec_hidden_states"],
                dynamic_axes={
                    "dec_ids":{0:"batch_size", 1:"dec_len"},
                    "encoder_attention_mask":{0:"batch_size", 1:"seq_len"},
                    "encoder_hidden_states":{0:"batch_size", 1:"seq_len"},
                    "dec_hidden_states":{0:"batch_size", 1:"dec_len"}}
                )

lm_head = T5LMHead(model.lm_head)
hidden_states = torch.ones(1, hidden_dim)
logging.info(f"lm head input dtype hidden_states:{hidden_states.dtype}")
lm_onnx_path = f"{model_dir}/lm_head.onnx"
torch.onnx.export(lm_head, 
                args=(hidden_states.float(),), 
                f=lm_onnx_path, 
                opset_version=opsets,
                enable_onnx_checker=True,
                input_names=["hidden_states"],
                output_names=["logits"],
                dynamic_axes={
                    "hidden_states":{0:"batch_size"},
                    "logits":{0:"batch_size"}}
                )
logging.info(f"[✓] onnx model export success.")