# 2021/8/30 21:41
# file name : xnli_torch2paddle.py

import sys
import os
sys.path.insert(0,os.path.join(os.path.dirname(os.path.abspath(__file__)),'..'))
from paddle_xlm.model.paddle_transformer import TransformerModel as paddle_TransformerModel
from xlm.model.transformer import TransformerModel as torch_TransformerModel
import torch
import paddle
paddle.set_device('cpu')
import pickle
from collections import OrderedDict
import numpy as np
args = pickle.load(open('../../t2p_modle/xnli_model_args.bin','rb'))
print(args)
model_torch = torch_TransformerModel(**args)
model_paddle = paddle_TransformerModel(**args)

pytorch_state_dict = torch.load('../../t2p_modle/xnli_torch_model.bin')
paddle_state_dict = OrderedDict()
transpose_list = [] # UserWarning: Skip loading for ffns.0.lin1.weight. ffns.0.lin1.weight receives a shape [4096, 1024], but the expected shape is [1024, 4096].
for i in range(1,3):
    for layer in range(12):
        transpose_list.append(f'ffns.{layer}.lin{i}.weight')
for k, v in pytorch_state_dict.items():
    """
    if k in skip_weights:
        continue
    if k in transpose_list:
        v = v.transpose(0, 1)
    if "self.key_conv_attn_layer.bias" in k:
        v = v.squeeze(-1)

    oldk = k
    for huggingface_name, paddle_name in huggingface_to_paddle.items():
        k = k.replace(huggingface_name, paddle_name)

    print(f"Converting: {oldk} => {k}")
    """
    if k in transpose_list:
        v = v.transpose(0, 1)

    paddle_state_dict[k] = v.data.numpy().astype(np.float32)
paddle_dump_path = '../../t2p_modle/xnli_paddle_model.bin'
paddle.save(paddle_state_dict, paddle_dump_path)
paddle_dict = paddle.load(paddle_dump_path)
model_paddle.load_dict(paddle_dict)

print()