# Testing hyperformer blocks.
# 23-10-25

import os
import sys
import jax
import jax.numpy as jnp
import numpy as np
import joblib
import haiku as hk
import mindspore as ms

from jax import Array, jit
from typing import Optional, Union, List
from jax.nn.initializers import lecun_normal, normal, zeros, ones
from mindspore import load_param_into_net, Tensor, nn, context

sys.path.append(os.path.dirname(sys.path[0]))

# Test for utils
# setting context & hyper parameters
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")

from cybertron.utils.base import OuterProduct, Transition
from ms_cybertron.interaction.hyperformer import OuterProduct as ms_OuterProduct
from ms_cybertron.interaction.hyperformer import Transition as ms_Transition

EPSILON = 1e-2
rng = jax.random.PRNGKey(42)
print("=================Test Outerproduct====================")
np.random.seed(42)
test_x = np.random.uniform(size=(2, 3, 4)) # (B, A, F)

def forward_fn(x, training=False):
    outer_product = OuterProduct(dim_feature=4,
                                 dim_outerproduct=8,
                                 key=rng)(x) # type: ignore
    return outer_product

forward_fn = jax.vmap(forward_fn, in_axes=(0))
outer_product_fn = hk.transform(forward_fn, apply_rng=True)
params = outer_product_fn.init(rng, test_x)
outer_product = jit(outer_product_fn.apply)(params, rng, test_x)
print("outer_product: ", outer_product.shape)

_params = hk.data_structures.to_mutable_dict(params)
ms_outer_product_fn = ms_OuterProduct(act_dim=4, 
                                      num_output_channel=4, 
                                      dim_outer_product=8,)
ms_params = {}
ms_params["right_projection_weights"] = \
    ms.Parameter(Tensor(np.array(_params['outer_product']['right_weights']).T, ms.float32))
ms_params["right_projection_biases"] = \
    ms.Parameter(Tensor(np.array(_params['outer_product']['right_bias']), ms.float32))
ms_params["left_projection_weights"] = \
    ms.Parameter(Tensor(np.array(_params['outer_product']['left_weights']).T, ms.float32))
ms_params["left_projection_biases"] = \
    ms.Parameter(Tensor(np.array(_params['outer_product']['left_bias']), ms.float32))
ms_params["linear_output_weights"] = \
    ms.Parameter(Tensor(np.array(_params['outer_product']['output_weights']).T, ms.float32))
ms_params["o_biases"] = \
    ms.Parameter(Tensor(np.array(_params['outer_product']['output_bias']), ms.float32))
ms_params["layer_norm.gamma"] = \
    ms.Parameter(Tensor(np.array(_params['outer_product/~/norm_fn']['scale']), ms.float32))
ms_params["layer_norm.beta"] = \
    ms.Parameter(Tensor(np.array(_params['outer_product/~/norm_fn']['offset']), ms.float32))

load_param_into_net(ms_outer_product_fn, ms_params)
ms_test_x = Tensor(np.array(test_x), ms.float32)
mask = Tensor(np.ones((2, 3), dtype=np.bool_), ms.bool_)
mask_norm = None
ms_outer_product = ms_outer_product_fn(ms_test_x, mask, mask_norm)
print("ms_outer_product: ", ms_outer_product.shape)

print("[from jax] outer_product: \n", outer_product)
print("[from mindspore] outer_product: \n", ms_outer_product.asnumpy())
out_same = np.allclose(outer_product, ms_outer_product.asnumpy(), rtol=EPSILON)
print(f"outer_product is same: {out_same} in accuracy of {EPSILON}")
# caculate the norm of diff
diff = outer_product - ms_outer_product.asnumpy()
norm = np.linalg.norm(diff)
print(f"norm of diff: {norm}")


print("=================Test Transition====================")
test_g = np.random.uniform(size=(2, 3, 3, 4)) # (B, A, A, F)

def forward_fn2(x, training=False):
    transition = Transition(dim_feature=4, n_transition=2)(x) # type: ignore
    return transition
forward_fn2 = jax.vmap(forward_fn2, in_axes=(0))
transition_fn = hk.transform(forward_fn2, apply_rng=True)
params = transition_fn.init(rng, test_g)
transition = jit(transition_fn.apply)(params, rng, test_g)
print("transition: ", transition.shape)

_params = hk.data_structures.to_mutable_dict(params)
ms_transition_fn = ms_Transition(layer_norm_dim=4, 
                                 num_intermediate_factor=2, 
                                 )
ms_params = {}
ms_params['transition1_weights'] = \
    ms.Parameter(Tensor(np.array(_params['transition/~/transition_1']['w']).T, ms.float32))
ms_params['transition1_biases'] = \
    ms.Parameter(Tensor(np.array(_params['transition/~/transition_1']['b']), ms.float32))
ms_params['transition2_weights'] = \
    ms.Parameter(Tensor(np.array(_params['transition/~/transition_2']['w']).T, ms.float32))
ms_params['transition2_biases'] = \
    ms.Parameter(Tensor(np.array(_params['transition/~/transition_2']['b']), ms.float32))
ms_params['layer_norm.gamma'] = \
    ms.Parameter(Tensor(np.array(_params['transition/~/norm']['scale']), ms.float32))
ms_params['layer_norm.beta'] = \
    ms.Parameter(Tensor(np.array(_params['transition/~/norm']['offset']), ms.float32))

load_param_into_net(ms_transition_fn, ms_params)
ms_test_g = Tensor(np.array(test_g), ms.float32)
ms_transition = ms_transition_fn(ms_test_g, None)
print("ms_transition: ", ms_transition.shape)

print("[from jax] transition: \n", transition)
print("[from mindspore] transition: \n", ms_transition.asnumpy())
out_same = np.allclose(transition, ms_transition.asnumpy(), rtol=EPSILON)
print(f"transition is same: {out_same} in accuracy of {EPSILON}")
# caculate the norm of diff
diff = transition - ms_transition.asnumpy()
norm = np.linalg.norm(diff)
print(f"norm of diff: {norm}")
# 23-10-26