# Testing radial basis functions.
# 23-10-26

import os
import sys
import jax
import jax.numpy as jnp
import numpy as np
import joblib
import haiku as hk
import mindspore as ms
import mindspore.nn as ms_nn

from jax import Array, jit
from typing import Optional, Union, List
from jax.nn.initializers import lecun_normal, normal, zeros, ones
from mindspore import load_param_into_net, Tensor, nn, context

sys.path.append(os.path.dirname(sys.path[0]))

# Test for utils
# setting context & hyper parameters
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")

from cybertron.utils.filter import DenseFilter
from ms_cybertron.filter import DenseFilter as ms_DenseFilter

EPSILON = 1e-3
rng = jax.random.PRNGKey(42)
# print("=================Test Dense====================")
# ## create and show data for test
# test_input = np.random.uniform(size=(2, 3, 4)) # (B, A, F)

# def dense_fn(x, training=False):
#     dense = DenseFilter(dim_in=4, dim_out=5, activation=jax.nn.relu)(x) # type: ignore
#     return dense
# dense_fn = jax.vmap(dense_fn, in_axes=(0,))
# ts_dense_fn = hk.transform(dense_fn, apply_rng=True)
# params = ts_dense_fn.init(rng, test_input)
# dense_out = jit(ts_dense_fn.apply)(params, rng, test_input)
# print(f"dense_out: shape of {dense_out.shape}")

# _name = 'dense_filter/~/mlp/~/linear'
# _params = hk.data_structures.to_mutable_dict(params)
# ms_dense_fn = ms_DenseFilter(dim_in=4, dim_out=5, activation=ms_nn.ReLU())
# ms_params = {}
# ms_params['dense_layers.mlp.0.weight'] = \
#     ms.Parameter(Tensor(np.array(_params[f'{_name}_0']['w']).T, ms.float32))
# ms_params['dense_layers.mlp.0.bias'] = \
#     ms.Parameter(Tensor(np.array(_params[f'{_name}_0']['b']), ms.float32))
# ms_params['dense_layers.mlp.1.weight'] = \
#     ms.Parameter(Tensor(np.array(_params[f'{_name}_1']['w']).T, ms.float32))
# ms_params['dense_layers.mlp.1.bias'] = \
#     ms.Parameter(Tensor(np.array(_params[f'{_name}_1']['b']), ms.float32))
# load_param_into_net(ms_dense_fn, ms_params)

# ms_test_input = Tensor(test_input, ms.float32)
# ms_dense_out = ms_dense_fn(ms_test_input)
# print(f"ms_dense_out: shape of {ms_dense_out.shape}")

# print("[from jax] dense_out: \n", dense_out)
# print("[from mindspore] ms_dense_out: \n", ms_dense_out.asnumpy())
# dense_same = np.allclose(dense_out, ms_dense_out.asnumpy(), atol=EPSILON)
# print(f"dense filter is the same: {dense_same} in accuracy of {EPSILON}.")

# # caculate the norm of diff
# dense_diff = dense_out - ms_dense_out.asnumpy()
# dense_norm = np.linalg.norm(dense_diff)
# print(f"norm of diff: {dense_norm}")

print("=================Test Residual====================")
from cybertron.utils.filter import ResFilter
from ms_cybertron.filter import ResFilter as ms_ResFilter

## create and show data for test
test_input = np.random.uniform(size=(2, 3, 4)) # (B, A, F)

def dense_fn(x, training=False):
    dense = ResFilter(dim_in=4, dim_out=5, activation=jax.nn.relu)(x) # type: ignore
    return dense
dense_fn = jax.vmap(dense_fn, in_axes=(0,))
ts_dense_fn = hk.transform(dense_fn, apply_rng=True)
params = ts_dense_fn.init(rng, test_input)
dense_out = jit(ts_dense_fn.apply)(params, rng, test_input)
print(f"res_out: shape of {dense_out.shape}")

_name = 'res_filter/~/'
_params = hk.data_structures.to_mutable_dict(params)
ms_dense_fn = ms_ResFilter(dim_in=4, dim_out=5, activation=ms_nn.ReLU())
ms_params = {}
ms_params['linear.weight'] = \
    ms.Parameter(Tensor(np.array(_params[f'{_name}linear']['w']).T, ms.float32))
ms_params['linear.bias'] = \
    ms.Parameter(Tensor(np.array(_params[f'{_name}linear']['b']), ms.float32))
ms_params['residual.nonlinear.mlp.0.weight'] = \
    ms.Parameter(Tensor(np.array(_params[f'{_name}mlp/~/linear_0']['w']).T, ms.float32))
ms_params['residual.nonlinear.mlp.0.bias'] = \
    ms.Parameter(Tensor(np.array(_params[f'{_name}mlp/~/linear_0']['b']), ms.float32))
ms_params['residual.nonlinear.mlp.1.weight'] = \
    ms.Parameter(Tensor(np.array(_params[f'{_name}mlp/~/linear_1']['w']).T, ms.float32))
ms_params['residual.nonlinear.mlp.1.bias'] = \
    ms.Parameter(Tensor(np.array(_params[f'{_name}mlp/~/linear_1']['b']), ms.float32))
load_param_into_net(ms_dense_fn, ms_params)

ms_test_input = Tensor(test_input, ms.float32)
ms_dense_out = ms_dense_fn(ms_test_input)
print(f"ms_dense_out: shape of {ms_dense_out.shape}")

print("[from jax] dense_out: \n", dense_out)
print("[from mindspore] ms_dense_out: \n", ms_dense_out.asnumpy())
dense_same = np.allclose(dense_out, ms_dense_out.asnumpy(), atol=EPSILON)
print(f"dense filter is the same: {dense_same} in accuracy of {EPSILON}.")

# caculate the norm of diff
dense_diff = dense_out - ms_dense_out.asnumpy()
dense_norm = np.linalg.norm(dense_diff)
print(f"norm of diff: {dense_norm}")





