# import mxnet as mx
import mxnet.ndarray as nd

from config import Config
from models.qa_net import DepthwiseSeparableConv
from models.qa_net import Embedding
from models.qa_net import EmbeddingHighway
from models.qa_net import EncoderBlock
from models.qa_net import Highway
from models.qa_net import QANet
from models.qa_net import SelfAttention, MultiHeadAttention
from models.qa_net import get_timing_signal_1d

# import json
# import numpy as np


def test_highway():
    num_layers = 2
    num_ch = 5
    k = 3
    activation = None
    conv_dim = 2
    dropout = 0.0

    highway = Highway(
        num_layers,
        num_ch,
        k,
        activation,
        conv_dim,
        dropout
    )

    highway.initialize()

    shape = (3, num_ch, 20, 20)
    data = nd.ones(shape=shape)
    out = highway(data)
    assert (out.shape == shape)


def test_depthwise_separable_conv():
    in_ch = 5
    out_ch = 5
    k = (3, 1)
    conv_dim = 1
    bias = False

    dsc = DepthwiseSeparableConv(in_ch,
                                 out_ch,
                                 k,
                                 conv_dim,
                                 bias)

    dsc.initialize()

    if conv_dim == 1:
        shape = (3, in_ch, 30)
        data = nd.ones(shape)
        out = dsc(data)
        out = dsc(out)
        print(out.shape, shape)
        # assert (out.shape == shape)
    elif conv_dim == 2:

        shape = (3, in_ch, 30, 30)
        data = nd.ones(shape)
        out = dsc(data)
        out = dsc(out)
        print(out.shape, shape)
        print(out)

        # assert(out.shape == shape)


def test_self_attn():
    query_units = 18
    num_heads = 2
    use_bias = True
    out_ch = 6
    dropout = 0.0

    self_attn = SelfAttention(query_units,
                              num_heads,
                              use_bias,
                              out_ch,
                              dropout)
    self_attn.initialize()

    length = 10
    shape = (3, length, 8)
    data = nd.random.normal(scale=0.6, shape=shape)
    out = self_attn(data)
    print(out.shape)
    # print(out)
    # assert (out.shape == (3, out_ch, length))


def test_multi_head_attn():
    units = 10
    num_heads = 2

    attn = MultiHeadAttention(units, num_heads)

    attn.initialize()
    length = 10
    shape = (3, length, 8)
    data = nd.random.normal(scale=0.6, shape=shape)
    out = attn(data)
    print(out)


def test_load_emb():
    word_emb_file = Config.word_emb_file
    ch_emb_file = Config.char_emb_file

    emb = Embedding(pretrained_ch_emb_file=ch_emb_file, pretrained_word_emb_file=word_emb_file, )
    emb.initialize()

    words = nd.array([[1, 2], [3, 4]])
    chs = nd.array([[1, 2], [3, 4]])

    print(emb(chs, words))


def test_encoder_block():
    num_blocks = 30
    num_conv_layers = 10
    num_ch = 8
    k = 3
    query_units = 10
    num_heads = 2
    use_bias = True
    dropout = 0.0

    length = 10
    shape = (3, num_ch, length)
    data = nd.random.normal(scale=0.6, shape=shape)

    enc_block = EncoderBlock(num_blocks, num_conv_layers, num_ch, k, query_units,
                             num_heads, use_bias, dropout)

    enc_block.initialize()

    enc_block(data)


def test_embedding_highway():
    ch_emb_size = 10
    word_emb_size = 20
    out_ch = 30
    k = 3
    dropout = 0.5
    num_highway = 2

    emb_highway = EmbeddingHighway(ch_emb_size, word_emb_size, out_ch, k, dropout, num_highway)
    emb_highway.initialize()

    length = 20
    max_ch = 6
    shape_ch = (3, length, max_ch, ch_emb_size)
    shape_word = (3, length, word_emb_size)

    ch_emb = nd.ones(shape=shape_ch)
    word_emb = nd.ones(shape=shape_word)

    emb = emb_highway(ch_emb, word_emb)
    print(emb)


def test_reshape():
    a = nd.arange(0, 200).reshape(shape=(10, 5, 4))
    # print(a.reshape(shape=(-1, 0)).shape)
    # print(a.reshape(shape=(-1, 0), reverse=True).shape)


def test_qa_net():
    config = Config()

    qa_net = QANet(config)
    qa_net.initialize()

    batch_size = 3
    q_length = 10
    p_length = 20
    char_dim = 8
    p_ch = nd.arange(0, batch_size * p_length * char_dim).reshape((batch_size, p_length, char_dim))
    q_ch = nd.arange(0, batch_size * q_length * char_dim).reshape((batch_size, q_length, char_dim))

    p_word = nd.arange(0, batch_size * p_length).reshape((batch_size, p_length))
    q_word = nd.arange(0, batch_size * q_length).reshape((batch_size, q_length))

    out = qa_net(p_ch, p_word, q_ch, q_word)
    print(out)


def test_get_timing_signal_1d():
    print(get_timing_signal_1d(10, 10, 3))
