import time
import math
import argparse
import torch
import only_costh_all_size


def parse_args():
    parser = argparse.ArgumentParser(description="some hyper-parameters of wave2vector costh module")
    parser.add_argument("--weight-dim0", default=768, type=int, help="the first dimension of weight")
    parser.add_argument("--weight-dim1", default=1211, type=int, help="the second dimension of weight")
    args, _ = parser.parse_known_args()
    return args


class W2vCosthFunction(torch.autograd.Function):
    @staticmethod
    def forward(ctx, w: torch.Tensor):
        fwd_out = only_costh_all_size.forward(w)
        costh_out = fwd_out[0]
        saved_tensors = [w] + fwd_out
        ctx.save_for_backward(*saved_tensors)
        return costh_out

    @staticmethod
    def backward(ctx, grad_costh_out: torch.Tensor):
        grad_w = only_costh_all_size.backward(grad_costh_out.contiguous(), *ctx.saved_tensors)
        return grad_w


class W2vCosth(torch.nn.Module):
    def __init__(self, weight_dim0, weight_dim1):
        super(W2vCosth, self).__init__()
        self.w_dim0, self.w_dim1 = weight_dim0, weight_dim1
        self.w = torch.nn.Parameter(torch.empty(weight_dim0, weight_dim1))
        self.reset_params()

    def reset_params(self):
        stdv = 1.0 / math.sqrt(self.w_dim1)
        for w in self.parameters():
            w.data.uniform_(-stdv, stdv)

    def forward(self):
        return W2vCosthFunction.apply(self.w)


class NativeW2vCosth(torch.nn.Module):
    def __init__(self, weight_dim0, weight_dim1):
        super(NativeW2vCosth, self).__init__()
        self.w_dim0, self.w_dim1 = weight_dim0, weight_dim1
        self.w = torch.nn.Parameter(torch.empty(weight_dim0, weight_dim1))
        self.reset_params()

    def reset_params(self):
        stdv = 1.0 / math.sqrt(self.w_dim1)
        for w in self.parameters():
            w.data.uniform_(-stdv, stdv)

    def forward(self):
        w_norm = torch.norm(self.w, p=2, dim=0, keepdim=True).clamp(min=1e-12)
        costh = torch.div(self.w, w_norm)

        return costh


if __name__ == "__main__":
    args = parse_args()
    assert args.weight_dim0 == 768, "The first dimension of weight should be 768"
    assert args.weight_dim1 == 1211, "The second dimension of weight should be 1211"

    only_costh_module = W2vCosth(args.weight_dim0, args.weight_dim1).to("cuda:0")
    native_only_costh_module = NativeW2vCosth(args.weight_dim0, args.weight_dim1).to("cuda:0")

    fwd_time = 0.0
    bkwd_time = 0.0
    native_fwd_time = 0.0
    native_bkwd_time = 0.0
    for idx in range(11000):
        cond = idx >= 1000
        if cond:
            native_fwd_start = time.time()
        native_module_out = native_only_costh_module()
        if cond:
            native_fwd_time += (time.time() - native_fwd_start)

        if cond:
            fwd_start = time.time()
        module_out = only_costh_module()
        if cond:
            fwd_time += (time.time() - fwd_start)

        native_loss = native_module_out.sum()
        loss = module_out.sum()

        if cond:
            native_bkwd_start = time.time()
        native_loss.backward()
        if cond:
            native_bkwd_time += (time.time() - native_bkwd_start)

        if cond:
            bkwd_start = time.time()
        loss.backward()
        if cond:
            bkwd_time += (time.time() - bkwd_start)

    print(f"Native forward time v.s. optim forward time is {native_fwd_time * 100:.5f} us/iter v.s. "
          f"{fwd_time * 100:.5f} us/iter.")
    print(f"Native backward time v.s. optim backward time is {native_bkwd_time * 100:.5f} us/iter v.s. "
          f"{bkwd_time * 100:.5f} us.iter.")

    # print(f"Forward time is: {fwd_time *100:.5f} μs/iter.")
    # print(f"Backward time is: {bkwd_time * 100:.5f} μs/iter.")

