import torch
import numpy as np


grad = torch.tensor([[1., 1.], [1., 1.]])
a = torch.tensor([[6., 5., 4.], [11., 12., 13.]], requires_grad=True)
b = torch.tensor([[1., 2.], [3., 4.], [5., 6.]], requires_grad=True)
mm1 = a.mm(b)
mm1.backward(grad)

print(mm1)
print(a.grad)
print(b.grad)

# a = torch.tensor([1, 2, 3, 4])
# b = a.view(2, 2)
# c = b[0]
# d = c.view(2, 1) #.expand(2, 3).reshape(6, 1).flatten().reshape(1, 6)
# e = d.permute(1, 0)
# print(a.storage().data_ptr())
# print(b.storage().data_ptr())
# print(c.storage().data_ptr())
# print(d.storage().data_ptr())
# print(e.storage().data_ptr())
# print(a.is_contiguous())
# print(b.is_contiguous())
# print(c.is_contiguous())
# print(d.is_contiguous())
# print(e.is_contiguous())

# a = torch.tensor(np.array([float(x) for x in range(16)]).reshape([2, 2, 2, 2]), requires_grad=True)
# b = a.permute(1, 3, 0, 2)
# print(a.shape, a.stride())
# print(b.shape, b.stride())
# grad = np.array([1., 1., 2., 2., 3., 3., 4., 4.,
#                  5., 5., 6., 6., 7., 7., 8., 8]).reshape([2, 2, 2, 2])
# print(grad)
# b.backward(torch.tensor(grad))
# print(a.grad.flatten())
# print(a.grad.shape, a.grad.stride())

# a = torch.tensor([1., 2., 3., 3., 4., 5.], requires_grad=True)
# print(a)
# a0 = a.view(2, 3)
# print(a0)
# a1 = a0.permute(1, 0)
# print(a1)
# a2 = a1.reshape(3, 2)
# print(a2)
# a4 = a2[1]
# print(a4)
# a4.backward(torch.tensor([5., 6.]))
# print(a.grad)
