import torch

# t = torch.ones((2, 3))
# t_0 = torch.cat([t, t], dim=0)  # shape:torch.Size([4, 3])
# t_1 = torch.cat([t, t, t], dim=1)  # shape:torch.Size([2, 9])

# print("t_0:{} shape:{}\nt_1:{} shape:{}".format(t_0, t_0.shape, t_1, t_1.shape))

# t = torch.ones((2, 3))
# t_stack = torch.stack([t, t, t], dim=0)
# t_stack:tensor([[[1., 1., 1.],
#          [1., 1., 1.]],
#
#         [[1., 1., 1.],
#          [1., 1., 1.]],
#
#         [[1., 1., 1.],
#          [1., 1., 1.]]])
# shape:torch.Size([3, 2, 3])

# print("\nt_stack:{} shape:{}".format(t_stack, t_stack.shape))

# a = torch.ones((2, 7))  # 7
# list_of_tensors = torch.chunk(a, dim=1, chunks=3)  # 3
# for idx, t in enumerate(list_of_tensors):
#     print("第{}个张量：{}, shape is {}".format(idx + 1, t, t.shape))
# 第1个张量：tensor([[1., 1., 1.],
# #         [1., 1., 1.]]), shape is torch.Size([2, 3])
# # 第2个张量：tensor([[1., 1., 1.],
# #         [1., 1., 1.]]), shape is torch.Size([2, 3])
# # 第3个张量：tensor([[1.],
# #         [1.]]), shape is torch.Size([2, 1])

# t = torch.ones((2, 5))
# list_of_tensors = torch.split(t, [2, 1, 2], dim=1)
# for idx, t in enumerate(list_of_tensors):
#     print("第{}个张量：{}, shape is {}".format(idx, t, t.shape))
# 第0个张量：tensor([[1., 1.],
#         [1., 1.]]), shape is torch.Size([2, 2])
# 第1个张量：tensor([[1.],
#         [1.]]), shape is torch.Size([2, 1])
# 第2个张量：tensor([[1., 1.],
#         [1., 1.]]), shape is torch.Size([2, 2])


# t = torch.randint(0, 9, size=(3, 3))
# idx = torch.tensor([0, 2], dtype=torch.long)  # float
# t_select = torch.index_select(t, dim=0, index=idx)
# print("t:\n{}\nt_select:\n{}".format(t, t_select))
# # t:
# # tensor([[7, 0, 7],
# #         [0, 6, 0],
# #         [1, 7, 6]])
# # t_select:
# # tensor([[7, 0, 7],
# #         [1, 7, 6]])

# t = torch.randint(0, 9, size=(3, 3))
# mask = t.ge(5)  # ge is mean greater than or equal/   gt: greater than  le  lt
# t_select = torch.masked_select(t, mask)
# print("t:\n{}\nmask:\n{}\nt_select:\n{} ".format(t, mask, t_select))
# t:
# tensor([[8, 7, 6],
#         [7, 4, 8],
#         [2, 4, 5]])
# mask:
# tensor([[ True,  True,  True],
#         [ True, False,  True],
#         [False, False,  True]])
# t_select:
# tensor([8, 7, 6, 7, 8, 5])

# t = torch.randperm(8)
# t_reshape = torch.reshape(t, (-1, 2, 2))    # -1
# print("t:{}\nt_reshape:\n{}".format(t, t_reshape))
# t[0] = 1024
# print("t:{}\nt_reshape:\n{}".format(t, t_reshape))
# print("t.data 内存地址:{}".format(id(t.data)))
# print("t_reshape.data 内存地址:{}".format(id(t_reshape.data)))
# t:tensor([7, 0, 2, 4, 1, 6, 3, 5])
# t_reshape:
# tensor([[[7, 0],
#          [2, 4]],
#
#         [[1, 6],
#          [3, 5]]])
# t:tensor([1024,    0,    2,    4,    1,    6,    3,    5])
# t_reshape:
# tensor([[[1024,    0],
#          [   2,    4]],
#
#         [[   1,    6],
#          [   3,    5]]])
# t.data 内存地址:2337882159928
# t_reshape.data 内存地址:2337882159928

# t = torch.rand((2, 3, 4))
# t_transpose = torch.transpose(t, dim0=1, dim1=2)
# print("t shape:{}\nt_transpose shape: {}".format(t.shape, t_transpose.shape))
# t shape:torch.Size([2, 3, 4])
# t_transpose shape: torch.Size([2, 4, 3])

# t = torch.rand((1, 2, 3, 1))
# t_sq = torch.squeeze(t)
# t_0 = torch.squeeze(t, dim=0)
# t_1 = torch.squeeze(t, dim=1)
# print(t.shape)     # torch.Size([1, 2, 3, 1])
# print(t_sq.shape)  # torch.Size([2, 3])
# print(t_0.shape)   # torch.Size([2, 3, 1])
# print(t_1.shape)   # torch.Size([1, 2, 3, 1])

t_0 = torch.randn((3, 3))
t_1 = torch.ones_like(t_0)
t_add = torch.add(t_0, 10, t_1)
print("t_0:\n{}\nt_1:\n{}\nt_add_10:\n{}".format(t_0, t_1, t_add))
# t_0:
# tensor([[-0.1121,  0.7929,  0.4945],
#         [ 0.2914,  0.1901,  1.6277],
#         [ 0.0409, -0.6093, -0.8580]])
# t_1:
# tensor([[1., 1., 1.],
#         [1., 1., 1.],
#         [1., 1., 1.]])
# t_add_10:
# tensor([[ 9.8879, 10.7929, 10.4945],
#         [10.2914, 10.1901, 11.6277],
#         [10.0409,  9.3907,  9.1420]])
















