import numpy as np
import torch


def fn1():
    # View/reshape 变换矩阵 view/reshape api一样
    print("View/reshape------------------->")
    a = torch.randn(4, 1, 28, 28)
    print(a.shape)
    a = a.view(4, 28 * 28 * 1)
    print(a)
    print(a.shape)
    # 还原数据 必须保持一样否则数据会丢失
    print(a.view(4, 1, 28, 28))


def fn2():
    print("Squeese/unsqueeze 挤压、维度展开------------------->")
    # Squeese/unsqueeze 挤压、维度展开
    b = torch.randn(4, 1, 28, 28)
    # unsqueeze（0） 正序索引前添加一个维度  unsqueeze（-2） 倒序索引后添加一个维度
    print(b.unsqueeze(1).shape)
    print(b.unsqueeze(-2).shape)
    # print(b.unsqueeze(-1))

    b = torch.tensor([1.1, 1.3])
    print(b.shape)
    print(b.unsqueeze(0))
    print(b.unsqueeze(1))

    # 将x 和 y 合并成一个矩阵 必须维度相同才能合并
    x = torch.rand(32)
    y = torch.rand(4, 32, 28, 28)
    print(x.shape)
    print(y.shape)
    x = x.unsqueeze(-1).unsqueeze(-1).unsqueeze(0)
    print(x.shape)

    #  squeese会将维度为1的维度挤压到最后一个维度
    w = torch.rand(1, 3, 1, 1)
    print(w.shape)
    # squeeze() 自动去除维度为1的维度
    print(w.squeeze().shape)
    print(w.squeeze(0).shape)
    print(w.squeeze(-2).shape)
    print(w.squeeze(1).shape)  # 无效的维度


# Transpose/t/permute  转置
def fn3():
    print("Transpose/t/permute  转至------------------->")
    a = torch.rand(2, 3)
    print(a)
    # 转置操作 t()只能在二维数据中进行转置
    print(a.t())
    print(a.t().shape)

    b = torch.rand(4, 3, 28, 28)
    print(b)
    # 将索引1和索引3转换位置 两两交换
    b1 = b.transpose(1, 3)
    print(b1.shape)
    # transpose后要使用contiguous() 将转换后的数据变成连续的
    print(b.transpose(1, 3).contiguous().view(4, 3 * 28 * 28).view(4, 28, 28, 3).transpose(1, 3))
    # torch.all（torch.eq()）比较 全部都相等
    print(
        torch.all(torch.eq(b, b.transpose(1, 3).contiguous().view(4, 3 * 28 * 28).view(4, 28, 28, 3).transpose(1, 3))))
    print(
        torch.all(torch.eq(b, b.transpose(1, 3).contiguous().view(4, 3 * 28 * 28).view(4, 28, 28, 3).transpose(1,
                                                                                                               3).contiguous())))

    print(torch.all(torch.eq(b, b.transpose(1, 3).contiguous().view(4, 3 * 28 * 28).view(4, 3, 28, 28))))
    # permute转换每个位置
    print(b.permute(0, 2, 3, 1).shape)
    print(torch.all(
        torch.eq(b, b.permute(0, 2, 3, 1).contiguous().view(4, 3 * 28 * 28).view(4, 28, 28, 3).permute(0, 3, 1, 2))))
    print(b.permute(0, 2, 3, 1).contiguous().view(4, 3 * 28 * 28).view(4, 28, 28, 3).permute(0, 3, 1, 2).shape)


# Expand/repeat  扩展  需要是扩展 Expand推荐使用  repeat不推荐使用 会增加内存使用
def fn4():
    print("Expand/repeat------------------->")
    a = torch.rand(4, 32, 28, 28)
    b = torch.rand(1, 32, 1, 1)
    print(a.shape)
    print(b.shape)
    # expand(4, 32, 28, 28)  扩展4个batch 32个通道  28*28  必须是将1维的进行扩展
    print(b.expand(4, 32, 28, 28).shape)
    # expand（-1） -1表示不扩展
    print(b.expand(-1, 32, 28, 28).shape)
    print(b.expand(-1, 32, 28, -1).shape)
    # print(b.expand(-1, 33, -1, -1).shape)   #必须是将1维的进行扩展

    c = torch.rand(4, 32, 1, 1)
    print(c.shape)
    # repeat(4, 32, 28, 28)  扩展在原来的维度上乘以4 32 28 28
    print(c.repeat(4, 32, 28, 1).shape)
    print(c.repeat(1, 1, 28, 28).shape)


if __name__ == '__main__':
    # fn1()
    # fn2()
    fn3()
    # fn4()
