import torch

x_uninitialized = torch.empty(5, 3)
print("未初始化的张量：\n", x_uninitialized)

x_random = torch.rand(5, 3)
print("随机初始化的张量：\n", x_random)

x_zeros = torch.zeros(5, 3, dtype=torch.long)
print("全为0的张量：\n", x_zeros)

x_data = torch.tensor([5.5, 3])
print("使用数据创建的张量：\n", x_data)

y = torch.rand(5, 3)

result_add1 = x_random + y
result_add2 = torch.add(x_random, y)
print("张量加法 (方法1)：\n", result_add1)
print("张量加法 (方法2)：\n", result_add2)

index_result = x_random[:, 1]
print("张量索引：\n", index_result)

x_reshaped = torch.randn(4, 4)
y_flat = x_reshaped.view(16)
z_reshaped = x_reshaped.view(-1, 8)
print("改变形状前：\n", x_reshaped.size())
print("改变形状后 (y_flat)：\n", y_flat.size())
print("改变形状后 (z_reshaped)：\n", z_reshaped.size())

w = torch.tensor([1.0], requires_grad=True)
b = torch.tensor([0.0], requires_grad=True)
x = torch.tensor([3.0])

y = w * x + b

y.backward()

print("w的梯度 (dy/dw)：\n", w.grad)
print("b的梯度 (dy/db)：\n", b.grad)

