#随机梯度下降法 features(n,p)   label (p,1)
import torch
def data_iter(batch_size,features,labels):
    num_examples=len(features)
    indices=list(range(num_examples))
    random.shuffle(indices)#打乱顺序
    for i in range(0,num_examples,batch_size):
        batch_indices=torch.tensor(indices[i:min(i+batch_size,num_examples)])
    yield features[batch_size],labels[batch_indices]
p=2  
num_epochs=10
w=torch.normal(0,0.01,size=(p,1),requires_grad=True)
b=torch.zeros(1,requires_grad=True)

for epoch in range(num_epochs):
    for X,y in data_iter(batch_size,features,labels):
        y_hat=X@w+b  #@做矩阵乘法
        loss = ((y_hat-y)**2/2).mean()
        loss.bachward()
        for param in [w,b]:
            param-=eta*param
            param.grad.zero_()