from parameters import n

def train(net, loader, loss_func, optimizer, epochs):
    global loss
    loader = loader
    optimizer = optimizer
    loss_func = loss_func
    loss_list = []
    for epoch in range(epochs):
        sum = 0
        for step, (batch_x, batch_y) in enumerate(loader):
            prediction = net(batch_x)
            loss = loss_func(prediction, batch_y)
            sum += loss.item()
            loss_list.append(loss.data.tolist())
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()
        if (sum <= 10):
            print("收敛前迭代次数：", (epoch + 1) * n)
            print()
            return
        print("epoch ", epoch + 1, "：loss ->", sum)
