import matplotlib.pyplot as plt

def train(graph, inputs, labels, epoch=1, lr=0.0001):
    '''计算图的训练过程'''
    for e in range(epoch):
        losses = []
        for i, x in enumerate(inputs):
            loss = graph.forward(x=x, label=labels[i])
            graph.backward(lr=lr)
            graph.clear()
            losses.append(loss)
        print(f'epoch {e}, avg_loss {sum(losses)/len(losses)}')

def eval(graph, inputs, labels, show=False, show_len=100):
    '''@ return: loss'''
    # 1 计算损失
    losses = []
    predicts = []
    for i, x in enumerate(inputs):
        loss = graph.forward(x=x, label=labels[i])
        predict = graph.predict(x=x)
        graph.clear()
        losses.append(loss)
        predicts.append(predict)
    avg_loss = sum(losses) / len(losses)
    # 2 画图 --> 拟合效果
    if show:
        x = [i for i in range(len(labels))]
        n = show_len
        plt.plot(x[:n], labels[:n])
        plt.plot(x[:n], predicts[:n])
        plt.show()
    return avg_loss
