class Trainer:
    def __init__(self, model, optimizer, loss_fn, device):
        self.model = model
        self.optimizer = optimizer
        self.loss_fn = loss_fn
        self.device = device
        self.data_loader = None
    def _train_one_step(self, graph0, graph1, gt):
        """
        训练模型一步。
        :param graph0: 图0。
        :param graph1: 图1。
        :param gt: ground truth。
        :return: 损失值。
        """
        self.optimizer.zero_grad()
        node_embeddings_1 = self.model(graph0)
        node_embeddings_2 = self.model(graph1)
        loss = self.model.compute_loss(node_embeddings_1, node_embeddings_2, gt)
        loss.backward()
        self.optimizer.step()
        return loss.item()
    def train(self, num_epochs,logger):
        """
        训练模型。
        :param num_epochs: 训练轮数。
        logger 是一个wandb logger
        """
        for epoch in range(num_epochs):
            self.model.train()
            loss_epoch = 0
            for graph0, graph1, gt in self.data_loader:
                graph0 = graph0.to(self.device)
                graph1 = graph1.to(self.device)
                gt = gt.to(self.device)
                loss = self._train_one_step(graph0, graph1, gt)
                loss_epoch += loss
            logger.log({'loss': loss_epoch})
        
                # print(f'Epoch {epoch}, loss: {loss}')
        