import torch

from utils.logging_utils import get_logger

logger = get_logger()

def total_loss_isnan_or_inf(total_loss):
    if torch.isnan(total_loss).any() or torch.isinf(total_loss).any():
        print("损失值为 NaN/Inf！")
        exit()

def model_parameters_isnan_or_inf(model):
    if model is not None:
        for name, param in model.named_parameters():
            if torch.isnan(param).any() or torch.isinf(param).any():
                print(f"参数 {name} 包含 NaN/Inf！")
                exit()

def loss_batch_handle(title,writer,avg_loss,global_step):
    # print(
    #     f'Epoch [{epoch + 1}/{num_epochs}], Batch [{batch_idx + 1}/{len(dataloader)}], Loss: {avg_loss:.4f}')
    # logger.info(
    #     f'global_step : {global_step}, avg_loss: {avg_loss:.4f}')

    writer.add_scalar(title, avg_loss, global_step=global_step)


def lr_batch_handle(title, writer,current_lr,global_step):
    writer.add_scalar(title, current_lr, global_step=global_step)