import torch
from torchvision import transforms
from PIL import Image
from model.DMPHN import DMPHN_With_MDSF2
import os
import torchvision
import argparse
from zhangUtils.util import *

parser = argparse.ArgumentParser(description='Test the DMPHN model.')
parser.add_argument('--checkpoint_path', type=str, default='/root/code/hrs-mdsf/checkpoints/checkpoint_min_loss.pth', help='Path to the checkpoint file')
parser.add_argument('--test_image_path', type=str, default='/root/data/NH-HAZE2/test/hazy', help='Path to the test images')
parser.add_argument('--clean_image_path', type=str, default='/root/data/NH-HAZE2/test/clean', help='Path to the clean images for evaluation')
parser.add_argument('--save_image_path', type=str, default='/root/code/hrs-mdsf/result', help='Path to save the results')

args = parser.parse_args()

# 加载模型  
model = DMPHN_With_MDSF2()
model = torch.nn.DataParallel(model)
checkpoint = torch.load(args.checkpoint_path)
model.load_state_dict(checkpoint['model_state_dict'])

model.eval()
if not os.path.exists(args.save_image_path):
    os.makedirs(args.save_image_path)
    print(f'{args.save_image_path} mkdir success')

# 预处理图片
transform = transforms.Compose([
    transforms.ToTensor(),
])

for root, dirs, files in os.walk(args.test_image_path):
    for file in files:
        image_path = os.path.join(root, file)

        image = Image.open(image_path).convert('RGB')
        image = transform(image).unsqueeze(0).cuda()

        with torch.no_grad():
            output = model(image)
        
        output_image = output[3].squeeze(0).cpu().numpy().transpose(1, 2, 0)
        output_image = (output_image - output_image.min()) / (output_image.max() - output_image.min())
        output_image = (output_image * 255).astype('uint8')
        output_image = Image.fromarray(output_image)
        output_image.save(os.path.join(args.save_image_path, file))
        print(f'{file} saved')


ssim_val, psnr_val = evaluation(args.clean_image_path, args.save_image_path)

print('ssim: ', ssim_val)
print('psnr: ', psnr_val)