from zhangUtils.train import *
from model import DMPHN
from loss import CustomLoss_function
from datasets import NH_HazeDataset
from torchvision import transforms
import torch
import argparse
from torch.optim.lr_scheduler import StepLR
import os
from zhangUtils.util import *
import warnings

warnings.filterwarnings("ignore")

parser = argparse.ArgumentParser(description="Deep Multi-Patch Hierarchical Network")
parser.add_argument("-e", "--epochs", type=int, default=10000)
parser.add_argument("-se", "--start_epoch", type=int, default=200)
parser.add_argument("-b", "--batchsize", type=int, default=8)
parser.add_argument("-c", "--cropsize", type=int, default=128)
parser.add_argument("-l", "--learning_rate", type=float, default=0.0001)
parser.add_argument("--data_path", type=str, default='/root/project/dataset/data', help="the path of NH-HAZE dataset")
parser.add_argument("--checkpoint_path", type=str, default='/root/project/code/hrs-mdsf/checkpoint/checkpoint_2142.pth', help="path to save checkpoints")
args = parser.parse_args()

if args.data_path == '':
    raise ValueError("data_path must be set")

model = DMPHN()
criterion = CustomLoss_function()
optimizer = torch.optim.Adam(model.parameters(), lr=0.0001)
scheduler = torch.optim.lr_scheduler.MultiStepLR(
    optimizer,
    milestones=[3000, 5000, 8000],  # 衰减的时间点
    gamma=0.5                      # 每次衰减的倍数
)
train_dataset = NH_HazeDataset(
            hazed_image_files=os.path.join(args.data_path, 'train/train.txt'),
            # make changes here !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
            dehazed_image_files=os.path.join(args.data_path, 'train/train.txt'),
            root_dir=os.path.join(args.data_path, 'train/'),
            crop=True,
            crop_size=args.cropsize,
            rotation=True,
            color_augment=True,
            transform=transforms.Compose([
                transforms.ToTensor()
            ]))
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batchsize, shuffle=True, num_workers=4)
test_dataset = NH_HazeDataset(
            hazed_image_files=os.path.join(args.data_path, 'test/test.txt'),
            dehazed_image_files=os.path.join(args.data_path, 'test/test.txt'),
            root_dir=os.path.join(args.data_path, 'test/'),
            crop=True,
            crop_size=args.cropsize,
            rotation=True,
            color_augment=True,
            transform=transforms.Compose([
                transforms.ToTensor()
            ]))

test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batchsize, shuffle=True, num_workers=0)


train(model=model, criterion=criterion, optimizer=optimizer, trainloader=train_loader, 
      epochs=args.epochs, testloader=test_loader, testEpoch=20, scheduler=scheduler,
      checkpoint_path=args.checkpoint_path, metric=calculate_ssim)

#  1.12.1 py3.10_cuda11.3_cudnn8.3.2_0