import os
import pathlib
import json
import torch
from torch.utils.data import Dataset, DataLoader
import wandb
from DeepPPG.utils.saver import save_checkpoint
import copy
from typing import Dict, Tuple, List, Optional
from tqdm import tqdm
from contextlib import nullcontext

from DeepPPG.utils.saver import load_checkpoint, save_whole_model, load_whole_model
from DeepPPG.config import PredConfig
from DeepPPG.utils import loss



class BaseTrainer:
    def __init__(self, model: torch.nn.Module,
                 config: PredConfig):
        """
        Initialize the BaseTrainer.

        :param model: The neural network model to be trained.
        :param config: Configuration dictionary with training parameters.
        """
        self.model = model
        self.config = config
        self.optimizer = torch.optim.Adam(model.parameters(), lr=config.lr, weight_decay=config.weight_decay)

        # Load existing checkpoint if available
        if config.resume:
            self.start_epoch, _ = load_checkpoint(model, self.optimizer, config.model_save_dir)
        else:
            self.start_epoch = 1
        self._epoch = int(self.start_epoch)
        config["finish"] = False

        if config.model_save_dir is not None:
            with open(os.path.join(config.model_save_dir, "config.json"), "w") as f:
                json.dump(dict(config), f)

    def criterion(self, batch):
        raise NotImplementedError

    def forward_epoch(self, train: bool, dataloader, record_pred, **kwargs) -> float:
        # self.model.train()
        # total_loss = 0
        # for batch in self.train_loader:
        #     # Implement your training logic here
        #     pass
        #
        # average_loss = total_loss / len(self.train_loader)
        # wandb.log({'train_loss': average_loss})
        # return average_loss
        raise NotImplementedError

    def select_best_fn(self, metric_dict: Dict) -> float:
        """
        Selects the best model based on the provided metric dictionary. To be implemented in subclass.

        :param metric_dict: Dictionary of metrics to determine the best model.
        :return: Value of the selected metric.
        """

        return metric_dict[f'val_{self.config.metric}']

    def train(self, train_loader, val_loader) -> Tuple[torch.nn.Module, float]:
        """
        Executes the training process over multiple epochs.

        :return: Tuple of the best model and the best metric value.
        """
        early_stopping_count = 0
        best_metric = float('-inf')
        best_model_path = pathlib.Path(self.config.model_save_dir) / 'best_model.pt'
        current_model_path = pathlib.Path(self.config.model_save_dir) / 'current_model.pt'
        os.makedirs(self.config.model_save_dir, exist_ok=True)
        for epoch in range(self.start_epoch, self.config.num_epochs + 1):
            _, _ = self.forward_epoch(train=True, dataloader=train_loader, record_pred=False)
            train_metric_dict = self.test_all(self.model, train_loader, prefix="train")
            val_metric_dict = self.test_all(self.model, val_loader, prefix="val")
            save_checkpoint(epoch, self.model, self.optimizer, current_model_path)
            wandb.log({**train_metric_dict, **val_metric_dict, 'epoch': epoch})
            self._epoch += 1
            metric_to_maximize = self.select_best_fn({**train_metric_dict, **val_metric_dict})
            if metric_to_maximize > best_metric:
                best_metric = metric_to_maximize
                early_stopping_count = 0
                save_whole_model(copy.deepcopy(self.model), best_model_path)
            else:
                early_stopping_count += 1
                if early_stopping_count >= self.config.early_stopping:
                    print(f"Early stopping at epoch {epoch}")
                    break
        return load_whole_model(best_model_path), best_metric

    @staticmethod
    def test_all(model: torch.nn.Module, dataloader: DataLoader, prefix: str) -> Dict:
        """
        Tests the model on the provided dataset and logs the results to W&B.

        :param model: The trained model.
        :param dataloader: DataLoader object for the dataset.
        :param prefix: Prefix for the metric names.
        :return: Dictionary of metrics.
        """
        raise NotImplementedError

    @staticmethod
    def test_batch(model: torch.nn.Module, dataloader: DataLoader, prefix: str) -> Dict:
        raise NotImplementedError