from typing import Optional, Callable

import torch
from sklearn.metrics import confusion_matrix
from torch.utils.data import DataLoader

from MyDataset import WindIcingDatasetV1, MyDataloader
from MyModel import WindIcingModelV1
from MyUtil import get_logger, metrics_function_v1, get_best_file


class WindIcingTesterV1():
    
    def __init__(self, no,
                 criterion=None,
                 device=torch.device('cpu'),
                 
                 loss_function: Optional[Callable] = None,
                 metrics_function: Optional[Callable] = None,
                 ):
        self.no = no
        self.device = device
        self.criterion = criterion
        self.loss_function = loss_function
        self.metrics_function = metrics_function
        
        self.checkpoint = None
        self.config = None
        self.model = None
        self.test_loader = None
        
        self.checkpoint_path = get_best_file(no)
        
        self.logger = get_logger(no)
    
    def set_test_loader(self, test_loader):
        self.test_loader = test_loader
        return self
    
    def set_config(self, config):
        self.config = config
        return self
    
    def set_model(self, model):
        self.model = model
        return self
    
    def load_all(self):
        self.load_checkpoint()
        self.load_config()
        self.load_model()
    
    def load_checkpoint(self):
        if self.checkpoint is None:
            self.logger.info(f'load checkpoint from {self.checkpoint_path}')
            self.checkpoint = torch.load(self.checkpoint_path)
        return self.checkpoint
    
    def load_config(self):
        if self.config is None:
            self.config = self.checkpoint['config']
            assert self.config['no'] == self.no
            self.logger.info(f'config: {self.config}')
        return self.config
    
    def load_model(self):
        if self.model is None:
            self.model = WindIcingModelV1(
                seq_len=self.config['seq_len'],
                window_size=self.config['window_size'],
                feature_dim=self.config['feature_dim'],
                device=self.device,
            )
            self.model.load_state_dict(self.checkpoint['state_dict'])
        
        return self.model
    
    def test(self):
        self.logger.info('begin testing')
        self.model.to(self.device)
        self.model.eval()
        with torch.no_grad():
            total_loss = 0
            pred_classes = []
            label_classes = []
            for idx, ((inputs, mask_indexes), labels) in enumerate(self.test_loader):
                # (batch_size, seq_len, window_size, feature_dim)
                inputs = inputs.to(self.device)
                # (batch_size) 表示掩码开始的位置
                mask_indexes = mask_indexes.to(self.device)
                # (batch_size)
                labels = labels.to(self.device)
                
                # (batch_size, seq_len, class_num)
                outputs = self.model(inputs)
                # 根据掩码位置获取实际时间步的输出，
                mask_indexes -= 1
                preds = outputs[torch.arange(mask_indexes.size(0)), mask_indexes]
                
                # 获取预测类别
                predicted_classes = preds.argmax(dim=1)  # 假设 preds 是 logits
                
                pred_classes.extend(predicted_classes.cpu().numpy())  # 转移到 CPU 并转换为 numpy 数组
                label_classes.extend(labels.cpu().numpy())
                
                if self.loss_function is not None:
                    loss = self.loss_function(preds, labels, self.criterion)
                    total_loss += loss.item()
                elif self.criterion is not None:
                    loss = self.criterion(preds, labels)
                    total_loss += loss.item()
                
                if idx % (len(self.test_loader) // 20) == 0:
                    self.logger.info(f'idx: {idx} out of {len(self.test_loader)}')
        
        if self.loss_function is not None or self.criterion is not None:
            self.logger.info(f'Test loss: {total_loss / len(self.test_loader)}')
        
        if self.metrics_function is not None:
            metrics = self.metrics_function(pred_classes, label_classes)
            metrics.print()
            # self, metrics_function_v1(preds=pred_classes, targets=label_classes)
        # cm = confusion_matrix(label_classes, pred_classes)
        # self.logger.info(f'Test Confusion Matrix:\n{cm}')
    
    def load_test_dataloader_one(self, cls, num=15, batch_size=None, *args, **kwargs):
        #
        #
        # if batch_size is None:
        #     batch_size = self.config['batch_size']
        #
        # dataset = cls(self.config['seq_len'], self.config['window_size'],
        #                              self.config['step_size'])
        # dataset.load(num)
        # self.logger.info(f'len(test_data): {len(dataset)}')
        # self.test_loader = DataLoader(dataset, batch_size=batch_size, shuffle=False)
        if batch_size is not None:
            self.config['batch_size'] = batch_size
        train_loader, _, _ = MyDataloader.get_dataloader_one(cls, self.config, num=num, *args, **kwargs)
        self.test_loader = train_loader
        return self.test_loader
    
    def load_test_dataloader_union(self, cls, batch_size=None, *args, **kwargs):
        
        if batch_size is not None:
            self.config['batch_size'] = batch_size
        
        _, _, test_loader = MyDataloader.get_dataloader_union(cls, self.config, *args, **kwargs)
        
        self.test_loader = test_loader
        
        return self.test_loader
    
    def load_test_dataloader_ratio(self, cls, batch_size=None, *args, **kwargs):
        if batch_size is not None:
            self.config['batch_size'] = batch_size
        
        _, _, test_loader = MyDataloader.get_dataloader_ratio(
            cls, self.config, train_size_15=self.config['train_size_15'], train_size_21=self.config['train_size_21'],
            test_size_21=self.config['test_size_21'], *args, **kwargs)
        
        self.test_loader = test_loader
        
        return self.test_loader
