import os
from typing import List, Union
import numpy as np
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier, ExtraTreesClassifier, BaggingClassifier
from sklearn.metrics import *
from deepforest import CascadeForestClassifier

from torch import nn
import torch
import joblib

from utils import *

class JobLibModel(object):
    """
        Base class for all the model class restored in a 'joblib' suffix file
    """
    def __init__(self, template_model, model_type, model_name : str) -> None:
        super().__init__()
        self.__model = template_model
        self.__name = model_name
        self.__model_type = model_type
    
    def fit(self, X : np.ndarray, y : np.ndarray) -> None:
        self.__model.fit(X, y)
    
    def score(self, X : np.ndarray, y : np.ndarray) -> float:
        if hasattr(self.__model, "score"):
            return self.__model.score(X, y)
        elif hasattr(self.__model, "predict"):
            return accuracy_score(self.__model.predict(X), y)
        else:
            raise AttributeError("{} doesn't have method score or predict, which means you cann't use the score method".format(type(self.__model)))

    def roc_auc(self, X : np.ndarray, y : np.ndarray) -> float:
        if hasattr(self.__model, "predict"):
            return roc_auc_score(self.__model.predict(X), y)
        else:
            raise AttributeError("{} doesn't have method score or predict, which means you cann't use the score method".format(type(self.__model)))

    def confuse_matrix(self, X : np.ndarray, y : np.ndarray) -> np.ndarray:
        pre_lab = self.predict_array(X)
        return confusion_matrix(pre_lab, y)

    def report(self, X : np.ndarray, y : np.ndarray, print_ : bool = True, return_str : bool = True) -> str:
        pre_lab = self.predict_array(X)
        if print_:
            print(classification_report(pre_lab, y, digits=3))
        elif return_str:
            return classification_report(pre_lab, y, digits=3)
        else:
            return classification_report(pre_lab, y, digits=3, output_dict=True)

    def load_model(self, path : str) -> bool:
        check_path(path, ".joblib")
        self.__model = joblib.load(path)
        if not isinstance(self.__model, self.__model_type):
            raise TypeError("Model restored in {} is not a {} model!".format(path, self.__name))
        return True
    
    def save_model(self, path : str) -> bool:
        check_path(path=path, check_path=False, check_file=False, suffix=".joblib")
        joblib.dump(self.__model, path)
        if os.path.exists(path):
            print("{} model has been saved to \033[34m{}\033[0m".format(self.__name, path))
    
    def predict_array(self, x : Union[np.ndarray, List], return_numpy : bool = True) -> Union[np.ndarray, List]:
        # return numpy if return_numpy is True else List
        if not isinstance(x, np.ndarray):
            x = np.array(x, dtype="float32")
        if len(x.shape) == 1:
            x = x.reshape(1, -1)
        elif len(x.shape) > 2:
            raise ValueError("x must be a 1D or 2D array! But received array's shape is {}!".format(x))
        pre_lab = self.__model.predict(x)
        if not return_numpy:
            pre_lab = pre_lab.tolist()
        return pre_lab

    def predict_wav_file(self, path : str, return_numpy : bool = True) -> Union[np.ndarray, List]:
        x = process_wav_file(path)
        return self.predict_array(x, return_numpy)

class ASSVM(JobLibModel):
    def __init__(self, C=1.0, kernel='rbf', degree=3, gamma='scale', coef0=0.0, shrinking=True, probability=False,
                 tol=1e-3, cache_size=200, class_weight=None, verbose=False, max_iter=-1, decision_function_shape='ovr',
                 break_ties=False, random_state=None) -> None:

        self.__model : SVC = SVC(C=C, kernel=kernel, degree=degree, gamma=gamma, coef0=coef0, shrinking=shrinking, probability=probability,
                 tol=tol, cache_size=cache_size, class_weight=class_weight, verbose=False, max_iter=max_iter, decision_function_shape=decision_function_shape,
                 break_ties=False, random_state=None)
    
        super().__init__(
            template_model=self.__model,
            model_type=SVC,
            model_name="SVC"
        )

class ASRandomForest(JobLibModel):
    def __init__(self, n_estimators=100, criterion="gini", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.,
                 max_features="auto", max_leaf_nodes=None, min_impurity_decrease=0., min_impurity_split=None, bootstrap=True, oob_score=False,
                 n_jobs=None, random_state=None, verbose=0, warm_start=False, class_weight=None, ccp_alpha=0.0, max_samples=None) -> None:
        
        self.__is_oob = oob_score
        self.__model : RandomForestClassifier = RandomForestClassifier(n_estimators=n_estimators, criterion=criterion, max_depth=max_depth, min_samples_split=min_samples_split,
                 min_samples_leaf=min_samples_leaf, min_weight_fraction_leaf=min_weight_fraction_leaf, max_features=max_features, max_leaf_nodes=max_leaf_nodes,
                 min_impurity_decrease=min_impurity_decrease, 
                #  min_impurity_split=min_impurity_split, 
                 bootstrap=bootstrap, oob_score=oob_score,
                 n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, class_weight=class_weight, ccp_alpha=ccp_alpha,
                 max_samples=max_samples)

        super().__init__(
            template_model=self.__model,
            model_type=RandomForestClassifier,
            model_name="RandomForest"
        )
    
    @property
    def oob_score(self):
        if self.__is_oob:
            return self.__model.oob_score_
        else:
            raise ValueError("Oob score isn't turned on! Please assign the parameter 'oob_score' as True!")

class ASExtraTrees(JobLibModel):
    def __init__(self, n_estimators=100, criterion="gini", max_depth=None, min_samples_split=2, min_samples_leaf=1, 
                 min_weight_fraction_leaf=0, max_features="auto", max_leaf_nodes=None, min_impurity_decrease=0, 
                 min_impurity_split=None, bootstrap=False, oob_score=False, n_jobs=None, random_state=None, verbose=0, 
                 warm_start=False, class_weight=None, ccp_alpha=0, max_samples=None) -> None:
        
        self.__is_oob = oob_score
        self.__model : ExtraTreesClassifier = ExtraTreesClassifier(
            n_estimators=n_estimators, criterion=criterion, max_depth=max_depth, min_samples_split=min_samples_split, min_samples_leaf=min_samples_leaf, 
            min_weight_fraction_leaf=min_weight_fraction_leaf, max_features=max_features, max_leaf_nodes=max_leaf_nodes, min_impurity_decrease=min_impurity_decrease, 
            # min_impurity_split=min_impurity_split, 
            bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, 
            warm_start=warm_start, class_weight=class_weight, ccp_alpha=ccp_alpha, max_samples=max_samples
        )
        super().__init__(
            template_model=self.__model, 
            model_type=ExtraTreesClassifier, 
            model_name="ExtraTrees"
        )
    
    @property
    def oob_score(self):
        if self.__is_oob:
            return self.__model.oob_score_
        else:
            raise ValueError("Oob score isn't turned on! Please assign the parameter 'oob_score' as True!")

class ASAdaBoostSVM(JobLibModel):
    def __init__(self, base_estimator=None, n_estimators : int = 100, learning_rate : float = 1.0, algorithm : str = 'SAMME', 
                       random_state : int = None) -> None:
        self.__model : AdaBoostClassifier = AdaBoostClassifier(
            base_estimator=base_estimator,
            n_estimators=n_estimators,
            learning_rate=learning_rate,
            algorithm=algorithm,
            random_state=random_state
        )
        super().__init__(
            template_model=self.__model, 
            model_type=AdaBoostClassifier, 
            model_name="AdaBoost based on SVM"
        )


class ASBaggingSVM(JobLibModel):
    def __init__(self, base_estimator=None, n_estimators=10, *, max_samples=1, max_features=1, bootstrap=True, 
                       bootstrap_features=False, oob_score=False, warm_start=False, n_jobs=None, random_state=None, 
                       verbose=0) -> None:

        self.__model : BaggingClassifier = BaggingClassifier(
            base_estimator=base_estimator, n_estimators=n_estimators, max_samples=max_samples, max_features=max_features, bootstrap=bootstrap, 
            bootstrap_features=bootstrap_features, oob_score=oob_score, warm_start=warm_start, n_jobs=n_jobs, random_state=random_state, 
            verbose=verbose
        )
        super().__init__(
            template_model=self.__model, 
            model_type=BaggingClassifier, 
            model_name="Bagging based on SVM"
        ) 


# torch model
# TODO : may be used for constructing nerual network
class ASRNN(nn.Module):
    def __init__(self, input_dim : int, hidden_dim : int, layer_dim : int = 1):
        super().__init__()
        self.hidden_dim = hidden_dim
        self.input_dim = input_dim
        self.lstm = nn.LSTM(input_dim, hidden_dim, layer_dim, batch_first=True)
        self.mlp1 = nn.Linear(hidden_dim, 32)
        self.mlp2 = nn.Linear(32, 2)

    def forward(self, x : torch.Tensor):
        r_out, (h_n, h_c) = self.lstm(x)
        out : torch.Tensor = self.mlp1(r_out[:, -1, :])
        out : torch.Tensor = self.mlp2(out)
        return out.softmax(dim=1)

class ASMLP(nn.Module):
    def __init__(self, input_size):
        super().__init__()
        self.classfier = nn.Sequential(

        )

class ASCNN(nn.Module):
    def __init__(self, left_lace_size : int = 3, right_lace_size : int = 7, input_channel : int = 1):
        super().__init__()
        self.conv1 = nn.Sequential(
            nn.Conv2d(input_channel, 32, left_lace_size),
            nn.BatchNorm2d(32),
            nn.MaxPool2d(3, 3),
            nn.Conv2d(32, 64, left_lace_size),
            nn.ReLU(),
            nn.BatchNorm2d(64),
            nn.MaxPool2d(2, 2),
            nn.Conv2d(64, 32, left_lace_size),
            nn.ReLU(),
            nn.BatchNorm2d(32),
            nn.MaxPool2d(2, 2),
        )

        self.conv2 = nn.Sequential(
            nn.Conv2d(input_channel, 32, right_lace_size),
            nn.BatchNorm2d(32),
            nn.MaxPool2d(3, 3),
            nn.Conv2d(32, 64, right_lace_size),
            nn.ReLU(),
            nn.BatchNorm2d(64),
            nn.MaxPool2d(2, 2),
            nn.Conv2d(64, 32, right_lace_size),
            nn.ReLU(),
            nn.BatchNorm2d(32),
            nn.MaxPool2d(2, 2),
        )

        self.fc1 = nn.Linear(1728, 480)
        self.classifier = nn.Sequential(
            nn.Linear(960, 128),
            nn.Linear(128, 32),
            nn.Linear(32, 2)
        )

    def forward(self, x):
        o1 = self.conv1(x).flatten(start_dim=1)
        o2 = self.conv2(x).flatten(start_dim=1)
        o1 = self.fc1(o1)
        concat = torch.cat([o1, o2], dim=1)
        out : torch.Tensor = self.classifier(concat)
        return out.softmax(dim=1)

class ASCNNAE(nn.Module):
    def __init__(self):
        super().__init__()
        self.Encoder = nn.Sequential(
            # param [input_c, output_c, kernel_size, stride, padding]
            nn.Conv2d(1, 64, 3, 1, 1),   # [, 64, 96, 96]
            nn.BatchNorm2d(64),
            nn.Conv2d(64, 64, 3, 1, 1), # [, 64, 96, 96]
            nn.ELU(),
            nn.MaxPool2d(2, 2),             # [, 64, 48, 48]
            nn.BatchNorm2d(64),
            nn.Conv2d(64, 64, 3, 1, 1),  # [, 64, 48, 48]
            nn.ELU(),
            nn.BatchNorm2d(64),
            nn.Conv2d(64, 128, 3, 1, 1), # [, 128, 48, 48]
            nn.ELU(),
            nn.BatchNorm2d(128),
            nn.Conv2d(128, 128, 3, 1, 1), # [, 128, 48, 48]
            nn.ELU(),
            nn.BatchNorm2d(128),
            nn.Conv2d(128, 256, 3, 1, 1), # [, 256, 48, 48]
            nn.ELU(),
            nn.MaxPool2d(2, 2),                 # [, 256, 24, 24]
            nn.BatchNorm2d(256)   
        )
        
        # decoder
        self.Decoder = nn.Sequential(
            nn.ConvTranspose2d(256, 128, 3 ,1, 1),   # [, 128, 24, 24]
            nn.ELU(),
            nn.BatchNorm2d(128),
            nn.ConvTranspose2d(128, 128, 3, 2, 1, 1),   # [, 128, 48, 48]
            nn.ELU(),
            nn.BatchNorm2d(128),
            nn.ConvTranspose2d(128, 64, 3, 1, 1),    # [, 64, 48, 48]
            nn.ELU(),
            nn.BatchNorm2d(64),
            nn.ConvTranspose2d(64, 32, 3, 1, 1),      # [, 32, 48, 48]
            nn.ELU(),
            nn.BatchNorm2d(32),
            nn.ConvTranspose2d(32, 32, 3, 1, 1),      # [, 32, 48, 48]
            nn.ConvTranspose2d(32, 16, 3, 2, 1, 1),  # [, 16, 96, 96]
            nn.ELU(),
            nn.BatchNorm2d(16),
            nn.ConvTranspose2d(16, 1, 3, 1, 1),         # [, 3, 96, 96]
            nn.Sigmoid()
        )
    
    def forward(self, x):
        bottle_neck = self.Encoder(x)
        output = self.Decoder(bottle_neck)
        return bottle_neck, output
