import torch

import get_data_from_101 as get_data
import itertools
import copy
import os
import pickle
from sklearn import tree
from sklearn.linear_model import LinearRegression
from sklearn import svm
from sklearn import neighbors
from sklearn import ensemble
from sklearn.tree import ExtraTreeRegressor
from sklearn.neural_network import MLPRegressor
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.metrics import r2_score
from scipy.stats import kendalltau
from tensorflow.keras.utils import to_categorical
import numpy as np
import matplotlib.pyplot as plt
from sklearn.neural_network import MLPClassifier
from sklearn.ensemble import RandomForestClassifier
from torch.utils.data import Dataset, DataLoader, TensorDataset
import torch as pt
from torch import nn
import pandas as pd
import gc
from sklearn.metrics import classification_report,confusion_matrix,accuracy_score
import joblib
# different methods
# 12


# model = [model_decision_tree_regression, model_linear_regression, model_svm, model_k_neighbor,
#         model_random_forest_regressor, model_adaboost_regressor, model_gradient_boosting_regressor,
#         model_bagging_regressor, model_extra_tree_regressor, model_gaussian_process_regressor, model_MLP_regressor]

# method = ['decision_tree', 'linear_regression', 'svm', 'knn', 'random_forest', 'adaboost', 'GBRT', 'Bagging',
#          'ExtraTree', 'Gaussian_Process', 'MLP']

# MAX full training time
MAX_FTT = 5521.803059895833
# MAX trainable parameters
MAX_TP = 49979274


def get_toy_metrics(num, type='train', train_num=400):
    index = get_data.get_data_index_from_101(num, type=type, train_num=train_num)
    metrics = get_data.get_corresponding_metrics_by_index(index, type=type)  # 获取结构，精度训练时间信息
    metrics = get_data.padding_zero_in_matrix(metrics)  # 矩阵和操作补长定长
    metrics = get_data.operations2integers(metrics)  # 操作转数字
    return metrics


"""  
420575: {'fixed_metrics': {'module_adjacency': array([[0, 1, 0, 0, 0, 1, 0],
                                                                   [0, 0, 1, 0, 0, 0, 0],
           [0, 0, 0, 1, 0, 0, 0],
           [0, 0, 0, 0, 1, 0, 1],
           [0, 0, 0, 0, 0, 1, 0],
           [0, 0, 0, 0, 0, 0, 1],
           [0, 0, 0, 0, 0, 0, 0]], dtype=int8), 
           'module_operations': ['input', 'conv3x3-bn-relu', 'conv3x3-bn-relu', 'maxpool3x3', 'conv1x1-bn-relu', 'conv1x1-bn-relu', 'output'], 
           'trainable_parameters': 6054282, 
           'module_integers': array([2, 2, 3, 1, 1])}, 

           'final_training_time': 1409.0293782552083, 
           'final_test_accuracy': 0.9034789005915324}, 
"""


# return: 1. X: a linear array: flattened adjacent matrix + integer operations
#         2. y: accuracy
def get_toy_data(important_metrics, create_more_metrics=True, select_upper_tri=False, max_creation=-1,
                 integers2one_hot=True, additional_metrics=False, normalization=True):
    X = []
    y = []
    more_metrics_num = []
    for index in important_metrics:
        fixed_metrics = important_metrics[index]['fixed_metrics']
        adjacent_matrix = fixed_metrics['module_adjacency']
        module_integers = fixed_metrics['module_integers']
        accuracy = important_metrics[index]['final_test_accuracy']
        if create_more_metrics:
            more_metrics = Create_more_metrics.create_new_metrics(adjacent_matrix, module_integers, select_upper_tri,
                                                                  max_creation, InOut=False)
            num_new_metrics = len(more_metrics)  # 获取增广数量
            more_metrics_num.extend([num_new_metrics])
            for same_metric in more_metrics:
                adjacent_matrix, module_integers = same_metric['module_adjacency'], same_metric['module_integers']
                adjacent_matrix = get_data.delete_margin(adjacent_matrix)
                flattened_adjacent = adjacent_matrix.flatten()
                input_metrics = []
                input_metrics.extend(flattened_adjacent)
                if integers2one_hot:
                    module_integers = to_categorical(module_integers, 4, dtype='int8')  # to_categorical使整数转化为独热向量
                    module_integers = module_integers.flatten()
                input_metrics.extend(module_integers)
                X.append(input_metrics)
                y.append(accuracy)
        else:
            adjacent_matrix = get_data.delete_margin(adjacent_matrix)
            flattened_adjacent = adjacent_matrix.flatten()
            input_metrics = []
            input_metrics.extend(flattened_adjacent)
            if integers2one_hot:
                module_integers = to_categorical(module_integers, 4, dtype='int8')
                module_integers = module_integers.flatten()
            if additional_metrics:
                if normalization:
                    trainable_parameters = trainable_parameters / MAX_TP
                    final_training_time = final_training_time / MAX_FTT
            input_metrics.extend(module_integers)
            X.append(input_metrics)
            y.append(accuracy)

    assert len(X) == len(y)
    print('Input {:} metrics, obtain {:} metrics'.format(len(important_metrics), len(X)))
    return X,y, more_metrics_num


def get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=False, additional_metrics=True,
                              normalization=True):
    # upper triangular and additional metrics (including trainable parameters and final training time)
    # double_upper denotes flatting the upper triangular matrix into a one-dimensional vector based the axis = 0 and 1
    X = []
    Y= []
    for index in important_metrics:
        fixed_metrics = important_metrics[index]['fixed_metrics']
        adjacent_matrix = fixed_metrics['module_adjacency']
        module_integers = fixed_metrics['module_integers']
        if integers2one_hot:
            module_integers = to_categorical(module_integers, 4, dtype='int8')
            module_integers = module_integers.flatten()


        adjacent_matrix = get_data.delete_margin(adjacent_matrix)
        array_adjacent_matrix = np.array(adjacent_matrix)
        flattened_adjacent = []
        matrix_size = len(adjacent_matrix)
        # get upper triangular data in matrix and flat it
        # Noting that this doesn't contains the elements as main diagonal
        for i in range(matrix_size):
            flattened_adjacent.extend(adjacent_matrix[i][i:])

        if double_upper:
            # add information from column
            for i in range(matrix_size):
                extend_array = array_adjacent_matrix[:i + 1, i]
                flattened_adjacent.extend(extend_array.tolist())
        input_metrics = []
        input_metrics.extend(flattened_adjacent)
        input_metrics.extend(module_integers)
        accuracy = important_metrics[index]['final_test_accuracy']
        Y.append(accuracy)
        X.append(input_metrics)
    return X,Y
model_MLPClassifier = MLPClassifier(solver='adam', activation='relu', hidden_layer_sizes=(246,123,25),random_state=123,
                                 alpha=0.0001, batch_size=128)
model_svm=svm.SVC(cache_size=4096)
model_ranforest=RandomForestClassifier(n_estimators=150,criterion='entropy',random_state=42)
def getpair(X,L):
    assert len(X) > 0
    num = len(X)
    Y=[]
    X1 = list(itertools.permutations(X, r=3))
    Y1 = list(itertools.permutations(L,r=3))
    X = np.array(X1)
    a =np.array(Y1)
    assert len(X)==len(a)
    for i in range(len(X)):
        y = np.array(a[i])
        if y[0] >= y[1] and y[0] >= y[2]:
            Y.append(2)
        elif (y[0]>= y[1] and y[0] <= y[2]) or(y[0] <= y[1]and y[0]>= y[2]):
            Y.append(1)
        else:
            Y.append(0)
    #X = np.delete(X[0],41,1)
    return  X.reshape(-1,123),np.array(Y)

if __name__ == '__main__':
    train_num1 = 210
    important_metrics = get_toy_metrics(train_num1, type='train', train_num=train_num1)
    x1,y1 = get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=False,
                                   additional_metrics=True, normalization=True)
    X1, Y1 = getpair(x1, y1)
    print("------------mlpssssssssssssss1-----------------------")
    model_ranforest.fit(X1, Y1.ravel())
    #joblib.dump(model_ranforest,'savemodelran.pkl')
    del X1, Y1
    gc.collect()
    print("------------pre-----------------------")
    for i in range(2):
        test_num = 210
        important_metrics = get_toy_metrics(test_num, type='test', train_num=test_num)
        x2, y2 = get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=False,
                                           additional_metrics=True, normalization=True)
        X2, Y2 = getpair(x2, y2)
        ypred = model_ranforest.predict(X2)
        result = confusion_matrix(Y2, ypred)
        print("Confusion Matrix:")
        print(result)
        result1 = classification_report(Y2, ypred)
        print("Classification Report:", )
        print(result1)
        result2 = accuracy_score(Y2, ypred)
        print("Accuracy:", result2)
        del X2, Y2
        gc.collect()
    """`
    0.001635637711420352
    train_num1 = 100
    important_metrics = get_toy_metrics(train_num1, type='train', train_num=train_num1)
    x1, y1 = get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=False,
                                       additional_metrics=True, normalization=True)
    X1, Y1 = getpair(x1, y1)
    #with open('ranforest.pickle.pickle', 'rb') as f:
        #model2 = pickle.load(f)
    print("------------mlpssssssssssssss2-----------------------")
    model2=joblib.load('savemodelran.pkl')
    model2.fit(X1, Y1.ravel())
    del X1, Y1
    gc.collect()
      print("------------svmssssssssssssss-----------------------",)
    model_svm.fit(X1, Y1.ravel())
    print("------------pre-----------------------")
    score = model_svm.score(X2)
    print("score", score)
    print("------------ranssssssssssssss-----------------------", )
    model_ranforest.fit(X1, Y1.ravel())
    print("------------pre-----------------------")
    score = model_ranforest.score(X2)
    print("score", score)
     model_MLPClassifier.fit(X1.reshape(X1.shape[0], X1.shape[1] * X1.shape[2]), Y1.ravel())
        print("------------pre-----------------------")
        del X1, Y1
        gc.collect()
        X2, Y2 = getpair(x2)
        score = model_MLPClassifier.fit(X2.reshape(X2.shape[0], X2.shape[1] * X2.shape[2]), Y2)
        print("score", score)


    print("------------fit-----------------------",i)
        model_svm.fit(X1, Y1.ravel())
    print("------------pre-----------------------")
    for i in range(4):
        test_num = 100
        important_metrics = get_toy_metrics(test_num, type='test', train_num=test_num)
        x2, y2 = get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=False,
                                           additional_metrics=True, normalization=True)
        X2, Y2 = getpair(x2, y2)
        score = model_svm.score(X2, Y2)
        print("score", score)

    
    3
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
     important_metrics = get_toy_metrics(train_num2, type='train', train_num=train_num2)
    x2, y2 = get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=True,
                                       additional_metrics=True, normalization=True)
    X2, Y2 = getpair(x2, y2)
    print("-------model_mlp.fit2------")
    model_MLPClassifier.fit(X2, Y2.ravel())
    model_svm.fit(X2,Y2.ravel())
    del x2, y2, X2, Y2
    gc.collect()
    print("------------ssssssssssssss-----------------------")
    important_metrics = get_toy_metrics(test_num, type='fixed_test', train_num=test_num)
    A, b = get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=True,
                                     additional_metrics=True, normalization=True)
    X3, Y3 = getpair(A, b)
    score1 = model_MLPClassifier.score(X3,Y3)
    print("mlpscore1", score1)
    score2 = model_svm.score(X3, Y3)
    print('model_svm', score2)
    del A, b, X3, Y3
    gc.collect()
    print("------------ssssssssssssss-----------------------")
    important_metrics = get_toy_metrics(test_num, type='fixed_test', train_num=test_num)
    C, d = get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=True,
                                     additional_metrics=True, normalization=True)
    X4, Y4 = getpair(C, d)
    score2=model_MLPClassifier.score(X4,Y4)
    scoreS = model_MLPClassifier.score(X4, Y4)
    print("mlpscore2", score2)
    print("SVMscore2", scoreS)
    del C, d, X4, Y4
    gc.collect()
    print("------------ssssssssssssss-----------------------")
    important_metrics = get_toy_metrics(test_num, type='fixed_test', train_num=test_num)
    E, f = get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=True,
                                     additional_metrics=True, normalization=True)
    X5, Y5 = getpair(E, f)
    score3 = model_MLPClassifier.score(X5, Y5)
    print("mlpscore3", score3)
    scoreS=model_svm.score(X5,Y5)
    print("SVMscore2", scoreS)
    del E, f, X5, Y5
    gc.collect()
    """

    # print('X', X)
    # print('X[0]', X[0])
    # print(type(X1), type(Y1))
    # print('X1', X1)
    # print('X1_S',len(X1[0]))

    # print('Y1_S', len(Y1))
    # print('Y1', Y1)
    # print('a', Y1[0])
    #important_metrics = get_toy_metrics(test_num, type='fixed_test', train_num=train_num)
    #A, b = get_upper_triangular_data(important_metrics, integers2one_hot=True, double_upper=True,
                                     #additional_metrics=True, normalization=True)
    #X2, Y2 = get_pair(A, b)
    #print("-------model_svm.fit------")
    #model_MLPClassifier.fit(X1, Y1.ravel())
    #print("------------ssssssssssssss-----------------------")
    #score = model_MLPClassifier.score(X2, Y2)
    #print("mlpscore", score)
    # traindata2 = addbatch(X2, Y2, 64)
    # print(type(X2), type(Y2))
    # print(X2)
    # print()

    # print("-------model_svm.fit------")
    # model_svm.fit(X1, Y1.ravel())
    # print("------------ssssssssssssss-----------------------")
    # score2 = model_svm.score(X2, Y2)
    # print('model_svm', score2)
