# _*_ coding: UTF-8 _*_

import numpy as np
from numpy import concatenate
import matplotlib.pyplot as plt

from random import sample
from random import uniform
from time import time
from copy import deepcopy
from tqdm import trange
from collections import Counter
import os

from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.metrics import accuracy_score

from scripts.utils.tools import timer
from scripts.utils.operators import Lifecycle
from scripts.utils.operators import lhs_uniform
from scripts.classifier.mpl_classifier import MPLClassifier

np.set_printoptions(linewidth=999, threshold=round(1e9))
plt.rc('font', family='Times New Roman')


class CASDLSO:
    """
    Classifier assisted stochastic dominant learning swarm optimizer

    case = 1 : f(x1) < f(x2) or f(x1) < f(x3), no better, no learning
    case = 2 : f(x2) < f(x3) < f(x1), learn form x2, x3
    case = 3 : f(x3) < f(x2) < f(x1), learn from x3, x2
    """

    def __init__(self, obj_func, dimension: int, bounds: tuple, NP: int, sample_size=100000, Ns=4):
        self.obj_func = obj_func
        self.__d, self.__bounds, self.__NP = dimension, bounds, NP
        self.sample_size, self.__Ns = sample_size, Ns

        # Initialize swarm.
        lhs = lhs_uniform(n=self.__NP, p=self.__d)
        x = (self.__bounds[0] + lhs * (self.__bounds[1] - self.__bounds[0]))  # position
        v = np.zeros(shape=(self.__NP, self.__d))  # velocity
        self.__swarm = [[x[i], v[i], 0] for i in range(self.__NP)]  # particle[i] structure: [x, v, fitness]
        for i, particle in enumerate(self.__swarm):
            self.__swarm[i][2] = self.obj_func(particle[0])
        self.gBest = min(self.__swarm, key=lambda _: _[2])[2]  # record only fitness

        self.__database = deepcopy(self.__swarm)  # database that stores the evaluated particles
        self.__classifier = MPLClassifier(n_input=self.__d * 2, n_output=2)  # MPLClassifier
        self.__splitter = StratifiedShuffleSplit(n_splits=1, test_size=0.2, train_size=0.8)  # data splitter, omissible

        self.life = Lifecycle()
        self.life.update(eva=self.__NP)
        self.acc, self.loss = [], []  # accuracy & loss in each iteration
        self.__updated = set()
        self.cases = []

    def optimize(self, iterations=None, evaluations=None, deadline=None):
        self.life.set_stopping_condition(iterations=iterations, evaluations=evaluations, deadline=deadline)

        while self.life.check_stopping_condition() is False:
            self.__database = sorted(self.__database, key=lambda _: _[2])

            self.__swarm = deepcopy(self.__database[:self.__NP])
            self.__train_model()
            self.__learning()
            self.__evaluate()

            self.gBest = min([p[2] for p in self.__database])
            self.life.update(it=1, eva=self.__Ns, gBest=self.gBest)
            print(self.life.it, self.life.eva, len(self.__database), self.acc[-1], self.gBest)

            plt.clf()
            plt.subplot(2, 2, 1)
            plt.plot(range(len(self.life.gBest)), self.life.gBest)
            plt.title(f'fit: {self.life.gBest[-1]:.2f}')
            plt.subplot(2, 2, 2)
            plt.plot(range(len(self.acc)), self.acc)
            plt.title(f'acc: {self.acc[-1]}')
            plt.subplot(2, 2, 3)
            plt.plot(range(len(self.loss)), self.loss)
            plt.title(f'loss: {self.loss[-1]}')
            plt.subplot(2, 2, 4)
            plt.pause(0.001)

        return self.life

    def __learning(self):
        updated, accuracy, consistency = [], [], []
        updated_times = [0 for _ in range(self.__NP)]
        _p, _t = [], []
        self.__updated.clear()
        for rd in range(1000):
            for i in range(self.__NP):
                # Select two random exemplars from the swarm: xb1, xb2.
                swarm = list(range(0, self.__NP))
                swarm.remove(i)
                xb1, xb2 = sample(swarm, k=2)
                xi, x1, x2 = self.__swarm[i][0], self.__swarm[xb1][0], self.__swarm[xb2][0]
                # compare x1 and xi
                x = concatenate((x1, xi), axis=0)
                pred = self.__classifier.predict(X=[x])[0]
                assert pred == 1 or pred == 2
                _p.append(pred)
                _t.append(self.__true_case(x1, xi))
                if pred == 2:  # f(x1) > f(xi)
                    continue
                # compare x2 and xi
                x = concatenate((x2, xi), axis=0)
                pred = self.__classifier.predict(X=[x])[0]
                assert pred == 1 or pred == 2
                _p.append(pred)
                _t.append(self.__true_case(x2, xi))
                if pred == 2:  # f(x2) > f(xi)
                    continue
                # compare x1 and x2
                x = concatenate((x1, x2), axis=0)
                pred = self.__classifier.predict(X=[x])[0]
                assert pred == 1 or pred == 2
                _p.append(pred)
                _t.append(self.__true_case(x2, xi))
                if pred == 1:  # f(x1) <= f(x2) <= f(xi)
                    self.__swarm[i][1], self.__swarm[i][0] = self.__update(self.__swarm[i], x1, x2, beta=0.4)
                    self.__updated.add(i)
                    updated_times[i] += 1
                else:  # f(x2) <= f(x1) <= f(xi)
                    self.__swarm[i][1], self.__swarm[i][0] = self.__update(self.__swarm[i], x2, x1, beta=0.4)
                    self.__updated.add(i)
                    updated_times[i] += 1

            updated.append(len(self.__updated))
            accuracy.append(accuracy_score(_p, _t))
            consistency.append(self.__consistency(self.__updated))

            # if len(self.__updated) > self.__NP * 0.8:
            #     break
            # if consistency[-1] < 0.8:
            #     break

            plt.clf()
            plt.subplot(2, 2, 1)
            plt.plot(range(len(updated)), updated)
            plt.title(f'update : {updated[-1]}')
            plt.subplot(2, 2, 2)
            plt.plot(range(len(accuracy)), accuracy, label='accuracy')
            plt.plot(range(len(consistency)), consistency, label='consistency')
            plt.title(f'accuracy: {accuracy[-1]:.2f}, consistency: {consistency[-1]:.2f}')
            plt.legend(loc='upper right')
            plt.subplot(2, 2, 3)
            plt.bar(range(len(updated_times)), updated_times, width=0.1)
            plt.title(f'update_times')
            plt.tight_layout()
            plt.pause(1)

        plt.show()

    def __update(self, p, x2, x3, beta: float):
        """
        update velocity and position
        :param p: current particle
        :param x2: pBest position
        :param x3: gBest position
        :return: velocity and position
        """
        x, v = p[0], p[1]
        r1 = np.random.uniform(low=0.0, high=1.0, size=self.__d)
        r2 = np.random.uniform(low=0.0, high=1.0, size=self.__d)
        r3 = np.random.uniform(low=0.0, high=1.0, size=self.__d)
        v = r1 * v + r2 * (x2 - x) + beta * r3 * (x3 - x)
        x = x + v
        for i in range(len(x)):
            if x[i] < self.__bounds[0]:
                x[i] = self.__bounds[0]
            if x[i] > self.__bounds[1]:
                x[i] = self.__bounds[1]
        return v, x

    def __evaluate(self):
        """
        Select Ns promising candidates which are different from particles in the database for real FEs.
        """
        old_particles = self.__database[:round(self.__NP / 4)]  # The particles which are in database.
        if len(self.__updated) < self.__Ns:
            print(f'len(self.updated) < self.__Ns, len(self.updated)={len(self.__updated)}')
            new_particles = deepcopy(self.__swarm)
        else:
            new_particles = [self.__swarm[i] for i in self.__updated]  # updated particles
        dis = []
        for o in new_particles:
            max_dis = 0
            for p in old_particles:
                d = np.sqrt(sum((o[0] - p[0]) ** 2))
                if max_dis < d:
                    max_dis = d
            dis.append(max_dis)  # the maximal distance from o to each p in P
        dis_sorted = sorted(range(len(dis)), key=lambda k: dis[k])  # sort the distances
        candidates = [new_particles[i] for i in dis_sorted[:self.__Ns]]
        for i, p in enumerate(candidates):
            candidates[i][2] = self.obj_func(p[0])
            self.__database.append(candidates[i])

    def __sample_data(self):
        X, y = [], []
        X_train, X_test, y_train, y_test = None, None, None, None
        while len(X) < self.sample_size:
            a, b = sample(self.__swarm, 2)
            if a[2] <= b[2]:
                case = 1  # f(a) <= f(b)
            else:
                case = 2  # f(a) > f(b)
            x = concatenate((a[0], b[0]), axis=0)
            X.append(x)
            y.append(case)
        # split the data
        for train_index, test_index in self.__splitter.split(X, y):
            X_train, X_test = [X[i] for i in train_index], [X[i] for i in test_index]
            y_train, y_test = [y[i] for i in train_index], [y[i] for i in test_index]
        return X_train, y_train, X_test, y_test

    def __train_model(self):
        X_train, y_train, X_test, y_test = self.__sample_data()
        self.__classifier.fit(X=X_train, y=y_train)
        y_pred = self.__classifier.predict(X=X_test)
        self.acc.append(accuracy_score(y_pred, y_test))
        self.loss.append(self.__classifier.loss_)

    def __true_case(self, x1, x2):
        f1, f2 = self.obj_func(x1), self.obj_func(x2)
        if f1 <= f2:
            return 1
        else:
            return 2

    def __consistency(self, updated):
        conflict = 0
        for i in range(1000):
            a, b = sample(updated, k=2)
            a, b = self.__swarm[a][0], self.__swarm[b][0],
            x = concatenate((a, b), axis=0)
            pred1 = self.__classifier.predict(X=[x])[0]
            x = concatenate((b, a), axis=0)
            pred2 = self.__classifier.predict(X=[x])[0]
            if pred1 == pred2:
                conflict += 1
        return (1000-conflict) / 1000

