import numpy as np

from optimizer.BaseOptimizer import BaseOptimizer
from utils.Ising import Ising


class Optimizer(BaseOptimizer):
    optimizer_name = 'SA'
    '''
    https://github.com/guofei9987/scikit-opt/blob/master/sko/SA.py
    '''

    def __init__(self, config: dict):
        self.ising_J = np.zeros((1, 1))
        super().__init__(config)

        self.T = 100
        self.generation_best_X, self.generation_best_Y = [self.history_best_x], [self.history_best_fit]
        self.stay_counter = 0

    def get_new_x(self, x):
        u = np.random.uniform(-1, 1, size=self.n_dim)
        x_new = x + 20 * np.sign(u) * self.T * ((1 + 1.0 / self.T) ** np.abs(u) - 1.0)
        return x_new

    def cool_down(self):
        self.T = self.T * 0.7

    def isclose(self, a, b, rel_tol=1e-09, abs_tol=1e-30):
        return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)

    def run_once(self, actions=None):
        for i in range(self.n_part):
            x_new = self.get_new_x(self.xs[i])
            y_new = self.evaluate(x_new)

            # Metropolis
            df = y_new - self.fits[i]
            if df < 0 or np.exp(-df / self.T) > np.random.rand():
                self.xs[i], self.fits[i] = x_new, y_new
                if y_new < self.history_best_fit:
                    self.history_best_x, self.history_best_fit = x_new, y_new
        self.generation_best_Y.append(self.history_best_fit)
        self.generation_best_X.append(self.history_best_x)
        # if best_y stay for max_stay_counter times, stop iteration
        if self.isclose(self.generation_best_Y[-1], self.generation_best_Y[-2]):
            self.stay_counter += 1
        else:
            self.stay_counter = 0

        if self.stay_counter > 100:
            self.cool_down()
            self.stay_counter = 0
