import numpy as np

from optimizer.BaseOptimizer import BaseOptimizer
from utils.Ising import Ising


class Optimizer(BaseOptimizer):
    optimizer_name = 'RANDOM-SEARCH'

    def __init__(self, config: dict):
        self.fine_tune_rate = 0.001
        self.ising_J = np.zeros((1, 1))
        super().__init__(config)

    def run_once(self, actions=None):
        self.clip()
        self.best_update()

        self.xs = np.random.uniform(self.pos_min, self.pos_max, self.xs.shape)
        for i in range(self.n_part):
            if self.real_clpso_flag[i] > 70:
                if np.random.random() < self.fine_tune_rate * (1 + 5 * (i == self.best_index)):
                    self.fine_tune(i)

    def fine_tune(self, i):
        print(f'finetune {i}')
        need_fine_x = self.xs[i].copy()
        fit = self.fits[i]
        best_update = True
        best_x_cache = need_fine_x
        best_fit_cache = fit
        dim_cache = 0
        fine_epoch = 0
        while best_update:
            best_update = False
            fine_epoch += 1
            for dd in range(self.n_dim):
                d = (dd + dim_cache) % self.n_dim
                new_x = need_fine_x.copy()
                new_x[d] = - new_x[d]
                new_fit = self.evaluate(new_x)
                if new_fit < best_fit_cache:
                    best_x_cache = new_x
                    best_fit_cache = new_fit
                    best_update = True
                    print(f'fine_epoch:{fine_epoch} best:{new_fit}')
                    dim_cache = d
                    break

            if best_update:
                need_fine_x = best_x_cache
        if best_fit_cache != fit:
            self.xs[i] = best_x_cache
            self.fits[i] = best_fit_cache