import numpy as np
import pagmo2 as pg

"""
使用元启发式算法取得最优超参数
"""

problem = pg.problem(pg.rosenbrock(dim=100))


@use_named_args(dimensions=dimensions)
def meta_fitness(pop_size, CR, F, variant):
    """
    Meta-fitness for evaluating the performance of the
    optimizer on one or more problems.

    :param pop_size: Population size.
    :param CR: Control-parameter CR (crossover probability).
    :param F: Control-parameter F (differential weight).
    :param variant: DE variant, integer between 1 and 10.
    :return: Float to be minimized.
    """

    # Type-cast from int64 to int because pygmo requires that.
    pop_size = int(pop_size)
    variant = int(variant)

    # Print parameters.
    print("pop_size:", pop_size)
    print("CR: {0:.2f}".format(CR))
    print("F: {0:.2f}".format(F))
    print("variant: {0} ({1})".format(variant,
                                      variant_names[variant]))

    # Number of iterations for the optimizer's population.
    # This ensures that we perform the same number of
    # evaluations of the problem's fitness function
    # regardless of the population-size.
    num_generations = num_fitness_evals // pop_size

    def _run():
        """
        Helper-function for performing a single optimization run.
        """

        # Create a new instance of the optimizer using
        # the given parameters.
        uda = pg.de(gen=num_generations, CR=CR, F=F, variant=variant)
        optimizer = pg.algorithm(uda=uda)

        # Create a population of agents for the optimizer.
        pop = pg.population(problem, size=pop_size)

        # Evolve the optimizer's population.
        pop = optimizer.evolve(pop)

        # Best fitness found in this optimization run.
        fitness = pop.champion_f[0]

        # Update the global variables for best-found solution.
        global best_fitness, best_x
        if fitness < best_fitness:
            best_fitness = fitness
            best_x = np.copy(pop.champion_x)

        return fitness

    # Perform a number of optimization runs and get a list
    # of the best fitness-values found in those runs.
    fitnesses = np.array([_run() for i in range(num_runs)])

    # The meta-fitness is the mean of those fitness-values.
    meta_fitness = fitnesses.mean()

    # Print the meta-fitness.
    print("meta_fitness: {0:.2e}".format(meta_fitness))
    print()

    return meta_fitness


search_result = gp_minimize(func=meta_fitness,
                            dimensions=dimensions,
                            n_calls=50,
                            acq_func='EI', # Expected Improvement.
                            x0=default_parameters)

for meta_fit, params in sorted(zip(search_result.func_vals, search_result.x_iters)):
    msg = "Meta-fitness: {0:.2e}, Parameters: {1}"
    print(msg.format(meta_fit, list(np.around(params, 2))))
