from Goldenberry.optimization.base.GbBaseOptimizer import GbBaseOptimizer
from Goldenberry.optimization.base.GbSolution import GbIndividual
from Goldenberry.optimization.ga.GbGaMgr.IGASetup import IGASetup
import numpy as np
import random as ran
import copy as cp

class GeneticAlgorithm(GbBaseOptimizer, IGASetup):

    cand_size = None
    max_iters = 0
    var_size = 0
    crossover_method = None
    crossover_prob = 0
    mutation_method = None
    mutation_prob = 0
    selection_method = None
    evol_strategy = None
    callback_func = None

    _cost_func = None
    _solution_rep = None
    _mutator = None
    _crosser = None
    _selector = None
    _init_pop = None
    _stop = False

    #Override methods of GbBaseOptimizer interface
    def setup(self, cand_size = 20, max_iters = 100, crossover_method = None, crossover_prob = 0.0, mutation_method = None, mutation_prob = 0.0, selection_method = None, evol_strategy = None, callback_func = None, **kwargs):
        
        self.cand_size = cand_size
        self.max_iters = max_iters
        self.crossover_method = crossover_method
        self.crossover_prob = crossover_prob
        self.mutation_method = mutation_method
        self.mutation_prob = mutation_prob
        self.selection_method = selection_method
        self.evol_strategy = evol_strategy
        stop = False
        self.reset()
        self.callback_func = callback_func
        self.__dict__.update(**kwargs)


    def reset(self):
        self.iters = 0
        self.stop = False
        if None is not self.cost_func:
            self.cost_func.reset_statistics()

    def ready(self):
        return (self.cost_func is not None and self._solution_rep is not None)

        """"Checks whether the algorithm is ready or not for executing.
        return (self.cost_func is not None \
                and None is not self.cand_size \
                and None is not self.var_size\
                and None is not self.distr \
                and self.cand_size > 0 \
                and self.var_size > 0 
                and self.selection_rate > 0 \
                and self.selection_rate < 100 \
                and self.selection_rate > 0)"""

    def search(self):
        self.reset()
        #var_size = self._cost_func.var_size
        var_size = self.var_size
        init_pop_type = self._init_pop.get_type()
        init_pop_params = self._init_pop.get_params()

        range_min = self._solution_rep.get_range_min()
        range_max = self._solution_rep.get_range_max()
        solution_domain = self._solution_rep.get_solution_domain()
        encode_function= self._solution_rep.get_encode_function()

        self._selector.set_selection(self.selection_method)
        self._crosser.set_crossover(self.crossover_method)
        self._mutator.set_mutation(self.mutation_method)
        
        """if not self.ready():
            raise Exception("Optimizer not ready.")"""

        if self.evol_strategy == 'Generational': #get evot strategy
            return self.generational_search(var_size, init_pop_type, init_pop_params, range_min, range_max, solution_domain, encode_function)
        else:
            return self.steady_state_search(var_size, init_pop_type, init_pop_params, range_min, range_max, solution_domain, encode_function)
  
    #Override methods of IGASetup  
    @property
    def cost_func(self):
        return self._cost_func

    @cost_func.setter
    def cost_func(self, value):
        self._cost_func = value         

    def set_solution_rep(self, value):
        self._solution_rep = value

    def set_init_pop(self, value):
        self._init_pop = value  

    def set_mutator(self, value):
        self._mutator = value  

    def set_crosser(self, value):
        self._crosser = value  

    def set_selector(self, value):
        self._selector = value  

    def get_mutator(self):
        return self._mutator

    def get_crosser(self):
        return self._crosser

    def get_selector(self):
        return self._selector
    @property
    def var_size(self):
        if None is not self.cost_func:
            return self.cost_func.var_size
        return None

    # Private methods
    def generational_search(self, var_size, init_pop_type, init_pop_params, range_min, range_max, solution_domain, encode_function):
        #with open("Gen.txt", "a") as file:
        #file = open("Gen.txt", "a")
        #file.write("evals \t argmin \t argmax \t iters \t min \t max \t mean  \t stdev\n")
        best = GbIndividual(None, float('-Inf'))
        population = self._init_pop.generate_init_pop(type = init_pop_type, params = init_pop_params, solution_domain = solution_domain, cand_size = self.cand_size, var_size = var_size, range_min = range_min, range_max = range_max)
        self._solution_rep.encode(population)
        while not self.done():
            self.iters += 1
            candidates = self.dump_pop_phenotypes(population)
            #esta linea es con la de gen_statistics
            #self.cost_func.reset_gen_statistics()
            fits = self.cost_func(candidates)
            self.populate_fits(population,fits)
            if best.cost <= np.amax(fits):
                best = population[np.argmax(fits)]
            parents = self._selector.select_parents(self.cand_size, population, fits)
            offspring_pop = self.generate_offspring_pop(parents)
            population = offspring_pop
            self._solution_rep.decode(population)

            if self.callback_func is not None:
                self.callback_func(best, self.iters/ float(self.max_iters))
            #evals, argmin, argmax, min, max, mean, stdev = self.cost_func.gen_statistics()
            #file.write(str(evals)+"\t"+ str(argmin)+"\t"+ str(argmax)+"\t"+ str(self.iters)+"\t"+ str(max)+"\t"+ str(min)+"\t"+ str(mean)+"\t"+ str(stdev)+ "\n")
        if self.cost_func.retest_last:
            best.cost = self.cost_func(np.array([best.params]), is_last = True)[0]
        
        if self.callback_func is not None:
                self.callback_func(best, 1.0) 
        #file.write("----------------------------------------------------------------------------------\n")           
        #file.close() 
        return best

    def steady_state_search(self, var_size, init_pop_type, init_pop_params, range_min, range_max, solution_domain, encode_function):
        #file = open("Steady.txt", "a")
        #file.write("evals \t argmin \t argmax \t iters \t max \t min  \t mean  \t stdev\n")
        best = GbIndividual(None, float('-Inf'))
        population = self._init_pop.generate_init_pop(type = init_pop_type, params = init_pop_params, solution_domain = solution_domain, cand_size = self.cand_size, var_size = var_size, range_min = range_min, range_max = range_max)
        candidates = self.dump_pop_phenotypes(population)
        #esta linea es con la de gen_statistics, y las dos debajo de la evaluacion d ela funcion de costo
        #self.cost_func.reset_gen_statistics()
        fits = self.cost_func(candidates)
        #evals, argmin, argmax, min, max, mean, stdev = self.cost_func.gen_statistics()
        #file.write(str(evals)+"\t"+ str(argmin)+"\t"+ str(argmax)+"\t"+ str(self.iters)+"\t"+ str(min)+"\t"+ str(max)+"\t"+ str(mean)+"\t"+ str(stdev)+ "\n")

        self.populate_fits(population,fits)
        self._solution_rep.encode(population)
        while not self.done():
            self.iters += 1
            if best.cost <= np.amax(fits):
                best = population[np.argmax(fits)]
            parents = self._selector.select_parents(self.cand_size, population, fits)
            offspring_pop = self.generate_offspring_pop(parents)
            self._solution_rep.decode(offspring_pop)
            candidates = self.dump_pop_phenotypes(offspring_pop)
            #esta linea es con la de gen_statistics
            #self.cost_func.reset_gen_statistics()

            fits_new_pop = self.cost_func(candidates)
            self.populate_fits(offspring_pop, fits_new_pop)
            population = self.get_elite(self.cand_size, offspring_pop + parents)
            fits = self.dump_cost(population)
            if self.callback_func is not None:
                self.callback_func(best, self.iters/ float(self.max_iters))
            #evals, argmin, argmax, min, max, mean, stdev = self.cost_func.gen_statistics()
            #file.write(str(evals)+"\t"+ str(argmin)+"\t"+ str(argmax)+"\t"+ str(self.iters)+"\t"+ str(max)+"\t"+ str(min)+"\t"+ str(mean)+"\t"+ str(stdev)+ "\n")


        if self.cost_func.retest_last:
            best.cost = self.cost_func(np.array([best.params]), is_last = True)[0]
        
        if self.callback_func is not None:
                self.callback_func(best, 1.0)    
        #file.write("----------------------------------------------------------------------------------\n")           
        #file.close() 
        return best

    def done(self):
        #finish = (self.cost_func.evals > self.max_iters or self.stop)
        finish = (self.iters >= self.max_iters or self.stop)
        if finish:
            return True

    def dump_pop_phenotypes(self, population):
        candidates = [population[0].params]
        for x in range(1,len(population)):
            candidates = np.concatenate((candidates,[population[x].params]),axis=0)
        return candidates

    def dump_cost(self, population):
        """the fitness of the population in a numpy array.
    
        :param population: array list of individuals
        :returns: numpy array of the population fitness
        """
        costs = np.array([])
        for individual in population:
            costs = np.append(costs, individual.cost) 
        return costs

    def populate_fits(self, population, fits):
        for x in range(len(population)):
            population[x].cost = fits[x]

    def get_elite(self, num_indi, population):
        pop = cp.copy(population)
        elite = []
        fits = self.dump_cost(population)
        for x in range(num_indi):
            maxindx = np.argmax(fits)
            elite.append(pop[maxindx])
            del pop[maxindx]
            fits = np.delete(fits, maxindx)
        return elite

    def generate_offspring_pop(self, parents):
        offspring_pop = []
        for x in range(0, self.cand_size, 2):
            if (ran.uniform(0,1)<=self.crossover_prob):
                offspring1, offspring2 = self._crosser.cross(parents[x:x+2]) 
                offspring1 = self._mutator.mutate(offspring1, self.mutation_prob)
                offspring2 = self._mutator.mutate(offspring2, self.mutation_prob)
                offspring_pop.append(offspring1)
                offspring_pop.append(offspring2)
            else:
                offspring_pop.append(parents[x])
                offspring_pop.append(parents[x+1])
        return offspring_pop