
from random import randrange
from abstract_search import LocalSearch
from algorithmState import *
from data import  load_hw3_data_1
from numpy import *
from classifier import *
from CrossValidation import create_cross_validation_idxs
class FirstChoiceLocalSearch(LocalSearch):
    SEARCH_THRESHOLD = 0.0000000001

    def __init__(self, starting_state):
        super(FirstChoiceLocalSearch, self).__init__(starting_state)

    def search(self, evaluation_set, evaluation_set_labels, *args, **kwargs):
        current_value = self._LocalSearch__current_state.evaluate(evaluation_set, evaluation_set_labels)
        while (True):
            possibleOperators = self._LocalSearch__current_state.get_next_states()
            while (possibleOperators != []):
                random_index = randrange(0, len(possibleOperators))
                # current_value = self._LocalSearch__current_state.evaluate(evaluation_set, evaluation_set_labels)
                next_state_value = possibleOperators[random_index][0].evaluate(evaluation_set, evaluation_set_labels)
                # TODO: I've added this threshold for testing, we shuold decide about it.
                print("next value: {} current value: {}, diff: {}".format(next_state_value, current_value, next_state_value - current_value ))
                if  next_state_value - current_value > self.SEARCH_THRESHOLD:
                    self._LocalSearch__current_state = possibleOperators[random_index][0]
                    current_value = next_state_value
                    break
                else: # havn't found an improving operator
                    possibleOperators.pop(random_index)
            if possibleOperators == []:
                return self._LocalSearch__current_state


    
    
if __name__ == "__main__":

    #load the data
    train, eval, test =  load_hw3_data_1()
    train_data, train_labels = train

    #convert to matrix form (of type numpy.array)
    data_matrix = array(train_data)
    label_array = array(train_labels)
    initial_data = (data_matrix, label_array)

    # initialize the algorithm object
    algorithm_to_optimize = KNearestNeighbours(3)
    # algorithm_to_optimize = DecisionTree()

    # create the operator functions and indices
    num_samples = len(train_labels)
    num_features = len(train_data[0])

    sample_operators = [sample_operator_i(feature) for feature in range(num_features)]
    feature_operators = [feature_operator_i(feature) for feature in range(num_features)]

    # pack them in Operator object (def. in algorithmState module)
    all_legal_operators = Operators(set(range(num_features)), set(range(num_features)))
    all_operators = Operators(sample_operators, feature_operators)

    # initialize the first state

    initial_state = algorithmState(
        set(range(len(train_data))),
        set(range(len(train_data[0]))),
        all_legal_operators,
        all_operators,
        initial_data,
        algorithm_to_optimize)

    eval_data, eval_labels = eval

    optimizer = FirstChoiceLocalSearch(initial_state)
    best_state= optimizer.search(eval_data, eval_labels)

    # test optimized algo' against basic one
    test_data, test_labels = test
    optimized_score = best_state.evaluate(test_data, test_labels)
    optimized_samples = best_state.current_samples
    optimized_features = best_state.current_features

    basic_learner = KNearestNeighbours(3)
    # basic_learner = DecisionTree()
    basic_state = algorithmState(
        set(range(len(train_data))),
        set(range(len(train_data[0]))),
        all_legal_operators,
        all_operators,
        initial_data,
        basic_learner)

    basic_score = basic_state.evaluate(test_data, test_labels)
    basic_samples =  basic_state.current_samples
    basic_features = basic_state.current_features

    print("optimized score: {}, basic score: {}".format(optimized_score, basic_score))
    print("optimized samples (there are {}): {}\nbasic samples (there are {}): {}".format(len(optimized_samples), optimized_samples, len(basic_samples), basic_samples))
    print("optimized features: {}\nbasic features: {}".format(optimized_features, basic_features))


    print "noa"











