import numpy as np

class Base_model(object):
    @classmethod
    def get_default_hpyer_parameters(cls):
        raise NotImplementedError


    def __init__(self,hyper_parameters=None):
        self.tf_model = None
        self.hyper_parameters = self.get_default_hpyer_parameters()
        if hyper_parameters:
            self.hyper_parameters.update(hyper_parameters)


    def build_model(self):
        if self.tf_model is None:
            self.build_model_arc()
            self.compile_model()


    def build_model_arc(self):
        raise NotImplementedError

    def compile_model(self,**kwargs):
        if kwargs.get('loss') is None:
            kwargs['loss'] = 'categorical_crossentropy'

        if kwargs.get("optimizer") is None:
            kwargs['optimizer'] = 'adam'

        if kwargs.get('metrics') is None:
            kwargs['metrics'] = ['accuracy']

        self.tf_model.compile(**kwargs)
        self.tf_model.summary()


    def predict(self,x_data,batch_size=32,debug_info=False,predict_kwargs=None):
        if predict_kwargs is None:
            predict_kwargs = {}

        pred = self.tf_model.predict(x_data, batch_size=batch_size, **predict_kwargs)
        return pred


    def get_data_generator(self,x_data,y_data,batch_size=32,shuffle=True):
        index_list = np.arange(len(x_data))
        page_count = len(x_data) // batch_size + 1

        while True:
            if shuffle:
                np.random.shuffle(index_list)

            for page in range(page_count):
                start_index = page * batch_size
                end_index = start_index + batch_size
                target_index = index_list[start_index:end_index]
                x_tensor = x_data[target_index]
                y_tensor = y_data[target_index]
                yield (x_tensor,y_tensor)

    def fit(self,x_train,y_train,
            x_validate=None,y_validate=None,
            batch_size=32,
            epochs=10,
            callbacks=None,
            fit_kwargs=None,
            shuffle=True):
        self.build_model()
        train_generator = self.get_data_generator(x_train,y_train,batch_size,shuffle)

        if fit_kwargs is None:
            fit_kwargs = {}

        validation_generator = None
        validation_steps = None
        if x_validate:
            validation_generator = self.get_data_generator(x_validate,
                                                           y_validate,
                                                           batch_size,
                                                           shuffle)
            if isinstance(x_validate,tuple):
                validation_steps = len(x_validate[0]) // batch_size + 1
            else:
                validation_steps = len(x_validate) // batch_size + 1

        if isinstance(x_train,tuple):
            steps_per_epoch = len(x_train[0]) // batch_size + 1
        else:
            steps_per_epoch = len(x_train) // batch_size + 1


        return self.tf_model.fit_generator(
            train_generator,
            steps_per_epoch=steps_per_epoch,
            epochs=epochs,
            validation_data=validation_generator,
            validation_steps=validation_steps,
            callbacks=callbacks,
            **fit_kwargs
        )


