if '__main__' == __name__:
    import transformers
    import logging
    
    logging.basicConfig(level=logging.DEBUG)

    from transformers import AutoModelForSequenceClassification
    from transformers import AutoTokenizer
    from transformers import AutoConfig
    from transformers import DataCollatorWithPadding
    from transformers import Trainer
    from transformers import TrainerState
    from transformers import TrainerControl
    from transformers import TrainingArguments
    from transformers.trainer_utils import PREFIX_CHECKPOINT_DIR


    class State(object):
        def __init__(self) -> None:
            self.log_history = []
            self.global_step = 0


    class LogCallback(transformers.TrainerCallback):
        def __init__(self, log_path, *args, **kwargs):
            super().__init__(*args, **kwargs)
            self.log_path = log_path

        """
        https://discuss.huggingface.co/t/logs-of-training-and-validation-loss/1974/3
        """

        def on_step_end(self, args, state, control, **kwargs):
            """
            https://discuss.huggingface.co/t/logs-of-training-and-validation-loss/1974/6
            """
            logging.debug('on_step_end step: %d, len of log_history: %d, log path: %s', state.global_step, len(state.log_history), self.log_path)


        def on_epoch_end(self, args: TrainingArguments, state: TrainerState, control: TrainerControl, **kwargs):
            logging.debug('on_epoch_end step: %d, len of log_history: %d', state.global_step, len(state.log_history))

        def on_evaluate(self, args: TrainingArguments, state: TrainerState, control: TrainerControl, **kwargs):
            logging.debug('on_evaluate step: %d, len of log_history: %d', state.global_step, len(state.log_history))

        def on_train_end(self, args, state, control, **kwargs):
            logging.debug('on_train_end step: %d, len of log_history: %d', state.global_step, len(state.log_history))

        def on_save(self, args: TrainingArguments, state: TrainerState, control: TrainerControl, **kwargs):
            checkpoint_folder = f"{PREFIX_CHECKPOINT_DIR}-{state.global_step}"
            logging.debug('on_save step: %d, len of log_history: %d, checkpoint_folder: %s', state.global_step, len(state.log_history), checkpoint_folder)


    fn = getattr(LogCallback('/tmp/aaa000'), 'on_step_end')
    fn(object(), State(), object())
    