# Copyright 2017 Phil Ferriere. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
ckpt_mgr.py

Maintains a directory containing only the best n checkpoints.

Written by Domenick Poster, modifications by Phil Ferriere

Modifications licensed under the MIT License (see LICENSE for details)

Based on:
    - https://github.com/vonclites/checkmate/blob/master/checkmate.py
        Written by Domenick Poster, Copyright (C) 2018 Domenick Poster
        Licensed under MIT License
"""

from __future__ import absolute_import, division, print_function
import os
import glob
import json
import numpy as np
import tensorflow as tf


class BestCheckpointSaver(object):
    """Maintains a directory containing only the best n checkpoints

    Inside the directory is a best_checkpoints JSON file containing a dictionary
    mapping of the best checkpoint filepaths to the values by which the checkpoints
    are compared.  Only the best n checkpoints are contained in the directory and JSON file.

    This is a light-weight wrapper class only intended to work in simple,
    non-distributed settings.  It is not intended to work with the tf.Estimator
    framework.
    """

    def __init__(self, save_dir, save_file, num_to_keep=5, maximize=True, saver=None):
        """Creates a `BestCheckpointSaver`

        `BestCheckpointSaver` acts as a wrapper class around a `tf.train.Saver`

        Args:
            save_dir: The directory in which the checkpoint files will be saved
            save_file: The prefix of the checkpoint filenames
            num_to_keep: The number of best checkpoint files to retain
            maximize: Define 'best' values to be the highest values.  For example,
              set this to True if selecting for the checkpoints with the highest
              given accuracy.  Or set to False to select for checkpoints with the
              lowest given error rate.
            saver: A `tf.train.Saver` to use for saving checkpoints.  A default
              `tf.train.Saver` will be created if none is provided.
        """
        self._num_to_keep = num_to_keep
        self._save_dir = save_dir
        self._save_file = save_file
        self._save_path = os.path.join(save_dir, f'{save_file}.ckpt')
        self._maximize = maximize
        self._saver = saver if saver else tf.train.Saver(
            max_to_keep=None,
            save_relative_paths=True
        )

        if not os.path.exists(save_dir):
            os.makedirs(save_dir)
        self.best_checkpoints_file = os.path.join(save_dir, 'best_checkpoints')

    def save(self, ranking_value, sess, global_step_tensor):
        """Updates the set of best checkpoints based on the given result.

        Args:
            ranking_value: The ranking value by which to rank the checkpoint.
            sess: A tf.Session to use to save the checkpoint
            global_step_tensor: A `tf.Tensor` represent the global step
        """
        global_step = sess.run(global_step_tensor)
        current_ckpt = f'{self._save_file}.ckpt-{global_step}'
        ranking_value = float(ranking_value)
        if not os.path.exists(self.best_checkpoints_file):
            self._save_best_checkpoints_file({current_ckpt: ranking_value})
            return self._saver.save(sess, self._save_path, global_step_tensor)

        best_checkpoints = self._load_best_checkpoints_file()

        if len(best_checkpoints) < self._num_to_keep:
            best_checkpoints[current_ckpt] = ranking_value
            self._save_best_checkpoints_file(best_checkpoints)
            return self._saver.save(sess, self._save_path, global_step_tensor)

        if self._maximize:
            should_save = not all(current_best >= ranking_value
                                  for current_best in best_checkpoints.values())
        else:
            should_save = not all(current_best <= ranking_value
                                  for current_best in best_checkpoints.values())
        if should_save:
            best_checkpoint_list = self._sort(best_checkpoints)

            worst_checkpoint = os.path.join(self._save_dir,
                                            best_checkpoint_list.pop(-1)[0])
            self._remove_outdated_checkpoint_files(worst_checkpoint)
            self._update_internal_saver_state(best_checkpoint_list)

            best_checkpoints = dict(best_checkpoint_list)
            best_checkpoints[current_ckpt] = ranking_value
            self._save_best_checkpoints_file(best_checkpoints)

            return self._saver.save(sess, self._save_path, global_step_tensor)

    def restore(self, sess, ckpt):
        """Restore from a checkpoint
        Args:
            sess: A tf.Session to use to save the checkpoint
            ckpt: Checkpoint file to restore from
        """
        self._saver.restore(sess, ckpt)

    def best_checkpoint(self, best_checkpoint_dir, maximize=True):
        """ Returns filepath to the best checkpoint

        Reads the best_checkpoints file in the best_checkpoint_dir directory.
        Returns the filepath in the best_checkpoints file associated with
        the highest value if select_maximum_value is True, or the filepath
        associated with the lowest value if select_maximum_value is False.

        Args:
            best_checkpoint_dir: Directory containing best_checkpoints JSON file
            maximize: If True, select the filepath associated
              with the highest value.  Otherwise, select the filepath associated
              with the lowest value.

        Returns:
            The full path to the best checkpoint file

        """
        best_checkpoints_file = os.path.join(best_checkpoint_dir, 'best_checkpoints')
        if not os.path.exists(best_checkpoints_file):
            return None
        with open(best_checkpoints_file, 'r') as f:
            best_checkpoints = json.load(f)
        best_checkpoints = [
            ckpt for ckpt in sorted(best_checkpoints,
                                    key=best_checkpoints.get,
                                    reverse=maximize)
        ]
        return os.path.join(best_checkpoint_dir, best_checkpoints[0])

    def _save_best_checkpoints_file(self, updated_best_checkpoints):
        with open(self.best_checkpoints_file, 'w') as f:
            json.dump(updated_best_checkpoints, f, indent=3)

    def _remove_outdated_checkpoint_files(self, worst_checkpoint):
        os.remove(os.path.join(self._save_dir, 'checkpoint'))
        for ckpt_file in glob.glob(worst_checkpoint + '.*'):
            os.remove(ckpt_file)

    def _update_internal_saver_state(self, best_checkpoint_list):
        best_checkpoint_files = [
            (ckpt[0], np.inf)  # TODO: Try to use actual file timestamp
            for ckpt in best_checkpoint_list
        ]
        self._saver.set_last_checkpoints_with_time(best_checkpoint_files)

    def _load_best_checkpoints_file(self):
        with open(self.best_checkpoints_file, 'r') as f:
            best_checkpoints = json.load(f)
        return best_checkpoints

    def _sort(self, best_checkpoints):
        best_checkpoints = [
            (ckpt, best_checkpoints[ckpt])
            for ckpt in sorted(best_checkpoints,
                               key=best_checkpoints.get,
                               reverse=self._maximize)
        ]
        return best_checkpoints
