# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
os.system('pip install easydict')
from random import shuffle
import tensorflow as tf
from tensorflow.python.keras import backend as K
from tensorflow.core.protobuf.rewriter_config_pb2 import RewriterConfig
from tensorflow.keras.callbacks import ModelCheckpoint

from npu_bridge.estimator import npu_ops

tf.logging.set_verbosity(tf.logging.ERROR)
sess_config = tf.ConfigProto()
custom_op = sess_config.graph_options.rewrite_options.custom_optimizers.add()
custom_op.name = "NpuOptimizer"
custom_op.parameter_map["use_off_line"].b = True
sess_config.graph_options.rewrite_options.remapping = RewriterConfig.OFF
sess = tf.Session(config=sess_config)
K.set_session(sess)

from pathlib import Path
from data.read_tfrecord import get_dataset
# ####################################### modelarts  ##########################################
import moxing as mox

os.system('pip install easydict')

from model import SqueezeNet
from config import _C as cfg

# modelarts copy file from obs to cache
basedir = os.path.abspath(os.path.dirname(__file__))
print('Base dir is:',os.listdir(basedir))
mox.file.copy_parallel(cfg.TRAINING_FILENAMES_OBS_PATH, cfg.TRAINING_FILENAMES_cache_PATH)
print('copy data from  ->',cfg.TRAINING_FILENAMES_OBS_PATH, 'to ->',cfg.TRAINING_FILENAMES_cache_PATH)
mox.file.copy_parallel(cfg.VALIDATION_FILENAMES_OBS_PATH, cfg.VALIDATION_FILENAMES_cache_PATH)
print('copy data from  ->',cfg.VALIDATION_FILENAMES_OBS_PATH, 'to ->',cfg.VALIDATION_FILENAMES_cache_PATH)
mox.file.copy_parallel(cfg.TEST_FILENAMES_OBS_PATH, cfg.TEST_FILENAMES_cache_PATH)
print('copy data from  ->',cfg.TEST_FILENAMES_OBS_PATH, 'to ->',cfg.TEST_FILENAMES_cache_PATH)

mox.file.copy_parallel(cfg.OBS_CKPT_PATH, cfg.CHECKPOINTS)
print('copy data from  ->',cfg.OBS_CKPT_PATH, 'to ->',cfg.CHECKPOINTS)



# print train val test dataset files
print('train files: ')
print([str(x) for x in Path(cfg.TRAINING_FILENAMES_cache_PATH).rglob('*.tfrecords')])

print('train files: ')
print([str(x) for x in Path(cfg.VALIDATION_FILENAMES_cache_PATH).rglob('*.tfrecords')])

print('test files: ')
print([str(x) for x in Path(cfg.TEST_FILENAMES_cache_PATH).rglob('*.tfrecords')])


dataset_file_list = [str(x) for x in Path(cfg.TRAINING_FILENAMES_cache_PATH).rglob('*.tfrecords')] + \
                    [str(x) for x in Path(cfg.VALIDATION_FILENAMES_cache_PATH).rglob('*.tfrecords')] + \
                    [str(x) for x in Path(cfg.TEST_FILENAMES_cache_PATH).rglob('*.tfrecords')]

shuffle(dataset_file_list)

# load dataset
training_dataset = get_dataset(dataset_file_list[:44],batch_size=cfg.BATCH_SIZE)
validation_dataset = get_dataset(dataset_file_list[45:49], batch_size=cfg.BATCH_SIZE)
test_dataset = get_dataset(dataset_file_list[50:54], batch_size=cfg.BATCH_SIZE)


# Customize Callbacks
class CopyCheckPoint(tf.keras.callbacks.Callback):
    def on_train_batch_end(self, batch, logs=None):
        if os.path.isfile(cfg.CHECKPOINTS):
            if batch + 1 % 400 == 0:
                mox.file.copy_parallel(cfg.CHECKPOINTS, cfg.OBS_CKPT_PATH)
                print("Copy ckpt from",cfg.CHECKPOINTS,'to',cfg.OBS_CKPT_PATH)

checkpoint = ModelCheckpoint(filepath=cfg.CHECKPOINTS,monitor='val_acc',mode='auto' ,save_best_only='True')
copyfile = CopyCheckPoint()
callbacks_list = [checkpoint,copyfile]

# load model & train

Squeezenet = SqueezeNet(200)
top_5_acc = tf.keras.metrics.TopKCategoricalAccuracy(k=5)
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
    initial_learning_rate=1e-1,
    decay_steps=10000,
    decay_rate=0.9)

Squeezenet.compile(
optimizer=tf.keras.optimizers.SGD(learning_rate=lr_schedule, momentum=0.09),
loss= 'categorical_crossentropy',
metrics=['accuracy', top_5_acc],
)



Squeezenet.summary()

# if os.path.exists(cfg.CHECKPOINTS):
#     Squeezenet.load_weights(cfg.CHECKPOINTS)
#     print("load weights from", )

Squeezenet.fit(training_dataset, steps_per_epoch=cfg.STEPS_PER_EPOCH, epochs=cfg.EPOCHS, validation_data=validation_dataset,validation_steps=cfg.VALIDATION_STEPS,callbacks=callbacks_list)

Squeezenet.evaluate(test_dataset)

sess.close()
print("Training End")