# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import time
import mindspore.dataset as ds
from mindspore import context, Model
from mindspore.train.serialization import load_checkpoint, load_param_into_net

from models.net import DAM_Net, PredictWithSigmoid, DAMNetWithLoss, DAMTrainOneStepCell, ModelBuilder
# from models.net_test import DAMNetWithLoss, DAMTrainOneStepCell, PredictWithSigmoid, DAM_Net
from utils.metric import UbuntuTestMetric, DoubanTestMetric
from utils.callback import UbuntuTestCallBack, DoubanTestCallBack
import utils.config as config


def eval(args, checkpoint):
    config = args

    context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend", device_id=0)

    print("************Starting loading data: ", config.test_data_path)
    print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
    dataset = ds.MindDataset(config.test_data_path,
                             columns_list=["turns", "turn_len", "response", "response_len", "label"],
                             shuffle=False)
    dataset = dataset.batch(200, drop_remainder=True)
    dataset = dataset.repeat(1)
    dataset_len = 200 * dataset.get_dataset_size()
    print("dataset: ", dataset)
    print("dataset_len: ", dataset_len)
    print("dataset_size: ", dataset.get_dataset_size())
    print("*************Finish loading data**************")
    print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))

    dam_net = DAM_Net(config)
    train_net = DAMNetWithLoss(dam_net)
    train_net = DAMTrainOneStepCell(train_net, config)
    eval_net = PredictWithSigmoid(dam_net)
    if config.model_name == 'DAM_ubuntu':
        metric = UbuntuTestMetric()
    else:
        metric = DoubanTestMetric()
    model = Model(train_net, eval_network=eval_net, metrics={"Accuracy": metric})

    # 加载checkpoint
    ckpt_path = config.model_path + config.model_name + checkpoint
    print('loading checkpoint: ', ckpt_path)
    param_dict = load_checkpoint(ckpt_path)
    load_param_into_net(dam_net, param_dict)

    print('testing started...')
    res = model.eval(dataset, dataset_sink_mode=False)
    result = res["Accuracy"]

    with open(config.eval_file_name, 'a+', encoding='utf-8') as file_out:
        time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
        out_str = time_str + '\n' + res + '\n'
        file_out.write(out_str + '\n')
    print(res)


if __name__ == '__main__':
    # eval(config.douban_parse_args())
    eval(config.ubuntu_parse_args(), '_2-1_3906.ckpt')

