# Copyright 2017 Neural Networks and Deep Learning lab, MIPT
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from logging import getLogger
from pathlib import Path
from typing import Dict, Union, Optional, Iterable

from deeppavlov.core.commands.utils import parse_config
from deeppavlov.core.datahelpers.dstc2_reader import SimpleDSTC2DatasetReader
from deeppavlov.core.datahelpers.dstc2_iterator import DialogDatasetIterator
from deeppavlov.core.trainers.nn_trainer import NNTrainer

log = getLogger(__name__)


def data_iter(data_path='D:\\ChatBot\\DeepPavlov\\my_data'):
    reader = SimpleDSTC2DatasetReader()
    data = reader.read(data_path)
    iterator = DialogDatasetIterator(data=data)

    return iterator


def train_evaluate_model_from_config(config: Union[str, Path, dict],
                                     iterator=None, *,
                                     to_train: bool = True,
                                     evaluation_targets: Optional[Iterable[str]] = None,
                                     to_validate: Optional[bool] = None,
                                     start_epoch_num: Optional[int] = None) -> Dict[str, Dict[str, float]]:
    """Make training and evaluation of the model described in corresponding configuration file."""
    config = parse_config(config)

    if iterator is None:
        data_path = config['dataset_reader']['data_path']
        iterator = data_iter()

    if 'train' not in config:
        log.warning('Train config is missing. Populating with default values')
    train_config = config.get('train')

    if start_epoch_num is not None:
        train_config['start_epoch_num'] = start_epoch_num

    if 'evaluation_targets' not in train_config and ('validate_best' in train_config
                                                     or 'test_best' in train_config):
        log.warning('"validate_best" and "test_best" parameters are deprecated.'
                    ' Please, use "evaluation_targets" list instead')

        train_config['evaluation_targets'] = []
        if train_config.pop('validate_best', True):
            train_config['evaluation_targets'].append('valid')
        if train_config.pop('test_best', True):
            train_config['evaluation_targets'].append('test')

    trainer = NNTrainer(config['chainer'], **train_config)

    if to_train:
        trainer.train(iterator)

    res = {}

    if iterator is not None:
        if to_validate is not None:
            if evaluation_targets is None:
                log.warning('"to_validate" parameter is deprecated and will be removed in future versions.'
                            ' Please, use "evaluation_targets" list instead')
                evaluation_targets = ['test']
                if to_validate:
                    evaluation_targets.append('valid')
            else:
                log.warn('Both "evaluation_targets" and "to_validate" parameters are specified.'
                         ' "to_validate" is deprecated and will be ignored')

        res = trainer.evaluate(iterator, evaluation_targets, print_reports=True)
        trainer.get_chainer().destroy()

    res = {k: v['metrics'] for k, v in res.items()}

    return res
