#!/usr/bin/env python
# coding: utf8

import logging
import os
import time
from os.path import join
from pathlib import Path

import tensorflow as tf
from tensorflow.python.framework import graph_util, graph_io

from .model import model_fn, InputProviderFactory, EstimatorSpecBuilder
from .model.provider import get_default_model_provider
from .utils.configuration import load_configuration

logger = logging.getLogger("segment")


class Publish(object):
    """ A wrapper class for performing separation. """

    def __init__(self, params_descriptor):
        """ Default constructor.

        :param params_descriptor: Descriptor for TF params to be used.
        """
        self._params = load_configuration(params_descriptor)
        self._builder = None
        self._input_provider = None
        self._features = None

    def _get_default_model_dir(self, model_dir):
        """
        Transforms a string like 'segment:2stems' into an actual path.
        :param model_dir:
        :return:
        """
        model_provider = get_default_model_provider()
        return model_provider.get(model_dir)

    def _get_input_provider(self):
        if self._input_provider is None:
            self._input_provider = InputProviderFactory.get(self._params)
        return self._input_provider

    def _get_features(self):
        if self._features is None:
            self._features = self._get_input_provider().get_input_dict_placeholders()
        return self._features

    def _create_estimator(self, params):
        """
            Initialize tensorflow estimator that will perform separation

            Params:
            - params: a dictionary of parameters for building the model

            Returns:
                a tensorflow estimator
        """
        # Load model.
        params['model_dir'] = self._get_default_model_dir(params['model_dir'])
        # Setup config
        # session_config = tf.compat.v1.ConfigProto()
        # session_config.gpu_options.per_process_gpu_memory_fraction = 0.7
        # config = tf.estimator.RunConfig(session_config=session_config)
        # Setup estimator
        estimator = tf.estimator.Estimator(
            model_fn=model_fn,
            model_dir=params['model_dir'],
            params=params,
            # config=config
        )
        return estimator

    def _to_export(self, estimator, directory):
        input_provider = self._get_input_provider()

        def receiver():
            features = input_provider.get_input_dict_placeholders()
            return tf.estimator.export.ServingInputReceiver(features, features)

        estimator.export_saved_model(directory, receiver)

    def _get_builder(self):
        if self._builder is None:
            self._builder = EstimatorSpecBuilder(self._get_features(), self._params)
        return self._builder

    def export_model(self, export_dir):
        """ Lazy loading access method for internal predictor instance.

        :returns: Predictor to use for source separation.
        """
        estimator = self._create_estimator(self._params)
        self._to_export(estimator, directory=export_dir)

    def export_frozen_model(self, export_dir):
        tf.compat.v1.disable_eager_execution()
        latest_checkpoint = tf.train.latest_checkpoint(self._get_default_model_dir(self._params['model_dir']))
        model_outputs = self._get_builder().outputs
        output_names = ['conv2d_18/truediv']
        saver = tf.compat.v1.train.Saver()
        with tf.compat.v1.Session() as sess:
            saver.restore(sess, latest_checkpoint)
            graph_def = sess.graph.as_graph_def()
            graphdef_frozen = graph_util.convert_variables_to_constants(sess, graph_def, output_names)
            model_dir = join(export_dir, str(int(time.time())))
            if not os.path.exists(model_dir):
                os.mkdir(model_dir)
            graph_io.write_graph(graphdef_frozen, model_dir, 'saved_model.pb', as_text=False)

    def convert_tflite_from_session(self, export_dir):
        tf.compat.v1.disable_eager_execution()
        latest_checkpoint = tf.train.latest_checkpoint(self._get_default_model_dir(self._params['model_dir']))
        features = self._get_features()
        model_outputs = self._get_builder().outputs
        saver = tf.compat.v1.train.Saver()
        with tf.compat.v1.Session() as sess:
            saver.restore(sess, latest_checkpoint)
            input_tensors = [self._get_input_provider().get_feed_tensor(features)]
            output_tensors = [model_outputs]
            converter = tf.compat.v1.lite.TFLiteConverter.from_session(sess, input_tensors, output_tensors)
            tflite_model = converter.convert()
            tflite_model_dir = join(export_dir, 'tflite')
            with open(join(tflite_model_dir, 'model.tflite'), "wb") as f:
                f.write(tflite_model)

    def convert_tflite_from_saved_model(self, export_dir):
        versions = [
            model for model in Path(export_dir).iterdir()
            if model.is_dir()]
        latest = str(sorted(versions)[-1])
        converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir=latest)
        tflite_model = converter.convert()
        tflite_model_dir = join(latest, 'tflite')
        with open(join(tflite_model_dir, 'model.tflite'), "wb") as f:
            f.write(tflite_model)
