"""
This module defines common interface for PaddlePaddle and PaddleLite.
"""
import os
import numpy as np
import logging


logger = logging.getLogger('scujcc.wrapper')

class Predictor:
    """ base class for Predictor interface"""

    def load(self, j):
        """ load model """
        pass

    def set_input(self, data, index):
        """ set input at given index data is numpy array"""
        pass

    def get_output(self, index):
        """ output Tensor at given index can be cast into numpy array"""
        pass

    def run(self):
        """ do inference """
        pass


class PaddlePaddlePredictor(Predictor):
    """ PaddlePaddle interface wrapper """

    def __init__(self):
        import paddle as pd
        import paddle.fluid as fluid
        from paddle.fluid import debugger
        from paddle.fluid import core
        pd.enable_static()
        self.place = fluid.CPUPlace()
        self.exe = fluid.Executor(self.place)

    def load(self, model_dir):
        import paddle as pd
        import paddle.fluid as fluid
        pd.enable_static()
        # model_dir = j["model"]
        # print(colorize('Loading model: {}'.format(model_dir), fg='green'))
        program = None
        feed = None
        fetch = None
        if os.path.exists(model_dir + "/params"):
            [program, feed, fetch] = fluid.io.load_inference_model(
                model_dir, self.exe, model_filename='model', params_filename="params")
        else:
            print("not combined")
            [program, feed, fetch] = fluid.io.load_inference_model(model_dir, self.exe)

        self.program = program
        self.feed = feed
        self.fetch = fetch
        # print(self.program)
        self.inputs = [None] * len(self.feed)

    def set_input(self, data, index):
        self.inputs[index] = data

    def run(self):
        feeds = {}
        for index, _ in enumerate(self.inputs):
            feeds[self.feed[index]] = self.inputs[index]

        self.results = self.exe.run(program=self.program,
                                    feed=feeds, fetch_list=self.fetch, return_numpy=False)
        self.outputs = []
        for res in self.results:
            self.outputs.append(np.array(res))
        return self.results

    def get_output(self, index):
        return self.results[index]


class PaddleLitePredictor(Predictor):
    """ PaddlePaddle interface wrapper """

    def __init__(self):
        self.predictor = None

    def load(self, model_dir):
        from paddlelite.lite import Place
        from paddlelite.lite import CxxConfig
        from paddlelite.lite import create_paddle_predictor as CreatePaddlePredictor
        # from paddlelite.lite import CxxPredictor as CreatePaddlePredictor
        from paddlelite.lite import TargetType
        from paddlelite.lite import PrecisionType
        from paddlelite.lite import DataLayoutType
        from paddlelite.lite import MobileConfig
        from paddlelite.lite import Tensor

        valid_places = (
            Place(TargetType.X86, PrecisionType.FP16, DataLayoutType.NHWC),
            Place(TargetType.Host, PrecisionType.FP32),
            Place(TargetType.CUDA, PrecisionType.FP32),
        )

        # config = MobileConfig()
        # config.set_model_from_file(model_dir)
        config = CxxConfig()
        config.set_model_dir(model_dir)

        if os.path.exists(model_dir + "/params"):
            config.set_model_file(model_dir + "/model")
            config.set_param_file(model_dir + "/params")
        else:
            config.set_model_dir(model_dir)
        config.set_valid_places(valid_places)

        self.predictor = CreatePaddlePredictor(config)

    def set_input(self, data, index):
        from paddlelite.lite import Tensor
        input = self.predictor.get_input(index)
        input.from_numpy(data)

    def run(self):
        self.predictor.run()

    def get_output(self, index):
        return self.predictor.get_output(index).numpy()

class EdgeBoard181Predictor(Predictor):
    """ EdgeBoard 1.8.1软核接口的封装 """

    def __init__(self):
        self.predictor = None

    def load(self, model_dir):
        from edgeboard import PaddleLitePredictor
        self.predictor = PaddleLitePredictor()
        # 简单起见，直接用飞桨导出模型时的默认名称
        model_fullpath = os.path.join(model_dir, 'model')
        param_fullpath = os.path.join(model_dir, 'params')
        logger.debug('Loading model file:{0}'.format(model_fullpath))
        logger.debug('Loadding model param:{0}'.format(param_fullpath))

        self.predictor.set_model_file(model_fullpath)
        self.predictor.set_param_file(param_fullpath)
        try:
            self.predictor.load()
            print("Predictor Init Success!!!")
            return 0
        except Exception as e:
            print("Error: Predictor Init Failed!!!")
            print(e)

        return -1

    def set_input(self, data, index):
        # input_tensor = self.predictor.get_input(0)
        # fpga_preprocess(frame, input_tensor, g_model_config)
        self.predictor.set_input(data, index)

    def run(self):
        self.predictor.run()

    def get_output(self, index):
        return self.predictor.get_output(index)