# -*- coding: utf-8 -*-
# @Time    : 2020/11/18 10:19
# @Author  : DuJiabao
# @File    : modelForTFLite.py
# @Description : 
import tflite_runtime.interpreter as tflite



class ModelForTFLite(object):
    """
    这是一个model封装类，可以将tflite的model封装成一般model的用法，简化使用流程
    """

    def __init__(self, model_path):
        """
        初始化函数
        :param model_path: tflite格式的model的存储路径
        """
        # Load the TFLite model and allocate tensors.
        self.interpreter = tflite.Interpreter(model_path=model_path)
        self.interpreter.allocate_tensors()

        # Get input and output tensors.
        self.input_details = self.interpreter.get_input_details()
        self.output_details = self.interpreter.get_output_details()

        # Test the model on random input data.
        self.input_shape = self.input_details[0]['shape']

    def __call__(self, input_data):
        """
        输入数据，返回预测值
        :param input_data: 输入数，与原模型的输入值类型、格式相同
        :return: 预测值
        """
        self.interpreter.set_tensor(self.input_details[0]['index'], input_data)
        self.interpreter.invoke()

        # The function `get_tensor()` returns a copy of the tensor data.
        # Use `tensor()` in order to get a pointer to the tensor.
        output_data = self.interpreter.get_tensor(self.output_details[0]['index'])
        return output_data


if __name__ == '__main__':
    pass
