import time

import cv2
import numpy as np
import platform
from rknnlite.api import RKNNLite
from Grade import DensenetGrade

infer = DensenetGrade()

# decice tree for rk356x/rk3588
DEVICE_COMPATIBLE_NODE = '/proc/device-tree/compatible'

def get_host():
    # get platform and device type
    system = platform.system()
    machine = platform.machine()
    os_machine = system + '-' + machine
    if os_machine == 'Linux-aarch64':
        try:
            with open(DEVICE_COMPATIBLE_NODE) as f:
                device_compatible_str = f.read()
                if 'rk3588' in device_compatible_str:
                    host = 'RK3588'
                elif 'rk3562' in device_compatible_str:
                    host = 'RK3562'
                else:
                    host = 'RK3566_RK3568'
        except IOError:
            print('Read device node {} failed.'.format(DEVICE_COMPATIBLE_NODE))
            exit(-1)
    else:
        host = os_machine
    return host

INPUT_SIZE = 224

RK3566_RK3568_RKNN_MODEL = 'resnet18_for_rk3566_rk3568.rknn'
RK3588_RKNN_MODEL = 'densenet121.rknn'
RK3562_RKNN_MODEL = 'resnet18_for_rk3562.rknn'


def show_outputs(outputs):
    index = np.argmax(outputs[0])
    pi = outputs
    print(f"等级：{index},概率：{pi}")


if __name__ == '__main__':

    host_name = get_host()
    if host_name == 'RK3566_RK3568':
        rknn_model = RK3566_RK3568_RKNN_MODEL
    elif host_name == 'RK3562':
        rknn_model = RK3562_RKNN_MODEL
    elif host_name == 'RK3588':
        rknn_model = RK3588_RKNN_MODEL
    else:
        print("This demo cannot run on the current platform: {}".format(host_name))
        exit(-1)

    rknn_lite = RKNNLite()

    # load RKNN model
    t1 = time.perf_counter()
    print('--> Load RKNN model')
    ret = rknn_lite.load_rknn(rknn_model)
    if ret != 0:
        print('Load RKNN model failed')
        exit(ret)
    print('done')

    img_path = "dense121-ori.jpg"
    img = infer.infer(img_path)

    # init runtime environment
    print('--> Init runtime environment')

    # run on RK356x/RK3588 with Debian OS, do not need specify target.
    if host_name == 'RK3588':
        ret = rknn_lite.init_runtime(core_mask=RKNNLite.NPU_CORE_0)
    else:
        ret = rknn_lite.init_runtime()
    if ret != 0:
        print('Init runtime environment failed')
        exit(ret)
    print('done')
    t3 = time.perf_counter()

    # Inference
    print('--> Running model')
    outputs = rknn_lite.inference(inputs=[img])
    x = outputs[0]
    output = np.exp(x)/np.sum(np.exp(x))
    outputs = [output]
    # outputs = torch.softmax(outputs).squeeze().cpu().numpy().tolist()
    show_outputs(outputs)
    t2 = time.perf_counter()
    print(f"init time cost:{t3-t1}\ninfer time cost:{t2-t3}")
    print('done')

    rknn_lite.release()
