
#include "person_track.h"
#define N_PERSON_ANCHOR 15
static float person_anchors[][4] = {
    {7.500000, 7.500000, 6.444802, 17.378922},
    {7.500000, 7.500000, 27.070000, 32.411266},
    {7.499999, 7.500000, 59.323946, 145.040344},
    {7.500000, 7.500000, 10.376238, 12.129096},
    {7.500000, 7.500000, 78.554626, 86.740006},
    {7.500000, 7.500000, 15.523774, 41.475006},
    {7.500000, 7.500000, 186.322494, 216.810502},
    {7.500000, 7.499999, 42.645004, 54.964464},
    {7.500000, 7.500000, 4.783142, 9.161744},
    {7.500000, 7.500000, 2.699996, 4.678788},
    {7.500000, 7.500000, 37.044770, 98.219254},
    {7.500000, 7.500000, 23.190628, 64.775010},
    {7.500000, 7.500000, 16.653870, 20.397190},
    {7.500000, 7.500000, 9.910644, 27.630906},
    {7.500000, 7.500000, 106.485000, 175.425660}};

STATIC mp_obj_t py_kpu_class_run_person(size_t n_args, const mp_obj_t *pos_args, mp_map_t *kw_args)
{
    sipeed_kpu_err_t ret;
    static uint16_t n_box_limit = 10;
    static float *boxes[10];
    static uint16_t n_obj = 10;
    static obj_t objects[10];
    static tracker_t tracker;
    init_tracker(&tracker, objects, n_obj);

    static float *output = NULL;
    static size_t output_size;
    static uint16_t n_result;
    
    if(mp_obj_get_type(pos_args[0]) == &py_kpu_net_obj_type)
    {

        //1. kpu init
        py_kpu_net_obj_t *kpu_net = MP_OBJ_TO_PTR(pos_args[0]);
        image_t *arg_img = py_image_cobj(pos_args[1]);
        uint16_t w0=0;uint16_t h0=0;uint16_t ch0=0;
        int kmodel_type=sipeed_kpu_model_get_type(kpu_net->kmodel_ctx);
        mp_printf(&mp_plat_print,"kmodel type:%d\r\n", kmodel_type);
        if(kmodel_type==3)
        {
            sipeed_kpu_model_get_input_shape(kpu_net->kmodel_ctx, &w0, &h0, &ch0);
        }
        

		if(check_img_format(arg_img, w0, h0, ch0, kmodel_type))
		{
			mp_raise_ValueError("[MAIXPY]kpu: person check img format err!\r\n");
			return mp_const_none;
		}

        // person detector
        sipeed_kpu_err_t ret = sipeed_kpu_model_run(kpu_net->kmodel_ctx, arg_img->pix_ai, K210_DMA_CH_KPU, ai_done, NULL);
	    if (ret == SIPEED_KPU_ERR_RUN_MODEL)
        {
            mp_raise_msg(&mp_type_OSError, "Cannot run kmodel.\n");
        } 
        else if(ret == SIPEED_KPU_ERR_OUTPUTS_NODONE)
        {
			mp_raise_msg(&mp_type_OSError, "You haven't set all outputs shape!\n");
		}
        while (!g_ai_done_flag){};
        g_ai_done_flag = 0;

        ret = sipeed_kpu_get_output(kpu_net->kmodel_ctx, 0, (uint8_t **)&output, &output_size);

        n_result = person_get_boxes(boxes, n_box_limit, output, 15, 20, person_anchors, N_PERSON_ANCHOR, 0.7, 0.15);
        update_tracker(&tracker, boxes, n_result, HUMAN, DIST_THRESH);


        //return result
        list_t out;
        list_init(&out, sizeof(py_kpu_netinfo_list_data_t));
        for (uint16_t i = 0; i < tracker.max_n_object; ++i)
        {
            obj_t *obj = &(tracker.objects[i]);
            if (!(obj->is_valid))
                continue;

            float half_w = obj->w / 2;
            float half_h = obj->h / 2;
            uint32_t x1 = obj->cx - half_w;
            uint32_t y1 = obj->cy - half_h;
            uint32_t x2 = obj->cx + half_w;
            uint32_t y2 = obj->cy + half_h;

            if (x1 <= 0)
                x1 = 1;
            if (x2 >= 319)
                x2 = 318;
            if (y1 <= 0)
                y1 = 1;
            if (y2 >= 239)
                y2 = 238;

            py_kpu_facelandmark_list_data_t data;
            data.x1 = (uint32_t)x1;
            data.y1 = (uint32_t)y1;
            data.x2 = (uint32_t)x2;
            data.y2 = (uint32_t)y2;

            mp_printf(&mp_plat_print,"return's person x1=%d,y1=%d\r\n",data.x1,data.y1);
            
            data.p0x = 0;
            data.p0y = 0;

            data.p1x = 0;
            data.p1y = 0;

            data.p2x = 0;
            data.p2y = 0;

            data.p3x = 0;
            data.p3y = 0;


            data.p4x = 0;
            data.p4y = 0;
            list_push_back(&out, &data);
        }

        mp_obj_list_t *objects_list = mp_obj_new_list(list_size(&out), NULL);
        for (size_t index = 0; list_size(&out); index++)
        {
            py_kpu_facelandmark_list_data_t lnk_data;
            list_pop_front(&out, &lnk_data);

            py_kpu_class_facelandmark_find_obj_t *o = m_new_obj(py_kpu_class_facelandmark_find_obj_t);
            o->base.type = &py_kpu_class_facelandmark_find_type;
            o->x1 = mp_obj_new_int(lnk_data.x1);
            o->y1 = mp_obj_new_int(lnk_data.y1);
            o->x2 = mp_obj_new_int(lnk_data.x2);
            o->y2 = mp_obj_new_int(lnk_data.y2);
            o->p0x = mp_obj_new_int(lnk_data.p0x);
            o->p0y = mp_obj_new_int(lnk_data.p0y);
            o->p1x = mp_obj_new_int(lnk_data.p1x);
            o->p1y = mp_obj_new_int(lnk_data.p1y);
            o->p2x = mp_obj_new_int(lnk_data.p2x);
            o->p2y = mp_obj_new_int(lnk_data.p2y);
            o->p3x = mp_obj_new_int(lnk_data.p3x);
            o->p3y = mp_obj_new_int(lnk_data.p3y);
            o->p4x = mp_obj_new_int(lnk_data.p4x);
            o->p4y = mp_obj_new_int(lnk_data.p4y);
            
            objects_list->items[index] = o;

        }
        return objects_list;

    }
    return mp_const_none;
}
STATIC MP_DEFINE_CONST_FUN_OBJ_KW(py_kpu_class_run_person_obj, 2, py_kpu_class_run_person);