import argparse
import numpy as np
import tritonclient.http as httpclient
import cv2
import time
from Iutils import diff_precision

import sys
import os.path as osp
sys.path.append(osp.dirname(osp.dirname(osp.dirname(osp.abspath(__file__)))))
from sam_onnx.utils import draw_masks


def get_argparser():
    parser = argparse.ArgumentParser()

    parser.add_argument(
        "-u",
        "--url",
        type=str,
        required=False,
        # default='192.168.2.220:38017',
        default='localhost:38017',
        help="Inference server URL. Default is localhost:8000.",
    )
    parser.add_argument(
        "-m",
        "--model-name",
        type=str,
        required=False,
        default="ensemble_sam2.1_pipeline_fp16",
        help="model name in triton server",
    )
    parser.add_argument(
        "-t",
        "--client-timeout",
        type=float,
        required=False,
        default=3,
        help="Client timeout in seconds. Default is 3.",
    )
    # parser.add_argument(
    #     "--input", type=str,
    #     nargs="+",
    #     default='/data/window/seg_1.png',
    #     help="path to a single image or image directory"
    # )

    return parser


def triton_infer(opts):
    input_names_types = [("esb_images", "UINT8"),
                         ("esb_point_coords", "FP32"),
                         ("esb_point_labels", "FP32"),
                         ("esb_mask_input", "FP32"),
                         ("esb_has_mask_input", "FP32"),]
    output_names = ["esb_masks",
                    "esb_iou_predictions",]
    
    def ping_server(triton_client):
        try:
            result = triton_client.is_server_live()
            return result
        except:
            return False
    
    def infer_async_http(opts):
        async_request_list = []
        for input in opts.input:
            input_list = []
            input_list.append(np.expand_dims(input, axis=0))
            input_list.append(np.array([[opts.point_coords]], dtype=np.float32))
            input_list.append(np.array([[opts.label_id]], dtype=np.float32))
            input_list.append(opts.mask_input)
            input_list.append(opts.has_mask_input)

            inputs = []
            for i in range(len(input_names_types)):
                inputs.append(httpclient.InferInput(input_names_types[i][0], input_list[i].shape, input_names_types[i][1]))
                inputs[i].set_data_from_numpy(input_list[i])

            outputs = []
            for i in range(len(output_names)):
                outputs.append(httpclient.InferRequestedOutput(output_names[i]))

            # Asynchronous inference call.
            async_request = triton_client.async_infer(
                model_name=opts.model_name,
                inputs=inputs,
                outputs=outputs
            )
            async_request_list.append(async_request)
        return async_request_list
    
    try:
        triton_client = httpclient.InferenceServerClient(
            url=opts.url,
            concurrency=1,
            connection_timeout=opts.client_timeout,
        )

        res_ping = ping_server(triton_client)
        if res_ping:
            requests_list = infer_async_http(opts)

            outputs = []
            for async_request in requests_list:
                result = async_request.get_result()
                outputs.append(result.as_numpy(output_names[0]).astype(np.float32))
                outputs.append(result.as_numpy(output_names[1]).astype(np.float32))
            return outputs
        else:
            print('triton-server ping bad')
    except Exception as e:
        print("channel creation failed: " + str(e))
        print('triton-server exception')


def main():
    opts = get_argparser().parse_args()
    img_list = [
        "/data/Racing_Terriers.jpg",
        # "/data/Racing_Terriers.jpg",
    ]
    opts.input = [cv2.imread(img) for img in img_list]
    opts.point_coords = [[345, 300], [0, 100], [600, 700]]
    opts.label_id = [0, 2, 3]
    opts.mask_input = np.zeros((1, 1, 1024 // 4, 1024 // 4), dtype=np.float32)
    opts.has_mask_input = np.array([[0]], dtype=np.float32)

    try:
        print('running triton_infer...')
        st = time.time()
        outputs = triton_infer(opts)
        print(f'triton-server total inference time:{time.time() - st:.4f}s')

        dic = {}
        mask_threshold = 0
        dic[0] = (outputs[0][0] > mask_threshold).astype(np.uint8).squeeze()
        masked_img = draw_masks(opts.input[0], dic)
        cv2.imwrite(osp.join("/data/sam2_masked_img_t.jpg"), masked_img)

        return outputs
    except Exception as e:
        print("Exception: " + str(e))
        # 服务调用失败，可用本地推理
        pass


if __name__ == '__main__':
    outputs = main()
    pass
