import json
import triton_python_backend_utils as pb_utils
import numpy as np
import tritonclient.grpc as grpcclient
import logging
from logging.handlers import RotatingFileHandler

log_filename = '/model.log'
file_handler = RotatingFileHandler(log_filename, maxBytes=50 * 1024 * 1024, backupCount=5)
console_handler = logging.StreamHandler()

log_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
file_handler.setFormatter(log_formatter)
console_handler.setFormatter(log_formatter)

logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(file_handler)
logger.addHandler(console_handler)


def add_log_separator():
    logger.info("-" * 80)


class TritonPythonModel:
    def initialize(self, args):
        try:
            if args is None:
                raise ValueError("Received 'None' for 'args'")
            if 'model_config' not in args:
                raise ValueError("Expected 'model_config' in 'args', but not found")
            self.model_config = json.loads(args['model_config'])

            out_bboxes_config = pb_utils.get_output_config_by_name(self.model_config, "bboxes")
            out_scores_config = pb_utils.get_output_config_by_name(self.model_config, "scores")
            self.out_bboxes_dtype = pb_utils.triton_string_to_numpy(out_bboxes_config['data_type'])
            self.out_scores_dtype = pb_utils.triton_string_to_numpy(out_scores_config['data_type'])
            logger.info(f"loaded successfully!")
        except Exception as e:
            logger.error(f'Failed to initialize model: {e}')
            raise

    def execute(self, requests):
        responses = []
        for request in requests:
            try:
                add_log_separator()
                logger.info(f"Received request...")
                image = pb_utils.get_input_tensor_by_name(request, 'image').as_numpy()
                base_triton_url = pb_utils.get_input_tensor_by_name(request, 'base_model_url').as_numpy()
                score = pb_utils.get_input_tensor_by_name(request, 'score')  # (1, 4)
                if score.as_numpy() is None or np.isnan(score.as_numpy()).any():
                    score = np.float32([0.3])
                    logger.info(f"No score input, use default value {score}")
                else:
                    score = score.as_numpy().astype(np.float32)
                logger.info(f"Input image shape: {image.shape}, score: {score}")

                # get base model inference result
                inputs = [
                    grpcclient.InferInput('image', image.shape, "UINT8"),
                    grpcclient.InferInput('score', [1], "FP16"),
                ]
                inputs[0].set_data_from_numpy(image)
                inputs[1].set_data_from_numpy(np.array(score, dtype=np.float16))

                outputs = [
                    grpcclient.InferRequestedOutput('classes'),
                    grpcclient.InferRequestedOutput('scores'),
                    grpcclient.InferRequestedOutput('bboxes'),
                    grpcclient.InferRequestedOutput("labels")
                ]
                base_triton_url = base_triton_url[0].decode('utf-8')
                triton_client = grpcclient.InferenceServerClient(url=base_triton_url)
                infer_result = triton_client.infer("base", inputs=inputs, outputs=outputs)

                bboxes = infer_result.as_numpy('bboxes')
                scores = infer_result.as_numpy('scores')
                classes = infer_result.as_numpy('classes')

                # 只保留类别为0的
                index_list = [i for i, value in enumerate(classes) if value == 0]

                # 直接从原始列表中提取对应索引的元素
                bboxes_ = [bboxes[i] for i in index_list]
                scores_ = [scores[i] for i in index_list]
                    
                # whether the result is empty
                if len(bboxes_) == 0:
                    logger.info("No object detected!!!")
                    e_bboxes = np.empty((0, 4), dtype=self.out_bboxes_dtype)
                    e_conf = np.empty((0, ), dtype=self.out_scores_dtype)
                    out_tensor_bboxes = pb_utils.Tensor('bboxes', e_bboxes)
                    out_tensor_scores = pb_utils.Tensor('scores', e_conf)
                    inference_response = pb_utils.InferenceResponse(
                        output_tensors=[out_tensor_bboxes, out_tensor_scores])
                    responses.append(inference_response)
                    continue
                scores_ = np.array(scores_)
                bboxes_ = np.array(bboxes_)
                logger.info(f"The number of detected targets: {len(bboxes_)}")
                # logger.info(f"Inference result numbers: bboxes:{bbox} conf:{conf}")

                out_tensor_bboxes = pb_utils.Tensor('bboxes', bboxes_.astype(self.out_bboxes_dtype))
                out_tensor_scores = pb_utils.Tensor('scores', scores_.astype(self.out_scores_dtype))

                inference_response = pb_utils.InferenceResponse(
                    output_tensors=[out_tensor_bboxes, out_tensor_scores])
                responses.append(inference_response)
                logger.info(f"Response executed successfully!")
            except Exception as e:
                logger.error(f'Failed to execute request: {e}')
                inference_response = pb_utils.InferenceResponse(output_tensors=[], error=pb_utils.TritonError(str(e)))
                responses.append(inference_response)
        return responses

    def finalize(self):
        logger.info('Cleaning up...')