
# Copyright (c) 2023-2024, NVIDIA CORPORATION.  All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto.  Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.

from __future__ import annotations

import uuid
import requests
import time
import logging
import os

import nimlib
from nimlib.nim_inference_api_builder.http_api import HttpNIMApiInterface

from tritonclient.utils import *
import tritonclient.http as httpclient

from typing import Any, Union
from data_models import HTTPValidationError, MolecularDockingRequest

# Setup tritonserver interface

MODEL_INPUT = [
    ['ligand_file_bytes',         bytes,    '$ligand'         ],
    ['ligand_file_name',          bytes,    ''                ],
    ['protein_file_bytes',        bytes,    '$protein'        ],
    ['protein_file_name',         bytes,    'protein.pdb'     ],
    ['poses_to_generate',         np.int32, '$num_poses'      ],
    ['no_final_step_noise',       bool,     True              ],
    ['diffusion_steps',           np.int32, '$steps'          ],
    ['diffusion_time_divisions',  np.int32, '$time_divisions' ],
    ['save_diffusion_trajectory', bool,     '$save_trajectory'],
    ['skip_gen_conformer',        bool,     False             ],
    ['is_staged',                 bool,     '$is_staged'      ],
    ['context',                   bytes,    '{}'              ]
]

MODEL_OUTPUT = [
    ['visualizations_files',         str,   'trajectory'         ],
    ['docked_ligand_position_files', str,   'ligand_positions'   ],
    ['pose_confidence',              float, 'position_confidence'],
    ['status',                       str,   'status'             ],
]

MODEL_NAME = "diffdock"
TRITON_TIMEOUT = 4 * 60 * 60

with open(os.path.join(os.environ.get("NIM_DIR_PATH", "/opt/nim"), "VERSION"), "r") as f:
    MODEL_VER = f.read().strip()

class Interface(HttpNIMApiInterface):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        # Debugging info
        loglevel = os.environ.get("NIM_LOG_LEVEL", "INFO")
        loglevel = loglevel if loglevel in ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] else "INFO"
        self.logger.setLevel(loglevel)
        self.logger.debug(f"Check [nimlib] path: {nimlib.__path__}")

        # Check tritonserver ready
        self.triton_http_port = os.environ.get("TRITON_HTTP_PORT", "8080")

        nwait = 0
        while not self.check_triton_backend():
            nwait += 1

            if nwait > 60:
                self.logger.error("Tritonserver backend checking is timeout. Exiting ...")
                exit(1)
            self.logger.info("Tritonserver backend is not ready yet. Wait for another 10 seconds ...")
            time.sleep(30)

        self.logger.info("Tritonserver backend is ready. Activate NIM APIs ...")

    @HttpNIMApiInterface.route("/molecular-docking/diffdock/generate", methods=["post"])
    def molecular_docking(self, body: MolecularDockingRequest) -> Any:
        self.logger.info("molecular_docking called")

        if body.steps > body.time_divisions:
            raise RequestValidationError('diffusion_steps should be less than or equal to diffusion_time_divisions')

        specs = [_.copy() for _ in MODEL_INPUT]

        for v in specs:
            if type(v[2]) == str and v[2].startswith('$'):
                v[2] = getattr(body, v[2][1:])

        specs[1][2] = f'ligand.{body.ligand_file_type}' # update ligand file name
        inputs = []            

        for v in specs:
            d = np.array([[v[2]]], dtype=v[1])
            inputs.append(httpclient.InferInput(v[0], d.shape, np_to_triton_dtype(d.dtype)))
            inputs[-1].set_data_from_numpy(d)
            self.logger.debug(v[0] + " = " + (str(v[2]) if len(str(v[2]))<24 else str(v[2]).replace('\n',' ')[:18] + "..."))

        outputs = [httpclient.InferRequestedOutput(v[0]) for v in MODEL_OUTPUT]

        with httpclient.InferenceServerClient("localhost:" + self.triton_http_port, network_timeout=TRITON_TIMEOUT, connection_timeout=TRITON_TIMEOUT) as client:
            def build_result_field(v, t):
                if t == str:
                    v = v.astype(str)

                return v.tolist()

            time_start = time.time()
            response = client.infer(MODEL_NAME, inputs, request_id=str(uuid.uuid1()), outputs=outputs, timeout=TRITON_TIMEOUT)
            self.logger.debug(f"Inference time: {time.time() - time_start} seconds")

            result = {v[2]: build_result_field(response.as_numpy(v[0]), v[1]) for v in MODEL_OUTPUT}
            result['protein'] = body.protein
            result['ligand'] = body.ligand
            return result

    def check_triton_backend(self):
        try:
            r = requests.get("http://localhost:"  + self.triton_http_port + "/v2/health/ready")
            return r.status_code == 200
        except requests.exceptions.ConnectionError:
            return False

    @HttpNIMApiInterface.route("/v1/health/ready", methods=["get"])
    def v1_health_ready_get(
        self,
    ) -> Any:
        self.logger.info("v1_health_ready_get called")
        """
        Provide your implementation for this "Health Ready" API
        """
        return self.check_triton_backend()
    
    @HttpNIMApiInterface.route("/v1/models", methods=["get"])
    def v1_models_get(
        self,
    ) -> Any:
        self.logger.info("v1_models_get called")
        return {'available_models': [MODEL_NAME + '-' + MODEL_VER]}

    async def health_live(self):
        """
        Provide your implementation of liveness to know when to restart container
        """
        return await super().health_live()

    async def health_ready(self):
        """
        Provide your implementation of readiness to know when container ready to accept traffic
        """
        return await super().health_ready()


def main():
    interface = Interface()
    interface.serve()


if __name__ == "__main__":
    main()
