# Copyright (c) 2024 Huawei Technologies Co., Ltd.
#
# openMind is licensed under Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
#
#          http://license.coscl.org.cn/MulanPSL2
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.

import os
import sys
from openmind.flow.deploy.mindie import DeployMindie
from openmind.flow.deploy.lmdeploy import DeployLMDeploy
from openmind.flow.arguments import initialize_openmind, get_args


def run_deploy(**kwargs):
    # stop Mindie container
    if len(sys.argv) == 3 and sys.argv[-1] == "stop":
        DeployMindie.stop_service(remind=True)
        return

    if len(sys.argv) == 3 and sys.argv[-1].endswith("yaml"):
        yaml_file = sys.argv[-1]
        initialize_openmind(yaml_file)
    elif len(sys.argv) > 2 and ("/" in sys.argv[2] or os.path.exists(sys.argv[2])):
        initialize_openmind(ignore_unknown_args=True)
    else:
        initialize_openmind(**kwargs)

    args = get_args()
    if args.backend is None or args.backend == "mindie":
        DeployMindie(args).deploy()
    elif args.backend == "lmdeploy":
        DeployLMDeploy(args).deploy()
    elif args.backend == "vllm":
        import torch

        if torch.__version__ >= "2.5.1":
            from ..flow.deploy.vllm import DeployvLLM

            DeployvLLM(args).deploy()
        else:
            raise ImportError(f"Required torch version >= 2.5.1, but found {torch.__version__}")
    else:
        raise ValueError("backend only supports mindie, vllm and lmdeploy.")
