# Copyright (c) 2025 Huawei Technologies Co., Ltd.
#
# openMind is licensed under Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
#
#          http://license.coscl.org.cn/MulanPSL2
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.


import argparse
import ast
import math
import os
import shutil
import sys
from pathlib import Path

import yaml
from tabulate import tabulate

from openmind.utils.hub import snapshot_download, upload_folder
from openmind.utils.hub import OM_HUB_CACHE
from openmind.utils import logging
from openmind.utils.constants import (
    GB,
    GIT,
    GIT_LOGS_HEAD,
    OPENMIND_PREFIX,
    SNAPSHOTS,
    MODEL_CONFIG,
)
from openmind.utils.arguments_utils import str2bool, str2bool_or_auto

logger = logging.get_logger(__name__)


def parse_args():
    """
    Parse arguments for the openMind CLI.

    This function parses command-line arguments for `openmind-cli`, supporting the commands push/pull/rm/list.

    The function supports three different arguments parse:
    - `openmind-cli list`
    - `openmind-cli push repo_id`
    - `openmind-cli push --repo_id repo_id`

    The function processes both positional and optional arguments, ensuring compatibility with different command formats.

    If a YAML configuration file is provided via `--yaml_path`, the function loads and overrides its parameters into the parsed arguments.

    Returns:
        argparse.Namespace
    """
    parser = argparse.ArgumentParser(description="openMind Arguments")

    parser = _add_model_utils_args(parser)

    # pop pull/push/rm/list
    if sys.argv[0].endswith("openmind-cli"):
        sys.argv.pop(1)

    if len(sys.argv) >= 2 and not sys.argv[1].startswith("--"):
        model_name = sys.argv.pop(1)
        args = parser.parse_args()
        args.repo_id = model_name
    else:
        args = parser.parse_args()

    if args.yaml_path is not None:
        yaml_path = args.yaml_path

        with open(yaml_path, "r") as f:
            yaml_args = yaml.safe_load(f)

        defined_params = {action.dest: action for action in parser._actions}
        known_args = {}
        extra_args = {}

        cli_args_dict = {
            sys.argv[i].lstrip("--"): sys.argv[i + 1]
            for i in range(1, len(sys.argv) - 1)
            if sys.argv[i].startswith("--") and sys.argv[i] != "--yaml_path"
        }

        for key, value in yaml_args.items():
            if key in defined_params:
                if key in cli_args_dict:
                    continue

                known_args[key] = value
            else:
                extra_args[key] = value

        if extra_args:
            raise ValueError(f"Currently {extra_args} is not supported.")

        vars(args).update(known_args)

    validata_args(args)

    return args


def _add_model_utils_args(parser):
    group = parser.add_argument_group(title="model push/pull/rm/list")

    group.add_argument(
        "--repo_id",
        type=str,
        default=None,
        help="The local path of the model or its name in the hub, "
        "such as /home/models/Telechat-7B-pt or TeleAI/Telechat-7B-pt",
    )

    group.add_argument("--cache_dir", type=str, default=None, help="Cache directory of downloaded models")
    group.add_argument(
        "--revision",
        type=str,
        default="main",
        help="The specific model version to use (can be a branch name, tag name or commit id).",
    )
    group.add_argument(
        "--token", type=str, default=None, help="The modelers.cn token to download model from private repo."
    )
    group.add_argument(
        "--repo_type",
        type=str,
        default=None,
        help="repository type, which can be 'model', 'dataset', or 'space'. The default value is None, which indicates 'model'.",
    )
    group.add_argument(
        "--local_dir",
        type=str,
        default=None,
        help="local path to which the file is downloaded. By default, the .symlink file is created only in the cache.",
    )
    group.add_argument(
        "--local_dir_use_symlinks",
        type=str2bool_or_auto,
        default="auto",
        help="Used together with local_dir. If the value is True, the system creates a .symlink file for all files. If the value is False, the system does not create a .symlink file for any file.",
    )
    group.add_argument(
        "--resume_download", type=str2bool, default=True, help="whether to resume the previously interrupted download."
    )
    group.add_argument(
        "--force_download",
        type=str2bool,
        default=False,
        help="whether to forcibly download the file regardless the cache.",
    )
    group.add_argument(
        "--local_files_only",
        type=str2bool,
        default=False,
        help="If the value is True, the file is not downloaded and the cache path is returned only when the local cache exists. The default value is False.",
    )
    group.add_argument(
        "--allow_patterns",
        type=str,
        default=None,
        help="Only certain types of files can be downloaded. For example, allow_patterns='allowed_folder\*' indicates that only files in the allowed_folder directory are downloaded.",
    )
    group.add_argument(
        "--ignore_patterns",
        type=str,
        default=None,
        help="Ignore downloading a certain type of file. For example, ignore_patterns='*.log' indicates that all log files are ignored.",
    )
    group.add_argument("--max_workers", type=int, default=8, help="Number of threads used for download. Defaults to 8.")
    group = parser.add_argument_group(title="hub")
    group.add_argument(
        "--folder_path",
        type=str,
        default=None,
        help="Path of the directory to be uploaded. The uploaded content does not contain the directory itself. Supports character string or path, for example, 'demo/folder' or Path('demo/folder').",
    )
    group.add_argument(
        "--path_in_repo",
        type=str,
        default=None,
        help="Path for uploading files to the repository, which cannot end with a backslash (). By default, the path is an empty string, indicating the root directory of the repository.",
    )
    group.add_argument(
        "--commit_message",
        type=str,
        default=None,
        help="Commit message of the upload. The default message is Upload folder using openMind hub.",
    )
    group.add_argument("--commit_description", type=str, default=None, help="description of this commit.")
    group.add_argument("--num_threads", type=int, default=5, help="Number of threads used for uploading")
    group.add_argument(
        "--yaml_path", type=str, help="The yaml path of arguments which will override existing arguments."
    )
    return parser


def try_to_trans_to_list(patterns):
    # allow_patterns/ignore_patterns can be str or List[str]
    try:
        output = ast.literal_eval(patterns)
        return output if isinstance(output, list) else patterns
    except Exception:
        return patterns


def validata_args(args):

    if args.allow_patterns:
        args.allow_patterns = try_to_trans_to_list(args.allow_patterns)

    if args.ignore_patterns:
        args.ignore_patterns = try_to_trans_to_list(args.ignore_patterns)


def run_pull():
    """
    Download model/datasets/spaces from hub
    Returns:
        path of downloading files
    """
    args = parse_args()
    model_path = snapshot_download(
        repo_id=args.repo_id,
        revision=args.revision,
        repo_type=args.repo_type,
        cache_dir=args.cache_dir,
        token=args.token,
        local_dir=args.local_dir,
        local_dir_use_symlinks=args.local_dir_use_symlinks,
        resume_download=args.resume_download,
        force_download=args.force_download,
        local_files_only=args.local_files_only,
        allow_patterns=args.allow_patterns,
        ignore_patterns=args.ignore_patterns,
        max_workers=args.max_workers,
    )
    msg = f"Pull {args.repo_id} finished, saved in {model_path}"
    logger.info_rank0(msg)


def run_push():
    """
    upload model/datasets/spaces to hub
    """
    args = parse_args()
    upload_folder(
        repo_id=args.repo_id,
        folder_path=args.folder_path,
        path_in_repo=args.path_in_repo,
        commit_message=args.commit_message,
        commit_description=args.commit_description,
        token=args.token,
        revision=args.revision,
        allow_patterns=args.allow_patterns,
        ignore_patterns=args.ignore_patterns,
        num_threads=args.num_threads,
    )


def _get_folder_size(folder_path):
    """
    Calculate the total size of a folder, excluding soft links.

    Args:
        folder_path (str): Path to the folder.

    Returns:
        float: Folder size in GB, rounded to one decimal place.
    """

    total_size = 0

    for root, _, files in os.walk(folder_path):
        for file in files:
            fp = os.path.join(root, file)
            # size of the soft link file is not calculated
            if not os.path.islink(fp):
                total_size += os.path.getsize(fp)

    return math.ceil(total_size / GB * 10) / 10


def _get_local_folder_size(folder_path):
    """
    Calculate the total size of a folder, including all files.

    Args:
        folder_path (str): Path to the folder.

    Returns:
        float: Folder size in GB, rounded to one decimal place.
    """

    total_size = 0

    for root, _, files in os.walk(folder_path):
        for file in files:
            fp = os.path.join(root, file)
            total_size += os.path.getsize(fp)

    return math.ceil(total_size / GB * 10) / 10


def _check_file_exists(directory, filename):
    """
    Check if a file exists in a directory, considering soft links.

    Args:
        directory (str): Path to the directory.
        filename (str): Name of the file to check.

    Returns:
        bool: True if the file exists, False otherwise.
    """

    for root, _, files in os.walk(directory):
        target_file = os.path.join(root, filename)
        # Check whether the target file is a soft link
        if os.path.islink(target_file):
            target_path = os.readlink(target_file)
            absolute_target_path = os.path.realpath(os.path.join(os.path.dirname(target_file), target_path))
            if os.path.exists(absolute_target_path):
                return True
        elif filename in files:
            return True

    return False


def _check_git_om_model(model_path):
    """
    Check if a model is a git-tracked OpenMind model.

    Args:
        model_path (str): Path to the model directory.

    Returns:
        tuple: (model_name, model_size)
               - model_name (str): The extracted model name.
               - model_size (float): Model size in GB, rounded to one decimal place.
    """

    model_name = ""
    model_size = 0

    git_head_path = os.path.join(model_path, GIT_LOGS_HEAD)
    if os.path.exists(git_head_path):
        with open(git_head_path, "r") as f:
            git_log_info = f.read().split()[-1]
        if OPENMIND_PREFIX in git_log_info:
            model_name = git_log_info.split(OPENMIND_PREFIX)[1].split(GIT)[0]
            model_size = _get_folder_size(model_path)

    return model_name, model_size


def _check_cache_om_model(model_path):
    """
    Check if a model is a cached OpenMind model.

    Args:
        model_path (str): Path to the model directory.

    Returns:
        tuple: (model_name, model_size)
               - model_name (str): Extracted model name if found.
               - model_size (float): Model size in GB, rounded to one decimal place.
    """

    model_name = ""
    model_size = 0
    git_head_path = os.path.join(model_path, GIT_LOGS_HEAD)

    if os.path.isdir(model_path):
        for file in os.listdir(model_path):
            if file == SNAPSHOTS:
                model_cache_path = os.path.join(model_path, file)
                if _check_file_exists(model_cache_path, MODEL_CONFIG):
                    model_name = "/".join(model_path.split("/")[-1].split("--")[1:])
                    model_size = _get_folder_size(model_path)
                    return model_name, model_size

            elif file == MODEL_CONFIG and not os.path.exists(git_head_path):
                model_name = model_path.split("/")[-1]
                model_size = _get_local_folder_size(model_path)
                return model_name, model_size

    return model_name, model_size


def _add_model_info(base_path, model_info):
    """
    Add model information from the given base path.

    Args:
        base_path (str): Path where models are stored.
        model_info (set): A set to store model information.

    Modifies:
        model_info: Adds tuples of (model_name, model_path, model_size).
    """

    for file in os.listdir(base_path):
        model_path = os.path.join(base_path, file)
        git_model_name, git_model_size = _check_git_om_model(model_path)
        if git_model_name and git_model_size:
            model_info.add((git_model_name, model_path, git_model_size))

        cache_model_name, cache_model_size = _check_cache_om_model(model_path)
        if cache_model_name and cache_model_size:
            model_info.add((cache_model_name, model_path, cache_model_size))


def _get_model_info(args: argparse.Namespace) -> set:
    """
    Retrieve model information from local and cache directories.

    Args:
        args (argparse.Namespace): Parsed command-line arguments containing `local_dir` and `cache_dir`.

    Returns:
        set: A set of tuples (model_name, model_path, model_size).
    """

    model_info = set()

    if args.local_dir:
        local_path = Path(args.local_dir).absolute()
        _add_model_info(local_path, model_info)

    if args.cache_dir:
        cache_path = Path(args.cache_dir).absolute()
        _add_model_info(cache_path, model_info)

    if not args.local_dir and not args.cache_dir:
        _add_model_info(OM_HUB_CACHE, model_info)

    return model_info


def run_list():
    """
    List all models downloaded to the local.
    """

    args = parse_args()
    model_info = _get_model_info(args)
    headers = ["Model Name", "Model Path", "Model Size(GB)"]
    table = tabulate(sorted(model_info), headers=headers, tablefmt="plain", numalign="left")
    print(table)


def run_rm():
    """
    Remove the specified model from the given or default path.
    """

    args = parse_args()
    model_info = _get_model_info(args)
    delete_num = 0

    for model in model_info:
        if args.repo_id == model[0]:
            print(f"Deleted file path: {model[1]}")
            shutil.rmtree(model[1])
            delete_num += 1
    if delete_num > 1:
        print("Files deleted successfully.")
    elif delete_num == 1:
        print("File deleted successfully.")
    else:
        raise ValueError(f"model `{args.repo_id }` does not exist in the given or default path.")
