#!/usr/bin/env python3
#
# Copyright (c) 2010 - 2025, Fraunhofer-Gesellschaft zur Foerderung der angewandten Forschung e.V.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
#    list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
#    this list of conditions and the following disclaimer in the documentation
#    and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
#    contributors may be used to endorse or promote products derived from
#    this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# We kindly request you to use one or more of the following phrases to refer to
# foxBMS in your hardware, software, documentation or advertising materials:
#
# - "This product uses parts of foxBMS®"
# - "This product includes parts of foxBMS®"
# - "This product is derived from foxBMS®"

"""Main Build Script: ``./wscript``
================================

This script defines how to configure and build the project. This includes
configuration the toolchain for building foxBMS binaries, the documentation
and running various checks on the source files.
"""

# pylint: disable=too-many-locals

import dataclasses
import linecache
import os
import pathlib
import shutil
import sys
from binascii import hexlify

from waflib import Build, Context, Logs, Options, Scripting, Utils
from waflib.Build import (
    BuildContext,
    CleanContext,
    ListContext,
    StepContext,
)
from waflib.Configure import ConfigurationContext

Context.Context.line_just = 50

out = "build"  # pylint: disable=invalid-name
top = "."  # pylint: disable=invalid-name

APPNAME = "foxBMS"

# foxBMS version; this is included in the embedded binaries, as well as the
# documentation and fox.py
VERSION = "1.9.0"

# Binary build command variations that are supported. The commands are then
# generated by concatenating the command + the variant, e.g.,
# build_app_embedded
BIN_VARIANTS = [
    "app_embedded",
    "app_spa",
    "bootloader_embedded",
    "bootloader_spa",
]


# Additional commands, that do not need more contexts than build and clean
MISC_VARIANTS = [
    "app_doxygen",
    "app_doxygen_unit_test",
    "app_host_unit_test",
    "bootloader_doxygen",
    "bootloader_doxygen_unit_test",
    "bootloader_host_unit_test",
    "docs",
]

ALL_VARIANTS = {"binary": BIN_VARIANTS, "misc": MISC_VARIANTS}

TOOLDIR = os.path.join("tools", "waf-tools")

BMS_CONFIG = os.path.join("conf", "bms", "bms.json")


@dataclasses.dataclass
class FoxBMSDefine:
    """container for defines"""

    name: str
    value: int | str | None = 0


AFE_SETUP = {
    "fsm": FoxBMSDefine("FOXBMS_AFE_DRIVER_TYPE_FSM", 0),
    "no-fsm": FoxBMSDefine("FOXBMS_AFE_DRIVER_TYPE_NO_FSM", 0),
    "afe-ic": FoxBMSDefine("", None),
    "afe-ic-family": FoxBMSDefine("", None),
    "afe-manufacturer": FoxBMSDefine("", None),
}

for target_type, target_val in ALL_VARIANTS.items():
    contexts: tuple = (BuildContext, CleanContext)
    if target_type == "binary":
        contexts += (ListContext, StepContext)
    for var in target_val:
        # save contexts
        old_contexts = contexts
        for cont in contexts:
            # pylint: disable-next=invalid-name
            name = cont.__name__.replace("Context", "").lower()

            # pylint:disable-next=invalid-name
            class tmp_1(cont):
                """Helper class to create the build variant commands"""

                if name == "build":
                    __doc__ = f"executes the {name} of {var}"
                elif name == "install":
                    __doc__ = f"flash {var} to the target"
                elif name == "clean":
                    __doc__ = f"cleans the project {var}"
                elif name == "list":
                    __doc__ = f"lists the targets to execute for {var}"
                elif name == "step":
                    __doc__ = f"executes tasks in a step-by-step fashion, for debugging of {var}"
                cmd = str(name) + "_" + var
                variant = var

        # restore contexts
        contexts = old_contexts


BUILD_VARIANTS = []
CLEAN_VARIANTS = []
# build and clean variants exist for all commands
for target_type, target_val in ALL_VARIANTS.items():
    for var in target_val:
        BUILD_VARIANTS.append(f"build_{var}")
        CLEAN_VARIANTS.append(f"clean_{var}")

# Files and directories that are excluded when running dist commands
DIST_EXCLUDE = (
    f"{out}/** **/.git **/.gitignore .gitlab/** **/.gitattributes "
    "**/*.tar.bz2 **/*.tar.gz **/*.pyc __pycache__ "
    "tools/waf*.*.**-* .lock-* "
    f".ws *eclipse* .vs* {APPNAME.lower()}/**"
)


def version_consistency_checker(ctx):
    """checks that all version strings in the repository are synced"""
    doc_dir = "docs"
    changelog_file = ctx.path.find_node(
        os.path.join(doc_dir, "general", "changelog.rst")
    )
    changelog_txt = changelog_file.read(encoding="utf-8")
    if changelog_txt.find(f"[{VERSION}]") < 0:
        ctx.fatal(
            f"The version information in {changelog_file} is different "
            f"from the specified version {VERSION}."
        )
    all_c_sources = ctx.path.ant_glob(
        "docs/**/*.c docs/**/*.h src/**/*.c src/**/*.c tests/**/*.c tests/**/*.c",
        excl=[
            "tests/axivion/addon-test/**/*.c",
            "tests/axivion/addon-test/**/*.h",
            "tests/axivion/compiler-errata/ti-cgt-arm_20.2.6.lts/**/*.c",
            "tests/axivion/compiler-errata/ti-cgt-arm_20.2.6.lts/**/*.h",
            "tests/axivion/qualification-test/**/*.c",
            "tests/axivion/qualification-test/**/*.h",
            "tests/cli/pre_commit_scripts/test_check_doxygen/invalid-version.c",
            "tests/unit/build/**",
            "tests/unit/gen_hcg/**",
        ],
    )
    version_line = -1
    main_txt = ctx.path.find_node("src/app/main/main.c").read()
    for i, line in enumerate(main_txt.splitlines()):
        if line.startswith(" * @version "):
            version_line = i + 1
            break
    expected_line = f"* @version v{VERSION}"
    for i in all_c_sources:
        version_line_txt = linecache.getline(i.abspath(), version_line)
        if version_line_txt.startswith(" * @version "):
            if version_line_txt.strip() != expected_line:
                ctx.fatal(
                    f"Version information in {i.abspath()}:{version_line} is "
                    f"not correct (expected '{expected_line}', but found "
                    f"'{version_line_txt.strip()}')."
                )


def options(opt):
    """Defines options that can be passed to waf"""
    opt.add_option(
        "--coverage",
        action="store_true",
        help="Builds a coverage report based on the unit test",
    )
    opt.load("f_axivion", tooldir=TOOLDIR)
    opt.load("f_sphinx_build", tooldir=TOOLDIR)
    opt.load("f_doxygen", tooldir=TOOLDIR)
    opt.load("f_ti_arm_cgt", tooldir=TOOLDIR)
    # load bootstrap-library-project-tool
    opt.load("f_bootstrap_library_project", tooldir=TOOLDIR)

    for k in (
        "--targets",
        "--out",
        "--top",
        "--prefix",
        "--destdir",
        "--bindir",
        "--libdir",
        "--msvc_version",
        "--msvc_targets",
        "--no-msvc-lazy",
        "--force",
        "--check-c-compiler",
    ):
        option = opt.parser.get_option(k)
        if option:
            opt.parser.remove_option(k)

    Context.classes.remove(Build.InstallContext)
    Context.classes.remove(Build.UninstallContext)

    opt.add_option(
        "--confcache",
        dest="confcache",
        default=0,
        action="count",
        help="Use a configuration cache",
    )

    opt.load("f_lauterbach", tooldir=TOOLDIR)

    # test function that shall not be communicate through the CLI
    opt.add_option(
        "--target-test",
        dest="target_test",
        default=False,
        action="store_true",
        help="SUPPRESS" + "HELP",
    )


# pylint: disable-next=too-many-branches,too-many-statements
def configure(conf: ConfigurationContext):
    """Configures the project"""
    conf.env.APPNAME = APPNAME
    conf.env.VERSION = VERSION

    if " " in conf.path.abspath():
        conf.fatal(f"Project path must not contain spaces ({conf.path}).")
    conf.env.append_unique("PROJECT_ROOT", pathlib.Path(conf.path.abspath()).as_posix())
    known_max_depth = 133
    expected_max_path_depth = len(conf.path.abspath()) + known_max_depth
    if Utils.is_win32 and expected_max_path_depth > 260:
        conf.fatal(
            "Build path length will exceed 260 characters.\nClone or move the "
            "repository into a shorter path."
        )
    else:
        Logs.debug(f"Expected max path depth: {expected_max_path_depth}")
    conf.msg("Checking project path", conf.path.abspath())

    version_consistency_checker(conf)

    conf.find_program("git", var="GIT")
    conf.load("f_node_helper", tooldir=TOOLDIR)
    conf.load("f_ti_arm_cgt", tooldir=TOOLDIR)
    fragment = "#include <stdint.h>\n\nint main() {\n    return 0;\n}\n"
    conf.check(
        features="c", fragment=fragment, msg="Checking for code snippet (object)"
    )

    fragment = "#include <stdint.h>\n\nint sum(int a, int b){\n    return (a + b);}\n"
    conf.check(
        features="c cstlib",
        fragment=fragment,
        msg="Checking for code snippet (library)",
    )

    def full_build(bld):
        bld.env.APPNAME = "TEST_BUILD"
        c_fragment = "#include <stdint.h>\n\nint main() {\n    return 0;\n}\n"
        h_fragment = (
            "#ifndef GENERAL_H_\n#define GENERAL_H_\n#include <stdbool.h>\n"
            "#include <stdint.h>\n#endif /* GENERAL_H_ */\n"
        )
        source = bld.srcnode.make_node("test.c")
        source.parent.mkdir()
        source.write(c_fragment, encoding="utf-8")
        include = bld.srcnode.make_node("general.h")
        include.write(h_fragment, encoding="utf-8")
        linker_script = bld.path.find_node("../../src/app/main/app.cmd")
        version_header = bld.path.find_node("../../src/version/version.h")
        cflags = []
        if bld.env.RTSV_missing:
            cflags = ["--diag_remark=10366"]
        linker_pulls = bld.path.find_or_declare("linker_pulls.json")
        linker_pulls.write("{}\n")
        bld.tiprogram(
            includes=[include.parent, version_header.parent],
            source=[source],
            cflags=cflags,
            linker_script=linker_script,
            no_version=True,
            linker_pulls=linker_pulls,
        )

    default_env = conf.env
    test_env = conf.env.derive()
    test_env.detach()

    conf.setenv("test_env", test_env)
    rtsv_lib = "rtsv7R4_A_be_v3D16_eabi.lib"
    rtsv_lib_path = os.path.join(
        pathlib.Path(conf.env.get_flat("CC")).parent.parent.absolute(),
        "lib",
        rtsv_lib,
    )
    if not os.path.isfile(rtsv_lib_path):
        Logs.warn(
            f"Runtime support library '{rtsv_lib}' missing. Need to build "
            "it first. The next step may take a while..."
        )
        conf.env.RTSV_missing = True
    else:
        conf.env.RTSV_missing = False
    conf.env.STLIB = ["c"]
    conf.env.TARGETLIB = []
    if "--undef_sym=resetEntry" in conf.env.LINKFLAGS:
        conf.env.LINKFLAGS.remove("--undef_sym=resetEntry")
    conf.check(msg="Checking for code snippet (program)", build_fun=full_build)
    conf.setenv("", default_env)

    conf.load("f_bootstrap_library_project", tooldir=TOOLDIR)

    # configure the documentation toolchain
    conf.load("f_sphinx_build", tooldir=TOOLDIR)
    conf.load("f_doxygen", tooldir=TOOLDIR)
    conf.load("f_unit_test", tooldir=TOOLDIR)
    conf.env.VSCODE_MK_DIRS = [
        os.path.join(out, "app_host_unit_test", "test", "mocks"),
        os.path.join(out, "app_embedded", "src", "app", "main"),
        os.path.join(out, "app_embedded", "src", "app", "hal", "include"),
        os.path.join(out, "app_embedded", "src", "app", "hal", "source"),
        os.path.join(out, "app_host_unit_test", "test", "mocks"),
        os.path.join(out, "app_embedded", "src", "app", "hal", "include"),
        os.path.join(out, "app_embedded", "src", "app", "hal", "source"),
        os.path.join(out, "app_host_unit_test", "test", "mocks"),
    ]
    conf.load("f_lauterbach", tooldir=TOOLDIR)
    conf.load("f_axivion", tooldir=TOOLDIR)

    # Configure the build for the correct RTOS
    bms_config_node = conf.path.find_node(BMS_CONFIG)
    conf.env.append_unique(
        "CONFIG_BMS_JSON_HASH", hexlify(bms_config_node.h_file()).decode("utf-8")
    )
    bms_config = bms_config_node.read_json()

    conf.load("bms_config_validator", tooldir=TOOLDIR)
    conf.validate_bms_configuration(bms_config)

    # parse conf/bms/bms.json to get all required defines, includes etc.
    # needs to be done, prior to loading the VS Code tool!
    # AFE on Slave unit: bms.json:slave-unit:analog-front-end
    slave_afe = bms_config["slave-unit"]["analog-front-end"]
    afe_man = slave_afe["manufacturer"]
    afe_ic = slave_afe["ic"]
    conf.env.afe_manufacturer = afe_man
    conf.env.afe_ic = afe_ic
    # vendor/ic includes and foxBMS specific driver adaptions
    afe_ic_inc = slave_afe["ic"]
    afe_driver_type = "fsm"
    afe_ic_d = ""
    if slave_afe["manufacturer"] == "ltc":
        if slave_afe["ic"] in ("6804-1", "6811-1", "6812-1"):
            afe_ic_inc = "6813-1"
        if slave_afe["ic"] == "6804-1":
            afe_ic_d = f"{afe_man.upper()}_LTC6804_1"
        elif slave_afe["ic"] == "6806":
            afe_ic_d = f"{afe_man.upper()}_LTC6806"
        elif slave_afe["ic"] == "6811-1":
            afe_ic_d = f"{afe_man.upper()}_LTC6811_1"
        elif slave_afe["ic"] == "6812-1":
            afe_ic_d = f"{afe_man.upper()}_LTC6812_1"
        elif slave_afe["ic"] == "6813-1":
            afe_ic_d = f"{afe_man.upper()}_LTC6813_1"
    elif slave_afe["manufacturer"] == "nxp":
        afe_driver_type = "no-fsm"
        if slave_afe["ic"] == "mc33775a":
            afe_ic_d = f"{afe_man.upper()}_MC33775A"
    elif slave_afe["manufacturer"] == "adi":
        afe_driver_type = "no-fsm"
        if slave_afe["ic"] == "ades1830":
            afe_ic_d = f"{afe_man.upper()}_ADES1830"
    elif slave_afe["manufacturer"] == "debug":
        if slave_afe["ic"] == "default":
            afe_ic_d = f"{afe_man.upper()}_DEFAULT"
        if slave_afe["ic"] == "can":
            afe_driver_type = "no-fsm"
            afe_ic_d = f"{afe_man.upper()}_CAN"
    elif slave_afe["manufacturer"] == "maxim":
        if slave_afe["ic"] == "max17852":
            afe_ic_d = f"{afe_man.upper()}_MAX17852"
    elif slave_afe["manufacturer"] == "ti":
        if slave_afe["ic"] == "dummy":
            afe_ic_d = "TI_DUMMY"

    if not afe_ic_d:
        conf.fatal("AFE IC specific define not set.")
    # set AFE configuration
    AFE_SETUP[afe_driver_type].value = 1
    AFE_SETUP["afe-ic"].name = "FOXBMS_AFE_DRIVER_" + afe_ic_d
    AFE_SETUP["afe-ic"].value = 1
    AFE_SETUP["afe-manufacturer"].name = "FOXBMS_AFE_DRIVER_" + afe_man.upper()
    AFE_SETUP["afe-manufacturer"].value = 1
    for _, i in AFE_SETUP.items():
        conf.define(i.name, i.value)

    # get AFE includes
    afe_base_path = os.path.join("src", "app", "driver", "afe")
    incs = os.path.join(
        afe_base_path, afe_man, afe_ic_inc, f"{afe_man}_{afe_ic_inc}.json"
    )
    afe_details = conf.path.find_node(incs).read_json()
    afe_includes = [
        os.path.join(afe_base_path, afe_man, afe_ic_inc, i)
        for i in afe_details["include"]
    ]
    for i in afe_includes:
        if not os.path.isdir(i):
            conf.fatal(f"'{i}' does not exist.")
    conf.env.append_unique(
        "INCLUDES_AFE", [conf.path.find_node(i).abspath() for i in afe_includes]
    )
    # temperature sensor on Slave unit: bms.json:slave-unit:temperature-sensor
    slave_temp = bms_config["slave-unit"]["temperature-sensor"]
    conf.env.temperature_sensor_manuf = slave_temp["manufacturer"]
    conf.env.temperature_sensor_model = slave_temp["model"]
    conf.env.temperature_sensor_meth = slave_temp["method"]

    # application setting: bms.json:application
    # state estimation
    app_cfg = bms_config["application"]
    state_estimators = app_cfg["algorithm"]["state-estimation"]
    conf.env.state_estimator_soc = state_estimators["soc"]
    conf.env.state_estimator_soe = state_estimators["soe"]
    conf.env.state_estimator_sof = state_estimators["sof"]
    conf.env.state_estimator_soh = state_estimators["soh"]

    # balancing strategy
    conf.env.balancing_strategy = app_cfg["balancing-strategy"]
    # ltc 6806 (fuel cell monitoring ic) has no balancing support
    if (
        afe_man == "ltc"
        and afe_ic == "6806"
        and not conf.env.balancing_strategy == "none"
    ):
        conf.fatal(f"{afe_man.upper()} {afe_ic} does not support balancing.")

    # insulation-monitoring-device
    imd_cfg = app_cfg["insulation-monitoring-device"]
    conf.env.imd_manufacturer = imd_cfg["manufacturer"]
    conf.env.imd_model = imd_cfg["model"]
    if conf.env.imd_manufacturer:
        conf.env.append_unique(
            "INCLUDES_IMD",
            [
                conf.path.find_node(i)
                for i in [
                    conf.env.imd_manufacturer + conf.env.imd_model,
                ]
            ],
        )

    # rtos: bms.json:rtos
    rtos = bms_config["rtos"]
    try:
        rtos_name = rtos["name"]
    except IndexError:
        conf.fatal("Could not determine operating system.")
    conf.env.append_unique("RTOS_NAME", rtos_name)
    conf.define(f"FOXBMS_USES_{rtos_name.upper()}", 1)

    # root directory of the RTOS
    rtos_base_path = f"src/os/{rtos_name}"
    # the kernel is in a directory with the same name as the RTOS
    kernel_base_path = f"{rtos_base_path}/{rtos_name}"

    rtos_kernel_details = conf.path.find_node(
        os.path.join(kernel_base_path, f"{rtos_name}_cfg.json")
    ).read_json()

    rtos_kernel_includes = [
        os.path.join(kernel_base_path, i) for i in rtos_kernel_details["include"]
    ]
    conf.env.append_unique(
        "INCLUDES_RTOS",  # TODO rename to 'INCLUDES_RTOS_KERNEL'
        [conf.path.find_node(i).abspath() for i in rtos_kernel_includes],
    )

    conf.env.append_unique("RTOS_ADDONS", rtos.get("addons", []))
    for addon in conf.env.RTOS_ADDONS:
        addon_base_path = f"{rtos_base_path}"
        if rtos_name == "freertos":
            addon_base_path = f"{addon_base_path}/freertos-plus"
        addon_base_path = f"{addon_base_path}/{addon}"
        rtos_addon_details = conf.path.find_node(
            os.path.join(addon_base_path, f"{addon}_cfg.json")
        ).read_json()
        rtos_addon_includes = [
            os.path.join(addon_base_path, addon)
            for addon in rtos_addon_details["include"]
        ]
        conf.env.append_unique(
            "INCLUDES_RTOS_ADDONS",
            [conf.path.find_node(i).abspath() for i in rtos_addon_includes],
        )
    conf.load("codegen_matlab", tooldir=TOOLDIR)

    # load VS Code setup as last foxBMS specific tool to ensure that all
    # variables have a meaningful value
    conf.load("f_vscode", tooldir=TOOLDIR)


# pylint: disable-next=too-many-branches,too-many-statements
def build(bld: BuildContext):
    """High level definition of the build details"""
    if not bld.variant:
        bld.fatal(
            f"A {bld.cmd} variant must be specified. The build variants are: "
            f"{', '.join(BUILD_VARIANTS)}.\nFor more details run 'python "
            f"tools{os.sep}waf --help'"
        )
    # we need to patch the build instructions for the SPA build, and by
    # that the "normal" build using TI ARM CGT gets broken (only in that
    # context!), therefore (build|clean)_app_spa must only be used as last
    # build commands if multiple commands are supplied.
    all_commands = [bld.cmd] + Options.commands  # current command + remaining
    if any(
        x in all_commands
        for x in [
            "build_app_spa",
            "clean_app_spa",
            "build_bootloader_spa",
            "clean_bootloader_spa",
        ]
    ):
        b_idx = sys.maxsize
        try:
            b_idx = all_commands.index("build_app_spa")
        except ValueError:
            pass
        try:
            b_idx = min(all_commands.index("build_bootloader_spa"), b_idx)
        except ValueError:
            pass
        c_idx = sys.maxsize
        try:
            c_idx = all_commands.index("clean_app_spa")
        except ValueError:
            pass
        try:
            b_idx = min(all_commands.index("clean_bootloader_spa"), c_idx)
        except ValueError:
            pass
        min_idx = min([b_idx, c_idx])
        ax_commands = all_commands[min_idx:]
        err = 0
        for i in ax_commands:
            if "_spa" not in i:
                err += 1
                Logs.error(f"'{i}' must not be used in that order {all_commands!r}.")
        if err:
            bld.fatal(
                "SPA related commands must be moved to the end of the "
                "command list, i.e., all other build commands must precede "
                "the SPA commands."
            )
    version_consistency_checker(bld)
    bld.env.append_unique(
        "CMD_FILES",
        [bld.path.find_node(os.path.join("conf", "cc", "remarks.txt")).abspath()],
    )
    if not bld.env.CONFIG_BMS_JSON_HASH[0] == hexlify(
        bld.path.find_node(BMS_CONFIG).h_file()
    ).decode("utf-8"):
        bld.fatal(f"{BMS_CONFIG} has changed. Please run the configure command again.")

    if bld.variant in (
        "app_embedded",
        "app_spa",
        "bootloader_embedded",
        "bootloader_spa",
    ):
        bld.recurse("src")

    if "unit_test" in bld.variant:
        Options.commands = ["check_test_files"] + Options.commands

        if bld.cmd.startswith("clean"):
            return

        base_dir = os.path.join("conf", "unit")
        if Utils.is_win32:
            project_infix = "win32"
        else:
            project_infix = "posix"

        if bld.cmd.startswith("build"):
            dep_err = 0
            for i in ("ruby", "gcc", "gdb", "gcov", "gcovr", "ceedling"):
                if not bld.env.get_flat(i.upper()):
                    dep_err += 1
                    Logs.error(f"Can not run unit tests as '{i}' is missing.")
            if dep_err:
                bld.fatal("Can not run unit tests due to missing dependencies.")

        if bld.variant == "app_host_unit_test":
            project_file = bld.path.find_node(
                os.path.join(base_dir, f"app_project_{project_infix}.yml")
            )
            bld(
                features="subst",
                source=project_file,
                target="project.yml",
                is_copy=True,
            )
            if Utils.is_win32:
                # check comment in function '_cleanup_hcg_sources'
                remove = [
                    "source/HL_sys_main.c",
                    "source/HL_sys_startup.c",
                    "source/HL_sys_link.cmd",
                ]
                bld(
                    source=os.path.join("conf", "hcg", "app.hcg"),
                    unit_test=True,
                    startup_hash=bld.path.find_node(
                        os.path.join("src", "app", "hal", "app-startup.hash")
                    ),
                    remove=remove,
                )
            else:
                Logs.warn(
                    "HALCoGen not available. Assuming generated sources are available otherwise."
                )
            bld.add_group()
            bld(features="ceedling")

        if bld.variant == "bootloader_host_unit_test":
            project_file = bld.path.find_node(
                os.path.join(base_dir, f"bootloader_project_{project_infix}.yml")
            )
            bld(
                features="subst",
                source=project_file,
                target="project.yml",
                is_copy=True,
            )
            if Utils.is_win32:
                # check comment in function '_cleanup_hcg_sources'
                remove = [
                    "source/HL_sys_main.c",
                    "source/HL_sys_startup.c",
                    "source/HL_sys_link.cmd",
                ]
                bld(
                    source=os.path.join("conf", "hcg", "bootloader.hcg"),
                    unit_test=True,
                    startup_hash=bld.path.find_node(
                        os.path.join(
                            "src", "bootloader", "hal", "bootloader-startup.hash"
                        )
                    ),
                    remove=remove,
                )
            else:
                Logs.warn(
                    "HALCoGen not available. Assuming generated sources are available otherwise."
                )
            bld.add_group()
            bld(features="ceedling")

    # pylint: disable-next=consider-using-in
    if bld.variant in (
        "app_doxygen",
        "app_doxygen_unit_test",
        "bootloader_doxygen",
        "bootloader_doxygen_unit_test",
        "docs",
    ):
        bld.recurse("docs")


def build_all(ctx: Context.Context):  # pylint: disable=unused-argument
    """shortcut to build all variants"""
    # spa, if existing, needs to be inserted at the end of build commands
    spa = []
    for i in BUILD_VARIANTS:
        if "app_spa" in i:
            spa.append("build_app_spa")
            continue
        if "bootloader_spa" in i:
            spa.append("build_bootloader_spa")
            continue
        Options.commands.append(i)
    Options.commands.extend(spa)


def clean_all(ctx: Context.Context):  # pylint: disable=unused-argument
    """shortcut to clean all variants"""
    # spa, if existing, needs to be inserted at the end of clean commands
    spa = []
    for i in CLEAN_VARIANTS:
        if "app_spa" in i:
            spa.append("clean_app_spa")
            continue
        if "bootloader_spa" in i:
            spa.append("clean_bootloader_spa")
            continue
        Options.commands.append(i)
    Options.commands.extend(spa)


# pylint: disable-next=missing-function-docstring
def make_distcheck_cmd(self: Scripting.DistCheck, tmpdir: str):
    shutil.rmtree(tmpdir)  # we do not need this temp dir as we do not install
    dist_waf = os.path.relpath(sys.argv[0], self.path.abspath())
    cmd = [
        sys.executable,
        os.path.join(self.path.abspath(), self.get_base_name(), dist_waf),
        "configure",
        "build_all",
    ]
    return cmd


Scripting.Dist.base_name = APPNAME.lower()
Scripting.Dist.algo = "tar.gz"
Scripting.Dist.excl = DIST_EXCLUDE
Scripting.DistCheck.make_distcheck_cmd = make_distcheck_cmd


def check_test_files(ctx: Context.Context):
    """Check if test files to corresponding source files exist."""
    prefix = os.path.join(ctx.path.abspath(), "src") + os.pathsep
    sources = [
        i.abspath()[len(prefix) :]
        for i in ctx.path.ant_glob(
            [
                "src/app/**/*.c",
                "src/bootloader/**/*.c",
                "src/opt/**/*.c",
            ],
            excl=[
                "src/app/driver/sbc/fs8x_driver/**",
                "src/app/driver/afe/nxp/mc33775a/vendor/**",
                "src/app/hal/**",
                "src/os/**",
            ],
        )
    ]

    prefix = os.path.join(ctx.path.abspath(), "tests", "unit") + os.pathsep
    tests = [
        i.abspath()[len(prefix) :].replace("test_", "")
        for i in ctx.path.ant_glob("tests/unit/**/test_*.c")
    ]
    diff = set(sources) - set(tests)
    err_msg = ""
    for i in diff:
        test_file = os.path.join(
            ctx.path.abspath(), "tests", "unit", f"test_{pathlib.Path(i).name}"
        )
        test_file = os.path.join(
            "tests",
            "unit",
            i.replace(pathlib.Path(i).name, f"test_{pathlib.Path(i).name}"),
        )
        err_msg += f"Missing test file for:  {i} (should be in: {test_file})\n"
    if diff:
        ctx.fatal(f"{err_msg}\nTest files are missing.")

    err_msg = ""
    for test in ctx.path.ant_glob("tests/unit/**/test_*.c"):
        for i, line in enumerate(test.read(encoding="utf-8").splitlines()):
            if line.startswith("void test"):
                if not line.endswith("(void) {"):
                    err_msg += (
                        f"{test.abspath()}:{i + 1}: Test files need to have "
                        f"the form 'test<TestName> (void) {{' ({line}\n"
                    )
    if err_msg:
        ctx.fatal(f"{err_msg}\nTests are implement invalid.")


def get_deepest_src_file(ctx: Context.Context):
    """Returns the path length of all source files."""
    all_test_files = ctx.path.ant_glob("src/**")
    sorted_files = sorted(all_test_files, key=lambda x: len(x.path_from(ctx.path)))
    for i in sorted_files:
        print(len(i.abspath()), i.abspath())
