import argparse
import fnmatch
import os
import re
import subprocess
import sys
import time
import xml.etree.ElementTree as ET
import chardet
import loguru
#from ruamel import yaml
from ruamel.yaml import YAML
import meson
import maven
from cmake_order_processor import CmakeOrderProcessor
from autotool_order_processor import AutotoolOrderProcessor
import mantiDB
from meson_order_processor import MesonOrderProcessor
from parse_spec import estimate_result
from parse_spec import merge_and_center_cells
from parse_spec import calculate_average_results
from parse_spec import extract_and_copy_files
import cmake
import autotools
import autoconf_depends
# 已知的依赖字典
# dependencies = {
#     'buildRequires': [
#         'find_package.PulseAudio',
#         'find_package.DBus1'
#     ]
# }
RESULT_RPM_YAML_NAME = "package-mapping-result.yaml"
CMAKE_DEPENDS_FILE_NAME = "cmake-mapping-depends.yaml"
AUTOTOOL_DEPENDS_FILE_NAME = "autotool-mapping-depends.yaml"
MESON_DEPENDS_FILE_NAME = "meson-mapping-depends.yaml"
MAVEN_DEPENDS_FILE_NAME = "maven-mapping-depends.yaml"

# 默认评估结果存放路径
ALL_ESTIMATE_FILE_PATG = "../estimate.xlsx"

def set_style(libs_dict):
    for key in libs_dict:
        libs_dict[key] = list(libs_dict[key])
    return libs_dict


def process_orders(depends):
    # 将 find_program.wayland-scanner && find_package.PKG_WAYLAND 拆分成两个
    processed_depends = {}
    for condition, orders in depends.items():
        new_orders = []

        for order in orders:
            # 拆分 '&&' 和 '||'
            if '&&' in order or '||' in order:
                parts = re.split(r'\s*&&\s*|\s*\|\|\s*', order)
                for part in parts:
                    clean_part = part.strip().lstrip('!')
                    new_orders.append(clean_part)
            else:
                clean_order = order.strip().lstrip('!')
                new_orders.append(clean_order)

        processed_depends[condition] = new_orders

    return processed_depends

# def read_yaml_file(file_path):
#     with open(file_path, 'r', encoding='utf-8') as file:
#         data = yaml.safe_load(file)
#     set_style(data)
#     data = process_orders(data)
#     return data
def read_yaml_file(path):
    with open(path, 'r') as file:
        yaml = YAML(typ='safe', pure=True)
        data = yaml.load(file)
    set_style(data)
    data = process_orders(data)
    return data

# def write_yaml_file(content_dict, dir, file_name):
#     yml = yaml.YAML()
#     yml.width = 4096
#     set_style(content_dict)
#     fn = os.path.join(dir, file_name)
#     with open(fn, 'w', encoding='utf-8') as file:
#         yml.dump(content_dict, file)
#     return fn

def write_yaml_file(content_dict, dir, file_name):
    yaml = YAML()
    yaml.width = 4096
    set_style(content_dict)
    fn = os.path.join(dir, file_name)
    with open(fn, 'w', encoding='utf-8') as file:
        yaml.dump(content_dict, file)
    return fn






class PackageMapper:
    def __init__(self, working_dir, os_version, special_rpms = []):

        self.debug_map = {}     # 其中存储每一个指令所对应的rpm包名数组
        # debug_map 中的数据结构：
        # debug_map[package_name] = {
        #     'config_path': config_path,
        #     'version_path': version_path,
        #     'depends_info': depends_info,
        #     'version_info': version_info
        # }
        self.rpm_set = set()    # 所有rpm包名集合
        self.result = {}        # {特性：rpm包名集合}
        self.os_version = os_version
        self.cmake_order_processor = CmakeOrderProcessor(working_dir, os_version)
        self.autotool_order_processor = AutotoolOrderProcessor(working_dir, os_version)
        self.meson_order_processor = MesonOrderProcessor(working_dir, os_version)

        # 从环境变量中获取用户自定义路径
        # cmake_prefix_path = os.environ.get('CMAKE_PREFIX_PATH', '').split(';')
        # search_paths.extend(cmake_prefix_path)

        # 由于特定文件存在,额外加入的 rpm 包。如：存在*.am则加入 libtool
        self.specal_rpms = special_rpms

    def clear_result(self):
        self.rpm_set.clear()
        self.result.clear()
        self.cmake_order_processor.clear_result()

    def single_mapping(self, mapping_type, order, rpm_set=None, visited=None):
        # 如果使用可变对象（如列表、字典、集合）作为函数的默认参数会导致意想不到的行为————
        # 所有调用该函数的实例将共享该对象。这样会导致函数之间的数据污染
        if rpm_set is None:
            rpm_set = set()
        if visited is None:
            visited = set()
        visited.add(order)
        # 从'!find_package.RiVLib && find_package.LibXml2'提取 find_package.LibXml2
        # TODO:
        # 定义正则表达式模式，匹配最后一个 && 及其后面的内容
        pattern = r'.*&&\s*(.*)'
        # 使用re.search()函数进行匹配
        match = re.search(pattern, order)
        if match:
            # print(f"处理{order}")
            order = match.group(1)
            # print(f"从而处理{order}")
        # 修改：find_path.png.h && find_package.ZLIB原来只是取后面的，现在修改成所有结果合并


        OrderProcessor = None
        if mapping_type == 'cmake':
            OrderProcessor = self.cmake_order_processor
        elif mapping_type == 'autotool':
            OrderProcessor = self.autotool_order_processor
        elif mapping_type == 'meson':
            OrderProcessor = self.meson_order_processor
        else:
            loguru.logger.error(f"mapping_type {mapping_type} is not valid")
            return

        assert(OrderProcessor is not None)
        value, need_recursion = OrderProcessor.process(order)
        self.debug_map[order] = value
        if need_recursion:
            # 此时返回的value是(Find*.cmake文件信息, rpm数组)
            rpm_list = value[1]
            rpm_set.update(rpm_list)
            self.rpm_set.update(rpm_list)  # 存放所有结果
            value = value[0]
            # 需要递归
            if value['config_path'] is not None:
                new_depends = value['depends_info']
                # 将 finxxx.yyy && findxxx.zzz 拆成两个
                new_depends = process_orders(new_depends)
                for _, new_orders in new_depends.items():
                    for new_order in new_orders:
                        # 对FindXXX.cmake文件中的新依赖项
                        if new_order not in visited:
                            # 防止无限递归
                            self.single_mapping(mapping_type, new_order, rpm_set, visited)
        else:
            # 此时返回的value是 rpm 数组
            rpms = value
            rpm_set.update(rpms)
            self.rpm_set.update(rpms)   #存放所有结果
        return rpm_set

    def mapping(self,mapping_type, depends, rpm_set):
        if not depends:
            # depends为空
            return
        for _, orders in depends.items():
            for order in orders:
                visited = set()
                self.single_mapping(mapping_type, order, rpm_set, visited)






    def sweep_mapping(self, cmake_enable, autotool_enable, meson_enable, maven_enable, cmake_depends, autotool_depends, meson_depends, maven_depends, output_dir, is_estimate = 'False', save_all=False):
        if cmake_enable:
            default_rpms = ['cmake', 'gdb', 'make', 'gcc', 'gcc-c++', 'findutils', 'ninja-build','dbus-x11', 'xorg-x11-server-Xvfb']
            default_rpms_set = set(default_rpms)
            for condition, orders in cmake_depends.items():
                if condition not in self.result.keys():
                    self.result[condition] = set()
                self.mapping('cmake', {condition:orders}, self.result[condition])

            if self.result.get('buildRequires') is not None:
                self.result.get('buildRequires').update(default_rpms_set)
            else:
                self.result['buildRequires'] = default_rpms_set
        if autotool_enable:
            default_rpms = ['gcc', 'gcc-c++', 'make']
            default_rpms_set = set(default_rpms)
            for condition, orders in autotool_depends.items():
                if condition not in self.result.keys():
                    self.result[condition] = set()
                self.mapping('autotool', {condition: orders}, self.result[condition])

            if self.result.get('buildRequires') is not None:
                self.result.get('buildRequires').update(default_rpms_set)
            else:
                self.result['buildRequires'] = default_rpms_set
        if meson_enable:
            default_rpms = ['meson', 'gcc', 'gcc-c++', 'vala'] #TODO to be completed
            default_rpms_set = set(default_rpms)
            for condition, orders in meson_depends.items():
                if condition not in self.result.keys():
                    self.result[condition] = set()
                self.mapping('meson', {condition:orders}, self.result[condition])
            if self.result.get('buildRequires') is not None:
                self.result.get('buildRequires').update(default_rpms_set)
            else:
                self.result['buildRequires'] = default_rpms_set
        if maven_enable:
            default_rpms = ['maven-local', 'java', 'maven'] #TODO to be completed
            default_rpms_set = set(default_rpms)
            for condition, orders in maven_depends.items():
                if condition not in self.result.keys():
                    self.result[condition] = set()
                for order in orders:
                    self.result[condition].add('mvn(' + order + ')')
            if self.result.get('buildRequires') is not None:
                self.result.get('buildRequires').update(default_rpms_set)
            else:
                self.result['buildRequires'] = default_rpms_set
        # 最后加入 special_rpms
        self.result['buildRequires'].update(self.specal_rpms)
        # 保存到 package-mapping-result.yaml 中
        fn = write_yaml_file(self.result, output_dir, RESULT_RPM_YAML_NAME)
        maven_only = not cmake_enable and not autotool_enable and not meson_enable and maven_enable
        if is_estimate == 'True':
            spec_path = os.path.join(output_dir, "*.spec")
            if save_all:
                estimate_result(fn, spec_path, ALL_ESTIMATE_FILE_PATG, self.os_version, maven_only)
            else:
                save_path = os.path.join(output_dir, 'one_estimate.xlsx')
                estimate_result(fn, spec_path, save_path, self.os_version, maven_only)




    def printMap(self):
        # DEBUG用
        for key,val in self.debug_map.items():
            print("==================")
            print(f"package_name:{key}")
            if isinstance(val,tuple):
                print(f"rpm_name:{val[1]}")
                print(f"config_path:{val[0]['config_path']}")
                # print(f"version_path:{val['version_path']}")
                print(f"depends_info:{val[0]['depends_info']}")
                # print(f"version_info:{val['version_info']}")
            else:
                print(f"rpm_name:{val}")

    def printRPM(self):
        # DEBUG用
        print("=============================")
        print("============**RPM**==========")
        print("=============================")
        print(self.rpm_set)

    def printResult(self):
        for condition, depends in self.result.items():
            print("==========================================================")
            print(condition)
            print("**********************************************************")
            for depend in depends:
                print(depend)
            print("==========================================================\n")


# 解析和查找每个find_package指令的config.cmake文件


# 打印结果

def mapping_order(mapping_type, order, os_version):
    mapper = PackageMapper(".\\", os_version)
    rpm_set = mapper.single_mapping(mapping_type, order)
    print(rpm_set)


def windows_findstr(directory, keyword):
    try:
        # 使用 findstr 命令进行搜索
        result = subprocess.run(['findstr', '/S', '/I', keyword, '*.*'],
                                cwd=directory,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                text=True,
                                encoding='utf-8',
                                errors='ignore')
        return result.stdout.strip() != ""
    except Exception as e:
        print(f"Error while running findstr: {e}")
        return False



def linux_find(dir, keyword):
    # TODO：可添加略过.spec文件的参数
    try:
        # 使用 grep -r -E 查找关键字
        result = subprocess.run(['grep', '-r', '-i','--exclude=*.{spec,yaml}', keyword, dir],
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                text=True,
                                encoding='utf-8',
                                errors='ignore')
        return result.stdout.strip() != ""  # 检查输出是否为空
    except Exception as e:
        print(f"Error while running grep: {e}")
        return False

def linux_search_str(dir, str_rpm_map):
    rpms = set()
    # 遍历 str_rpm_map 中的每个关键字
    for keyword, rpm_packages in str_rpm_map.items():
        if os.name == 'nt':
            # Windows 系统:
            if windows_findstr(dir, keyword):
                print(f"keyword:{keyword}    rpm:{rpm_packages}")
                # 找到关键字，添加相应的 RPM 包
                rpms.update(rpm_packages)
        else:
            # Linux 系统
            if linux_find(dir, keyword):
            # if windows_findstr(dir, keyword):
                # 找到关键字，添加相应的 RPM 包
                rpms.update(rpm_packages)
    # print(rpms)
    return rpms


def mapping_file(directory, os_version, is_estimate = 'False', save_all = False):
    if not os.path.isdir(directory):
        loguru.logger.error(f"Error: {directory} is not a valid directory.")
        return

    files = os.listdir(directory)
    cmake_enable = 'CMakeLists.txt' in files
    autotool_enable = 'configure.ac' in files
    meson_enable = 'meson.build' in files
    maven_enable = 'pom.xml' in files

    # TODO:暂时不启用
    # autotool_enable = False

    if not cmake_enable and not autotool_enable and not meson_enable and not maven_enable:
        loguru.logger.error(f"Error: {directory} is not a valid CMAKE/AUTOTOOL/MESON/MAVEN directory.")
        return
    cmake_file_list = ["CMakeLists.txt", "*.cmake"]
    autotool_file_list = ["*.ac", "*.m4"]
    meson_file_list = ["meson.build", "meson_options.txt"]
    maven_file_list = ["pom.xml"]
    special_rpm_map_from_file = { "*.am": ["libtool"],
                        'configure.ac': ['autoconf'],
                        'Makefile.in': ['automake'],
                        'Makefile.am': ['automake'],
                        '*.po': ['gettext'],
                        '*.desktop':['desktop-file-utils'],
                        'SConstruct':['python3-scons'],
                        '*.docbook':['kf5-kdoctools-devel'],
                        '*.qph':['qt5-linguist'],
                        '*.moc':['qt5-qttools-devel']
                        }
    # 遍历文件夹中的所有文件，按对应要求处理对应文件
    # cmake: 处理所有CMakeLists.txt、*.cmake
    # autotool: 处理configure.ac
    # 特殊文件: 如扫描到*.am，则在结果中加入 libtool
    # 返回值：即需要加入的特殊 rpm

    special_rpms = traverse_files(directory, cmake_file_list, autotool_file_list, meson_file_list, maven_file_list, special_rpm_map_from_file, os_version)
    # special_rpms = traverse_files(directory, cmake_file_list, autotool_file_list, meson_file_list, special_rpm_map_from_file, os_version)
    # TODO: 利用 linux grep -r -E 来检索特殊字段，从而添加特定rpm包。
    # TODO：比如 检索‘fortran’，添加 gcc-gfortran
    special_rpm_map_from_str = {'Fortran':['gcc-gfortran'],
                                'include(ECM':['kf5-rpm-macros'],
                                'source-highlighter':['source-highlight'],
                                # 'ca.*certificates':['ca-certificates'],
                                '7zip':['p7zip'],
                                # 'install.*kernel-devel':['kernel-devel'],
                                'install kernel-devel': ['kernel-devel'],
                                'clang.h':['clang'],
                                'clang':['clang'],
                                '.clang-format':['clang'],
                                'Git':['git'],
                                # 'install.*llvm':['llvm'],
                                # 'install.*luajit':['luajit-devel','luajit'],
                                'install llvm':['llvm'],
                                'install luajit':['luajit-devel','luajit'],
                                # 'elfutils-devel \\':['elfutils-devel']
                                # 'install.*pkgconfig':['pkgconf'],
                                'install pkgconfig':['pkgconf'],
                                'KF5':['kf5-rpm-macros'],
                                'OPENSSL':['openssl','openssl-devel'],
                                'LZ4':['lz4','lz4-devel'],
                                'ZSTD':['zstd','zstd-devel'],
                                'cJSON':['cjson','cjson-devel'],
                                'LIBNSL':['libnsl2','libnsl2-devel'],
                                'use File::Path':['perl-File-Path', 'perl-generators'],
                                'use File::Basename':['perl', 'perl-generators'],
                                'use Text::ParseWords':['perl-Text-ParseWords', 'perl-generators'],
                                'use Term::ANSIColor':['perl-Term-ANSIColor', 'perl-generators'],
                                'use Digest::MD5':['perl-Digest-MD5', 'perl-generators'],
                                'use ExtUtils::MakeMaker':['perl-ExtUtils-MakeMaker', 'perl-generators'],
                                'use Data::Dumper':['perl-Data-Dumper', 'perl-generators'],
                                'use Encode':['perl-Encode', 'perl-generators'],
                                'sphinx-build':['python3-sphinx'],
                                'python.h': ['python3-devel'],
                                'import recommonmark':['python3-recommonmark'],
                                'libncurses5-dev':['ncurses-devel'],
                                'qttools5-dev-tools':['qt5-qttools-devel'],
                                'icon-themes':['kf5-kiconthemes-devel'],
                                'libboost-dev':['boost-devel'],
                                'libgpgme11-dev':['gpgme-devel'],
                                'libgpgmepp-dev':['gpgme-devel'],
                                'qtdeclarative5-dev':['kf5-kdeclarative-devel'],
                                'Grantlee5':['grantlee-qt5-devel'],
                                'KConfigGroup':['kf5-kconfig-devel'],
                                'ksharedconfig':['kf5-kconfig-devel'],
                                'kwindowconfig':['kf5-kconfig-devel'],
                                'kconfig ':['kf5-kconfig-devel'],
                                'kstandardshortcut':['kf5-kconfig-devel'],
                                'kfilter':['kf5-kitemviews-devel'],
                                'kwidgetsaddons':['kf5-kitemviews-devel','kf5-kwidgetsaddons-devel'],
                                'kio_version':['kf5-kio-devel'],
                                'kio_little':['kf5-kio-devel'],
                                'kdirwatch':['kf5-kio-devel'],
                                'kfileitem':['kf5-kio-devel'],
                                'plasmacore':['kf5-plasma-devel'],
                                'plasmacomponents':['kf5-plasma-devel'],
                                'kf5::plasma':['kf5-plasma-devel'],
                                'kf5plasma':['kf5-plasma-devel'],
                                'plasmoid':['kf5-plasma-devel'],
                                '&plasma':['kf5-plasma-devel'],
                                'dbusaddons':['kf5-kdbusaddons-devel'],
                                'QDBusConnection':['kf5-kdbusaddons-devel'],
                                'QDBusInteface':['kf5-kdbusaddons-devel'],
                                'KDBusService':['kf5-kdbusaddons-devel'],
                                'X-DBUS':['kf5-kdbusaddons-devel'],
                                'KApplicationTrader':['kf5-kservice-devel'],
                                'DBusService':['kf5-kservice-devel'],
                                'KService':['kf5-kservice-devel'],
                                'KF5::NewStuff':['kf5-knewstuff-devel'],
                                'KNewStuff':['kf5-knewstuff-devel'],
                                'KNS3::':['kf5-knewstuff-devel'],
                                'libglib2':['glib2-devel'],
                                'glib_':['glib2-devel'],
                                'QGraphics':['qt5-qtsvg-devel'],
                                'Qt5Svg':['qt5-qtsvg-devel'],
                                'QSvg':['qt5-qtsvg-devel'],
                                'KAboutData':['kf5-kcoreaddons-devel'],
                                'KJob':['kf5-kcoreaddons-devel'],
                                'KCMUtils':['kf5-kcmutils-devel'],
                                'KPluginMetaData':['kf5-kcmutils-devel'],
                                'kcm_':['kf5-kcmutils-devel'],
                                'kScript':['qt5-qtscript-devel'],
                                'ScriptEngine':['qt5-qtscript-devel'],
                                'KColorScheme':['kf5-kconfigwidgets-devel'],
                                'KConfigGroup':['kf5-kconfigwidgets-devel'],
                                'kdoctools':['kf5-kdoctools-devel'],
                                '::EmailAddress':['kf5-kcodecs-devel'],
                                'QFile::encodeName':['kf5-kcodecs-devel'],
                                'kcodecs':['kf5-kcodecs-devel'],
                                'X-Text-Markup: kde4':['kf5-kdelibs4support-devel'],
                                'Kdelibs4ConfigMigrator':['kf5-kdelibs4support-devel'],
                                'X-systemd-skip':['systemd'],
                                'SYSTEMD_SERVICE':['systemd'],
                                'sd_address':['systemd'],
                                'systemd':['systemd'],
                                'libpython3-dev':['python3-devel'],
                                'distutils':['python3-devel'],
                                'python.h':['python3-devel'],
                                'lupdate':['qt5-linguist'],
                                'lrelease':['qt5-linguist'],
                                'X11':['libX11-devel','qt5-qtx11extras-devel'],
                                'qx11info':['qt5-qtx11extras-devel'],
                                'QApplication':['qt5-qtx11extras-devel'],
                                'KJobWidgets':['kf5-kjobwidgets-devel'],
                                'OpenFileManagerWindowJob':['kf5-kjobwidgets-devel'],
                                'JobUi':['kf5-kjobwidgets-devel'],
                                'KLineEdit':['kf5-kcompletion-devel','kf5-kwidgetsaddons-devel'],
                                'kcompletion':['kf5-kcompletion-devel'],
                                'KWallet':['kf5-kwallet-devel'],
                                'kde.wallet':['kf5-kwallet-devel'],
                                'org.doxygen':['doxygen'],
                                '[doxygen]':['doxygen'],
                                'doxygen.org':['doxygen'],
                                'LibXml2':['libxml2-devel'],
                                'setuptools':['python3-setuptools'],
                                'gmock.h':['gmock-devel'],
                                'GoogleTest':['gmock-devel'],
                                'wl_backend':['wayland-protocols-devel'],
                                'wl_display':['wayland-protocols-devel'],
                                'wl_drm':['wayland-protocols-devel'],
                                'wl_surface':['wayland-protocols-devel'],
                                'shared-mime-info':['shared-mime-info'],
                                'KFile::':['kf5-kwidgetsaddons-devel'],
                                'autoconf-archive':['autoconf-archive'],
                                'KApplicationTrader':['libappstream-glib'],
                                'gnome':['libappstream-glib'],
                                'flatpak':['libappstream-glib'],
                                'zlib.h':['zlib-devel'],
                                'bzip2':['zlib-devel'],
                                'KWindow':['kf5-kwindowsystem-devel'],
                                'boost::':['boost-devel']
                   }
    
    #extra for meson
    if meson_enable:
        special_rpm_map_from_file['*.po'] = ['gettext', 'gettext-devel']
        special_rpm_map_from_file['Makefile'] = ['make']
        special_rpm_map_from_file['*.vala'] = ['vala']
        special_rpm_map_from_file['*.cmake'] = ['cmake']
        special_rpm_map_from_file['CMakeLists.txt'] = ['cmake']
        special_rpm_map_from_str['itstool'] = ['itstool']
        special_rpm_map_from_str['ninja'] = ['ninja-build']
        special_rpm_map_from_str['generate_gir'] = ['gobject-introspection-devel']
        special_rpm_map_from_str['gtk-doc'] = ['gtk-doc']
        special_rpm_map_from_str['desktop-file'] = ['desktop-file-utils']
        special_rpm_map_from_str['intltool-'] = ['intltool']
        special_rpm_map_from_str['docbook.xsl'] = ['docbook-style-xsl']
        special_rpm_map_from_str['bindtextdomain'] = ['gettext-devel']
        special_rpm_map_from_str['i18n.gettext'] = ['gettext-devel']
        special_rpm_map_from_str['libtool'] = ['libtool']
        special_rpm_map_from_str['ac_attr'] = ['gettext-devel']
        special_rpm_map_from_str['gnome.compile'] = ['gnome-desktop3-devel']
        special_rpm_map_from_str['appstream_'] = ['libappstream-glib-devel']
        special_rpm_map_from_str['libsoup'] = ['libsoup-devel']
        special_rpm_map_from_str['soup_server'] = ['libsoup-devel']
        special_rpm_map_from_str['pytest.mark'] = ['python3-pytest']
        special_rpm_map_from_str['DBus.Proper'] = ['dbus']
        special_rpm_map_from_str['docbook5'] = ['docbook5-style-xsl']

    if maven_enable:
        special_rpm_map_from_str['javax.servlet'] = ['mvn(javax.servlet:javax.servlet-api)']
        special_rpm_map_from_str['wagon-http'] = ['mvn(org.apache.maven.wagon:wagon-http)']
        


    special_rpms.update(linux_search_str(directory, special_rpm_map_from_str))
    mapper = PackageMapper(directory, os_version, list(special_rpms))
    # 输入的depends路径
    cmake_depends_file_path = os.path.join(directory, CMAKE_DEPENDS_FILE_NAME)
    autotool_depends_file_path = os.path.join(directory, AUTOTOOL_DEPENDS_FILE_NAME)
    meson_depends_file_path = os.path.join(directory, MESON_DEPENDS_FILE_NAME)
    maven_depends_file_path = os.path.join(directory, MAVEN_DEPENDS_FILE_NAME)

    # 读取文件获取的depends字典
    cmake_dependencies = read_yaml_file(cmake_depends_file_path)
    autotool_dependencies = read_yaml_file(autotool_depends_file_path)
    meson_dependencies = read_yaml_file(meson_depends_file_path)
    maven_dependencies = read_yaml_file(maven_depends_file_path)

    # 开始进行包名映射 Note: maven不需要rpm映射
    # mapper.sweep_mapping(True, True, cmake_dependencies, autotool_dependencies, directory, is_estimate, save_all)
    mapper.sweep_mapping(cmake_enable, autotool_enable, meson_enable, maven_enable, cmake_dependencies, autotool_dependencies, meson_dependencies, maven_dependencies, directory, is_estimate, save_all)
    mapper.printMap()
    # mapper.printRPM()
    # 输出结果
    # mapper.printResult()

def should_open_subdir(dir_path):
    # 获取目录中的所有文件和文件夹
    items = os.listdir(dir_path)
    # 分别获取文件和文件夹列表
    files = [f for f in items if os.path.isfile(os.path.join(dir_path, f))]
    dirs = [d for d in items if os.path.isdir(os.path.join(dir_path, d))]

    # 检查文件中是否只有压缩包
    non_archive_files = [f for f in files if not f.endswith(('.zip', '.tar.gz', '.rar', '.7z','.tar.xz'))]

    # 判断是否只有一个文件夹并且没有非压缩文件
    if len(dirs) == 1 and not non_archive_files:
        # 如果是，则说明此文件夹并非合法文件夹，应该继续打开
        return True, os.path.join(dir_path, dirs[0])
    return False, None

def find_working_directory(dir_path):
    should_open, subdir_path = should_open_subdir(dir_path)
    if should_open:
        loguru.logger.info(f"打开文件夹: {subdir_path}")
        return find_working_directory(subdir_path)
    else:
        loguru.logger.info(f"处理目录: {dir_path}")
        # 业务逻辑函数
        return dir_path

# 测试函数，dir为工作目录，depends_file_name为工作目录中各个测试项的依赖文件名
# 如：dir='spec-cmake'  depends_file_name = 'depends.yaml' 即可对spec-cmake下的所有depends.yaml进行包名映射
def mapping_dir(map_dir, os_version, is_estimate = 'False'):
    # 暂时有bug的项目放入bug_dirs
    bug_dirs = {}
    bug_dirs = {'cryfs','czmq','libgdiplus','libgeotiff','lighttpd','SDL2','thrift','usbguard','vlc','zeromq'}
    # 获取目录中的所有文件和文件夹
    items = os.listdir(map_dir)
    # 分别获取文件和文件夹列表
    absolute_path = os.path.abspath(map_dir)
    dirs = [os.path.join(absolute_path, d) for d in items if os.path.isdir(os.path.join(absolute_path, d))]
    for dir in dirs:
        dir_name = os.path.basename(dir)
        if dir_name in bug_dirs:
            continue
        working_dir = find_working_directory(dir)
        working_dir_items = os.listdir(working_dir)
        files = [f for f in working_dir_items if os.path.isfile(os.path.join(working_dir, f))]
        if 'CMakeLists.txt' or 'configure.ac' or 'meson.build' or 'pom.xml' in files:
            try: # Note: removed temporarily
                mapping_file(working_dir, os_version, is_estimate, save_all=True)
            except Exception as e:
                loguru.logger.error(f"Error occured in mapping file. {e}")
                continue
    if is_estimate == 'True':
        merge_and_center_cells(ALL_ESTIMATE_FILE_PATG)

    '''
    使用os.walk会导致非cmake项目，因为其下某个文件中存在CMakeList,使得此项目被分析，与目标不符
    # 使用 os.walk 遍历目录树
    for root, dirs, files in os.walk(dir):
        has_CmakeLists = False
        has_spec = False
        for file in files:
            file_name, file_ext = os.path.splitext(file)
            if file == 'CMakeLists.txt':
                has_CmakeLists = True
            if file_ext == '.spec':
                has_spec = True
            if has_CmakeLists and has_spec:
                mapping_file(root, os_version, is_estimate, save_all=True)
                break
    merge_and_center_cells(ALL_ESTIMATE_FILE_PATG)
    '''







def main():
    # 创建解析器
    parser = argparse.ArgumentParser(description='带有多个可选参数的脚本')
    subparsers = parser.add_subparsers(dest='mode', required=True, help='选择运行模式')

    # mapping_order 子解析器
    parser_order = subparsers.add_parser('mapping_order', help='命令映射模式')
    parser_order.add_argument('order', help='需要执行包名映射的命令')
    parser_order.add_argument('--os-version', help='指定操作系统的版本', required=True)


    # mapping_file 子解析器
    parser_file = subparsers.add_parser('mapping_file', help='文件映射模式')
    parser_file.add_argument('filepath', help='依赖文件的路径')
    parser_file.add_argument('--os-version', help='指定操作系统的版本', required=True)
    parser_file.add_argument('--estimate', help='有此项即可在包名映射后进行评估')

    # mapping_dir 子解析器
    parser_dir = subparsers.add_parser('mapping_dir', help='目录映射模式')
    parser_dir.add_argument('dir', help='要处理的工作目录')
    parser_dir.add_argument('depends_file_name', help='目录中要处理的依赖文件名')
    parser_dir.add_argument('--os-version', help='指定操作系统的版本', required=True)
    parser_dir.add_argument('--estimate', help='有此项即可在包名映射后进行评估')

    # db 子解析器
    parser_db = subparsers.add_parser('db', help='数据库CRUD模式')
    parser_db.add_argument('--db-mode', choices=[ 'list', 'create','download', 'delete'], help='指定数据库操作类型', required=True)
    parser_db.add_argument('--os-version', help='指定操作系统的版本')
    # 解析参数
    args = parser.parse_args()

    # 根据 mode 参数调用相应的函数
    if args.mode == 'db':
        if args.db_mode == 'list':
            tables = mantiDB.list_tables()
            print("数据库中所有TABLE的名称:")
            for name in tables:
                print(name)
        elif args.db_mode == 'create':
            mantiDB.create_tables(args.os_version)
        elif args.db_mode == 'download':
            tables = mantiDB.list_tables()
            table_name = "openeuler-" + args.os_version.lower()
            table_name = re.sub(r'[^a-zA-Z0-9]', '_', table_name)
            if table_name in tables:
                mantiDB.export_data_from_name(table_name)
            else:
                print(f"TABLE[{table_name}] is undefined")
        elif args.db_mode == 'delete':
            tables = mantiDB.list_tables()
            table_name = "openeuler-" + args.os_version.lower()
            table_name = re.sub(r'[^a-zA-Z0-9]', '_', table_name)
            if table_name in tables:
                mantiDB.delete_table(table_name)
            else:
                print(f"TABLE[{table_name}] is undefined")
    else:
        table_name = mantiDB.table_name_generator(args.os_version)
        tables = mantiDB.list_tables()
        if table_name not in tables:
            loguru.logger.error(f"Table named<{table_name}> was not Found!")
            return
        if args.mode == 'mapping_order':
            mapping_order(args.order, args.os_version)
        elif args.mode == 'mapping_file':
            mapping_file(args.filepath, args.os_version, args.estimate)
        elif args.mode == 'mapping_dir':
            mapping_dir(args.dir, args.depends_file_name, args.os_version, args.estimate)


def merge_dict(dict1, dict2):
    merged_dict = {}

    # 合并 dict1
    for key, value in dict1.items():
        if key in merged_dict:
            merged_dict[key].extend(value)
        else:
            merged_dict[key] = list(value)

    # 合并 dict2
    for key, value in dict2.items():
        if key in merged_dict:
            merged_dict[key].extend(value)
        else:
            merged_dict[key] = list(value)

    for key, value in merged_dict.items():
        merged_dict[key] = list(set(value))
    return merged_dict





def traverse_files(directory, cmake_file_patterns, autotool_file_patterns, meson_file_patterns, maven_file_patterns, special_rpm_map, os_version):
    # 负责遍历项目文件夹，从对应文件中提取依赖相关指令 depends.yaml
    # 同时，处理一些特殊的文件，如脚本文件、.am文件等
    traverse_cmake_files(directory, cmake_file_patterns)
    traverse_autotool_files(directory, autotool_file_patterns)
    traverse_meson_files(directory, meson_file_patterns)
    is_maven = traverse_maven_files(directory, maven_file_patterns)
    special_rpms = traverse_special_files(directory, special_rpm_map, os_version)
    return special_rpms


# TODO: 这三个 traverse 函数可以合并，就只用遍历一遍文件了，但是现在为了方便调试暂时分开
def traverse_cmake_files(directory, file_patterns):
    # 输入项目文件夹的路径，以及需要遍历的文件列表
    # 对文件列表中的文件执行cmake.py中的解析，提取cmake指令
    all_depends = {}
    # 中间解析结果存放的路径：
    mid_depends_save_path = os.path.join(directory, 'cmake_mid_depends')
    os.makedirs(mid_depends_save_path, exist_ok=True)
    # 解析结果存放的路径：
    output_save_path = os.path.join(directory, 'cmake_output')
    os.makedirs(output_save_path, exist_ok=True)
    # 解析时遇到的变量
    VAR_MAP = {'QT_MAJOR_VERSION': ['5', '6']}
    cmake_lists_analysed = False
    for root, _, files in os.walk(directory):
        for file in files:
            if any(fnmatch.fnmatch(file, pattern) for pattern in file_patterns):
                # if '.h' in file or '.knsrc' in file or '.pc' in file or '.rc' in file or 'Doxyfile' in file or '.qhcp' in file or '.ini' in file:
                special_exts = ['INSTALL.CMAKE', 'ical2vcal.cmake', 'output.cmake', '.json.cmake','.py.cmake','.hpp.cmake', '.h.cmake', '.knsrc.cmake', '.pc.cmake', '.rc.cmake', 'Doxyfile.cmake', '.qhcp.cmake', '.ini.cmake']

                if file.count('.') > 1 or any([ext in file for ext in special_exts]):
                    # .xxxx.cmake 的情况太多了，要不直接判断 . 的数量？
                    continue

                file_path = os.path.join(root, file)
                loguru.logger.info(f"Processing File {file_path} ......")
                cmake_depends = cmake.CmakeDepends()
                cmake_depends.parse(file_path)

                # Get the components of the path
                components = file_path.split(os.path.sep)
                # Select the last three components
                selected_components = components[-2:]
                # Join them with underscores to form the filename
                unique_file_name = '_'.join(selected_components)

                # todo: 变量解析

                cmake_depends.save_only_depends(mid_depends_save_path, VAR_MAP, f"{unique_file_name}_depends.yaml")


                # 一个项目可能有多个cmakelists, 只能解析第一个
                if file.lower() == "cmakelists.txt" and not cmake_lists_analysed:
                    cmake_depends.save_cmake(output_save_path)
                    cmake_lists_analysed = True

                path = os.path.join(mid_depends_save_path, f"{unique_file_name}_depends.yaml")
                all_depends = merge_dict(all_depends ,read_yaml_file(path))
                # try:
                #     cmake_depends.parse(file_path)
                # except Exception as e:
                #     print(f"Error reading {file_path}: {e}")
    write_yaml_file(all_depends, directory, CMAKE_DEPENDS_FILE_NAME)

def traverse_autotool_files(directory, file_patterns):
    # 输入项目文件夹的路径，以及需要遍历的文件列表
    # 对文件列表中的.ac文件执行 autotools.py 中的解析
    all_depends = {}
    # 中间解析结果存放的路径：
    mid_depends_save_path = os.path.join(directory, 'autotool_mid_depends')
    os.makedirs(mid_depends_save_path, exist_ok=True)
    # 解析结果存放的路径：
    output_save_path = os.path.join(directory, 'autotool_output')
    os.makedirs(output_save_path, exist_ok=True)
    # 解析时遇到的变量
    VAR_MAP = {}
    for root, _, files in os.walk(directory):
        for file in files:
            if any(fnmatch.fnmatch(file, pattern) for pattern in file_patterns):

                special_exts = []

                if file.count('.') > 1 or any([ext in file for ext in special_exts]):
                    # .xxxx.cmake 的情况太多了，要不直接判断 . 的数量？
                    continue

                file_path = os.path.join(root, file)
                loguru.logger.info(f"Processing File {file_path} ......")
                auto_depends = autoconf_depends.AutoconfDepends()
                auto_depends.load_configure_ac(file_path)



                # Get the components of the path
                components = file_path.split(os.path.sep)
                # Select the last three components
                selected_components = components[-2:]
                # Join them with underscores to form the filename
                unique_file_name = '_'.join(selected_components)


                auto_depends.save_only_depends(mid_depends_save_path, VAR_MAP, f"{unique_file_name}_depends.yaml")
                auto_depends.save_autotool(output_save_path)
                path = os.path.join(mid_depends_save_path, f"{unique_file_name}_depends.yaml")
                all_depends = merge_dict(all_depends ,read_yaml_file(path))
                # try:
                #     cmake_depends.parse(file_path)
                # except Exception as e:
                #     print(f"Error reading {file_path}: {e}")
    write_yaml_file(all_depends, directory, AUTOTOOL_DEPENDS_FILE_NAME)

def traverse_meson_files(directory, file_patterns):
    # 输入项目文件夹的路径，以及需要遍历的文件列表
    # 中间解析结果存放的路径： meson不需要中间解析结果了
    # mid_depends_save_path = os.path.join(directory, 'meson_mid_depends')
    # os.makedirs(mid_depends_save_path, exist_ok=True)
    # 解析结果存放的路径：
    output_save_path = os.path.join(directory, 'meson_output')
    os.makedirs(output_save_path, exist_ok=True)
    # 解析时遇到的变量
    loguru.logger.info(f"Processing Project {directory} ......")
    is_meson = False
    all_depends = {}
    for root, _, files in os.walk(directory):
        for file in files:
            if any(fnmatch.fnmatch(file, pattern) for pattern in file_patterns):
                is_meson = True
                break
    if is_meson:
        meson_depends = meson.MesonDepends()
        meson_depends.parseDir(directory)
        meson_depends.save_meson_with_only_depends(directory, MESON_DEPENDS_FILE_NAME)
        path = os.path.join(directory, MESON_DEPENDS_FILE_NAME)
        all_depends = merge_dict(all_depends, read_yaml_file(path))
    write_yaml_file(all_depends, directory, MESON_DEPENDS_FILE_NAME)


def traverse_maven_files(directory, file_patterns):
    # 输入项目文件夹的路径，以及需要遍历的文件列表
    # 中间解析结果存放的路径： maven也不需要中间解析结果
    # 解析结果存放的路径：
    output_save_path = os.path.join(directory, 'maven_output')
    os.makedirs(output_save_path, exist_ok=True)
    # 解析时遇到的变量
    loguru.logger.info(f"Processing Project {directory} ......")
    is_maven = False
    all_depends = {}
    for root, _, files in os.walk(directory):
        for file in files:
            if any(fnmatch.fnmatch(file, pattern) for pattern in file_patterns):
                is_maven = True
                break
    if is_maven:
        maven_depends = maven.MavenDepends()
        maven_depends.parse_dir(directory)
        maven_depends.save_maven(directory, MAVEN_DEPENDS_FILE_NAME)
        path = os.path.join(directory, MAVEN_DEPENDS_FILE_NAME)
        all_depends = merge_dict(all_depends, read_yaml_file(path))
    write_yaml_file(all_depends, directory, MAVEN_DEPENDS_FILE_NAME)
    return is_maven

def contains_string(path, target_str):
    try:
        with open(path, 'r', encoding='utf-8') as file:
            for line in file:
                if target_str in line:
                    return True
        return False
    except FileNotFoundError:
        print(f"File {path} not found.")
        return False
    except IOError:
        print(f"An error occurred while reading the file {path}.")
        return False

def  parse_pro_pri(file_path):
    # 正则表达式用于匹配 CONFIG 和 qtConfig
    config_pattern = re.compile(r'CONFIG\s*\+=\s*([^\n]*)', re.IGNORECASE)
    qtconfig_pattern = re.compile(r'qtConfig\(([^)]+)\)', re.IGNORECASE)

    # dependencies = {
    #     'config': set(),
    #     'qtconfig': set()
    # }
    dependencies = set()

    with open(file_path, 'r', encoding='utf-8') as file:
        content = file.read()

    # 按行分隔内容，并处理每一行
    lines = content.splitlines()

    for line in lines:
        line = line.strip()  # 去除行首尾的空白字符

        # 匹配 CONFIG += 指令
        config_match = config_pattern.search(line)
        if config_match:
            # 处理配置项，分割并去除多余空白
            configs = re.split(r'\s+', config_match.group(1).strip())
            # dependencies['config'].update(configs)
            for config in configs:
                config = config.replace('(',"").replace(')',"").replace('"',"").replace("'","")
                dependencies.add(f'pkgconfig({config})')

        # 匹配 qtConfig(xxx) 指令
        qtconfig_match = qtconfig_pattern.search(line)
        if qtconfig_match:
            # 处理 qtConfig 项，分割并去除多余空白
            qtconfigs = re.split(r'\s+', qtconfig_match.group(1).strip())
            # dependencies['qtconfig'].update(qtconfigs)
            for qtconfig in qtconfigs:
                qtconfig = qtconfig.replace('(', "").replace(')', "").replace('"',"").replace("'","")
                dependencies.add(f'pkgconfig({qtconfig})')

    return dependencies

def parse_pom(pom_file):
    tree = ET.parse(pom_file)
    root = tree.getroot()
    namespace = {'maven': 'http://maven.apache.org/POM/4.0.0'}

    dependencies = set()

    # Extract from <dependency>
    for dependency in root.findall('.//maven:dependency', namespace):
        group = dependency.find('maven:groupId', namespace)
        if group is not None:
            group_id = group.text
            artifact_id = dependency.find('maven:artifactId', namespace).text
            dependencies.add(f'mvn({group_id}:{artifact_id})')
        else:
            artifact_id = dependency.find('maven:artifactId', namespace).text
            dependencies.add(f'mvn({artifact_id})')

    # Extract from <parent>
    parent = root.find('.//maven:parent', namespace)
    if parent is not None:
        group = parent.find('maven:groupId', namespace)
        if group is not None:
            group_id = group.text
            artifact_id = parent.find('maven:artifactId', namespace).text
            dependencies.add(f'mvn({group_id}:{artifact_id})')
            dependencies.add(f'mvn({group_id}:{artifact_id}:pom:)')
        else:
            artifact_id = parent.find('maven:artifactId', namespace).text
            dependencies.add(f'mvn({artifact_id})')
            dependencies.add(f'mvn({artifact_id}:pom:)')

    # Extract from <plugin>
    for plugin in root.findall('.//maven:plugin', namespace):
        group_id = plugin.find('maven:groupId', namespace)
        artifact_id = plugin.find('maven:artifactId', namespace)
        if group_id is not None and artifact_id is not None:
            dependencies.add(f'mvn({group_id.text}:{artifact_id.text})')

    return dependencies

def parse_h(file_path):
    # 打开并读取文件内容
    with open(file_path, 'r', encoding='utf-8') as file:
        content = file.read()

    # 使用正则表达式匹配尖括号中的内容，并将结果转换为集合
    includes = set(re.findall(r'include\s*<([^>]+)>', content))

    return includes

def virtual2real(virtual_rpms, os_version):

    '''
        virtual_rpms:
        'stdint.h'

    db:
        'stdint.h'

    virtual_rpms:
        '/usr/include/stdint.h'
        db:
        '/usr/include/stdint.h'

    '''
    # 将虚包转换为实包
    real_rpms = []
    for virtual_rpm in virtual_rpms:
        real_rpm_list = mantiDB.search_rpm_from_virtual(virtual_rpm, os_version)
        if len(real_rpm_list) == 0:
            loguru.logger.info(f"{virtual_rpm} is not valid")
        elif len(real_rpm_list) > 1:
            loguru.logger.info(f"{virtual_rpm} has many names")
        else:
            real_rpm = real_rpm_list[0]
            real_rpms.append(real_rpm)
    return real_rpms

def traverse_special_files(directory, special_rpm_map, os_version):
    # special_rpm_map:{"*.am" : [libtool], ... , "file_pattren" : [rpm1, rpm2, ... ,rpmn]}
    visited_key = []
    # 功能1：根据 special_rpm_map，遇到某个 file_pattren，添加某些 rpm
    # 功能2：扫描特殊文件内容，确定特殊rpm包
    # 针对特定文件对应的特定 rpm 包
    special_rpms = set()
    for root, _, files in os.walk(directory):
        for file in files:
            basename, extension = os.path.splitext(file)
            file_path = os.path.join(root, file)

            # 功能2
            # 特殊rpm包的特殊检索方式
            # if file.lower() == 'cmakelists.txt':
            #     # print(f"checking:{file_path}")
            #     if contains_string(file_path, "include(KDE"):
            #         special_rpms.add("kf5-rpm-macros")
            if file.lower() == 'pom.xml':
                loguru.logger.info(f"Processing File {file_path} ......")
                virtual_rpms = parse_pom(file_path)
                real_rpms = virtual2real(virtual_rpms, os_version)
                special_rpms.update(real_rpms)
            if file.lower() == 'control':
                loguru.logger.info(f"Processing File {file_path} ......")
                virtual_rpms = parse_debian_control(file_path)
                real_rpms = virtual2real(virtual_rpms, os_version)
                special_rpms.update(real_rpms)
            if extension =='.pro' or extension =='.pri':
                loguru.logger.info(f"Processing File {file_path} ......")
                virtual_rpms = parse_pro_pri(file_path)
                real_rpms = virtual2real(virtual_rpms, os_version)
                special_rpms.update(real_rpms)
            if extension == '.pl':
                loguru.logger.info(f"Processing File {file_path} ......")
                special_rpms.update(parse_pl(file_path, os_version))
            if extension == '.h':
                loguru.logger.info(f"Processing File {file_path} ......")
                headers = parse_h(file_path)
                for head in headers:
                    name = head.partition(".")[0]
                    rpms = mantiDB.search(name,".h",os_version)
                    special_rpms.update(rpms)

            file_patterns = special_rpm_map.keys()
            # print(len(file_patterns))
            for pattern in file_patterns:
                if pattern not in visited_key and fnmatch.fnmatch(file, pattern):
                    # 功能1
                    loguru.logger.info(f"Processing file {file}, pattern {pattern}")
                    loguru.logger.info(f"Here <{file_path}>! There is a file named {file}")
                    add_special_rpms = special_rpm_map[pattern]
                    visited_key.append(pattern)
                    special_rpms.update(add_special_rpms)
                    loguru.logger.info(f"Add {add_special_rpms} to the result")







    return special_rpms


def parse_pl(pl_path, version):
    perl_modules = []

    def parse_pl_file(file_path):
        # 检测文件编码
        with open(file_path, 'rb') as file:
            raw_data = file.read()
            result = chardet.detect(raw_data)
            encoding = result['encoding']

        # 使用检测到的编码读取文件
        with open(file_path, 'r', encoding=encoding) as file:
            content = file.read()
            matches = re.findall(r'^use\s+([\w:]+)', content, re.MULTILINE)
            return matches

    modules = parse_pl_file(pl_path)
    perl_modules.extend(modules)
    perl_rpms = set()
    for perl_module in perl_modules:
        rpms = mantiDB.search(perl_module,'perl',version)
        print(f'perl add:{rpms}')
        perl_rpms.update(rpms)

    return perl_rpms


def parse_debian_control(control_file):
    # Define regex patterns
    build_dep_start_pattern = re.compile(r'^Build-Depends:\s*', re.MULTILINE)
    section_start_pattern = re.compile(r'^\w+:\s*', re.MULTILINE)
    version_pattern = re.compile(r'\s*\([^)]*\)')
    dev_pattern = re.compile(r'-dev\b')
    qt_pattern = re.compile(r'(\d+)-dev$')

    with open(control_file, 'r', encoding='utf-8') as file:
        content = file.read()

    # Find the starting position of Build-Depends
    build_dep_start_match = build_dep_start_pattern.search(content)
    if not build_dep_start_match:
        return set()

    build_dep_start_pos = build_dep_start_match.end()

    # Find the end of the Build-Depends section
    next_section_match = section_start_pattern.search(content, build_dep_start_pos)
    if next_section_match:
        build_dep_end_pos = next_section_match.start()
    else:
        build_dep_end_pos = len(content)

    # Extract the Build-Depends section
    build_dep_section = content[build_dep_start_pos:build_dep_end_pos].strip()

    # Split dependencies based on commas and newlines, strip extra whitespace
    dependencies = [dep.strip() for dep in re.split(r',\s*|\s*\n\s*', build_dep_section) if dep.strip()]

    # Initialize a set to store processed dependencies
    processed_dependencies = set()

    for dep in dependencies:
        # Remove version information in parentheses
        dep = version_pattern.sub('', dep).strip()
        dep = dev_pattern.sub('-devel', dep)
        # Replace -dev with -devel and add qtN- prefix if needed
        prefix = dep.partition('-')[0]
        last_char = prefix[-1]
        QT_VERSIONS = ['4', '5', '6']
        if last_char in QT_VERSIONS:
            dep = f'qt{last_char}-{dep.replace(last_char, "")}'
            # print(dep)

        # Add to the set
        processed_dependencies.add(dep)
        if dep[:3] == 'lib':
            processed_dependencies.add(dep[3:])

    return processed_dependencies


if __name__ == '__main__':
    # 使用方法：
    # 1. 测试单一文件：
    #  python package_mapping.py mapping_file --os-version 22.03-LTS-SP3  --estimate True D:\Iron\Work\spec-test\wireshark\depends.yaml
    # 2. 测试文件夹：
    # python package_mapping.py mapping_dir --os-version 22.03-LTS-SP3  --estimate True D:\Iron\Work\spec-test depends.yaml
    # 测试文件我放在了spec-test文件夹下
    main()

