import csv
import os
import shutil
import subprocess
import logging
from dotenv import load_dotenv # 新增

# --- 配置 ---
GIT_URLS_CSV_PATH = '/root/ws/2025/04/agiros_pkg_build_tools/repo_urls.csv'
# 指定处理的仓库名称，如果为 None，则处理 CSV 中的所有仓库
TARGET_REPO_NAME = None
# 仓库的基础路径
# REPOS_BASE_DIR = 'agiros_src/agiros_partial_openEuler2403lts'
REPOS_BASE_DIR = 'agiros_src/repos_partial_ubuntu2403'
# 生成的补丁文件存放路径 (相对于仓库目录)
PATCHES_DIR_NAME = '.' # 存放在仓库目录下
# 日志文件路径
LOG_FILE_PATH = 'repos_partial_ubuntu2403.log'
# Git 远程仓库名称
GIT_REMOTE_NAME = 'origin'
# 新增: 提交时使用的默认邮箱后缀 (如果 .env 中未指定 GITEE_EMAIL)
DEFAULT_EMAIL_DOMAIN_FOR_COMMIT = "4384213@qq.com" 

# --- Logging Setup ---
def setup_logging():
    """配置日志记录器"""
    logging.basicConfig(
        level=logging.INFO,
        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
        handlers=[
            logging.FileHandler(LOG_FILE_PATH, encoding='utf-8'),
            logging.StreamHandler()
        ]
    )

# --- Git 辅助函数 ---
def run_git_command(command_list, cwd, check=True, capture_output=False, text=False, no_log_stdout_on_success=False):
    """
    运行一个 Git 命令。
    如果 capture_output 为 True，返回 subprocess.CompletedProcess 对象。
    如果 capture_output 为 False 且 check 为 False，成功时返回 True (returncode 0)。
    如果 check 为 True，失败时抛出 CalledProcessError。
    """
    logger = logging.getLogger(__name__)
    full_command = ['git'] + command_list
    logger.debug(f"运行 Git 命令: {' '.join(full_command)} 在目录: {cwd}")
    try:
        process = subprocess.run(
            full_command,
            cwd=cwd,
            check=check,
            capture_output=capture_output,
            text=text,
            encoding='utf-8' if text else None
        )
        if capture_output:
            if process.stdout and not no_log_stdout_on_success:
                 logger.debug(f"Git stdout: {process.stdout.strip()}")
            if process.stderr: # Always log stderr if present
                 logger.debug(f"Git stderr: {process.stderr.strip()}")
            return process
        return process.returncode == 0
    except subprocess.CalledProcessError as e:
        logger.error(f"Git 命令执行失败: {e}")
        if hasattr(e, 'stdout') and e.stdout:
            logger.error(f"Git stdout: {e.stdout.strip()}")
        if hasattr(e, 'stderr') and e.stderr:
            logger.error(f"Git stderr: {e.stderr.strip()}")
        if check:
            raise
        return False
    except FileNotFoundError:
        logger.error("Git 命令未找到。请确保 Git 已安装并在您的 PATH 中。")
        if check:
            raise
        return False

def check_repo_status(repo_path_abs):
    """
    确保本地仓库与远程仓库一致。如果本地仓库与远端仓库不一致，
    则用远端仓库覆盖本地仓库。
    返回: (bool: 操作是否成功且仓库现在与远程一致, str: 当前分支名称或 None)
    """
    logger = logging.getLogger(__name__)
    if not os.path.exists(os.path.join(repo_path_abs, '.git')):
        logger.warning(f"目录 {repo_path_abs} 不是一个 Git 仓库。跳过 Git 操作。")
        return False, None # Cannot proceed if not a git repo

    logger.info(f"正在准备同步仓库: {repo_path_abs} 与远程 '{GIT_REMOTE_NAME}'")

    # 1. 获取当前分支名称
    current_branch = None
    try:
        branch_proc = run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'], cwd=repo_path_abs, capture_output=True, text=True, check=True, no_log_stdout_on_success=True)
        current_branch = branch_proc.stdout.strip()
        if not current_branch or current_branch == 'HEAD': # HEAD means detached HEAD
            logger.error(f"  仓库 {repo_path_abs} 处于分离头指针状态或无法确定当前分支。无法安全同步。")
            return False, None
        logger.info(f"  当前本地分支: {current_branch}")
    except subprocess.CalledProcessError:
        logger.error(f"  无法获取 {repo_path_abs} 的当前分支名称。")
        return False, None

    # 2. 从远程获取更新
    logger.info(f"  正在从远程 '{GIT_REMOTE_NAME}' 获取最新状态 (git fetch --prune)...")
    if not run_git_command(['fetch', GIT_REMOTE_NAME, '--prune'], cwd=repo_path_abs, check=False):
        logger.warning(f"  从远程 '{GIT_REMOTE_NAME}' 获取更新失败。同步操作可能基于过时数据或失败。")
        return False, current_branch # If fetch fails, cannot reliably sync

    remote_tracking_branch = f'{GIT_REMOTE_NAME}/{current_branch}'
    logger.info(f"  目标远程跟踪分支: {remote_tracking_branch}")

    # 3. 检查远程跟踪分支是否存在
    try:
        # 使用 rev-parse 来验证远程分支是否存在。
        run_git_command(['rev-parse', '--verify', f'refs/remotes/{remote_tracking_branch}'], cwd=repo_path_abs, check=True, capture_output=True, no_log_stdout_on_success=True)
        logger.info(f"  远程跟踪分支 '{remote_tracking_branch}' 存在。")
    except subprocess.CalledProcessError:
        logger.error(f"  远程跟踪分支 '{remote_tracking_branch}' 在远程 '{GIT_REMOTE_NAME}' 上未找到。无法与远程同步。")
        logger.error(f"  请确保本地分支 '{current_branch}' 已推送到远程或远程分支名称正确。")
        return False, current_branch

    # 4. 执行强制同步：用远程分支覆盖本地分支
    logger.info(f"  将强制使用远程分支 '{remote_tracking_branch}' 覆盖本地分支 '{current_branch}'...")
    try:
        # 重置本地分支到远程跟踪分支的状态
        # 这会丢弃本地未推送到远程的提交，并重置已跟踪文件的未提交更改。
        logger.info(f"    执行: git reset --hard {remote_tracking_branch}")
        run_git_command(['reset', '--hard', remote_tracking_branch], cwd=repo_path_abs, check=True)

        # 删除所有未跟踪的文件和目录。
        # 这对于确保工作目录与远程状态完全一致非常重要。
        logger.info(f"    执行: git clean -fdx")
        run_git_command(['clean', '-fdx'], cwd=repo_path_abs, check=True)

        logger.info(f"  本地仓库 '{repo_path_abs}' (分支 '{current_branch}') 已成功强制与远程分支 '{remote_tracking_branch}' 同步。")
        return True, current_branch
    except subprocess.CalledProcessError as e:
        logger.error(f"  强制同步本地仓库与远程分支 '{remote_tracking_branch}' 失败: {e}")
        return False, current_branch
    except Exception as e: # 捕获同步期间任何其他意外错误
        logger.error(f"  强制同步期间发生意外错误: {e}")
        return False, current_branch

# --- 辅助函数 ---
def get_repo_path(repo_name):
    return os.path.join(REPOS_BASE_DIR, repo_name)

def get_cmakelists_path(repo_name):
    return os.path.join(get_repo_path(repo_name), 'CMakeLists.txt')

def get_spec_file_path(repo_name):
    # 假设 spec 文件名是 repo_name.spec
    return os.path.join(get_repo_path(repo_name), f'{repo_name}.spec')

def get_patch_file_path(repo_name, patch_name):
    repo_path = get_repo_path(repo_name)
    # 补丁文件直接放在仓库目录下 (由 PATCHES_DIR_NAME = '.' 控制)
    return os.path.join(repo_path, patch_name)


def process_cmake_file(cmakelists_path, url_mappings):
    """
    修改 CMakeLists.txt 文件中的 URL，并创建 .orig 文件备份。
    """
    logger = logging.getLogger(__name__)
    if not os.path.exists(cmakelists_path):
        logger.warning(f"  [!] CMakeLists.txt 未找到于: {cmakelists_path}")
        return False

    logger.info(f"  [*] 正在处理 CMakeLists.txt: {cmakelists_path}")
    
    original_cmakelists_path_orig = cmakelists_path + ".orig"
    # 仅当 .orig 不存在或 CMakeLists.txt 比 .orig 更新时才复制
    # (简化：如果脚本重新运行，我们总是基于当前的 CMakeLists.txt 创建 .orig)
    if os.path.exists(original_cmakelists_path_orig):
        os.remove(original_cmakelists_path_orig) # 确保 .orig 是当前版本的备份
        logger.debug(f"  Removed existing .orig file: {original_cmakelists_path_orig}")

    try:
        shutil.copyfile(cmakelists_path, original_cmakelists_path_orig) 
        logger.info(f"  [*] 已复制 {cmakelists_path} 到 {original_cmakelists_path_orig} 用于生成 diff。")
    except Exception as e:
        logger.error(f"  [!] 复制文件到 .orig 时出错: {e}")
        return False

    content_changed_in_this_run = False
    try:
        with open(cmakelists_path, 'r', encoding='utf-8') as f:
            content = f.read()
        
        modified_content = content
        for old_url, new_url in url_mappings:
            if old_url in modified_content:
                modified_content = modified_content.replace(old_url, new_url)
                logger.info(f"    - 已替换 URL: '{old_url}' -> '{new_url}'")
                content_changed_in_this_run = True
        
        if content_changed_in_this_run:
            with open(cmakelists_path, 'w', encoding='utf-8') as f:
                f.write(modified_content)
            logger.info(f"  [*] CMakeLists.txt 已更新。")
        else:
            logger.info(f"  [*] CMakeLists.txt 中没有需要替换的 URL。")
            # 如果没有通过替换进行更改，则 .orig 可能不需要用于补丁
            # 如果 generate_patch_file 确认没有差异，它将被清理
            
    except Exception as e:
        logger.error(f"  [!] 处理 CMakeLists.txt 时出错: {e}")
        # 如果修改失败，尝试从 .orig 恢复
        if os.path.exists(original_cmakelists_path_orig):
            try:
                shutil.copyfile(original_cmakelists_path_orig, cmakelists_path)
                logger.info(f"  [*] 由于错误，已从 {original_cmakelists_path_orig} 恢复 {cmakelists_path}。")
            except Exception as restore_e:
                logger.error(f"  [!] 从 .orig 恢复文件时出错: {restore_e}")
        return False

    return content_changed_in_this_run

def generate_patch_file(repo_name, cmakelists_path, patch_file_name):
    """
    通过比较修改后的 CMakeLists.txt 与其 .orig 备份来生成补丁文件。
    返回: (bool: 补丁是否生成, int/None: 适用的 strip_level 或 None)
    """
    logger = logging.getLogger(__name__)
    # cmakelists_path 是相对于脚本执行目录的路径, e.g., "REPOS_BASE_DIR/repo_name/CMakeLists.txt"
    original_file_on_disk = cmakelists_path + ".orig"
    # modified_file_on_disk is cmakelists_path, not directly used by diff args but good to be aware

    patch_output_path = get_patch_file_path(repo_name, patch_file_name) # e.g., REPOS_BASE_DIR/repo_name/0001-fix.patch
    
    # cwd for diff should be REPOS_BASE_DIR.
    # REPOS_BASE_DIR is a global constant, e.g., 'agiros_src/agiros_partial_openEuler2403lts'
    cwd_for_diff = REPOS_BASE_DIR

    # File paths for diff command, relative to cwd_for_diff
    # e.g., "repo_name/CMakeLists.txt.orig"
    diff_arg_original = os.path.join(repo_name, "CMakeLists.txt.orig")
    # e.g., "repo_name/CMakeLists.txt"
    diff_arg_modified = os.path.join(repo_name, "CMakeLists.txt")

    if not os.path.exists(original_file_on_disk):
        logger.warning(f"  [!] 未找到原始文件备份: {original_file_on_disk}。无法生成补丁。")
        return False, None

    logger.info(f"  [*] 尝试生成补丁: {patch_output_path}")
    logger.info(f"      diffing '{diff_arg_original}' and '{diff_arg_modified}' in cwd '{cwd_for_diff}'")
    try:
        # determined_strip_level should be 1 to match %autosetup -p1
        # This means the patch paths will be like "repo_name/file.txt"
        determined_strip_level = 1 
        process = subprocess.run(
            ['diff', '-u', diff_arg_original, diff_arg_modified],
            cwd=cwd_for_diff, # Run diff from the parent of repo_name directories
            capture_output=True,
            text=True,
            encoding='utf-8',
            check=False 
        )

        if process.returncode == 0: # 文件相同
            logger.info(f"  [*] 文件 '{diff_arg_original}' 和 '{diff_arg_modified}' (在 {cwd_for_diff} 中) 相同。未为 {patch_file_name} 生成补丁。")
            if os.path.exists(original_file_on_disk):
                os.remove(original_file_on_disk)
                logger.info(f"  [*] 已清理 {original_file_on_disk}。")
            return False, None
        elif process.returncode == 1: # 文件不同，已生成补丁
            with open(patch_output_path, 'w', encoding='utf-8') as f:
                f.write(process.stdout)
            logger.info(f"  [*] 已生成补丁文件: {patch_output_path}")
            if os.path.exists(original_file_on_disk):
                os.remove(original_file_on_disk)
                logger.info(f"  [*] 已清理 {original_file_on_disk}。")
            return True, determined_strip_level
        else: # diff 的其他错误
            logger.error(f"  [!] 生成 diff 时出错 (返回码 {process.returncode}): {process.stderr}")
            # 如果 diff 本身失败，不要删除 .orig，用户可能需要它进行手动检查
            return False, None
    except FileNotFoundError:
        logger.error(f"  [!] 错误: 未找到 'diff' 命令。请确保已安装并在您的 PATH 中。")
        return False, None
    except Exception as e:
        logger.error(f"  [!] 生成补丁期间发生意外错误: {e}")
        return False, None

def get_top_dir_from_tar(tar_path):
    import tarfile
    with tarfile.open(tar_path, 'r:gz') as tar:
        # 获取第一个成员的路径并提取顶级目录名
        first_member = tar.getmembers()[0].name
        top_dir = first_member.split('/')[0]
        return top_dir
    
def test_get_top_dir_from_tar():
    folder_name = get_top_dir_from_tar('agiros_src/agiros_partial_openEuler2403lts/acado-vendor/agiros-loong-acado-vendor_1.0.0.orig.tar.gz')
    print(f"解压后的文件夹名称是: {folder_name}")


def update_spec_file(spec_file_path, patch_file_name_in_repo_dir, strip_level):
    """
    修改 .spec 文件以添加补丁。
    返回 True 表示成功，False 表示失败。
    """
    logger = logging.getLogger(__name__)
    if not os.path.exists(spec_file_path):
        logger.warning(f"  [!] Spec 文件未找到于: {spec_file_path}")
        return False

    if strip_level is None:
        logger.error(f"  [!] 无法更新 spec 文件，因为补丁的 strip_level 未确定。")
        return False

    logger.info(f"  [*] 正在处理 spec 文件: {spec_file_path} (使用 strip_level: {strip_level})")
    
    try:
        # 查找合适的 Patch 编号
        next_patch_num = 0
        try:
            with open(spec_file_path, 'r', encoding='utf-8') as f:
                for line in f:
                    if line.strip().startswith("Patch") and ":" in line:
                        try:
                            num_str = line.split(":")[0].strip()[5:]
                            if num_str.isdigit():
                                num = int(num_str)
                                if num >= next_patch_num:
                                    next_patch_num = num + 1
                        except ValueError:
                            logger.debug(f"    无法解析补丁编号: {line.strip()}")
        except IOError as e:
            logger.warning(f"    读取 spec 文件以确定 next_patch_num 时出错: {e}")
            # 继续尝试使用 patch_num = 0

        patch_num = next_patch_num # 使用计算出的下一个补丁号
        patch_directive = f"Patch{patch_num}: {patch_file_name_in_repo_dir}"
        apply_patch_directive = f"%patch{patch_num} -p{strip_level}" 

        logger.info(f"    - 将添加到头文件部分 (靠近其他 Patch/Source 行):")
        logger.info(f"      {patch_directive}")
        logger.info(f"    - 将添加到 %prep 部分 (在 %setup 之后):")
        logger.info(f"      {apply_patch_directive}")
        
        spec_lines = []
        with open(spec_file_path, 'r', encoding='utf-8') as f:
            spec_lines = f.readlines()

        # 检查是否使用了 %autosetup
        has_autosetup = any(
            line.strip().startswith("%autosetup") for line in spec_lines
        )

        # 在这里添加对 patch_already_exists 和 apply_already_exists 的定义
        patch_already_exists = any(
            patch_file_name_in_repo_dir in line and line.strip().startswith("Patch") 
            for line in spec_lines
        )
        apply_already_exists = any(
            f"%patch{patch_num}" in line.split()[0] and patch_file_name_in_repo_dir in line
            for line in spec_lines if line.strip().startswith("%patch")
        )

        if patch_already_exists:
            logger.info(f"    补丁声明 '{patch_file_name_in_repo_dir}' 已存在于 spec 文件中。跳过添加 Patch{patch_num} 行。")
        else:
            insert_patch_index = -1
            # 寻找最后一个 PatchX: 或 SourceX: 行
            for i in range(len(spec_lines) - 1, -1, -1):
                line_stripped = spec_lines[i].strip()
                if line_stripped.startswith("Patch") and ":" in line_stripped or \
                   line_stripped.startswith("Source") and ":" in line_stripped:
                    insert_patch_index = i + 1
                    break
            if insert_patch_index == -1: # 如果没有找到，尝试在 Name: 之后插入
                for i, line in enumerate(spec_lines):
                    if line.strip().startswith("Name:"):
                        insert_patch_index = i + 1
                        break
            if insert_patch_index != -1:
                spec_lines.insert(insert_patch_index, patch_directive + "\n")
            else: # 回退: 添加到顶部 (用户可能需要调整)
                spec_lines.insert(0, patch_directive + "\n")
                logger.warning("    无法可靠地找到插入 Patch 指令的位置，已添加到文件开头。")

        if apply_already_exists:
            logger.info(f"    应用补丁指令 '{apply_patch_directive}' (或类似的针对此补丁文件) 已存在于 %prep 部分。跳过添加。")
        elif not has_autosetup:
            # 只有未使用 %autosetup 时才插入 %patchX
            insert_apply_patch_index = -1
            in_prep_section = False
            setup_line_index = -1
            for i, line in enumerate(spec_lines):
                line_stripped = line.strip()
                if line_stripped == "%prep":
                    in_prep_section = True
                if in_prep_section and (line_stripped.startswith("%setup") or line_stripped.startswith("%autosetup")):
                    setup_line_index = i
                # 在 %setup/%autosetup 之后，但在下一个 %section 或空行之前插入
                if in_prep_section and setup_line_index != -1 and i > setup_line_index:
                    if line_stripped == "" or line_stripped.startswith("%"): # 下一个节或空行
                        insert_apply_patch_index = i # 在此行之前插入
                        break
                    insert_apply_patch_index = i + 1 # 默认在当前行之后插入
            
            if setup_line_index != -1 and insert_apply_patch_index == -1 and in_prep_section: # 如果 %prep 节在 %setup 后结束
                 insert_apply_patch_index = len(spec_lines)


            if insert_apply_patch_index != -1:
                # 确保在 %setup 或 %autosetup 之后插入
                if setup_line_index != -1 and insert_apply_patch_index <= setup_line_index:
                    insert_apply_patch_index = setup_line_index + 1
                
                # 如果 %prep 节的末尾没有空行，则在最后一行之后插入
                if insert_apply_patch_index > len(spec_lines):
                    insert_apply_patch_index = len(spec_lines)

                spec_lines.insert(insert_apply_patch_index, apply_patch_directive + "\n")

            else: # 回退: 用户需要手动添加或改进逻辑
                logger.warning(f"  [!] 无法可靠地找到插入 {apply_patch_directive} 的位置。需要手动插入。")
                # 尝试在 %prep 后，或 %setup 后添加
                prep_idx = -1
                try:
                    prep_idx = next(i for i, line in enumerate(spec_lines) if line.strip() == "%prep")
                    if setup_line_index != -1:
                         spec_lines.insert(setup_line_index + 1, apply_patch_directive + "\n")
                    elif prep_idx != -1 :
                         spec_lines.insert(prep_idx + 1, apply_patch_directive + "\n")
                    else:
                        return False # 无法安全插入
                except StopIteration:
                     logger.error("  [!] 未找到 %prep 部分。无法自动添加应用补丁指令。")
                     return False

        with open(spec_file_path, 'w', encoding='utf-8') as f:
           f.writelines(spec_lines)
        logger.info(f"  [*] Spec 文件已更新: {spec_file_path}")
        return True
    except IOError as e:
        logger.error(f"  [!] 更新 spec 文件 {spec_file_path} 时发生 IOError: {e}")
        return False
    except Exception as e:
        logger.error(f"  [!] 更新 spec 文件 {spec_file_path} 时发生意外错误: {e}")
        return False


def git_commit_and_push(repo_path_abs, files_to_add, commit_message, current_branch):
    """
    执行 Git add, commit, 和 push 操作。
    gitee_config 是一个包含 'user', 'token', 'email', 'force_push'键的字典。
    返回 True 表示成功，False 表示失败。
    """
    logger = logging.getLogger(__name__)
    if not files_to_add:
        logger.info(f"  在仓库 {repo_path_abs} 中没有文件需要提交。")
        return True # 没有文件更改也视为一种“成功”的完成状态

    logger.info(f"  准备为仓库 {os.path.basename(repo_path_abs)} 提交更改。文件: {files_to_add}")

    gitee_config = get_gitee_config_for_push()
    
    # Git Add
    for file_to_add in files_to_add:
        # 如果 file_to_add 是绝对路径，尝试转为相对路径
        if os.path.isabs(file_to_add):
            try:
                file_to_add_relative = os.path.relpath(file_to_add, repo_path_abs)
            except ValueError: # 如果不在同一驱动器或无法相对化，使用原始路径（通常是文件名）
                logger.warning(f"    无法将 {file_to_add} 转换为相对于 {repo_path_abs} 的路径，将按原样添加。")
                file_to_add_relative = file_to_add
        else:
            file_to_add_relative = file_to_add

        if not run_git_command(['add', file_to_add_relative], cwd=repo_path_abs, check=False):
            logger.error(f"  [!] Git add 文件 '{file_to_add_relative}' 失败于仓库 {repo_path_abs}。")
            return False
        logger.info(f"    已添加文件到暂存区: {file_to_add_relative}")

    # Git Commit
    commit_cmd = ['commit', '-m', commit_message]
    if gitee_config.get('user') and gitee_config.get('email'):
        commit_cmd.extend(['--author', f"{gitee_config['user']} <{gitee_config['email']}>"])
        logger.info(f"    使用作者信息提交: {gitee_config['user']} <{gitee_config['email']}>")
    
    if not run_git_command(commit_cmd, cwd=repo_path_abs, check=False):
        # 检查是否因为没有更改而提交失败 (通常 'git add' 后不会，但以防万一)
        status_proc = run_git_command(['status', '--porcelain'], cwd=repo_path_abs, capture_output=True, text=True, check=False)
        if not status_proc.stdout.strip(): # 如果没有暂存的更改
            logger.info(f"  Git commit 在仓库 {repo_path_abs} 中没有检测到更改。可能文件内容未实际变化。")
            return True # 视为操作完成
        logger.error(f"  [!] Git commit 失败于仓库 {repo_path_abs}。")
        return False
    logger.info(f"  [*] Git commit 成功: \"{commit_message}\"")

    # Git Push
    gitee_token = gitee_config.get('token')
    gitee_user = gitee_config.get('user')
    is_force_push = gitee_config.get('force_push', False)

    if gitee_token and current_branch and gitee_user: # gitee_user 也需要用于构建认证URL
        push_cmd = ['push']
        if is_force_push:
            push_cmd.append('--force')
        
        # 构造包含 token 的远程 URL
        # 假设远程是 origin，并且 URL 格式是 https://gitee.com/user/repo.git
        try:
            remote_url_proc = run_git_command(['config', '--get', f'remote.{GIT_REMOTE_NAME}.url'], cwd=repo_path_abs, capture_output=True, text=True, check=True)
            remote_url = remote_url_proc.stdout.strip()
            if remote_url.startswith('https://') and '@' not in remote_url: # 避免重复添加 token
                authed_remote_url = remote_url.replace('https://', f'https://{gitee_user}:{gitee_token}@')
                push_cmd.extend([authed_remote_url, current_branch])
            else: # 如果 URL 格式不同或已包含认证，则使用默认推送
                logger.warning(f"    远程 URL '{remote_url}' 格式不支持自动添加 token 或已包含认证信息。将尝试默认推送。")
                push_cmd.extend([GIT_REMOTE_NAME, current_branch])

        except subprocess.CalledProcessError:
            logger.error(f"  [!] 无法获取远程 '{GIT_REMOTE_NAME}' 的 URL。将尝试默认推送。")
            push_cmd.extend([GIT_REMOTE_NAME, current_branch])
        
        logger.info(f"  [*] 正在推送到远程 '{GIT_REMOTE_NAME}' 分支 '{current_branch}' {'(强制推送)' if is_force_push else ''}...")
        if not run_git_command(push_cmd, cwd=repo_path_abs, check=False):
            logger.error(f"  [!] Git push 失败于仓库 {repo_path_abs}。")
            return False
        logger.info(f"  [*] Git push 成功。")
    elif not gitee_token:
        logger.info(f"  [*] 未提供 Gitee Token (GITEE_TOKEN)，跳过 Git push。")
    elif not current_branch:
        logger.info(f"  [*] 无法确定当前分支，跳过 Git push。")
    elif not gitee_user: # 如果没有 gitee_user，也无法构造认证URL
        logger.info(f"  [*] 未提供 Gitee 用户名，无法构造认证推送 URL，跳过 Git push。")
        
    return True

def get_gitee_config_for_push():
    logger = logging.getLogger(__name__)
     # 新增: 加载 .env 文件
    load_dotenv()
    gitee_user_env = os.getenv('gitee_user') 
    gitee_token_env = os.getenv('gitee_token') 
    gitee_email_env = os.getenv('GITEE_EMAIL') 
    is_force_push_str = os.getenv('IS_FORCE_PUSH', 'FALSE').upper()
    is_force_push_env = is_force_push_str == 'TRUE'

    # 确定提交邮箱
    gitee_email_for_commit = None
    if gitee_email_env:
        gitee_email_for_commit = gitee_email_env
    elif gitee_user_env: 
        gitee_email_for_commit = f"{gitee_user_env.split('@')[0]}@{DEFAULT_EMAIL_DOMAIN_FOR_COMMIT}"
        logger.info(f"  未在 .env 中找到 GITEE_EMAIL，将使用构造的邮箱: {gitee_email_for_commit}")

    # 准备 Gitee 配置字典
    gitee_config_for_push = {
        "user": gitee_user_env,
        "token": gitee_token_env,
        "email": gitee_email_for_commit,
        "force_push": is_force_push_env
    }

    if gitee_config_for_push["user"]:
        logger.info(f"  [*] 将使用 Gitee 用户名 '{gitee_config_for_push['user']}' 进行 Git 操作。")
    if gitee_config_for_push["token"]:
        logger.info(f"  [*] 检测到 Gitee Token。将用于推送操作。")
    if gitee_config_for_push["force_push"]:
        logger.info(f"  [*] IS_FORCE_PUSH 设置为 TRUE，将执行强制推送。")
    return gitee_config_for_push



def copy_file_main():
    """
    主函数：处理文件复制
    """
    logger = logging.getLogger(__name__)  # 添加这行来获取 logger
    
    source_directory = "build_tools/output/repo"  # 替换为你的源目录
    target_directory = "agiros_src/agiros_partial_openEuler2403lts"  # 替换为你的目标目录
    # specified_names = ["acado-vendor"]  # 替换为你需要拷贝的文件夹名称列表
    specified_names =  get_repos_name()
    
    success_count, failed_folders = copy_specified_folders(source_directory, target_directory, specified_names)
    
    if failed_folders:
        logger.warning("部分文件夹复制失败，但继续执行其他操作")
        # 可以继续执行后续操作
        return True  # 或者根据实际需求返回适当的值
    
    return True  # 所有操作成功完成

def copy_specified_folders(source_dir, target_dir, folder_names):
    """
    复制指定的文件夹从源目录到目标目录。
    即使某个文件夹复制失败，也会继续处理其他文件夹。
    """
    logger = logging.getLogger(__name__)
    success_count = 0
    failed_folders = []

    for folder_name in folder_names:
        item_path = os.path.join(source_dir, folder_name)
        target_path = os.path.join(target_dir, folder_name)
        
        try:
            if os.path.exists(item_path):
                # 如果目标路径已存在，先删除它
                if os.path.exists(target_path):
                    try:
                        if os.path.isdir(target_path):
                            shutil.rmtree(target_path)
                        else:
                            os.remove(target_path)
                    except Exception as e:
                        logger.error(f"删除已存在的目标路径失败 {target_path}: {e}")
                        failed_folders.append((folder_name, str(e)))
                        continue

                # 尝试复制文件夹
                shutil.copytree(item_path, target_path)
                logger.info(f"已拷贝文件夹: {folder_name}")
                success_count += 1
            else:
                logger.warning(f"源文件夹不存在: {item_path}")
                failed_folders.append((folder_name, "源文件夹不存在"))
        except Exception as e:
            logger.error(f"复制文件夹 {folder_name} 时出错: {e}")
            failed_folders.append((folder_name, str(e)))
            continue

    # 汇总复制结果
    total = len(folder_names)
    if failed_folders:
        logger.warning(f"复制完成。成功: {success_count}/{total}")
        logger.warning("失败的文件夹:")
        for folder, error in failed_folders:
            logger.warning(f"  - {folder}: {error}")
    else:
        logger.info(f"所有文件夹复制成功 ({success_count}/{total})")

    return success_count, failed_folders

def get_repos_name():
     filepath = 'repo_urls.csv'
     repo_names = []
     with open(filepath, mode='r', encoding='utf-8') as csvfile:
        reader = csv.DictReader(csvfile)
        for row in reader:
            repo_name = row['仓库名']
            original_url = row['URL']
            dst_url = row['dst_url']
            repo_names.append(repo_name)
    
     return repo_names



def main():
    setup_logging()
    logger = logging.getLogger(__name__)

    # 读取 CSV 文件获取 Git 仓库 URL 和目标 URL 映射
    url_map_by_repo = {}
    if not os.path.exists(GIT_URLS_CSV_PATH):
        logger.error(f"[!] 错误: {GIT_URLS_CSV_PATH} 未找到。")
        return

    with open(GIT_URLS_CSV_PATH, mode='r', encoding='utf-8') as csvfile:
        reader = csv.DictReader(csvfile)
        for row in reader:
            repo_name = row['仓库名']
            original_url = row['URL']
            dst_url = row['dst_url']
            if repo_name not in url_map_by_repo:
                url_map_by_repo[repo_name] = []
            url_map_by_repo[repo_name].append((original_url, dst_url))

    # 2. 确定要处理的仓库
    repos_to_process = []
    if TARGET_REPO_NAME:
        if TARGET_REPO_NAME in url_map_by_repo:
            repos_to_process.append(TARGET_REPO_NAME)
            logger.info(f"[*] TARGET_REPO_NAME 设置为 '{TARGET_REPO_NAME}'。仅处理此仓库。")
        else:
            logger.error(f"[!] 目标仓库 '{TARGET_REPO_NAME}' 在 {GIT_URLS_CSV_PATH} 中未找到。")
            return
    else:
        repos_to_process = list(url_map_by_repo.keys())
        if not repos_to_process:
            logger.warning(f"[*] 在 {GIT_URLS_CSV_PATH} 中未找到要处理的仓库。")
            return
        logger.info(f"[*] TARGET_REPO_NAME 未设置。正在处理来自 {GIT_URLS_CSV_PATH} 的所有 {len(repos_to_process)} 个仓库。")


    for repo_name in repos_to_process:
        logger.info(f"\n--- 开始处理仓库: {repo_name} ---")
        repo_path_abs = os.path.abspath(get_repo_path(repo_name))

        if not os.path.isdir(repo_path_abs):
            logger.error(f"  [!] 未找到仓库 {repo_name} 的目录: {repo_path_abs}")
            continue

        current_repo_url_mappings = url_map_by_repo.get(repo_name, [])
        if not current_repo_url_mappings:
            logger.info(f"  [*] 在 {GIT_URLS_CSV_PATH} 中未找到 {repo_name} 的 URL 映射。")
            # continue # 如果没有URL映射，可能仍需检查或处理现有补丁，暂时不跳过

        # Git 预检查
        # repo_ok_for_commit, current_branch = check_repo_status(repo_path_abs)
        # if not repo_ok_for_commit:
        #     logger.warning(f"  仓库 {repo_name} 未通过预检查。将处理文件，但不会执行 Git 提交和推送。")
        
        files_changed_by_script = False
        
        # 修改 CMakeLists.txt
        cmakelists_path = get_cmakelists_path(repo_name)
        cmakelists_path_abs = os.path.abspath(cmakelists_path)
        cmake_modified_by_script = process_cmake_file(cmakelists_path_abs, current_repo_url_mappings)
        if cmake_modified_by_script:
            files_changed_by_script = True

        # 生成补丁文件
        patch_file_name = f"{repo_name}-cmake-urls.patch"
        # 即使 cmake_modified_by_script 为 False，也尝试生成补丁，以捕获手动更改或确保 .orig 被清理
        patch_generated_by_script,strip_level = generate_patch_file(repo_name, cmakelists_path_abs, patch_file_name)
        if patch_generated_by_script:
            files_changed_by_script = True
            
        # 更新 .spec 文件
        spec_file_updated_by_script = False
        if patch_generated_by_script: # 仅当成功生成补丁时才更新 spec
            spec_file_path = get_spec_file_path(repo_name)
            # patch_file_name_in_repo_dir 是相对于仓库根目录的补丁文件名
            patch_file_name_in_repo_dir = os.path.basename(get_patch_file_path(repo_name, patch_file_name))
            if update_spec_file(spec_file_path, patch_file_name_in_repo_dir,strip_level):
                spec_file_updated_by_script = True
                files_changed_by_script = True
            else:
                logger.error(f"  未能更新 {repo_name} 的 spec 文件。")
        elif cmake_modified_by_script and not patch_generated_by_script:
            logger.info(f"  CMakeLists.txt 已修改但未生成补丁 (可能内容无实际差异), 跳过 spec 文件更新。")

            # # 5. Git Commit and Push (如果需要)
            # if files_to_add and repo_path_abs and current_branch_name: # 确保有文件添加，且仓库路径和分支名有效
            #     actions_performed_desc = []
            #     if cmake_modified_in_this_run:
            #         actions_performed_desc.append("updated CMake URLs")
                
            #     if spec_file_updated_in_this_run: # spec_file_updated_in_this_run 为 True 意味着 patch_created 也为 True
            #         actions_performed_desc.append("added/updated patch and spec file")
            #     elif patch_created: # 补丁已创建，但 spec 文件未更新 (或更新失败)
            #         actions_performed_desc.append("added/updated patch")
                
            #     commit_message = ""
            #     if not actions_performed_desc:
            #         # 如果 files_to_add 不为空，但上述标志均未设置，则使用通用消息
            #         # 这在当前逻辑下不太可能发生，因为 files_to_add 的填充与这些标志相关联
            #         commit_message = f"Automated: Updates for {repo_name}"
            #     else:
            #         # 将描述列表转换为一个字符串，例如 "Updated CMake URLs, added/updated patch"
            #         commit_description = ", ".join(actions_performed_desc)
            #         # 首字母大写处理
            #         if commit_description:
            #             commit_description = commit_description[0].upper() + commit_description[1:]
            #         commit_message = f"Automated: {commit_description} for {repo_name}"

            #     if not git_commit_and_push(repo_path_abs, files_to_add, commit_message, current_branch_name):
            #         logger.error(f"  [!] Git 提交或推送失败于仓库 {repo_name}。")
            #     else:
            #         logger.info(f"  [*] Git 操作成功完成于仓库 {repo_name}。")
            # elif not files_to_add:
            #     logger.info(f"  在仓库 {repo_name} 中没有检测到文件更改，跳过 Git 提交和推送。")
            # elif not current_branch_name:
            #      logger.warning(f"  无法确定仓库 {repo_name} 的当前分支，跳过 Git 提交和推送。")


        logger.info(f"--- 完成处理仓库: {repo_name} ---")

    logger.info("\n所有仓库处理完毕。")

if __name__ == '__main__':
    # copy_file_main()
    main()