import os
import zipfile
import tempfile
import subprocess
import shutil
from pathlib import Path
import hashlib
import base64
from pathlib import Path
# from wheel.tool import unpack, pack
from zipfile import ZipInfo
import stat

def run_ldd(path):
    try:
        return subprocess.check_output(["ldd", str(path)], text=True)
    except subprocess.CalledProcessError as e:
        print(f"[!] ldd failed on {path}: {e}")
        return ""

def parse_ldd_output(output):
    missing = []
    for line in output.splitlines():
        if "=>" not in line:
            continue
        parts = line.split("=>")
        lib = parts[0].strip()
        if "not found" in parts[1]:
            missing.append(lib)
    return missing

def lib_exists_in_dir(lib_name, lib_dir):
    return any(Path(f).name == lib_name for f in os.listdir(lib_dir))

def set_rpath(so_file, rpath='$ORIGIN'):
    try:
        subprocess.check_call(['patchelf', '--set-rpath', rpath, str(so_file)])
        print(f"[✓] rpath set: {so_file}")
    except subprocess.CalledProcessError as e:
        print(f"[✗] Failed rpath: {so_file} - {e}")

def get_ldd_lib_names(output):
    libs = []
    for line in output.splitlines():
        if "=>" not in line:
            continue
        parts = line.split("=>")
        lib_name = parts[0].strip()
        # 排除linux-vdso和ld-linux等特殊项
        if lib_name and lib_name != "linux-vdso.so.1" and not lib_name.startswith("/"):
            libs.append(lib_name)
        # libs.append(lib_name)
    return libs

def fix_so_rpaths_in_lib_dir(lib_dir):
    lib_dir = Path(lib_dir)

    so_files = list(lib_dir.glob("*.so*"))

    resolved_targets = set()

    for so_file in so_files:
        try:
            real_target = so_file.resolve()
        except FileNotFoundError:
            print(f"[⚠️] Broken symlink skipped: {so_file}")
            continue  # 链接坏掉，跳过

        if real_target in resolved_targets:
            print(f"[↩] Already processed target: {real_target.name} (via {so_file.name})")
            continue
        resolved_targets.add(real_target)

        before_ldd = run_ldd(real_target)
        before_libs = get_ldd_lib_names(before_ldd)
        missing_deps = parse_ldd_output(before_ldd)

        for dep in missing_deps:
            if lib_exists_in_dir(dep, lib_dir):
                print(f"❗ [!] {real_target.name} missing {dep} — attempting fix...")

                backup_path = real_target.with_suffix(real_target.suffix + ".bak")
                shutil.copy2(real_target, backup_path)
                print(f"📦  Backup created: {backup_path.name}")

                set_rpath(real_target)

                after_ldd = run_ldd(real_target)
                after_libs = get_ldd_lib_names(after_ldd)

                if not set(before_libs).issubset(set(after_libs)):
                    print(f"🔁  [✗] Dependency mismatch after patch, reverting 🚫 {real_target.name}")
                    shutil.move(backup_path, real_target)
                else:
                    print(f"✅  [✓] rpath fixed and dependencies verified: {real_target.name}")
                    backup_path.unlink(missing_ok=True)

                break  # 一个目标文件只修一次

def write_record_file(wheel_root):
    dist_info_dirs = list(wheel_root.glob("*.dist-info"))
    if not dist_info_dirs:
        print("[✗] No .dist-info directory found.")
        return
    dist_info = dist_info_dirs[0]
    record_path = dist_info / "RECORD"

    print(f"[+] Generating new RECORD: {record_path.name}")
    with record_path.open('w', encoding='utf-8') as record_file:
        for file_path in wheel_root.rglob("*"):
            rel_path = file_path.relative_to(wheel_root).as_posix()

            if file_path == record_path:
                # RECORD 文件本身记录空的 hash 和 size
                record_file.write(f"{rel_path},,\n")
                continue

            if file_path.is_file():
                with file_path.open("rb") as f:
                    data = f.read()
                digest = hashlib.sha256(data).digest()
                digest_b64 = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
                size = len(data)
                record_file.write(f"{rel_path},sha256={digest_b64},{size}\n")

def process_whl(whl_path):
    if shutil.which("patchelf") is None:
        print("[✗] patchelf not found. Please install it first.")
        return

    whl_path = Path(whl_path).resolve()
    if not whl_path.exists() or not whl_path.suffix == ".whl":
        print(f"[✗] Not a valid .whl file: {whl_path}")
        return

    with tempfile.TemporaryDirectory() as tmpdir_str:
        tmpdir = Path(tmpdir_str)
        print(f"[+] Extracting {whl_path.name} ...")
        
        # Unpack the wheel using a more reliable method
        # and explicitly set permissions after extraction.
        with zipfile.ZipFile(whl_path, 'r') as zip_ref:
            zip_ref.extractall(tmpdir)
        
        # --- NEW CODE START ---
        # Explicitly set executable permissions on known binary files.
        # This is a more robust approach than relying on zipinfo.
        print("[+] Checking for and setting executable permissions...")
        for root, _, files in os.walk(tmpdir):
            for file_name in files:
                full_path = Path(root) / file_name
                # A common heuristic is to check for files in a 'bin' directory.
                # Adjust this logic as needed for your specific package.
                if 'bin' in full_path.parts and not file_name.endswith('.py'):
                    # Check if the file is a regular file before trying to chmod.
                    if full_path.is_file():
                        current_stat = full_path.stat()
                        # Set executable bit for the owner, group, and others.
                        os.chmod(full_path, current_stat.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
                        print(f"  - Set executable permission on: {full_path.relative_to(tmpdir)}")
        # --- NEW CODE END ---

        # The rest of your script follows:
        # ... (your original code) ...
        
        # 查找 .dist-info 目录数量
        dist_info_dirs = list(tmpdir.glob("*.dist-info"))
        if not dist_info_dirs:
            print("[✗] No .dist-info directory found in the wheel.")
            return
        # ... (rest of the dist_info check) ...
        
        # 查找 .libs 目录
        lib_dirs = list(tmpdir.glob("*/.libs")) + list(tmpdir.glob("*.libs"))
        if lib_dirs:
            for lib_dir in lib_dirs:
                print(f"[+] Checking .so dependencies in {lib_dir} ...")
                fix_so_rpaths_in_lib_dir(lib_dir)

        # 生成新的 RECORD 文件
        write_record_file(tmpdir)

        # Repack the wheel
        print(f"[+] Repacking and overwriting: {whl_path.name} ...")
        with zipfile.ZipFile(whl_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
            for root, _, files in os.walk(tmpdir):
                for file_name in files:
                    full_path = Path(root) / file_name
                    rel_path = full_path.relative_to(tmpdir)
                    zipf.write(full_path, rel_path)

        print(f"[✅] Original wheel overwritten: {whl_path}")


if __name__ == "__main__":
    import argparse
    parser = argparse.ArgumentParser(description="Fix rpath inside .whl package for embedded .libs/")
    parser.add_argument("whl", help="Path to .whl file")
    args = parser.parse_args()

    process_whl(args.whl)

