import os
import zipfile
import tempfile
import subprocess
import shutil
from pathlib import Path
import hashlib
import base64
from pathlib import Path

def run_ldd(path):
    try:
        return subprocess.check_output(["ldd", str(path)], text=True)
    except subprocess.CalledProcessError:
        return ""

def parse_ldd_output(output):
    missing = []
    for line in output.splitlines():
        if "=>" not in line:
            continue
        parts = line.split("=>")
        lib = parts[0].strip()
        if "not found" in parts[1]:
            missing.append(lib)
    return missing

def lib_exists_in_dir(lib_name, lib_dir):
    for file in os.listdir(lib_dir):
        if file.startswith(lib_name):
            return True
    return False

def set_rpath(so_file, rpath='$ORIGIN'):
    try:
        subprocess.check_call(['patchelf', '--set-rpath', rpath, str(so_file)])
        print(f"[✓] rpath set: {so_file}")
    except subprocess.CalledProcessError as e:
        print(f"[✗] Failed rpath: {so_file} - {e}")

def get_ldd_lib_names(output):
    libs = []
    for line in output.splitlines():
        if "=>" not in line:
            continue
        parts = line.split("=>")
        lib_name = parts[0].strip()
        # 排除linux-vdso和ld-linux等特殊项
        if lib_name and lib_name != "linux-vdso.so.1" and not lib_name.startswith("/"):
            libs.append(lib_name)
        # libs.append(lib_name)
    return libs

def fix_so_rpaths_in_lib_dir(lib_dir):
    lib_dir = Path(lib_dir)
    so_files = list(lib_dir.glob("*.so*"))
    for so_file in so_files:
        before_ldd = run_ldd(so_file)
        before_libs = get_ldd_lib_names(before_ldd)
        missing_deps = parse_ldd_output(before_ldd)
        for dep in missing_deps:
            if lib_exists_in_dir(dep, lib_dir):
                print(f"❗ [!] {so_file.name} missing {dep} — attempting fix...")

                # 备份文件
                backup_path = so_file.with_suffix(so_file.suffix + ".bak")
                shutil.copy2(so_file, backup_path)
                print(f"📦  Backup created: {backup_path.name}")
                
                set_rpath(so_file)

                # 修改后检查依赖是否一致
                after_ldd = run_ldd(so_file)
                after_libs = get_ldd_lib_names(after_ldd)
                
                if not set(before_libs).issubset(set(after_libs)):
                    print(f"🔁  [✗] Dependency mismatch after patch, reverting 🚫 {so_file.name}")
                    shutil.move(backup_path, so_file)
                else:
                    print(f"✅  [✓] rpath fixed and dependencies verified: {so_file.name}")
                    backup_path.unlink(missing_ok=True)
                
                break  # 只修正一次

def write_record_file(wheel_root):
    dist_info_dirs = list(wheel_root.glob("*.dist-info"))
    if not dist_info_dirs:
        print("[✗] No .dist-info directory found.")
        return
    dist_info = dist_info_dirs[0]
    record_path = dist_info / "RECORD"

    print(f"[+] Generating new RECORD: {record_path.name}")
    with record_path.open('w', encoding='utf-8') as record_file:
        for file_path in wheel_root.rglob("*"):
            rel_path = file_path.relative_to(wheel_root).as_posix()

            if file_path == record_path:
                # RECORD 文件本身记录空的 hash 和 size
                record_file.write(f"{rel_path},,\n")
                continue

            if file_path.is_file():
                with file_path.open("rb") as f:
                    data = f.read()
                digest = hashlib.sha256(data).digest()
                digest_b64 = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
                size = len(data)
                record_file.write(f"{rel_path},sha256={digest_b64},{size}\n")

def process_whl(whl_path):
    whl_path = Path(whl_path).resolve()
    if not whl_path.exists() or not whl_path.suffix == ".whl":
        print(f"[✗] Not a valid .whl file: {whl_path}")
        return

    with tempfile.TemporaryDirectory() as tmpdir:
        tmpdir = Path(tmpdir)
        print(f"[+] Extracting {whl_path.name} ...")
        with zipfile.ZipFile(whl_path, 'r') as zip_ref:
            zip_ref.extractall(tmpdir)

        # locate the .libs/ dir
        lib_dirs = list(tmpdir.glob("*/.libs")) + list(tmpdir.glob("*.libs"))
        if not lib_dirs:
            print("[!] No .libs directory found in the wheel")
            return

        for lib_dir in lib_dirs:
            print(f"[+] Checking .so dependencies in {lib_dir} ...")
            fix_so_rpaths_in_lib_dir(lib_dir)

        # 生成新的 RECORD 文件
        write_record_file(tmpdir)

        # Overwrite original .whl
        print(f"[+] Repacking and overwriting: {whl_path.name} ...")
        with zipfile.ZipFile(whl_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
            for root, _, files in os.walk(tmpdir):
                for file in files:
                    full_path = Path(root) / file
                    rel_path = full_path.relative_to(tmpdir)
                    zipf.write(full_path, arcname=str(rel_path))

        print(f"[✅] Original wheel overwritten: {whl_path}")

if __name__ == "__main__":
    import argparse
    parser = argparse.ArgumentParser(description="Fix rpath inside .whl package for embedded .libs/")
    parser.add_argument("whl", help="Path to .whl file")
    args = parser.parse_args()

    process_whl(args.whl)


# auditwheel repair \
#     --no-update-tags \
#     --disable-isa-ext-check \
#     --exclude 'libgio*.so*' \
#     --exclude 'libgdk*.so*' \
#     --exclude 'libgtk*.so*' \
#     opencv_python-4.12.0.88-cp312-cp312-linux_x86_64.whl
