use std::env;
use std::path::PathBuf;

fn main() {
    let target = env::var("TARGET").unwrap_or_default();
    if target.contains("apple") || target.contains("darwin") || target.contains("ios") {
        build_macos();
    } else {
        basebuild();
    }
}

fn basebuild(){
    let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
    //打印显示out_path

    // 尝试从环境变量获取 NCNN include 路径，如果没有则使用默认路径
    let ncnn_include_dir = env::var("NCNN_INCLUDE_DIR")
        .map(|dir| PathBuf::from(dir))
        .unwrap_or_else(|_| {
            // 默认路径，与库路径保持一致
            PathBuf::from("/mnt/f/work/openmmlab_wl/ncnn/build/install/include/ncnn")
        });

    // 检查 include 目录是否存在必要的头文件
    if !ncnn_include_dir.join("c_api.h").exists() {
        panic!(
            "ERROR: NCNN include directory not found or invalid.\n\
             Tried: {}\n\
             Please either:\n\
             1. Set NCNN_INCLUDE_DIR environment variable: export NCNN_INCLUDE_DIR=/path/to/ncnn/include\n\
             2. Ensure default path exists: /mnt/f/work/openmmlab_wl/ncnn/build/install/include\n\
             3. Make sure c_api.h exists in the include directory",
            ncnn_include_dir.display()
        );
    }

    println!(
        "Using NCNN include directory: {}",
        ncnn_include_dir.display()
    );

    let target = env::var("TARGET").unwrap_or_default();

    // 检查是否为安卓目标
    if target.contains("android") {
        // 设置 NCNN 库搜索路径 - 根据目标架构选择
        let ncnn_include_dir_str = ncnn_include_dir.to_str().unwrap();
        let ncnn_lib_path = if target.contains("aarch64") {
            ncnn_include_dir_str.replace("/include/ncnn", "/lib")
        } else if target.contains("armv7") {
            ncnn_include_dir_str.replace("/include/ncnn", "/lib")
        } else if target.contains("i686") {
            ncnn_include_dir_str.replace("/include/ncnn", "/lib")
        } else if target.contains("x86_64") {
            ncnn_include_dir_str.replace("/include/ncnn", "/lib")
        } else {
            ncnn_include_dir_str.replace("/include/ncnn", "/lib") // 默认
        };
        println!("cargo:rustc-link-search=native={}", ncnn_lib_path);
    } else {
        // 桌面平台构建配置
        println!("cargo:rustc-link-search=native=/mnt/f/work/openmmlab_wl/ncnn/build/install/lib");
    }

    // 链接 NCNN 主库
    println!("cargo:rustc-link-lib=static=ncnn");

    // 链接 glslang 相关库
    println!("cargo:rustc-link-lib=static=glslang");
    println!("cargo:rustc-link-lib=static=MachineIndependent");
    println!("cargo:rustc-link-lib=static=OSDependent");
    println!("cargo:rustc-link-lib=static=SPIRV");
    println!("cargo:rustc-link-lib=static=GenericCodeGen");
    println!("cargo:rustc-link-lib=static=glslang-default-resource-limits");

    // 系统库 - 根据平台调整
    if target.contains("android") {
        // Android 平台配置
        println!("cargo:rustc-link-lib=dylib=m"); // 数学库
        println!("cargo:rustc-link-lib=dylib=c++_shared"); // C++ 标准库
                                                           // Android 使用 LLVM OpenMP，不是 GNU OpenMP
                                                           // 如果 NCNN 编译时启用了 OpenMP，需要链接 libomp
        println!("cargo:rustc-link-lib=dylib=omp"); // LLVM OpenMP

    // pthread 在 Android 中是内置的，不需要显式链接
    } else {
        // 桌面平台配置
        println!("cargo:rustc-link-lib=dylib=stdc++");
        println!("cargo:rustc-link-lib=dylib=m");
        println!("cargo:rustc-link-lib=dylib=gomp");
        println!("cargo:rustc-link-lib=pthread");
        println!("cargo:rustc-link-lib=dl");
    }

    // println!("cargo:rerun-if-env-changed=NCNN_INCLUDE_DIR");
    let mut builder = bindgen::Builder::default()
        .header(format!("{}/c_api.h", ncnn_include_dir.display()))
        .clang_arg("-x")
        .clang_arg("c++")
        .clang_arg("-DNCNN_C_API=1")
        .allowlist_type("regex")
        .allowlist_function("ncnn.*")
        .allowlist_var("NCNN.*")
        .allowlist_type("ncnn.*")
        .allowlist_function("ncnn_version")
        // 添加自定义的 std::vector 生成规则
        .opaque_type("std::.*")
        .blocklist_type("std::.*")
        .raw_line("use std::mem::ManuallyDrop;")
        .raw_line("#[repr(C)]")
        .raw_line("pub struct std_vector<T> {")
        .raw_line("    _unused: [u8; 0],")
        .raw_line("    _phantom: std::marker::PhantomData<T>,")
        .raw_line("}");

    if target.contains("armv7") == false {
        builder = builder.header(format!("{}/gpu.h", ncnn_include_dir.display()));
    }

    let bindings = builder.generate().expect("Unable to generate bindings");

    bindings
        .write_to_file(out_path.join("bindings.rs"))
        .expect("Couldn't write bindings!");
}

fn build_macos(){
    let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
    let target = env::var("TARGET").unwrap_or_default();
    
    // 获取 NCNN include 目录
    let ncnn_include_dir = env::var("NCNN_INCLUDE_DIR")
        .map(|dir| PathBuf::from(dir))
        .unwrap_or_else(|_| {
            panic!(
                "ERROR: NCNN_INCLUDE_DIR environment variable not set.\n\
                 For macOS builds, please set NCNN_INCLUDE_DIR to point to the framework headers.\n\
                 Example: export NCNN_INCLUDE_DIR=/path/to/ncnn.framework/Headers/ncnn"
            );
        });

    // 检查 include 目录是否存在必要的头文件
    if !ncnn_include_dir.join("c_api.h").exists() {
        panic!(
            "ERROR: NCNN include directory not found or invalid.\n\
             Tried: {}\n\
             Please set NCNN_INCLUDE_DIR to the correct framework headers path.\n\
             Make sure c_api.h exists in the include directory",
            ncnn_include_dir.display()
        );
    }

    println!(
        "Using NCNN include directory: {}",
        ncnn_include_dir.display()
    );

    // macOS 使用 frameworks，不需要设置库搜索路径或链接静态库
    // 所有的链接配置都在 ncnn_lib 的 macos_build.rs 中处理
    
    // 生成绑定
    let mut builder = bindgen::Builder::default()
        .header(format!("{}/c_api.h", ncnn_include_dir.display()))
        .clang_arg("-x")
        .clang_arg("c++")
        .clang_arg("-DNCNN_C_API=1")
        .allowlist_type("regex")
        .allowlist_function("ncnn.*")
        .allowlist_var("NCNN.*")
        .allowlist_type("ncnn.*")
        .allowlist_function("ncnn_version")
        // 添加自定义的 std::vector 生成规则
        .opaque_type("std::.*")
        .blocklist_type("std::.*")
        .raw_line("use std::mem::ManuallyDrop;")
        .raw_line("#[repr(C)]")
        .raw_line("pub struct std_vector<T> {")
        .raw_line("    _unused: [u8; 0],")
        .raw_line("    _phantom: std::marker::PhantomData<T>,")
        .raw_line("}");

    // 在 macOS 上默认包含 GPU 头文件
    if !cfg!(feature = "cpu") {
        builder = builder.header(format!("{}/gpu.h", ncnn_include_dir.display()));
    }

    let bindings = builder.generate().expect("Unable to generate bindings");

    bindings
        .write_to_file(out_path.join("bindings.rs"))
        .expect("Couldn't write bindings!");
        
    eprintln!("✓ macOS NCNN bindings generated successfully");
}