//! Build script that generates tests from projects/ directory and benchmarks from benches/.
//! Each folder becomes a test module with comprehensive compiler phase tests.

use std::{
    env, fs,
    path::{Path, PathBuf},
};

use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use walkdir::WalkDir;

/// Creates an `include_str!(r"path")` expression.
/// Since paths may contain backslashes on Windows, we need to be careful.
/// Using a regular string literal with the path works fine.
fn make_include_str(path: &str) -> TokenStream {
    // Create a string literal - quote! will handle the escaping
    let lit = syn::LitStr::new(path, proc_macro2::Span::call_site());
    quote! {
        include_str!(#lit)
    }
}

fn main() {
    // Watch the projects and benches directories for changes
    println!("cargo:rerun-if-changed=projects");
    println!("cargo:rerun-if-changed=benches");

    let out_dir = env::var("OUT_DIR").unwrap();
    let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();

    // Generate tests
    generate_tests(&out_dir, &manifest_dir);

    // Generate benchmarks
    generate_benchmarks(&out_dir, &manifest_dir);
}

fn generate_tests(out_dir: &str, manifest_dir: &str) {
    let projects_dir = Path::new(&manifest_dir).join("projects");

    // Discover all projects
    let projects = discover_projects(&projects_dir);

    // Generate test file
    let dest_path = Path::new(&out_dir).join("generated_tests.rs");

    let test_modules: TokenStream = projects
        .iter()
        .map(|project| generate_project_tests(project, manifest_dir))
        .collect();

    let header = "\
// Auto-generated tests from projects/
// Do not edit this file directly.
";

    write_formatted_code(&dest_path, test_modules, header);
}

fn generate_benchmarks(out_dir: &str, manifest_dir: &str) {
    let benches_dir = Path::new(&manifest_dir).join("benches");
    let dest_path = Path::new(&out_dir).join("generated_benchmarks.rs");

    let header = "\
// Auto-generated benchmarks from benches/
// Do not edit this file directly.
//
// Note: divan::Bencher, baml_db::*, and divan::black_box are already imported in the main file.
";

    if !benches_dir.exists() {
        // No benchmarks to generate - create empty file with just header
        fs::write(&dest_path, header).unwrap();
        return;
    }

    // Discover all benchmarks
    let mut benchmarks = Vec::new();

    // Discover incremental benchmarks
    let incremental_dir = benches_dir.join("incremental");
    if incremental_dir.exists() {
        discover_incremental_benchmarks(&incremental_dir, &mut benchmarks);
    }

    // Discover scale benchmarks
    let scale_dir = benches_dir.join("scale");
    if scale_dir.exists() {
        discover_scale_benchmarks(&scale_dir, &mut benchmarks);
    }

    let benchmark_fns: TokenStream = benchmarks.iter().map(generate_benchmark).collect();

    write_formatted_code(&dest_path, benchmark_fns, header);
}

fn write_formatted_code(path: &Path, code: TokenStream, header: &str) {
    let code_string = code.to_string();
    let syntax_tree = syn::parse_file(&code_string).expect("Failed to parse generated code");
    let formatted = prettyplease::unparse(&syntax_tree);

    // Prepend the header (doc comments that prettyplease would strip)
    let output = format!("{header}\n{formatted}");
    fs::write(path, output).unwrap();
}

// Test-related structures and functions
struct TestProject {
    name: String,
    #[allow(dead_code)]
    path: PathBuf,
    files: Vec<BamlFile>,
}

struct BamlFile {
    name: String,
    relative_path: PathBuf,
    full_path: PathBuf,
}

fn discover_projects(projects_dir: &Path) -> Vec<TestProject> {
    let mut projects = Vec::new();

    if !projects_dir.exists() {
        return projects;
    }

    for entry in fs::read_dir(projects_dir).unwrap() {
        let entry = entry.unwrap();
        let path = entry.path();

        if !path.is_dir() {
            continue;
        }

        let name = path.file_name().unwrap().to_str().unwrap().to_string();

        let files = discover_baml_files(&path);

        if !files.is_empty() {
            projects.push(TestProject { name, path, files });
        }
    }

    projects.sort_by(|a, b| a.name.cmp(&b.name));
    projects
}

fn discover_baml_files(dir: &Path) -> Vec<BamlFile> {
    let mut files = Vec::new();

    for entry in WalkDir::new(dir) {
        let entry = entry.unwrap();
        let path = entry.path();

        if path.extension().and_then(|s| s.to_str()) == Some("baml") {
            let relative_path = path.strip_prefix(dir).unwrap().to_path_buf();

            // Create safe test name from path
            let name = relative_path
                .to_str()
                .unwrap()
                .replace(['/', '\\'], "_")
                .replace(".baml", "");

            files.push(BamlFile {
                name,
                relative_path,
                full_path: path.to_path_buf(),
            });
        }
    }

    files.sort_by(|a, b| a.relative_path.cmp(&b.relative_path));
    files
}

fn generate_project_tests(project: &TestProject, manifest_dir: &str) -> TokenStream {
    let module_name = format_ident!("{}", project.name.replace("-", "_"));
    let snapshot_path = format!(
        "{}/snapshots/{}",
        manifest_dir.replace('\\', "/"),
        project.name
    );

    let lexer_tests: TokenStream = project.files.iter().map(generate_lexer_test).collect();

    let parser_tests: TokenStream = project.files.iter().map(generate_parser_test).collect();

    let hir_test = generate_hir_test(project);
    let thir_test = generate_thir_test(project);
    let diagnostics_test = generate_diagnostics_test(project);
    let codegen_test = generate_codegen_test(project);

    let parser_specific_tests = if project.name.starts_with("parser_") {
        let incremental_tests: TokenStream = project
            .files
            .iter()
            .map(generate_incremental_parsing_test)
            .collect();

        let node_reuse_tests: TokenStream =
            project.files.iter().map(generate_node_reuse_test).collect();

        let tree_lossless_test = generate_tree_lossless_test(project);

        quote! {
            #incremental_tests
            #node_reuse_tests
            #tree_lossless_test
        }
    } else {
        quote! {}
    };

    quote! {
        #[cfg(test)]
        mod #module_name {
            use baml_db::*;
            use baml_db::baml_lexer;
            use baml_db::baml_parser;
            use baml_db::baml_hir;
            use baml_db::baml_thir;
            use baml_db::baml_codegen;
            use baml_hir::{function_body, function_signature};
            use baml_thir::{build_typing_context_from_files};
            use baml_thir::pretty::short_display;
            use baml_diagnostics::{render_name_error, render_parse_error, render_type_error};
            use std::collections::HashMap;
            use insta::{assert_snapshot, with_settings};
            use std::fmt::Write;
            use salsa::Setter;
            #[allow(unused_imports)]
            use crate::utils::*;
            const SNAPSHOT_PATH: &str = #snapshot_path;

            #lexer_tests
            #parser_tests
            #hir_test
            #thir_test
            #diagnostics_test
            #codegen_test
            #parser_specific_tests
        }
    }
}

fn generate_lexer_test(baml_file: &BamlFile) -> TokenStream {
    let test_name = format_ident!("test_01_lexer_{}", baml_file.name);
    let snapshot_name = format!("01_lexer__{}", baml_file.name);
    let full_path = baml_file.full_path.display().to_string();
    let relative_path = baml_file.relative_path.display().to_string();
    let include_content = make_include_str(&full_path);

    quote! {
        #[test]
        fn #test_name() {
            let content = #include_content;
            // Normalize line endings for cross-platform compatibility
            let content = content.replace("\r\n", "\n");
            let mut db = RootDatabase::new();
            let source_file = db.add_file(#relative_path, &content);
            let tokens = baml_lexer::lex_file(&db, source_file);

            // Format tokens as readable text
            let mut output = String::new();
            for token in tokens.iter() {
                if !matches!(token.kind,
                    baml_lexer::TokenKind::Whitespace |
                    baml_lexer::TokenKind::Newline
                ) {
                    writeln!(output, "{:?} {:?}", token.kind, token.text).unwrap();
                }
            }

            with_settings!({snapshot_path => SNAPSHOT_PATH}, {
                assert_snapshot!(#snapshot_name, output);
            });
        }
    }
}

fn generate_parser_test(baml_file: &BamlFile) -> TokenStream {
    let test_name = format_ident!("test_02_parser_{}", baml_file.name);
    let snapshot_name = format!("02_parser__{}", baml_file.name);
    let full_path = baml_file.full_path.display().to_string();
    let relative_path = baml_file.relative_path.display().to_string();
    let include_content = make_include_str(&full_path);

    quote! {
        #[test]
        fn #test_name() {
            let content = #include_content;
            // Normalize line endings for cross-platform compatibility
            let content = content.replace("\r\n", "\n");
            let mut db = RootDatabase::new();
            let mut sources = HashMap::new();
            let source_file = db.add_file(#relative_path, &content);
            sources.insert(source_file.file_id(&db), content.clone());
            let tree = baml_parser::syntax_tree(&db, source_file);
            let errors = baml_parser::parse_errors(&db, source_file);

            let mut output = String::new();
            writeln!(output, "=== SYNTAX TREE ===").unwrap();
            write!(output, "{}", crate::format_syntax_tree(&tree)).unwrap();
            writeln!(output, "\n=== ERRORS ===").unwrap();
            if errors.is_empty() {
                writeln!(output, "None").unwrap();
            } else {
                for error in errors.iter() {
                    writeln!(output, "{}", render_parse_error(error, &sources, false)).unwrap();
                }
            }

            with_settings!({snapshot_path => SNAPSHOT_PATH}, {
                assert_snapshot!(#snapshot_name, output);
            });
        }
    }
}

fn generate_hir_test(project: &TestProject) -> TokenStream {
    let file_loaders: TokenStream = project
        .files
        .iter()
        .map(|baml_file| {
            let full_path = baml_file.full_path.display().to_string();
            let relative_path = baml_file.relative_path.display().to_string();
            let include_content = make_include_str(&full_path);

            quote! {
                {
                    let content = #include_content;
                    let content = content.replace("\r\n", "\n");
                    let source_file = db.add_file(
                        #relative_path,
                        &content,
                    );
                    let items_struct = baml_hir::file_items(&db, source_file);
                    let items = items_struct.items(&db);
                    if !items.is_empty() {
                        let formatted = crate::format_hir_file(&db, source_file, items);
                        output.push_str(&formatted);
                    }
                }
            }
        })
        .collect();

    quote! {
        #[test]
        fn test_03_hir() {
            let mut db = RootDatabase::new();
            let mut output = String::new();
            writeln!(output, "=== HIR ITEMS ===").unwrap();

            #file_loaders

            if output.trim() == "=== HIR ITEMS ===" {
                writeln!(output, "No items found.").unwrap();
            }

            with_settings!({snapshot_path => SNAPSHOT_PATH}, {
                assert_snapshot!("03_hir", output);
            });
        }
    }
}

fn generate_thir_test(project: &TestProject) -> TokenStream {
    let file_loaders: TokenStream = project
        .files
        .iter()
        .map(|baml_file| {
            let full_path = baml_file.full_path.display().to_string();
            let relative_path = baml_file.relative_path.display().to_string();
            let include_content = make_include_str(&full_path);

            quote! {
                {
                    let content = #include_content;
                    let content = content.replace("\r\n", "\n");
                    let sf = db.add_file(
                        #relative_path,
                        &content,
                    );
                    source_files.push(sf);
                }
            }
        })
        .collect();

    quote! {
        #[test]
        fn test_04_thir() {
            let mut db = RootDatabase::new();
            let root = db.set_project_root(std::path::PathBuf::from("."));
            let mut source_files = Vec::new();

            #file_loaders

            // Update project root with the list of files for proper Salsa tracking
            root.set_files(&mut db).to(source_files.clone());

            let mut output = String::new();
            writeln!(output, "=== TYPE INFERENCE ===").unwrap();

            // Build initial typing context with all function types
            let globals = build_typing_context_from_files(&db, &source_files);
            let class_fields = baml_thir::lower_project_class_fields(&db, root);

            // Iterate over files and their functions
            for source_file in &source_files {
                let items_struct = baml_hir::file_items(&db, *source_file);
                let items = items_struct.items(&db);
                for item in items.iter() {
                    if let baml_hir::ItemId::Function(func_id) = item {
                        let signature = function_signature(&db, *func_id);
                        let body = function_body(&db, *func_id);
                        let result = baml_thir::infer_function(&db, &signature, &body, Some(globals.clone()), Some(class_fields.clone()));

                        writeln!(output, "  Function {}:", signature.name).unwrap();
                        writeln!(output, "    Return: {:?}", result.return_type).unwrap();
                        if !result.errors.is_empty() {
                            writeln!(output, "    Errors:").unwrap();
                            for error in &result.errors {
                                writeln!(output, "      - {}", short_display(error)).unwrap();
                            }
                        }
                    }
                }
            }

            with_settings!({snapshot_path => SNAPSHOT_PATH}, {
                assert_snapshot!("04_thir", output);
            });
        }
    }
}

fn generate_diagnostics_test(project: &TestProject) -> TokenStream {
    let file_loaders: TokenStream = project
        .files
        .iter()
        .map(|baml_file| {
            let full_path = baml_file.full_path.display().to_string();
            let relative_path = baml_file.relative_path.display().to_string();
            let include_content = make_include_str(&full_path);

            quote! {
                {
                    let content = #include_content;
                    let content = content.replace("\r\n", "\n");
                    let source_file = db.add_file(
                        #relative_path,
                        &content,
                    );
                    sources.insert(source_file.file_id(&db), content.clone());
                    source_files.push(source_file);

                    let errors = baml_parser::parse_errors(&db, source_file);
                    for error in errors {
                        all_errors.push(("parse".to_string(), render_parse_error(&error, &sources, false)));
                    }
                }
            }
        })
        .collect();

    quote! {
        #[test]
        fn test_05_diagnostics() {
            let mut db = RootDatabase::new();
            let root = db.set_project_root(std::path::PathBuf::from("."));
            let mut sources = HashMap::new();
            let mut source_files = Vec::new();
            let mut all_errors = Vec::new();

            #file_loaders

            // Update project root with the list of files for proper Salsa tracking
            root.set_files(&mut db).to(source_files.clone());

            // Check for duplicate names
            for error in baml_hir::validate_duplicate_names(&db, root) {
                all_errors.push(("name".to_string(), render_name_error(&error, &sources, false)));
            }

            // Build typing context and run type inference
            let globals = build_typing_context_from_files(&db, &source_files);
            let class_fields = baml_thir::lower_project_class_fields(&db, root);
            for source_file in &source_files {
                let items_struct = baml_hir::file_items(&db, *source_file);
                let items = items_struct.items(&db);
                for item in items.iter() {
                    if let baml_hir::ItemId::Function(func_id) = item {
                        let signature = function_signature(&db, *func_id);
                        let body = function_body(&db, *func_id);
                        let result = baml_thir::infer_function(&db, &signature, &body, Some(globals.clone()), Some(class_fields.clone()));
                        for error in &result.errors {
                            all_errors.push(("type".to_string(), render_type_error(error, &sources, false)));
                        }
                    }
                }
            }

            let mut output = String::new();
            writeln!(output, "=== DIAGNOSTICS ===").unwrap();
            if all_errors.is_empty() {
                writeln!(output, "No errors found.").unwrap();
            } else {
                for (phase, message) in all_errors {
                    writeln!(output, "  [{}] {}", phase, message).unwrap();
                }
            }

            with_settings!({snapshot_path => SNAPSHOT_PATH}, {
                assert_snapshot!("05_diagnostics", output);
            });
        }
    }
}

fn generate_codegen_test(project: &TestProject) -> TokenStream {
    let file_loaders: TokenStream = project
        .files
        .iter()
        .map(|baml_file| {
            let full_path = baml_file.full_path.display().to_string();
            let relative_path = baml_file.relative_path.display().to_string();
            let include_content = make_include_str(&full_path);

            quote! {
                {
                    let content = #include_content;
                    let content = content.replace("\r\n", "\n");
                    let sf = db.add_file(
                        #relative_path,
                        &content,
                    );
                    source_files.push(sf);
                }
            }
        })
        .collect();

    quote! {
        #[test]
        fn test_06_codegen() {
            let mut db = RootDatabase::new();
            let _root = db.set_project_root(std::path::PathBuf::from("."));
            let mut source_files = Vec::new();

            #file_loaders

            let program = baml_codegen::compile_files(&db, &source_files);

            let mut output = String::new();
            writeln!(output, "=== BYTECODE ===").unwrap();
            writeln!(output, "Functions: {}", program.function_indices.len()).unwrap();
            writeln!(output, "Objects: {}", program.objects.len()).unwrap();
            writeln!(output, "Globals: {}", program.globals.len()).unwrap();

            // Show functions and their bytecode using debug formatting
            let mut func_names: Vec<_> = program.function_indices.keys().collect();
            func_names.sort();
            for func_name in func_names {
                if let Some(&idx) = program.function_indices.get(func_name)
                    && let Some(baml_codegen::Object::Function(func)) = program.objects.get(idx)
                {
                    writeln!(output, "\nFunction {} (arity: {}, kind: {:?}):", func_name, func.arity, func.kind).unwrap();
                    let bytecode_table = baml_vm::debug::display_bytecode(
                        func,
                        &[],  // Empty stack for static display
                        &program.objects,
                        &program.globals,
                    );
                    if bytecode_table.is_empty() {
                        writeln!(output, "  (no bytecode)").unwrap();
                    } else {
                        for line in bytecode_table.lines() {
                            writeln!(output, "  {}", line).unwrap();
                        }
                    }
                }
            }

            with_settings!({snapshot_path => SNAPSHOT_PATH}, {
                assert_snapshot!("06_codegen", output);
            });
        }
    }
}

// Parser-specific test generation functions
fn generate_incremental_parsing_test(baml_file: &BamlFile) -> TokenStream {
    let test_name = format_ident!("test_07_incremental_{}", baml_file.name);
    let full_path = baml_file.full_path.display().to_string();
    let relative_path = baml_file.relative_path.display().to_string();
    let include_content = make_include_str(&full_path);

    quote! {
        #[test]
        fn #test_name() {
            let content = #include_content;
            let content = content.replace("\r\n", "\n");

            // Test single character edits maintain correctness
            let mut db = RootDatabase::new();
            let source_file = db.add_file(#relative_path, &content);
            let original_tree = baml_parser::syntax_tree(&db, source_file);

            // Test adding a character
            let modified = insert_char(&content, content.len() / 2, 'x');
            let modified_file = db.add_file("modified.baml", &modified);
            let modified_tree = baml_parser::syntax_tree(&db, modified_file);

            // Verify the trees are valid
            assert_no_panics(&original_tree);
            assert_no_panics(&modified_tree);
        }
    }
}

fn generate_node_reuse_test(baml_file: &BamlFile) -> TokenStream {
    let test_name = format_ident!("test_08_node_reuse_{}", baml_file.name);
    let full_path = baml_file.full_path.display().to_string();
    let relative_path = baml_file.relative_path.display().to_string();
    let include_content = make_include_str(&full_path);

    quote! {
        #[test]
        fn #test_name() {
            let content = #include_content;
            let content = content.replace("\r\n", "\n");

            // Measure node reuse for single character edit
            let mut db = RootDatabase::new();
            let source_file = db.add_file(#relative_path, &content);
            let original_tree = baml_parser::syntax_tree(&db, source_file);

            // Make a small edit
            let modified = insert_char(&content, content.len() / 2, 'a');
            let modified_file = db.add_file("modified.baml", &modified);
            let modified_tree = baml_parser::syntax_tree(&db, modified_file);

            // Measure reuse (this is a simplified check)
            // In a real implementation, you'd check actual node reuse
            assert_no_panics(&original_tree);
            assert_no_panics(&modified_tree);
        }
    }
}

fn generate_tree_lossless_test(project: &TestProject) -> TokenStream {
    let file_checks: TokenStream = project
        .files
        .iter()
        .map(|baml_file| {
            let full_path = baml_file.full_path.display().to_string();
            let relative_path = baml_file.relative_path.display().to_string();
            let include_content = make_include_str(&full_path);

            quote! {
                {
                    let content = #include_content;
                    let content = content.replace("\r\n", "\n");
                    let mut db = RootDatabase::new();
                    let source_file = db.add_file(#relative_path, &content);
                    let tree = baml_parser::syntax_tree(&db, source_file);
                    assert_tree_is_lossless(&tree, &content);
                }
            }
        })
        .collect();

    quote! {
        #[test]
        fn test_09_tree_lossless() {
            // Verify parse trees can reconstruct original source
            #file_checks
        }
    }
}

// Benchmark-related structures and functions
enum Benchmark {
    IncrementalMultiFile {
        name: String,
        before_files: Vec<PathBuf>,
        after_files: Vec<PathBuf>,
        delete_files: Vec<String>,
    },
    Scale {
        name: String,
        path: PathBuf,
    },
}

fn discover_incremental_benchmarks(dir: &Path, benchmarks: &mut Vec<Benchmark>) {
    for entry in fs::read_dir(dir).unwrap() {
        let entry = entry.unwrap();
        let path = entry.path();
        if !path.is_dir() {
            continue;
        }

        let before_dir = path.join("before");
        let after_dir = path.join("after");

        if before_dir.exists() && after_dir.exists() {
            let name = path.file_name().unwrap().to_str().unwrap().to_string();

            // Collect all files from before/
            let before_files = collect_baml_files(&before_dir);

            // Collect changes from after/ (including .delete markers)
            let mut after_files = Vec::new();
            let mut delete_files = Vec::new();

            for entry in WalkDir::new(&after_dir) {
                let entry = entry.unwrap();
                let path = entry.path();

                if path.extension().and_then(|s| s.to_str()) == Some("baml") {
                    after_files.push(path.to_path_buf());
                } else if let Some(filename) = path.file_name().and_then(|s| s.to_str())
                    && filename.ends_with(".baml.delete")
                {
                    // Extract the original filename
                    let original = filename.strip_suffix(".delete").unwrap();
                    delete_files.push(original.to_string());
                }
            }

            benchmarks.push(Benchmark::IncrementalMultiFile {
                name,
                before_files,
                after_files,
                delete_files,
            });
        }
    }
}

fn discover_scale_benchmarks(dir: &Path, benchmarks: &mut Vec<Benchmark>) {
    for entry in WalkDir::new(dir) {
        let entry = entry.unwrap();
        let path = entry.path();

        if path.extension().and_then(|s| s.to_str()) == Some("baml") {
            let name = path.file_stem().unwrap().to_str().unwrap().to_string();
            benchmarks.push(Benchmark::Scale {
                name,
                path: path.to_path_buf(),
            });
        }
    }
}

fn collect_baml_files(dir: &Path) -> Vec<PathBuf> {
    let mut files = Vec::new();

    for entry in WalkDir::new(dir) {
        let entry = entry.unwrap();
        let path = entry.path();

        if path.extension().and_then(|s| s.to_str()) == Some("baml") {
            files.push(path.to_path_buf());
        }
    }

    files.sort();
    files
}

fn generate_benchmark(benchmark: &Benchmark) -> TokenStream {
    match benchmark {
        Benchmark::IncrementalMultiFile {
            name,
            before_files,
            after_files,
            delete_files,
        } => generate_incremental_benchmark(name, before_files, after_files, delete_files),
        Benchmark::Scale { name, path } => generate_scale_benchmark(name, path),
    }
}

fn generate_incremental_benchmark(
    name: &str,
    before_files: &[PathBuf],
    after_files: &[PathBuf],
    delete_files: &[String],
) -> TokenStream {
    let fn_name = format_ident!("bench_incremental_{}", name.replace("-", "_"));

    // Generate before file includes
    let before_includes: TokenStream = before_files
        .iter()
        .enumerate()
        .map(|(i, path)| {
            let var_raw = format_ident!("before_{}_raw", i);
            let var = format_ident!("before_{}", i);
            let path_str = path.display().to_string();
            let include_content = make_include_str(&path_str);
            quote! {
                let #var_raw = #include_content;
                let #var = #var_raw.replace("\r\n", "\n");
            }
        })
        .collect();

    // Generate after file includes
    let after_includes: TokenStream = after_files
        .iter()
        .enumerate()
        .map(|(i, path)| {
            let var_raw = format_ident!("after_{}_raw", i);
            let var = format_ident!("after_{}", i);
            let path_str = path.display().to_string();
            let include_content = make_include_str(&path_str);
            quote! {
                let #var_raw = #include_content;
                let #var = #var_raw.replace("\r\n", "\n");
            }
        })
        .collect();

    // Generate initial file loading
    let initial_loads: TokenStream = before_files
        .iter()
        .enumerate()
        .map(|(i, path)| {
            let var = format_ident!("before_{}", i);
            let rel_path = path.file_name().unwrap().to_str().unwrap();
            quote! {
                db.add_file(#rel_path, &#var);
            }
        })
        .collect();

    // Generate incremental updates
    let incremental_updates: TokenStream = after_files
        .iter()
        .enumerate()
        .map(|(i, path)| {
            let var = format_ident!("after_{}", i);
            let filename = path.file_name().unwrap().to_str().unwrap();
            quote! {
                db.add_file(#filename, &#var);  // Updated/New file
            }
        })
        .collect();

    // Note: File deletions cannot be tested without remove_file API
    // so delete_files is unused but kept for future implementation
    let _ = delete_files;

    quote! {
        #[divan::bench]
        fn #fn_name(bencher: divan::Bencher) {
            #before_includes
            #after_includes

            bencher.bench_local(|| {
                let mut db = RootDatabase::new();
                let root = db.set_project_root(std::path::PathBuf::from("."));

                // Initial compilation
                #initial_loads
                let _ = baml_hir::project_items(&db, root);  // Full compilation

                // Apply incremental changes (re-add files with new content)
                #incremental_updates
                let _ = black_box(baml_hir::project_items(&db, root));  // Incremental compilation
            });
        }
    }
}

fn generate_scale_benchmark(name: &str, path: &Path) -> TokenStream {
    let fn_name = format_ident!("bench_scale_{}", name.replace("-", "_"));
    let path_str = path.display().to_string();
    let file_name = format!("{}.baml", name);
    let include_content = make_include_str(&path_str);

    quote! {
        #[divan::bench]
        fn #fn_name(bencher: divan::Bencher) {
            let content_raw = #include_content;
            let content = content_raw.replace("\r\n", "\n");

            bencher.bench_local(|| {
                let mut db = RootDatabase::new();
                let root = db.set_project_root(std::path::PathBuf::from("."));
                db.add_file(#file_name, &content);
                let _ = black_box(baml_hir::project_items(&db, root));
            });
        }
    }
}
