#![allow(clippy::disallowed_types)]

use std::env::current_dir;
use std::fs;
use std::io::Cursor;
use std::path::PathBuf;

use anyhow::{bail, Context, Result};
use assert_fs::prelude::*;
use flate2::write::GzEncoder;
use fs_err::File;
use indoc::indoc;
use url::Url;
use wiremock::matchers::{method, path};
use wiremock::{Mock, MockServer, ResponseTemplate};

use uv_fs::Simplified;
use uv_static::EnvVars;

use crate::common::{download_to_disk, packse_index_url, uv_snapshot, TestContext};

#[test]
fn compile_requirements_in() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio==3.7.0")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    anyio==3.7.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###);

    Ok(())
}

/// Resolve a specific version of `anyio` from a `requirements.in` file with a `--annotation-style=line` flag.
#[test]
fn compile_requirements_in_annotation_line() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio==3.7.0")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("--annotation-style=line")
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line requirements.in
    anyio==3.7.0              # via -r requirements.in
    idna==3.6                 # via anyio
    sniffio==1.3.1            # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###);

    Ok(())
}

/// Resolve a specific version of `anyio` from a `requirements.in` file on stdin
/// when passed a path of `-`.
#[test]
fn compile_requirements_in_stdin() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio==3.7.0")?;

    uv_snapshot!(context
        .pip_compile()
        .stdin(fs::File::open(requirements_in)?)
        .arg("-"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] -
    anyio==3.7.0
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###);

    Ok(())
}

#[test]
fn missing_requirements_in() {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: File not found: `requirements.in`
    "###
    );

    requirements_in.assert(predicates::path::missing());
}

#[test]
fn missing_venv() -> Result<()> {
    let context = TestContext::new("3.12");
    context.temp_dir.child("requirements.in").touch()?;
    fs_err::remove_dir_all(context.venv.path())?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in

    ----- stderr -----
    warning: Requirements file `requirements.in` does not contain any dependencies
    Resolved in [TIME]
    "###
    );

    context.venv.assert(predicates::path::missing());

    Ok(())
}

#[test]
fn empty_output() -> Result<()> {
    let context = TestContext::new("3.12");
    context
        .temp_dir
        .child("requirements.in")
        .write_str("iniconfig==1.1.1")?;
    context.temp_dir.child("requirements.txt").touch()?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--output-file")
            .arg("requirements.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
    iniconfig==1.1.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific version of `anyio` from a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
version = "0.1.0"
dependencies = [
    "anyio==3.7.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    anyio==3.7.0
        # via project (pyproject.toml)
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific version of `anyio` from a `pyproject.toml` file. Despite the version being
/// dynamic, we shouldn't need to build the package, since the requirements are static.
#[test]
fn compile_pyproject_toml_dynamic_version() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
dynamic = ["version"]
dependencies = [
    "anyio==3.7.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    anyio==3.7.0
        # via project (pyproject.toml)
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific version of `anyio` from a `pyproject.toml` file with `--annotation-style=line`.
#[test]
fn compile_pyproject_toml_with_line_annotation() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
version = "0.1.0"
dependencies = [
    "anyio==3.7.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("--annotation-style=line")
            .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line pyproject.toml
    anyio==3.7.0              # via project (pyproject.toml)
    idna==3.6                 # via anyio
    sniffio==1.3.1            # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn compile_pyproject_toml_eager_validation() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        dynamic = ["version"]
        requires-python = ">=3.10"
        dependencies = ["anyio==4.7.0"]

        [tool.uv.sources]
        anyio = { workspace = true }
    "#})?;

    // This should fail without attempting to build the package.
    uv_snapshot!(context
        .pip_compile()
        .arg("pyproject.toml"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Failed to parse entry: `anyio`
      Caused by: `anyio` references a workspace in `tool.uv.sources` (e.g., `anyio = { workspace = true }`), but is not a workspace member
    "###);

    Ok(())
}

/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file.
#[test]
fn compile_constraints_txt() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio==3.7.0")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("idna<3.4")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
    anyio==3.7.0
        # via -r requirements.in
    idna==3.3
        # via
        #   -c constraints.txt
        #   anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file, with an inline constraint.
#[test]
fn compile_constraints_inline() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio==3.7.0")?;
    requirements_in.write_str("-c constraints.txt")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("idna<3.4")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in

    ----- stderr -----
    Resolved in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that
/// uses markers.
#[test]
fn compile_constraints_markers() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    // Constrain a transitive dependency based on the Python version
    let constraints_txt = context.temp_dir.child("constraints.txt");
    // If constraints are ignored, these will conflict
    constraints_txt.write_str("sniffio==1.2.0;python_version<='3.7'")?;
    constraints_txt.write_str("sniffio==1.3.0;python_version>'3.7'")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.0
        # via
        #   -c constraints.txt
        #   anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that uses an
/// extra. The constraint should be enforced, but the extra should _not_ be included in the output
/// (though it currently _is_ included).
#[test]
fn compile_constraint_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask")?;

    // Constrain a transitive dependency based on the Python version
    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("flask[dotenv]<24.3.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask==3.0.2
        # via
        #   -c constraints.txt
        #   -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// This is a regression test for a case where `uv pip compile --universal`
/// would include dependencies with marker expressions that always evaluate
/// to false (for example, `python_version < '0'`).
///
/// See: <https://github.com/astral-sh/uv/issues/8676>
#[test]
fn compile_constraints_omit_impossible_dependencies() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(
        "\
apache-airflow[microsoft.azure]==2.3.4
",
    )?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str(
        "\
apache-airflow-providers-microsoft-azure==4.2.0
click==8.1.3
colorama==0.4.5
msal-extensions==1.0.0
portalocker==2.5.1
",
    )?;

    let filters: Vec<_> = [
        // 3.10 may not be installed
        (
            "warning: The requested Python version 3.10 is not available; .* will be used to build dependencies instead.\n",
            "",
        ),
        // These aren't used on Windows, so we filter them out.
        (".*colorama==.*\n", ""),
        (".*tzdata==.*\n", ""),
    ]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(filters, context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt")
            .arg("--annotation-style")
            .arg("line")
            .arg("--python-version")
            .arg("3.10")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt --annotation-style line --python-version 3.10 --universal
    a2wsgi==1.10.4            # via connexion
    adal==1.2.7               # via azure-kusto-data, msrestazure
    aiohttp==3.9.3            # via apache-airflow-providers-http
    aiosignal==1.3.1          # via aiohttp
    alembic==1.13.1           # via apache-airflow
    anyio==4.3.0              # via httpx, starlette
    apache-airflow==2.3.4     # via apache-airflow-providers-microsoft-azure, -r requirements.in
    apache-airflow-providers-common-sql==1.4.0  # via apache-airflow-providers-sqlite
    apache-airflow-providers-ftp==3.3.1  # via apache-airflow
    apache-airflow-providers-http==4.3.0  # via apache-airflow
    apache-airflow-providers-imap==3.1.1  # via apache-airflow
    apache-airflow-providers-microsoft-azure==4.2.0  # via apache-airflow, -c constraints.txt
    apache-airflow-providers-sqlite==3.3.2  # via apache-airflow
    apispec==3.3.2            # via flask-appbuilder
    argcomplete==3.2.3        # via apache-airflow
    asgiref==3.8.1            # via apache-airflow-providers-http, connexion, flask
    async-timeout==4.0.3 ; python_full_version < '3.11'  # via aiohttp
    attrs==23.2.0             # via aiohttp, cattrs, jsonschema, referencing
    azure-batch==14.1.0       # via apache-airflow-providers-microsoft-azure
    azure-common==1.1.28      # via azure-batch, azure-mgmt-containerinstance, azure-mgmt-datafactory, azure-mgmt-datalake-store, azure-mgmt-resource, azure-storage-common, azure-storage-file
    azure-core==1.30.1        # via azure-cosmos, azure-identity, azure-keyvault-secrets, azure-mgmt-core, azure-servicebus, azure-storage-blob, msrest
    azure-cosmos==4.6.0       # via apache-airflow-providers-microsoft-azure
    azure-datalake-store==0.0.53  # via apache-airflow-providers-microsoft-azure
    azure-identity==1.15.0    # via apache-airflow-providers-microsoft-azure
    azure-keyvault-secrets==4.8.0  # via apache-airflow-providers-microsoft-azure
    azure-kusto-data==0.0.45  # via apache-airflow-providers-microsoft-azure
    azure-mgmt-containerinstance==1.5.0  # via apache-airflow-providers-microsoft-azure
    azure-mgmt-core==1.4.0    # via azure-mgmt-datafactory, azure-mgmt-resource
    azure-mgmt-datafactory==1.1.0  # via apache-airflow-providers-microsoft-azure
    azure-mgmt-datalake-nspkg==3.0.1  # via azure-mgmt-datalake-store
    azure-mgmt-datalake-store==0.5.0  # via apache-airflow-providers-microsoft-azure
    azure-mgmt-nspkg==3.0.2   # via azure-mgmt-datalake-nspkg
    azure-mgmt-resource==23.0.1  # via apache-airflow-providers-microsoft-azure
    azure-nspkg==3.0.2        # via azure-mgmt-nspkg
    azure-servicebus==7.12.1 ; platform_machine != 'aarch64'  # via apache-airflow-providers-microsoft-azure
    azure-storage-blob==12.8.1  # via apache-airflow-providers-microsoft-azure
    azure-storage-common==2.1.0  # via apache-airflow-providers-microsoft-azure, azure-storage-file
    azure-storage-file==2.1.0  # via apache-airflow-providers-microsoft-azure
    babel==2.14.0             # via flask-babel
    blinker==1.7.0            # via apache-airflow
    cachelib==0.9.0           # via flask-caching
    cattrs==23.2.3            # via apache-airflow
    certifi==2024.2.2         # via httpcore, httpx, msrest, requests
    cffi==1.16.0              # via azure-datalake-store, cryptography
    charset-normalizer==3.3.2  # via requests
    click==8.1.3              # via flask, flask-appbuilder, -c constraints.txt
    colorlog==4.8.0           # via apache-airflow
    connexion==3.0.6          # via apache-airflow
    cron-descriptor==1.4.3    # via apache-airflow
    croniter==2.0.3           # via apache-airflow
    cryptography==42.0.5      # via adal, apache-airflow, azure-identity, azure-storage-blob, azure-storage-common, msal, pyjwt
    deprecated==1.2.14        # via apache-airflow
    dill==0.3.8               # via apache-airflow
    dnspython==2.6.1          # via email-validator
    docutils==0.20.1          # via python-daemon
    email-validator==1.3.1    # via flask-appbuilder
    exceptiongroup==1.2.0 ; python_full_version < '3.11'  # via anyio, cattrs
    flask==2.2.5              # via apache-airflow, connexion, flask-appbuilder, flask-babel, flask-caching, flask-jwt-extended, flask-login, flask-session, flask-sqlalchemy, flask-wtf
    flask-appbuilder==4.1.3   # via apache-airflow
    flask-babel==2.0.0        # via flask-appbuilder
    flask-caching==2.1.0      # via apache-airflow
    flask-jwt-extended==4.6.0  # via flask-appbuilder
    flask-login==0.6.3        # via apache-airflow, flask-appbuilder
    flask-session==0.7.0      # via apache-airflow
    flask-sqlalchemy==2.5.1   # via flask-appbuilder
    flask-wtf==0.15.1         # via apache-airflow, flask-appbuilder
    frozenlist==1.4.1         # via aiohttp, aiosignal
    graphviz==0.20.3          # via apache-airflow
    greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'  # via sqlalchemy
    gunicorn==21.2.0          # via apache-airflow
    h11==0.14.0               # via httpcore
    httpcore==1.0.4           # via httpx
    httpx==0.27.0             # via apache-airflow, connexion
    idna==3.6                 # via anyio, email-validator, httpx, requests, yarl
    inflection==0.5.1         # via connexion
    isodate==0.6.1            # via azure-keyvault-secrets, azure-mgmt-resource, azure-servicebus, msrest
    itsdangerous==2.1.2       # via apache-airflow, flask, flask-wtf
    jinja2==3.1.3             # via apache-airflow, connexion, flask, flask-babel, python-nvd3, swagger-ui-bundle
    jsonschema==4.21.1        # via apache-airflow, connexion, flask-appbuilder
    jsonschema-specifications==2023.12.[X]  # via jsonschema
    lazy-object-proxy==1.10.0  # via apache-airflow
    linkify-it-py==2.0.3      # via apache-airflow
    lockfile==0.12.2          # via apache-airflow, python-daemon
    mako==1.3.2               # via alembic
    markdown==3.6             # via apache-airflow
    markdown-it-py==3.0.0     # via apache-airflow, mdit-py-plugins, rich
    markupsafe==2.1.5         # via apache-airflow, jinja2, mako, werkzeug, wtforms
    marshmallow==3.21.1       # via flask-appbuilder, marshmallow-enum, marshmallow-oneofschema, marshmallow-sqlalchemy
    marshmallow-enum==1.5.1   # via flask-appbuilder
    marshmallow-oneofschema==3.1.1  # via apache-airflow
    marshmallow-sqlalchemy==0.26.1  # via flask-appbuilder
    mdit-py-plugins==0.4.0    # via apache-airflow
    mdurl==0.1.2              # via markdown-it-py
    msal==1.28.0              # via azure-datalake-store, azure-identity, msal-extensions
    msal-extensions==1.0.0    # via azure-identity, -c constraints.txt
    msgspec==0.18.6           # via flask-session
    msrest==0.7.1             # via azure-mgmt-containerinstance, azure-mgmt-datafactory, azure-storage-blob, msrestazure
    msrestazure==0.6.4        # via azure-batch, azure-kusto-data, azure-mgmt-containerinstance, azure-mgmt-datalake-store
    multidict==6.0.5          # via aiohttp, yarl
    oauthlib==3.2.2           # via requests-oauthlib
    packaging==24.0           # via apache-airflow, gunicorn, marshmallow
    pathspec==0.9.0           # via apache-airflow
    pendulum==3.0.0           # via apache-airflow
    pluggy==1.4.0             # via apache-airflow
    portalocker==2.5.1        # via msal-extensions, -c constraints.txt
    prison==0.2.1             # via flask-appbuilder
    psutil==5.9.8             # via apache-airflow
    pycparser==2.21           # via cffi
    pygments==2.17.2          # via apache-airflow, rich
    pyjwt==2.8.0              # via adal, apache-airflow, flask-appbuilder, flask-jwt-extended, msal
    python-daemon==3.0.1      # via apache-airflow
    python-dateutil==2.9.0.post0  # via adal, apache-airflow, azure-kusto-data, azure-storage-common, croniter, flask-appbuilder, pendulum, time-machine
    python-multipart==0.0.9   # via connexion
    python-nvd3==0.15.0       # via apache-airflow
    python-slugify==8.0.4     # via apache-airflow, python-nvd3
    pytz==2024.1              # via croniter, flask-babel
    pywin32==306 ; sys_platform == 'win32'  # via portalocker
    pyyaml==6.0.1             # via apispec, connexion
    referencing==0.34.0       # via jsonschema, jsonschema-specifications
    requests==2.31.0          # via adal, apache-airflow-providers-http, azure-core, azure-datalake-store, azure-kusto-data, azure-storage-common, connexion, msal, msrest, requests-oauthlib, requests-toolbelt
    requests-oauthlib==2.0.0  # via msrest
    requests-toolbelt==1.0.0  # via apache-airflow-providers-http
    rich==13.7.1              # via apache-airflow
    rpds-py==0.18.0           # via jsonschema, referencing
    setproctitle==1.3.3       # via apache-airflow
    setuptools==69.2.0        # via python-daemon
    six==1.16.0               # via azure-core, isodate, msrestazure, prison, python-dateutil
    sniffio==1.3.1            # via anyio, httpx
    sqlalchemy==1.4.52        # via alembic, apache-airflow, flask-appbuilder, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-jsonfield, sqlalchemy-utils
    sqlalchemy-jsonfield==1.0.2  # via apache-airflow
    sqlalchemy-utils==0.41.2  # via flask-appbuilder
    sqlparse==0.4.4           # via apache-airflow-providers-common-sql
    starlette==0.37.2         # via connexion
    swagger-ui-bundle==1.1.0  # via connexion
    tabulate==0.9.0           # via apache-airflow
    tenacity==8.2.3           # via apache-airflow
    termcolor==2.4.0          # via apache-airflow
    text-unidecode==1.3       # via python-slugify
    time-machine==2.14.1 ; implementation_name != 'pypy'  # via pendulum
    typing-extensions==4.10.0  # via alembic, anyio, apache-airflow, asgiref, azure-core, azure-cosmos, azure-keyvault-secrets, azure-servicebus, cattrs, connexion
    uc-micro-py==1.0.3        # via linkify-it-py
    unicodecsv==0.14.1        # via apache-airflow
    urllib3==2.2.1            # via requests
    werkzeug==3.0.1           # via apache-airflow, connexion, flask, flask-jwt-extended, flask-login
    wrapt==1.16.0             # via deprecated
    wtforms==2.3.3            # via flask-appbuilder, flask-wtf
    yarl==1.9.4               # via aiohttp

    ----- stderr -----
    Resolved 149 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package from an optional extra in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
    "anyio==3.7.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("foo"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra foo
    anyio==3.7.0
        # via project (pyproject.toml)
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package from an extra with non-normalized names in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_name_normalization() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies."FrIeNdLy-._.-bArD" = [
    "anyio==3.7.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("FRiENDlY-...-_-BARd"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra FRiENDlY-...-_-BARd
    anyio==3.7.0
        # via project (pyproject.toml)
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Request an extra that does not exist in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_missing() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
    "anyio==3.7.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("bar"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Requested extra not found: bar
    "###
    );

    Ok(())
}

/// Compile a `pyproject.toml` file with a `poetry` section.
#[test]
fn compile_pyproject_toml_poetry() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]

[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"
pytest = { version = "*", optional = true }

[tool.poetry.extras]
test = ["pytest"]

[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("test"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra test
    anyio==3.7.1
        # via poetry-editable (pyproject.toml)
    idna==3.6
        # via anyio
    iniconfig==2.0.0
        # via pytest
    packaging==24.0
        # via pytest
    pluggy==1.4.0
        # via pytest
    pytest==8.1.1
        # via poetry-editable (pyproject.toml)
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Compile a `pyproject.toml` file with a `poetry` section and a `project` section without a
/// `dependencies` field, which should be treated as an empty list.
#[test]
fn compile_pyproject_toml_poetry_empty_dependencies() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]

[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]

[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"

[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    anyio==3.7.1
        # via poetry-editable (pyproject.toml)
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Compile a `pyproject.toml` file with a `poetry` section and a `project` section with an invalid
/// `dependencies` field.
#[test]
fn compile_pyproject_toml_poetry_invalid_dependencies() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]

[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]

[project.dependencies]
python = "^3.12"
msgspec = "^0.18.4"

[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"

[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Failed to parse: `pyproject.toml`
      Caused by: TOML parse error at line 13, column 1
       |
    13 | [project.dependencies]
       | ^^^^^^^^^^^^^^^^^^^^^^
    invalid type: map, expected a sequence

    "###
    );

    Ok(())
}

/// Compile a `pyproject.toml` file that uses setuptools as the build backend.
#[test]
fn compile_pyproject_toml_setuptools() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
"#,
    )?;

    let setup_cfg = context.temp_dir.child("setup.cfg");
    setup_cfg.write_str(
        r#"[options]
packages = find:
install_requires=
    anyio

[options.extras_require]
dev =
    iniconfig; python_version >= "3.7"
    mypy; python_version <= "3.8"
"#,
    )?;

    let setup_py = context.temp_dir.child("setup.py");
    setup_py.write_str(
        r#"# setup.py
from setuptools import setup


setup(
    name="dummypkg",
    description="A dummy package",
)
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("dev"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra dev
    anyio==4.3.0
        # via dummypkg (pyproject.toml)
    idna==3.6
        # via anyio
    iniconfig==2.0.0
        # via dummypkg (pyproject.toml)
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Compile a `setup.cfg` file.
#[test]
fn compile_setup_cfg() -> Result<()> {
    let context = TestContext::new("3.12");

    let setup_cfg = context.temp_dir.child("setup.cfg");
    setup_cfg.write_str(
        r#"[options]
packages = find:
install_requires=
    anyio

[options.extras_require]
dev =
    iniconfig; python_version >= "3.7"
    mypy; python_version <= "3.8"
"#,
    )?;

    let setup_py = context.temp_dir.child("setup.py");
    setup_py.write_str(
        r#"# setup.py
from setuptools import setup


setup(
    name="dummypkg",
    description="A dummy package",
)
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("setup.cfg")
            .arg("--extra")
            .arg("dev"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] setup.cfg --extra dev
    anyio==4.3.0
        # via dummypkg (setup.cfg)
    idna==3.6
        # via anyio
    iniconfig==2.0.0
        # via dummypkg (setup.cfg)
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Compile a `setup.py` file.
#[test]
fn compile_setup_py() -> Result<()> {
    let context = TestContext::new("3.12");

    let setup_py = context.temp_dir.child("setup.py");
    setup_py.write_str(
        r#"# setup.py
from setuptools import setup


setup(
    name="dummypkg",
    description="A dummy package",
    install_requires=["anyio"],
    extras_require={
        "dev": ["iniconfig; python_version >= '3.7'", "mypy; python_version <= '3.8'"],
    },
)
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("setup.py")
            .arg("--extra")
            .arg("dev"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] setup.py --extra dev
    anyio==4.3.0
        # via dummypkg (setup.py)
    idna==3.6
        # via anyio
    iniconfig==2.0.0
        # via dummypkg (setup.py)
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a `pyproject.toml` file with an invalid project name.
#[test]
fn compile_pyproject_toml_invalid_name() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "!project"
dependencies = [
    "anyio==3.7.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Failed to parse: `pyproject.toml`
      Caused by: TOML parse error at line 5, column 8
      |
    5 | name = "!project"
      |        ^^^^^^^^^^
    Not a valid package or extra name: "!project". Names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters.

    "###
    );

    Ok(())
}

/// Request multiple extras that do not exist in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extras_missing() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
    "anyio==3.7.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("foo")
            .arg("--extra")
            .arg("bar")
            .arg("--extra")
            .arg("foobar"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Requested extras not found: bar, foobar
    "###
    );

    Ok(())
}

/// Request extras when using a `requirements.in` file which does not support extras.
#[test]
fn compile_requirements_file_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio==3.7.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--all-extras"),
            @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file.
    "###
    );

    Ok(())
}

/// Request an extra with a name that does not conform to the specification.
#[test]
fn invalid_extra_name() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
    "anyio==3.7.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("invalid name!"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: invalid value 'invalid name!' for '--extra <EXTRA>': Extra names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters

    For more information, try '--help'.
    "###
    );

    Ok(())
}

/// Resolve a specific version of Black at Python 3.12.
#[test]
fn compile_python_312() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--python-version")
            .arg("3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-version 3.12
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    // This should work with the short flag
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("-p")
        .arg("3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.12
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    // And `--python`
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python")
        .arg("3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python 3.12
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    // And `UV_PYTHON`
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .env("UV_PYTHON", "3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific version of Black at Python 3.12 with `--annotation-style=line`.
#[test]
fn compile_python_312_annotation_line() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("--annotation-style=line")
            .arg("requirements.in")
            .arg("--python-version")
            .arg("3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line requirements.in --python-version 3.12
    black==23.10.1            # via -r requirements.in
    click==8.1.7              # via black
    mypy-extensions==1.0.0    # via black
    packaging==24.0           # via black
    pathspec==0.12.1          # via black
    platformdirs==4.2.0       # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Compile for 3.12 when only a different interpreter version is available.
#[test]
fn compile_fallback_interpreter() -> Result<()> {
    let context = TestContext::new("3.10").with_filtered_python_sources();
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--python-version")
            .arg("3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-version 3.12
    black==23.10.[X]
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    warning: The requested Python version 3.12 is not available; 3.10.[X] will be used to build dependencies instead.
    Resolved 6 packages in [TIME]
    "###
    );

    // This should work for the short flag too
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("-p")
            .arg("3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.12
    black==23.10.[X]
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    warning: The requested Python version 3.12 is not available; 3.10.[X] will be used to build dependencies instead.
    Resolved 6 packages in [TIME]
    "###
    );

    // And for `UV_PYTHON`
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .env(EnvVars::UV_PYTHON, "3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    black==23.10.[X]
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    warning: The requested Python version 3.12 is not available; 3.10.[X] will be used to build dependencies instead.
    Resolved 6 packages in [TIME]
    "###
    );

    // And for `--python`
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--python")
            .arg("3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python 3.12
    black==23.10.[X]
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    warning: The requested Python version 3.12 is not available; 3.10.[X] will be used to build dependencies instead.
    Resolved 6 packages in [TIME]
    "###
    );

    // We also allow requesting alternative implementations, but we should fail if we can't find it
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("-p")
        .arg("pypy"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: No interpreter found for PyPy in [PYTHON SOURCES]
    "###
    );

    // Similarly, we fail if we receive a range request that cannot be satisfied
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("-p")
            .arg(">=3.12"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: No interpreter found for Python >=3.12 in [PYTHON SOURCES]
    "###
    );

    Ok(())
}

#[test]
fn compile_python_conflicts() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    // `-p` and `--python` cannot be used together
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python")
        .arg("3.12")
        .arg("-p")
        .arg("3.12"), @r"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: the argument '--python <PYTHON>' cannot be used multiple times

    Usage: uv pip compile [OPTIONS] <SRC_FILE|--group <GROUP>>

    For more information, try '--help'.
    "
    );

    // `UV_PYTHON` should  be usable with `-p`
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("-p")
        .arg("3.12")
        .env("UV_PYTHON", "3.11"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.12
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    // `UV_PYTHON` should  be usable with `--python`
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python")
        .arg("3.12")
        .env("UV_PYTHON", "3.11"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python 3.12
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    // `UV_PYTHON` should  be usable with `--python-version`
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python-version")
        .arg("3.12")
        .env("UV_PYTHON", "3.11"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-version 3.12
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn compile_python_build_version_different_than_target() -> Result<()> {
    let context =
        TestContext::new_with_versions(&["3.12", "3.10", "3.11"]).with_filtered_python_sources();
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    // The build interpreter can differ from the target version
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python-version")
        .arg("3.12")
        .arg("-p")
        .arg("3.11")
        .env_remove(EnvVars::VIRTUAL_ENV), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-version 3.12 -p 3.11
    black==23.10.[X]
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python-version")
        .arg("3.12")
        .arg("-p")
        .arg("cpython@3.11")
        .env_remove(EnvVars::VIRTUAL_ENV), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-version 3.12 -p cpython@3.11
    black==23.10.[X]
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    // If we can't find the interpreter, we fail
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python-version")
        .arg("3.12")
        .arg("-p")
        .arg("pypy@3.11")
        .env_remove(EnvVars::VIRTUAL_ENV), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: No interpreter found for PyPy 3.11 in [PYTHON SOURCES]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python-version")
        .arg("3.12")
        .arg("-p")
        .arg("3.13")
        .env_remove(EnvVars::VIRTUAL_ENV), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: No interpreter found for Python 3.13 in [PYTHON SOURCES]
    "###
    );

    // `UV_PYTHON` is ignored if `--python-version` is set
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python-version")
        .arg("3.12")
        .env(EnvVars::UV_PYTHON, "3.11")
        .env_remove(EnvVars::VIRTUAL_ENV), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-version 3.12
    black==23.10.[X]
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    // `UV_PYTHON` is ignored if `--python-version` is set
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--python-version")
        .arg("3.12")
        .env(EnvVars::UV_PYTHON, "pypy")
        .env_remove(EnvVars::VIRTUAL_ENV), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-version 3.12
    black==23.10.[X]
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Compile for 3.12 when only a different interpreter version is available, there's also
/// a broken interpreter in the PATH.
#[test]
#[cfg(unix)]
fn compile_fallback_interpreter_broken_in_path() -> Result<()> {
    use std::os::unix::fs::PermissionsExt;

    let context = TestContext::new("3.10");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    // Create a "broken" Python executable in the test context `bin`
    let contents = r"#!/bin/sh
    echo 'error: intentionally broken python executable' >&2
    exit 1";
    let python = context
        .bin_dir
        .join(format!("python3{}", std::env::consts::EXE_SUFFIX));
    fs_err::write(&python, contents).unwrap();

    let mut perms = fs_err::metadata(&python).unwrap().permissions();
    perms.set_mode(0o755);
    fs_err::set_permissions(&python, perms).unwrap();

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--python-version")
            .arg("3.12")
            // In tests, we ignore `PATH` during Python discovery so we need to add the context `bin`
            .env("UV_TEST_PYTHON_PATH", context.bin_dir.as_os_str()), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-version 3.12
    black==23.10.[X]
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    warning: The requested Python version 3.12 is not available; 3.10.[X] will be used to build dependencies instead.
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific version of Black at Python 3.12 without deps.
#[test]
fn compile_python_312_no_deps() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-deps")
            .arg("--python-version")
            .arg("3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-deps --python-version 3.12
    black==23.10.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific version of Black at Python 3.7.
#[test]
fn compile_python_37() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    let filters: Vec<_> = [
        // 3.7 may not be installed
        (
            "warning: The requested Python version 3.7 is not available; .* will be used to build dependencies instead.\n",
            "",
        ),
        (r"warning: uv is only compatible with Python 3\.8\+, found Python 3\.7.*\n", "")
    ]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(filters, context.pip_compile()
            .arg("requirements.in")
            .arg("--python-version")
            .arg("3.7"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because the requested Python version (>=3.7) does not satisfy Python>=3.8 and black==23.10.1 depends on Python>=3.8, we can conclude that black==23.10.1 cannot be used.
          And because you require black==23.10.1, we can conclude that your requirements are unsatisfiable.

          hint: The `--python-version` value (>=3.7) includes Python versions that are not supported by your dependencies (e.g., black==23.10.1 only supports >=3.8). Consider using a higher `--python-version` value.
    "###);

    Ok(())
}

/// Resolve a source distribution with `--resolution=lowest-direct`, to ensure that the build
/// requirements aren't resolved at their lowest compatible version.
#[test]
fn compile_sdist_resolution_lowest() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--resolution=lowest-direct")
            .arg("--python-version")
            .arg("3.12"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --resolution=lowest-direct --python-version 3.12
    anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific version of Black against an invalid Python version.
#[test]
fn compile_python_invalid_version() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--python-version")
            .arg("3.7.x"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: invalid value '3.7.x' for '--python-version <PYTHON_VERSION>': Python version `3.7.x` could not be parsed: after parsing `3.7`, found `.x`, which is not part of a valid version

    For more information, try '--help'.
    "###
    );

    Ok(())
}

/// Resolve a specific version of Black against an invalid Python version.
#[test]
fn compile_python_dev_version() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--python-version")
            .arg("3.7-dev"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: invalid value '3.7-dev' for '--python-version <PYTHON_VERSION>': Python version `3.7-dev` is a development release

    For more information, try '--help'.
    "###
    );

    Ok(())
}

/// Omit the constraint annotation (e.g., `# from -c constraints.txt`) when the constraint is not
/// applicable due to a marker expression.
#[test]
fn omit_non_matching_annotation() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("idna <3.7; python_version < '3.7'")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("-c")
            .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt
    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Test that we select the last 3.8 compatible numpy version instead of trying to compile an
/// incompatible sdist <https://github.com/astral-sh/uv/issues/388>
#[test]
fn compile_numpy_py38() -> Result<()> {
    let context = TestContext::new("3.8");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("numpy")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-build"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-build
    numpy==1.24.4
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific Flask wheel via a URL dependency.
#[test]
fn compile_wheel_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific Flask source distribution via a URL dependency.
///
/// Exercises the `prepare_metadata_for_build_wheel` hooks.
#[test]
fn compile_sdist_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific source distribution via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_https_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(
        "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage",
    )?;

    // In addition to the standard filters, remove the `main` commit, which will change frequently.
    let filters: Vec<_> = [(r"@(\d|\w){40}", "@[COMMIT]")]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(filters, context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@[COMMIT]
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Resolve a specific branch via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_branch_https_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(
        "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-branch",
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific tag via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_tag_https_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(
        "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-tag",
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific tag via a Git HTTPS dependency.
///
/// In this case, the tag is a date, and thus could feasibly refer to a short commit hash.
#[test]
#[cfg(feature = "git")]
fn compile_git_date_tag_https_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(
        "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@20240402",
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_long_commit_https_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(
        "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979",
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_short_commit_https_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(
        "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd6",
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific ref via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_refs_https_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in
        .write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@refs/pull/4/head")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@9d01a806f17ddacb9c7b66b1b68574adf790b63f
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific Git dependency with a subdirectory.
#[test]
#[cfg(feature = "git")]
fn compile_git_subdirectory_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve two packages from a `requirements.in` file with the same Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_concurrent_access() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in
        .write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\nexample-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
        # via -r requirements.in
    example-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b
        # via -r requirements.in

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve two packages from a `requirements.in` file with the same Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_unnamed_concurrent_access() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in
        .write_str("git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\ngit+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
        # via -r requirements.in
    example-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b
        # via -r requirements.in

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a Git dependency with a declared name that differs from the true name of the package.
#[test]
#[cfg(feature = "git")]
fn compile_git_mismatched_name() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in
        .write_str("flask @ git+https://github.com/pallets/flask.git@2.0.0\ndask @ git+https://github.com/pallets/flask.git@3.0.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × Failed to download and build `dask @ git+https://github.com/pallets/flask.git@3.0.0`
      ╰─▶ Package metadata name `flask` does not match given name `dask`
    "###
    );

    Ok(())
}

/// Resolve a specific Git dependency with a subdirectory, where the root directory contains a
/// static `pyproject.toml` file.
#[test]
#[cfg(feature = "git")]
fn compile_git_subdirectory_static_metadata() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-workspace-pypackage#subdirectory=uv-public-pypackage")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-workspace-pypackage@b8c4e192456d736c27f2c84c61175c896dba8373#subdirectory=uv-public-pypackage
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Request Flask, but include a URL dependency for Werkzeug, which should avoid adding a
/// duplicate dependency from `PyPI`.
#[test]
fn mixed_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask==3.0.0
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
        # via
        #   -r requirements.in
        #   flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Request Werkzeug via both a version and a URL dependency at a _different_ version, which
/// should result in a conflict.
#[test]
fn conflicting_direct_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("werkzeug==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because there is no version of werkzeug==3.0.0 and you require werkzeug==3.0.0, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Request Werkzeug via both a version and a URL dependency at _the same_ version, which
/// should prefer the direct URL dependency.
#[test]
fn compatible_direct_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("werkzeug==2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Request Werkzeug via two different URLs at different versions, which should result in a conflict.
#[test]
fn conflicting_repeated_url_dependency_version_mismatch() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Requirements contain conflicting URLs for package `werkzeug`:
    - https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl
    - https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
    "###
    );

    Ok(())
}

/// Request Werkzeug via two different URLs at different versions. However, only one of the
/// URLs is compatible with the requested Python version, so there shouldn't be any conflict.
#[test]
fn conflicting_repeated_url_dependency_markers() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl ; python_version >= '3.10'
        werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl ; python_version < '3.10'
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Request Werkzeug via two different URLs at the same version. Despite mapping to the same
/// version, it should still result in a conflict.
#[test]
#[cfg(feature = "git")]
fn conflicting_repeated_url_dependency_version_match() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");

    requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Requirements contain conflicting URLs for package `werkzeug`:
    - git+https://github.com/pallets/werkzeug.git@2.0.0
    - https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
    "###
    );

    Ok(())
}

/// Request Flask, but include a URL dependency for a conflicting version of Werkzeug.
#[test]
fn conflicting_transitive_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because only werkzeug<3.0.0 is available and flask==3.0.0 depends on werkzeug>=3.0.0, we can conclude that flask==3.0.0 cannot be used.
          And because you require flask==3.0.0, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Request `uv-public-pypackage` via two different URLs which resolve to the same canonical version.
#[cfg(feature = "git")]
#[test]
fn compatible_repeated_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0.0.2
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Request `uv-public-pypackage` via two different URLs which resolve to the same repository, but
/// different commits.
#[cfg(feature = "git")]
#[test]
fn conflicting_repeated_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.1
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Requirements contain conflicting URLs for package `uv-public-pypackage`:
    - git+https://github.com/astral-test/uv-public-pypackage.git@0.0.1
    - git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
    "###
    );

    Ok(())
}

/// Request `uv-public-pypackage` via three different URLs: `0.0.2`, a short SHA, and a precise SHA.
/// All three are compatible, since they resolve to the same canonical version.
#[cfg(feature = "git")]
#[test]
fn compatible_narrowed_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Request `uv-public-pypackage` via three different URLs: a precise SHA, a short SHA, and `4.3.0`.
/// All three are compatible, since they resolve to the same canonical version.
#[cfg(feature = "git")]
#[test]
fn compatible_broader_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Request `uv-public-pypackage` via two different URLs: `0.0.2`, and a precise SHA, followed by
/// `0.0.2` again. All three are compatible, since they resolve to the same canonical version.
#[test]
fn compatible_repeated_narrowed_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Request `uv-public-pypackage` via three different URLs: `0.0.2`, a precise SHA, and
/// `test-branch`.
///
/// Although `0.0.2` and the precise SHA resolve to the same canonical version, `test-branch`
/// resolves to a different version, so there should be a conflict.
#[cfg(feature = "git")]
#[test]
fn incompatible_narrowed_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
        uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-branch
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Requirements contain conflicting URLs for package `uv-public-pypackage`:
    - git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
    - git+https://github.com/astral-test/uv-public-pypackage@test-branch
    "###
    );

    Ok(())
}

/// Request `hatchling_editable`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_git_dependency() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    hatchling-editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip
        # via -r requirements.in
    iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
        # via hatchling-editable

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Request `transitive_url_dependency`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
/// Since this URL is declared as a constraint, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--constraint")
        .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
    hatchling-editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip
        # via -r requirements.in
    iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
        # via
        #   -c constraints.txt
        #   hatchling-editable

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Request `transitive_url_dependency`, which depends on `iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4`.
/// Since this `iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4.git` is declared as a constraint, and
/// those map to the same canonical URL, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_canonical_url_dependency() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--constraint")
        .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
    hatchling-editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip
        # via -r requirements.in
    iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
        # via
        #   -c constraints.txt
        #   hatchling-editable

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Request `hatchling_editable`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
/// Since `hatchling_editable` is a path (local) dependency, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_url_path_dependency() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("hatchling_editable @ ${HATCH_PATH}")?;

    let hatchling_path = current_dir()?.join("../../scripts/packages/hatchling_editable");
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .env(EnvVars::HATCH_PATH, hatchling_path.as_os_str()), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    ${HATCH_PATH}
        # via -r requirements.in
    iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
        # via hatchling-editable

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// A dependency with conflicting URLs in `requirements.in` and `constraints.txt` should be ignored
/// if the dependency has an override.
#[test]
fn requirement_constraint_override_url() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("anyio==3.7.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--constraint")
        .arg("constraints.txt")
        .arg("--override")
        .arg("overrides.txt"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because there is no version of anyio==3.7.0 and you require anyio==3.7.0, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// A dependency that uses a pre-release marker in `requirements.in` should be overridden by a
/// non-pre-release version in `overrides.txt`. We should _not_ allow Flask to be resolved to
/// a pre-release version.
#[test]
fn requirement_override_prerelease() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask<2.0.0rc4")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("flask<2.0.1,!=2.0.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--override")
        .arg("overrides.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
    click==7.1.2
        # via flask
    flask==1.1.4
        # via
        #   --override overrides.txt
        #   -r requirements.in
    itsdangerous==1.1.0
        # via flask
    jinja2==2.11.3
        # via flask
    markupsafe==2.1.5
        # via jinja2
    werkzeug==1.0.1
        # via flask

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve packages from all extras in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_all_extras() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
version = "0.1.0"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
    "iniconfig==1.1.1",
]
optional-dependencies.bar = [
    "httpcore==0.18.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--all-extras"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --all-extras
    anyio==3.7.0
        # via
        #   project (pyproject.toml)
        #   httpcore
    certifi==2024.2.2
        # via httpcore
    h11==0.14.0
        # via httpcore
    httpcore==0.18.0
        # via project (pyproject.toml)
    idna==3.6
        # via anyio
    iniconfig==1.1.1
        # via project (pyproject.toml)
    sniffio==1.3.1
        # via
        #   anyio
        #   httpcore

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn compile_pyproject_toml_all_extras_annotation_line() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
version = "0.1.0"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
    "iniconfig==1.1.1",
]
optional-dependencies.bar = [
    "httpcore==0.18.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("--annotation-style=line")
            .arg("pyproject.toml")
            .arg("--all-extras"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line pyproject.toml --all-extras
    anyio==3.7.0              # via httpcore, project (pyproject.toml)
    certifi==2024.2.2         # via httpcore
    h11==0.14.0               # via httpcore
    httpcore==0.18.0          # via project (pyproject.toml)
    idna==3.6                 # via anyio
    iniconfig==1.1.1          # via project (pyproject.toml)
    sniffio==1.3.1            # via anyio, httpcore

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve packages from all extras in a `pyproject.toml` file.
#[test]
fn compile_does_not_allow_both_extra_and_all_extras() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "project"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
    "iniconfig==1.1.1",
]
optional-dependencies.bar = [
    "httpcore==0.18.0",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--all-extras")
            .arg("--extra")
            .arg("foo"),
            @r"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: the argument '--all-extras' cannot be used with '--extra <EXTRA>'

    Usage: uv pip compile --cache-dir [CACHE_DIR] --all-extras --exclude-newer <EXCLUDE_NEWER> <SRC_FILE|--group <GROUP>>

    For more information, try '--help'.
    "
    );

    Ok(())
}

/// Compile requirements that cannot be solved due to conflict in a `pyproject.toml` fil;e.
#[test]
fn compile_unsolvable_requirements() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "my-project"
version = "0.1.0"
dependencies = ["anyio==3.7.0", "anyio==4.0.0"]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because my-project depends on anyio==3.7.0 and anyio==4.0.0, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Compile requirements in a `pyproject.toml` file that cannot be resolved due to
/// a requirement with a version that is not available online.
#[test]
fn compile_unsolvable_requirements_version_not_available() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools>=42"]

[project]
name = "my-project"
version = "0.1.0"
dependencies = ["anyio==300.1.4"]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because there is no version of anyio==300.1.4 and my-project depends on anyio==300.1.4, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Resolve at a specific time in the past
#[test]
fn compile_exclude_newer() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("tqdm")?;

    uv_snapshot!(context
        .pip_compile()
        .env_remove(EnvVars::UV_EXCLUDE_NEWER)
        .arg("requirements.in")
        .arg("--exclude-newer")
        // 4.64.0: 2022-04-04T01:48:46.194635Z1
        // 4.64.1: 2022-09-03T11:10:27.148080Z
        .arg("2022-04-04T12:00:00Z"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --exclude-newer 2022-04-04T12:00:00Z
    tqdm==4.64.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    // Use a date as input instead.
    // We interpret a date as including this day
    uv_snapshot!(context
        .pip_compile()
        .env_remove(EnvVars::UV_EXCLUDE_NEWER)
        .arg("requirements.in")
        .arg("--exclude-newer")
        .arg("2022-04-04"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --exclude-newer 2022-04-04
    tqdm==4.64.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    // Check the error message for invalid datetime
    uv_snapshot!(context
        .pip_compile()
        .env_remove(EnvVars::UV_EXCLUDE_NEWER)
        .arg("requirements.in")
        .arg("--exclude-newer")
        .arg("2022-04-04+02:00"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: invalid value '2022-04-04+02:00' for '--exclude-newer <EXCLUDE_NEWER>': `2022-04-04+02:00` could not be parsed as a valid date: parsed value '2022-04-04', but unparsed input "+02:00" remains (expected no unparsed input)

    For more information, try '--help'.
    "###
    );

    // Check the error message for the case of
    // an invalid timestamp that still has a
    // valid date.
    uv_snapshot!(context
        .pip_compile()
        .env_remove(EnvVars::UV_EXCLUDE_NEWER)
        .arg("requirements.in")
        .arg("--exclude-newer")
        .arg("2022-04-04T26:00:00+00"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: invalid value '2022-04-04T26:00:00+00' for '--exclude-newer <EXCLUDE_NEWER>': `2022-04-04T26:00:00+00` could not be parsed as a valid date: failed to parse hour in time "26:00:00+00": hour is not valid: parameter 'hour' with value 26 is not in the required range of 0..=23

    For more information, try '--help'.
    "###
    );

    Ok(())
}

/// Resolve a local path dependency on a specific wheel.
#[test]
fn compile_wheel_path_dependency() -> Result<()> {
    let context = TestContext::new("3.12");

    // Download a wheel.
    let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
    download_to_disk(
        "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl",
        &flask_wheel,
    );

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&format!(
        "flask @ {}",
        Url::from_file_path(flask_wheel.path()).unwrap()
    ))?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###);

    // Run the same operation, but this time with a relative path, omitting the `//`.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ file:flask-3.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ file:flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    // Run the same operation, but this time with a relative path, including the `//`.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ file://flask-3.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    // Run the same operation, but this time with a relative path, exclusive of any scheme.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ ./flask-3.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    ./flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    // Run the same operation, but this time with an absolute path (rather than a URL).
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&format!("flask @ {}", flask_wheel.path().display()))?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    [TEMP_DIR]/flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    // Run the same operation, but this time with an absolute path (rather than a URL), including
    // the `file://` prefix.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&format!("flask @ file://{}", flask_wheel.path().display()))?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    // Run the same operation, but this time with an absolute path (rather than a URL), including
    // the `file://localhost/` prefix.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&format!(
        "flask @ file://localhost/{}",
        flask_wheel.path().display()
    ))?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ file://localhost/[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a local path dependency on a specific source distribution.
#[test]
fn compile_source_distribution_path_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    // Download a source distribution.
    let flask_wheel = context.temp_dir.child("flask-3.0.0.tar.gz");
    download_to_disk(
        "https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz",
        &flask_wheel,
    );

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&format!(
        "flask @ {}",
        Url::from_file_path(flask_wheel.path()).unwrap()
    ))?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ file://[TEMP_DIR]/flask-3.0.0.tar.gz
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###);

    Ok(())
}

/// Resolve a local path dependency to a non-existent file.
#[test]
fn compile_wheel_path_dependency_missing() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&format!(
        "flask @ {}",
        context
            .temp_dir
            .join("flask-3.0.0-py3-none-any.whl")
            .simplified_display()
    ))?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Distribution not found at: file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
    "###);

    Ok(())
}

/// Resolve a yanked version of `attrs` by specifying the version directly.
#[test]
fn compile_yanked_version_direct() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("attrs==21.1.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    attrs==21.1.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    warning: `attrs==21.1.0` is yanked (reason: "Installable but not importable on Python 3.4.")
    "###
    );

    Ok(())
}

/// Fail to resolve `attrs` due to the indirect use of a yanked version (`21.1.0`).
#[test]
fn compile_yanked_version_indirect() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("attrs>20.3.0,<21.2.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because only the following versions of attrs are available:
              attrs<20.3.0
              attrs==21.1.0
              attrs>21.2.0
          and attrs==21.1.0 was yanked (reason: Installable but not importable on Python 3.4), we can conclude that attrs>20.3.0,<21.2.0 cannot be used.
          And because you require attrs>20.3.0,<21.2.0, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with an incompatible version.
#[test]
fn override_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask==3.0.0")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("werkzeug==2.3.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--override")
            .arg("overrides.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask==3.0.0
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==2.3.0
        # via
        #   --override overrides.txt
        #   flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Check that `tool.uv.override-dependencies` in `pyproject.toml` is respected.
#[test]
fn override_dependency_from_pyproject() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
    name = "example"
    version = "0.0.0"
    dependencies = [
      "flask==3.0.0"
    ]

    [tool.uv]
    override-dependencies = [
      "werkzeug==2.3.0"
    ]
    "#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .current_dir(&context.temp_dir)
            , @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask==3.0.0
        # via example (pyproject.toml)
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==2.3.0
        # via
        #   --override (workspace)
        #   flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Check that `tool.uv.constraint-dependencies` in `pyproject.toml` is respected.
#[test]
fn constraint_dependency_from_pyproject() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
    name = "example"
    version = "0.0.0"
    dependencies = [
      "anyio==3.7.0"
    ]

    [tool.uv]
    constraint-dependencies = [
      "idna<3.4"
    ]
    "#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    anyio==3.7.0
        # via example (pyproject.toml)
    idna==3.3
        # via
        #   -c (workspace)
        #   anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Check that `override-dependencies` in `uv.toml` is respected.
#[test]
fn override_dependency_from_specific_uv_toml() -> Result<()> {
    let context = TestContext::new("3.12");
    let _ = context.temp_dir.child("project").create_dir_all();
    let pyproject_toml = context.temp_dir.child("project/pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
    name = "example"
    version = "0.0.0"
    dependencies = [
      "flask==3.0.0"
    ]
    "#,
    )?;

    let _ = context.temp_dir.child("uv").create_dir_all();
    let uv_toml: assert_fs::fixture::ChildPath = context.temp_dir.child("uv").child("uv.toml");
    uv_toml.write_str(
        r#"
        override-dependencies = [
          "werkzeug==2.3.0"
        ]
        "#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml")
        .arg("--config-file")
        .arg("../uv/uv.toml")
        .current_dir(context.temp_dir.child("project"))
            , @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --config-file ../uv/uv.toml
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask==3.0.0
        # via example (pyproject.toml)
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==2.3.0
        # via
        #   --override (workspace)
        #   flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Black==23.10.1 depends on tomli>=1.1.0 for Python versions below 3.11. Demonstrate that we can
/// override it with a multi-line override.
#[test]
fn override_multi_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str(
        "tomli>=1.1.0; python_version >= '3.11'\ntomli<1.0.0; python_version < '3.11'",
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--override")
            .arg("overrides.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black
    tomli==2.0.1
        # via
        #   --override overrides.txt
        #   black

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// `urllib3==2.2.2` has an optional dependency on `pysocks!=1.5.7,<2.0,>=1.5.6; extra == 'socks'`,
/// So we shouldn't apply the `pysocks==1.7.1` override without the `socks` extra.
#[test]
fn dont_add_override_for_non_activated_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("urllib3==2.2.1")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("pysocks==1.7.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--override")
        .arg("overrides.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
    urllib3==2.2.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Check how invalid `tool.uv.override-dependencies` is handled in `pyproject.toml`.
#[test]
fn override_dependency_from_workspace_invalid_syntax() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
    name = "example"
    version = "0.0.0"
    dependencies = [
      "flask==3.0.0"
    ]

    [tool.uv]
    override-dependencies = [
      "werkzeug=2.3.0"
    ]
    "#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .current_dir(&context.temp_dir)
            , @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    warning: Failed to parse `pyproject.toml` during settings discovery:
      TOML parse error at line 10, column 7
         |
      10 |       "werkzeug=2.3.0"
         |       ^^^^^^^^^^^^^^^^
      no such comparison operator "=", must be one of ~= == != <= >= < > ===
      werkzeug=2.3.0
              ^^^^^^

    error: Failed to parse: `pyproject.toml`
      Caused by: TOML parse error at line 10, column 7
       |
    10 |       "werkzeug=2.3.0"
       |       ^^^^^^^^^^^^^^^^
    no such comparison operator "=", must be one of ~= == != <= >= < > ===
    werkzeug=2.3.0
            ^^^^^^
    "###
    );

    Ok(())
}

/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with a URL.
#[test]
fn override_dependency_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask==3.0.0")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--override")
            .arg("overrides.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask==3.0.0
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via jinja2
    werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl
        # via
        #   --override overrides.txt
        #   flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with an unnamed URL.
#[test]
fn override_dependency_unnamed_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask==3.0.0")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--override")
            .arg("overrides.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask==3.0.0
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via jinja2
    werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl
        # via
        #   --override overrides.txt
        #   flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Request an extra that doesn't exist on the specified package.
#[test]
fn missing_registry_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black[tensorboard]==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    warning: The package `black==23.10.1` does not have an extra named `tensorboard`
    "###
    );

    Ok(())
}

/// Request an extra that doesn't exist on the specified package.
#[test]
fn missing_url_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask[tensorboard] @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    warning: The package `flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl` does not have an extra named `tensorboard`
    "###
    );

    Ok(())
}

/// Resolve a dependency from a URL, preserving the exact casing of the URL as specified in the
/// requirements file.
#[test]
fn preserve_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a dependency from a URL, preserving the unexpanded environment variable as specified in
/// the requirements file.
#[test]
fn preserve_project_root() -> Result<()> {
    let context = TestContext::new("3.12");
    // Download a wheel.
    let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
    download_to_disk(
        "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl",
        &flask_wheel,
    );

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a dependency from a URL, passing in the entire URL as an environment variable.
#[test]
fn respect_http_env_var() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ ${URL}")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .env(EnvVars::URL, "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ ${URL}
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// A requirement defined as a single unnamed environment variable should be parsed as such.
#[test]
fn respect_unnamed_env_var() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("${URL}")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .env(EnvVars::URL, "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ ${URL}
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// A requirement defined as a single unnamed environment variable should error if the environment
/// variable is not set.
#[test]
fn error_missing_unnamed_env_var() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("${URL}")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Couldn't parse requirement in `requirements.in` at position 0
      Caused by: Expected package name starting with an alphanumeric character, found `$`
    ${URL}
    ^
    "###
    );

    Ok(())
}

/// Resolve a dependency from a file path, passing in the entire path as an environment variable.
#[test]
fn respect_file_env_var() -> Result<()> {
    let context = TestContext::new("3.12");
    // Download a wheel.
    let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
    download_to_disk(
        "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl",
        &flask_wheel,
    );

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ ${FILE_PATH}")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .env(EnvVars::FILE_PATH, context.temp_dir.join("flask-3.0.0-py3-none-any.whl")), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    ${FILE_PATH}
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn compile_editable() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        -e ../../scripts/packages/poetry_editable
        -e file://../../scripts/packages/black_editable[dev]
        boltons # normal dependency for comparison
        "
    })?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    -e ../../scripts/packages/poetry_editable
        # via -r [TEMP_DIR]/requirements.in
    -e file://../../scripts/packages/black_editable
        # via -r [TEMP_DIR]/requirements.in
    aiohttp==3.9.3
        # via black
    aiosignal==1.3.1
        # via aiohttp
    anyio==4.3.0
        # via poetry-editable
    attrs==23.2.0
        # via aiohttp
    boltons==23.1.1
        # via -r [TEMP_DIR]/requirements.in
    frozenlist==1.4.1
        # via
        #   aiohttp
        #   aiosignal
    idna==3.6
        # via
        #   anyio
        #   yarl
    multidict==6.0.5
        # via
        #   aiohttp
        #   yarl
    sniffio==1.3.1
        # via anyio
    uvloop==0.19.0
        # via black
    yarl==1.9.4
        # via aiohttp

    ----- stderr -----
    Resolved 13 packages in [TIME]
    "###);

    Ok(())
}

/// If an editable is repeated, it should only be built once.
#[test]
fn deduplicate_editable() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        -e file://../../scripts/packages/black_editable
        -e ${PROJECT_ROOT}/../../scripts/packages/black_editable
        -e file://../../scripts/packages/black_editable[dev]
        "
    })?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    -e file://../../scripts/packages/black_editable
        # via -r [TEMP_DIR]/requirements.in
    aiohttp==3.9.3
        # via black
    aiosignal==1.3.1
        # via aiohttp
    attrs==23.2.0
        # via aiohttp
    frozenlist==1.4.1
        # via
        #   aiohttp
        #   aiosignal
    idna==3.6
        # via yarl
    multidict==6.0.5
        # via
        #   aiohttp
        #   yarl
    uvloop==0.19.0
        # via black
    yarl==1.9.4
        # via aiohttp

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###);

    Ok(())
}

#[test]
fn strip_fragment_unnamed() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        ../../scripts/packages/black_editable#egg=black
        "
    })?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    ../../scripts/packages/black_editable#egg=black
        # via -r [TEMP_DIR]/requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

#[test]
fn strip_fragment_named() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        black @ ../../scripts/packages/black_editable#egg=black
        "
    })?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    ../../scripts/packages/black_editable#egg=black
        # via -r [TEMP_DIR]/requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

#[test]
fn recursive_extras_direct_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black[dev] @ ../../scripts/packages/black_editable")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg(requirements_in.path())
        .current_dir(current_dir().unwrap()), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    aiohttp==3.9.3
        # via black
    aiosignal==1.3.1
        # via aiohttp
    attrs==23.2.0
        # via aiohttp
    ../../scripts/packages/black_editable
        # via -r [TEMP_DIR]/requirements.in
    frozenlist==1.4.1
        # via
        #   aiohttp
        #   aiosignal
    idna==3.6
        # via yarl
    multidict==6.0.5
        # via
        #   aiohttp
        #   yarl
    uvloop==0.19.0
        # via black
    yarl==1.9.4
        # via aiohttp

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###);

    Ok(())
}

/// Compile an editable package with a direct URL requirement.
#[test]
#[cfg(feature = "git")]
fn compile_editable_url_requirement() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e ../../scripts/packages/hatchling_editable")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    -e ../../scripts/packages/hatchling_editable
        # via -r [TEMP_DIR]/requirements.in
    iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
        # via hatchling-editable

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###);

    Ok(())
}

#[test]
#[ignore]
fn cache_errors_are_non_fatal() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    // No git dep, git has its own locking strategy
    requirements_in.write_str(indoc! {r"
        # pypi wheel
        pandas
        # url wheel
        flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
        # url source dist
        werkzeug @ https://files.pythonhosted.org/packages/0d/cc/ff1904eb5eb4b455e442834dabf9427331ac0fa02853bf83db817a7dd53d/werkzeug-3.0.1.tar.gz
    "
    })?;

    // Pick a file from each kind of cache
    let interpreter_cache = context
        .cache_dir
        .path()
        .join("interpreter-v0")
        .read_dir()?
        .next()
        .context("Expected a python interpreter cache file")??
        .path();
    let cache_files = [
        PathBuf::from("simple-v0/pypi/numpy.msgpack"),
        PathBuf::from(
            "wheels-v0/pypi/python-dateutil/python_dateutil-2.8.2-py2.py3-none-any.msgpack",
        ),
        PathBuf::from("wheels-v0/url/4b8be67c801a7ecb/flask/flask-3.0.0-py3-none-any.msgpack"),
        PathBuf::from("built-wheels-v0/url/6781bd6440ae72c2/werkzeug/metadata.msgpack"),
        interpreter_cache,
    ];

    let check = || {
        uv_snapshot!(context.filters(), context.pip_compile()
                .arg("pip")
                .arg("compile")
                .arg(requirements_in.path())
                // It's sufficient to check that we resolve to a fix number of packages
                .stdout(std::process::Stdio::null()), @r###"
            success: true
            exit_code: 0
            ----- stdout -----

            ----- stderr -----
            Resolved 13 packages in [TIME]
            "###
        );
    };

    insta::allow_duplicates! {
        check();

        // Replace some cache files with invalid contents
        for file in &cache_files {
            let file = context.cache_dir.join(file);
            if !file.is_file() {
                bail!("Missing cache file {}", file.user_display());
            }
            fs_err::write(file, "I borken you cache")?;
        }

        check();

        #[cfg(unix)]
        {
            use fs_err::os::unix::fs::OpenOptionsExt;

            // Make some files unreadable, so that the read instead of the deserialization will fail
            for file in cache_files {
                let file = context.cache_dir.join(file);
                if !file.is_file() {
                    bail!("Missing cache file {}", file.user_display());
                }

                fs_err::OpenOptions::new()
                    .create(true)
                    .write(true)
                    .mode(0o000)
                    .open(file)?;
            }
        }

        check();

        Ok(())
    }
}

/// Resolve a distribution from an HTML-only registry.
#[test]
#[cfg(not(target_env = "musl"))] // No musllinux wheels in the torch index
fn compile_html() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("jinja2<=3.1.2")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--index-url")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/cpu"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    jinja2==3.1.2
        # via -r requirements.in
    markupsafe==2.1.5
        # via jinja2

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a distribution from a registry with and without a trailing slash.
#[test]
fn trailing_slash() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("jinja2")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--index-url")
        .arg("https://test.pypi.org/simple"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    jinja2==3.1.3
        # via -r requirements.in
    markupsafe==2.1.5
        # via jinja2

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--index-url")
        .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    jinja2==3.1.3
        # via -r requirements.in
    markupsafe==2.1.5
        # via jinja2

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a project without a `pyproject.toml`, using the PEP 517 build backend.
#[test]
fn compile_legacy_sdist_pep_517() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz
        # via -r requirements.in
    mccabe==0.7.0
        # via flake8
    pycodestyle==2.10.0
        # via flake8
    pyflakes==3.0.1
        # via flake8

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Include hashes from the registry in the generated output.
#[test]
fn generate_hashes_registry() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio==4.0.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--generate-hashes"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
    anyio==4.0.0 \
        --hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
        --hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
        # via -r requirements.in
    idna==3.6 \
        --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
        --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
        # via anyio
    sniffio==1.3.1 \
        --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
        --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Include hashes from the URL in the generated output.
#[test]
fn generate_hashes_source_distribution_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--generate-hashes"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
    anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz \
        --hash=sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f
        # via -r requirements.in
    idna==3.6 \
        --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
        --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
        # via anyio
    sniffio==1.3.1 \
        --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
        --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Include hashes from the URL in the generated output.
#[test]
fn generate_hashes_built_distribution_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--generate-hashes"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
    anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
        --hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
        # via -r requirements.in
    idna==3.6 \
        --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
        --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
        # via anyio
    sniffio==1.3.1 \
        --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
        --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Given a VCS dependency, include hashes for its dependencies, but not the repository itself.
#[test]
#[cfg(feature = "git")]
fn generate_hashes_git() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio @ git+https://github.com/agronholm/anyio@4.3.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--generate-hashes"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
    anyio @ git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
        # via -r requirements.in
    idna==3.6 \
        --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
        --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
        # via anyio
    sniffio==1.3.1 \
        --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
        --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Given an unnamed URL, include hashes for the URL and its dependencies.
#[test]
fn generate_hashes_unnamed_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--generate-hashes"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
    anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
        --hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
        # via -r requirements.in
    idna==3.6 \
        --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
        --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
        # via anyio
    sniffio==1.3.1 \
        --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
        --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Given a local directory, include hashes for its dependencies, but not the directory itself.
#[test]
fn generate_hashes_local_directory() -> Result<()> {
    let _context = TestContext::new("3.12");

    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        ../../scripts/packages/poetry_editable
        "
    })?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .arg("--generate-hashes")
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --generate-hashes
    anyio==4.3.0 \
        --hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
        --hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
        # via poetry-editable
    idna==3.6 \
        --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
        --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
        # via anyio
    ../../scripts/packages/poetry_editable
        # via -r [TEMP_DIR]/requirements.in
    sniffio==1.3.1 \
        --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
        --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###);

    Ok(())
}

/// Given an editable dependency, include hashes for its dependencies, but not the directory itself.
#[test]
fn generate_hashes_editable() -> Result<()> {
    let _context = TestContext::new("3.12");

    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        -e ../../scripts/packages/poetry_editable
        "
    })?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .arg("--generate-hashes")
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --generate-hashes
    -e ../../scripts/packages/poetry_editable
        # via -r [TEMP_DIR]/requirements.in
    anyio==4.3.0 \
        --hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
        --hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
        # via poetry-editable
    idna==3.6 \
        --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
        --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
        # via anyio
    sniffio==1.3.1 \
        --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
        --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###);

    Ok(())
}

/// Include hashes from a `--find-links` index in the generated output.
#[test]
fn generate_hashes_find_links_directory() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("tqdm")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--generate-hashes")
        .arg("--find-links")
        .arg(context.workspace_root.join("scripts").join("links")), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
    tqdm==1000.0.0 \
        --hash=sha256:a34996d4bd5abb2336e14ff0a2d22b92cfd0f0ed344e6883041ce01953276a13
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Include hashes from a `--find-links` index in the generated output.
#[test]
fn generate_hashes_find_links_url() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("tqdm")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--generate-hashes")
        .arg("--no-index")
        .arg("--find-links")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes --no-index
    tqdm==4.64.1 \
        --hash=sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Compile using `--find-links` with a local directory.
#[test]
fn find_links_directory() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        tqdm
        numpy
        werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--find-links")
            .arg(context.workspace_root.join("scripts").join("links")), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    markupsafe==2.1.5
        # via werkzeug
    numpy==1.26.4
        # via -r requirements.in
    tqdm==1000.0.0
        # via -r requirements.in
    werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
        # via -r requirements.in

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###);

    Ok(())
}

/// Compile using `--find-links` with a URL by resolving `tqdm` from the PyTorch wheels index.
#[test]
fn find_links_url() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("tqdm")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-index")
            .arg("--find-links")
            .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-index
    tqdm==4.64.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Compile using `--find-links` with a URL passed via an environment variable.
#[test]
fn find_links_env_var() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("tqdm\n--find-links ${URL}")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-index")
            .env(EnvVars::URL, "https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-index
    tqdm==4.64.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Compile using `--find-links` with a URL by resolving `tqdm` from the PyTorch wheels index,
/// with the URL itself provided in a `requirements.txt` file.
#[test]
fn find_links_requirements_txt() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in
        .write_str("-f https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html\ntqdm")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-index")
            .arg("--emit-find-links"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-index --emit-find-links
    --find-links https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html

    tqdm==4.64.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Compile using the `UV_FIND_LINKS` environment variable
#[test]
fn find_links_uv_env_var() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("tqdm")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-index")
            .env(EnvVars::UV_FIND_LINKS, "https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-index
    tqdm==4.64.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// `extras==0.0.2` fails to build (i.e., it always throws). Since `extras==0.0.1` is pinned, we
/// should never even attempt to build `extras==0.0.2`, despite an unpinned `extras[dev]`
/// requirement.
#[test]
fn avoid_irrelevant_extras() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        extras==0.0.1
        extras[dev]
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--find-links")
            .arg(context.workspace_root.join("scripts").join("links")), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    anyio==4.3.0
        # via extras
    extras==0.0.1
        # via -r requirements.in
    idna==3.6
        # via anyio
    iniconfig==2.0.0
        # via extras
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 5 packages in [TIME]
    "###);

    Ok(())
}

/// `extras==0.0.2` fails to build (i.e., it always throws). `extras==0.0.1` is the only version
/// that resolves the constraints, but if we don't visit `example[test]` prior to `extras==0.0.2`,
/// we'll end up with a broken build.
#[test]
fn avoid_irrelevant_recursive_extras() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create an editable package with an optional URL dependency.
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = []
requires-python = '>=3.8'

[project.optional-dependencies]
test = ["extras<0.0.2"]
coverage = ["example[test]", "extras>=0.0.1,<=0.0.2"]
"#,
    )?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e .[test,coverage]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--find-links")
        .arg(context.workspace_root.join("scripts").join("links")), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    -e .
        # via -r requirements.in
    extras==0.0.1
        # via example
    iniconfig==2.0.0
        # via extras

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Respect `requires-python` when prefetching.
///
/// `voluptuous==0.15.1` requires Python 3.9 or later, so we should resolve to an earlier version
/// and avoiding building 0.15.1 at all.
#[test]
fn requires_python_prefetch() -> Result<()> {
    let context = TestContext::new("3.8").with_exclude_newer("2025-01-01T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("voluptuous<=0.15.1")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    voluptuous==0.14.2
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Nothing should change.
#[test]
fn upgrade_none() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc! {r"
        black==23.10.1
        click==8.1.2
            # via black
        mypy-extensions==1.0.0
            # via black
        packaging==23.2
            # via black
        pathspec==0.11.0
            # via black
        platformdirs==4.0.0
            # via black
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--output-file")
        .arg("requirements.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
    black==23.10.1
        # via -r requirements.in
    click==8.1.2
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==23.2
        # via black
    pathspec==0.11.0
        # via black
    platformdirs==4.0.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Both packages should be upgraded.
#[test]
fn upgrade_all() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc! {r"
        # This file was autogenerated by uv via the following command:
        #    uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
        black==23.10.1
        click==8.1.2
            # via black
        mypy-extensions==1.0.0
            # via black
        packaging==23.2
            # via black
        pathspec==0.11.0
            # via black
        platformdirs==4.0.0
            # via black
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--output-file")
        .arg("requirements.txt")
        .arg("--upgrade"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Only `click` should be upgraded.
#[test]
fn upgrade_package() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc! {r"
        # This file was autogenerated by uv via the following command:
        #    uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
        black==23.10.1
        click==8.1.2
            # via black
        mypy-extensions==1.0.0
            # via black
        packaging==23.2
            # via black
        pathspec==0.11.0
            # via black
        platformdirs==4.0.0
            # via black
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--output-file")
            .arg("requirements.txt")
            .arg("--upgrade-package")
            .arg("click"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==23.2
        # via black
    pathspec==0.11.0
        # via black
    platformdirs==4.0.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Upgrade a package with a constraint on the allowed upgrade.
#[test]
fn upgrade_constraint() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("iniconfig")?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc! {r"
        # This file was autogenerated by uv via the following command:
        #    uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
        iniconfig==1.0.0
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--output-file")
            .arg("requirements.txt")
            .arg("--upgrade-package")
            .arg("iniconfig<2"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
    iniconfig==1.1.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--output-file")
            .arg("requirements.txt")
            .arg("-P")
            .arg("iniconfig"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Attempt to resolve a requirement at a path that doesn't exist.
#[test]
fn missing_path_requirement() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(if cfg!(windows) {
        "anyio @ file://C:/tmp/anyio-3.7.0.tar.gz"
    } else {
        "anyio @ file:///tmp/anyio-3.7.0.tar.gz"
    })?;

    let filters: Vec<_> = [(r"/C:/", "/")]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(filters, context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Distribution not found at: file://tmp/anyio-3.7.0.tar.gz
    "###);

    Ok(())
}

/// Attempt to resolve an editable requirement at a file path that doesn't exist.
#[test]
fn missing_editable_file() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e foo/anyio-3.7.0.tar.gz")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Unsupported editable requirement in `requirements.in`
      Caused by: Editable must refer to a local directory, not an archive: `file://[TEMP_DIR]/foo/anyio-3.7.0.tar.gz`
    "###);

    Ok(())
}

/// Attempt to resolve an editable requirement at a directory path that doesn't exist.
#[test]
fn missing_editable_directory() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e foo/bar")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Distribution not found at: file://[TEMP_DIR]/foo/bar
    "###);

    Ok(())
}

/// Attempt to resolve a URL requirement without a package name. The package name can be extracted
/// from the URL.
#[test]
fn unnamed_requirement_with_package_name() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Exclude annotations from the output.
#[test]
fn no_annotate() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-annotate"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-annotate
    black==23.10.1
    click==8.1.7
    mypy-extensions==1.0.0
    packaging==24.0
    pathspec==0.12.1
    platformdirs==4.2.0

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Exclude header from the output.
#[test]
fn no_header() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-header"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Include custom compile command in the header.
#[test]
fn custom_compile_command() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--custom-compile-command")
            .arg("./custom-uv-compile.sh"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    ./custom-uv-compile.sh
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    // with env var
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .env(EnvVars::UV_CUSTOM_COMPILE_COMMAND, "./custom-uv-compile.sh"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    ./custom-uv-compile.sh
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Emit warnings when users pass redundant options from `pip-compile`.
#[test]
fn allow_unsafe() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("werkzeug==3.0.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--allow-unsafe"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --allow-unsafe
    markupsafe==2.1.5
        # via werkzeug
    werkzeug==3.0.1
        # via -r requirements.in

    ----- stderr -----
    warning: pip-compile's `--allow-unsafe` has no effect (uv can safely pin `pip` and other packages)
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Emit warnings when users pass redundant options from `pip-compile`.
#[test]
fn resolver_legacy() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("werkzeug==3.0.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--resolver=legacy"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: pip-compile's `--resolver=legacy` is unsupported (uv always backtracks)
    "###
    );

    Ok(())
}

/// Avoid including the `--index` and `-i` flags in the header.
#[test]
fn hide_index_urls() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("iniconfig")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--default-index")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--default-index=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--index")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--index=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("-i")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("-i=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--index-url")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--index-url=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--extra-index-url")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--extra-index-url=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Emit the `--index-url` and `--extra-index-url` locations, and preserve the `--index-url` and
/// `--extra-index-url` flags in the command in the header.
#[test]
fn emit_index_urls() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("iniconfig")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("--default-index")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url --default-index https://test.pypi.org/simple/
    --index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("--default-index=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url --default-index=https://test.pypi.org/simple/
    --index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("--index")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url --index https://test.pypi.org/simple/
    --index-url https://pypi.org/simple
    --extra-index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("--index=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url --index=https://test.pypi.org/simple/
    --index-url https://pypi.org/simple
    --extra-index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("-i")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url -i https://test.pypi.org/simple/
    --index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("-i=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url -i=https://test.pypi.org/simple/
    --index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("--index-url")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url --index-url https://test.pypi.org/simple/
    --index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("--index-url=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url --index-url=https://test.pypi.org/simple/
    --index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("--extra-index-url")
            .arg("https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url --extra-index-url https://test.pypi.org/simple/
    --index-url https://pypi.org/simple
    --extra-index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .arg("--extra-index-url=https://test.pypi.org/simple/"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url --extra-index-url=https://test.pypi.org/simple/
    --index-url https://pypi.org/simple
    --extra-index-url https://test.pypi.org/simple/

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Emit the `--find-links` locations.
#[test]
fn emit_find_links() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-find-links")
            .arg("--find-links")
            .arg("./"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-find-links --find-links ./
    --find-links ./

    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Emit the `--find-links` locations using a relative path in a requirements file. The verbatim
/// path should be preserved.
#[test]
fn emit_find_links_relative() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-f ./\niniconfig")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-find-links"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-find-links
    --find-links ./

    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Emit the `--no-binary` and `--only-binary` options.
#[test]
fn emit_build_options() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-build-options")
            .arg("--only-binary")
            .arg("black")
            .arg("--no-binary")
            .arg(":all:"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-build-options --only-binary black --no-binary :all:
    --no-binary :all:
    --only-binary black

    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Respect the `--no-index` flag in a `requirements.txt` file.
#[test]
fn no_index_requirements_txt() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("--no-index\ntqdm")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because tqdm was not found in the provided package locations and you require tqdm, we can conclude that your requirements are unsatisfiable.

          hint: Packages were unavailable because index lookups were disabled and no additional package locations were provided (try: `--find-links <uri>`)
    "###
    );

    Ok(())
}

/// Prefer the `--index-url` from the command line over the `--index-url` in a `requirements.txt`
/// file. Also, `--index-url` and `--extra-index-url` should not be presented in the output
/// unless we specify `--emit-index-url`.
#[test]
fn index_url_requirements_txt() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("--index-url https://google.com\ntqdm")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--index-url")
            .arg("https://pypi.org/simple"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    tqdm==4.66.2
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Raise an error when multiple `requirements.txt` files include different `--index-url` flags.
#[test]
fn conflicting_index_urls_requirements_txt() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("--index-url https://google.com\ntqdm")?;

    let constraints_in = context.temp_dir.child("constraints.in");
    constraints_in.write_str("--index-url https://wikipedia.org\nflask")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Multiple index URLs specified: `https://google.com/` vs. `https://wikipedia.org/`
    "###
    );

    Ok(())
}

/// Doesn't raise an error when multiple `requirements.txt` files include matching `--index-url` flags.
#[test]
fn matching_index_urls_requirements_txt() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("--index-url https://pypi.org/simple")?;

    let constraints_in = context.temp_dir.child("constraints.in");
    constraints_in.write_str("--index-url https://pypi.org/simple")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.in

    ----- stderr -----
    Resolved in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a registry package without network access via the `--offline` flag.
#[test]
fn offline_registry() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black==23.10.1")?;

    // Resolve with `--offline` with an empty cache.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--offline"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because black was not found in the cache and you require black==23.10.1, we can conclude that your requirements are unsatisfiable.

          hint: Packages were unavailable because the network was disabled. When the network is disabled, registry packages may only be read from the cache.
    "###
    );

    // Populate the cache.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    // Resolve with `--offline` with a populated cache.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--offline"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --offline
    black==23.10.1
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a registry package without network access via the `--offline` flag. We should backtrack
/// to the latest version of the package that's available in the cache.
#[test]
fn offline_registry_backtrack() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("iniconfig==1.1.1")?;

    // Populate the cache.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==1.1.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    // Resolve with `--offline`, with a looser requirement. We should backtrack to `1.1.1`.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("iniconfig")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--offline"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --offline
    iniconfig==1.1.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package without network access via the `--offline` flag, using `--find-links` for an
/// HTML registry.
#[test]
fn offline_find_links() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("tqdm")?;

    // Resolve with `--offline` and `--find-links`. We indicate that the network was disabled,
    // since both the `--find-links` and the registry lookups fail (but, importantly, we don't error
    // when failing to fetch the `--find-links` URL).
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--find-links")
            .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html")
            .arg("--offline"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because tqdm was not found in the cache and you require tqdm, we can conclude that your requirements are unsatisfiable.

          hint: Packages were unavailable because the network was disabled. When the network is disabled, registry packages may only be read from the cache.
    "###
    );

    // Resolve with `--offline`, `--find-links`, and `--no-index`.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--find-links")
            .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html")
            .arg("--no-index")
            .arg("--offline"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because tqdm was not found in the cache and you require tqdm, we can conclude that your requirements are unsatisfiable.

          hint: Packages were unavailable because the network was disabled. When the network is disabled, registry packages may only be read from the cache.
    "###
    );

    Ok(())
}

/// Resolve a direct URL package without network access via the `--offline` flag.
#[test]
fn offline_direct_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl")?;

    // Resolve with `--offline` with an empty cache.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--offline"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × Failed to download `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`
      ╰─▶ Network connectivity is disabled, but the requested data wasn't found in the cache for: `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`
    "###
    );

    // Populate the cache.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    // Resolve with `--offline` with a populated cache.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--offline"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --offline
    iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with invalid metadata, by way of an invalid `Requires-Python` field in the
/// `METADATA` file.
#[test]
fn invalid_metadata_requires_python() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("validation==2.0.0")?;

    // `2.0.0` has invalid metadata.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-index")
            .arg("--find-links")
            .arg(context.workspace_root.join("scripts").join("links")), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because validation==2.0.0 has invalid metadata and you require validation==2.0.0, we can conclude that your requirements are unsatisfiable.

          hint: Metadata for `validation` (v2.0.0) could not be parsed:
            Failed to parse version: Unexpected end of version specifier, expected operator:
            12
            ^^
    "###
    );

    Ok(())
}

/// Resolve a package with multiple `.dist-info` directories.
#[test]
fn invalid_metadata_multiple_dist_info() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("validation==3.0.0")?;

    // `3.0.0` has an invalid structure (multiple `.dist-info` directories).
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-index")
            .arg("--find-links")
            .arg(context.workspace_root.join("scripts").join("links")), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because validation==3.0.0 has an invalid package format and you require validation==3.0.0, we can conclude that your requirements are unsatisfiable.

          hint: The structure of `validation` (v3.0.0) was invalid:
            Multiple .dist-info directories found: validation-2.0.0, validation-3.0.0
    "###
    );

    Ok(())
}

/// Resolve a package, but backtrack past versions with invalid metadata.
#[test]
fn invalid_metadata_backtrack() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("validation")?;

    // `2.0.0` and `3.0.0` have invalid metadata. We should backtrack to `1.0.0` (the preceding
    // version, which has valid metadata).
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-index")
            .arg("--find-links")
            .arg(context.workspace_root.join("scripts").join("links")), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-index
    validation==1.0.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve nested `-r` requirements files with relative paths.
#[test]
fn compile_relative_subfile() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-r subdir/requirements.in")?;

    let subdir = context.temp_dir.child("subdir");
    let requirements_in = subdir.child("requirements.in");
    requirements_in.write_str("-r requirements-dev.in")?;

    let requirements_dev_in = subdir.child("requirements-dev.in");
    requirements_dev_in.write_str("anyio")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    anyio==4.3.0
        # via -r subdir/requirements-dev.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###);

    Ok(())
}

/// Resolve a package with an invalid extra named `.none`.
#[test]
fn compile_none_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("entrypoints==0.3")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    entrypoints==0.3
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Resolve a package (`pytz`) with a preference that omits a trailing zero.
///
/// See: <https://github.com/astral-sh/uv/issues/1536>
#[test]
fn compile_types_pytz() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("types-pytz")?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("types-pytz==2021.1")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("-o")
        .arg("requirements.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -o requirements.txt
    types-pytz==2021.1.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Resolve a registry package (`black`) with an unnamed URL preference. The preference should be
/// ignored.
#[test]
fn compile_unnamed_preference() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black")?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("./scripts/packages/black_editable")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("-o")
        .arg("requirements.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -o requirements.txt
    black==24.3.0
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###);

    Ok(())
}

/// Resolve a package from a `requirements.in` file, with a `constraints.txt` pinning that package
/// to a specific URL.
#[test]
fn compile_constraints_compatible_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio>4")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
    anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
        # via
        #   -c constraints.txt
        #   -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a direct URL package from a `requirements.in` file, with a `constraints.txt` file
/// pinning it to a specific version.
#[test]
fn compile_constraints_compatible_url_version() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("anyio>4")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
    anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
        # via
        #   -c constraints.txt
        #   -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning it to
/// a specific URL with an incompatible version.
#[test]
fn compile_constraints_incompatible_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio<4")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because only anyio>=4 is available and you require anyio<4, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file, respecting the `--index-url` in a
/// `requirements.in` file. The resolution should fail, since the package doesn't exist at the
#[test]
fn index_url_in_requirements() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in
        .write_str("--index-url https://astral-sh.github.io/pytorch-mirror/whl\nanyio<4")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because anyio was not found in the package registry and you require anyio<4, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file, respecting the `--index-url` passed via the
/// command line over that in a `requirements.in` file.
#[test]
fn index_url_from_command_line() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in
        .write_str("--index-url https://astral-sh.github.io/pytorch-mirror/whl\nanyio<4")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--index-url")
            .arg("https://pypi.org/simple"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    anyio==3.7.1
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file with a dependency that uses an unsupported
/// scheme.
#[test]
fn unsupported_scheme() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio @ bzr+https://example.com/anyio")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Couldn't parse requirement in `requirements.in` at position 0
      Caused by: Unsupported URL prefix `bzr` in URL: `bzr+https://example.com/anyio` (Bazaar is not supported)
    anyio @ bzr+https://example.com/anyio
            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    "###
    );

    Ok(())
}

/// Resolve a package with `--no-deps`, including a valid extra.
#[test]
fn no_deps_valid_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask[dotenv]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-deps"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-deps
    flask==3.0.2
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with `--no-deps`, including an invalid extra. We don't warn here.
#[test]
fn no_deps_invalid_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask[empty]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-deps"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-deps
    flask==3.0.2
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with `--no-deps` in which the requirements have a conflict in their
/// transitive dependencies. The resolution should succeed, since `--no-deps` ignores the
/// transitive dependencies.
#[test]
fn no_deps_transitive_conflict() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create an editable with a dependency on `anyio` at a dedicated URL.
    let editable_dir1 = context.temp_dir.child("editable1");
    editable_dir1.create_dir_all()?;

    let pyproject_toml = editable_dir1.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"
[project]
name = "editable1"
version = "0.0.1"
dependencies = [
    "anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl"
]
"#,
    )?;

    // Create an editable with a dependency on `anyio` at a different, dedicated URL.
    let editable_dir2 = context.temp_dir.child("editable2");
    editable_dir2.create_dir_all()?;

    let pyproject_toml = editable_dir2.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"
[project]
name = "editable2"
version = "0.0.1"
dependencies = [
    "anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl"
]
"#,
    )?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&indoc::formatdoc! {r"
        -e {}
        -e {}
    ",
        editable_dir1.path().display(),
        editable_dir2.path().display()
    })?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-deps"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-deps
    -e [TEMP_DIR]/editable1
        # via -r requirements.in
    -e [TEMP_DIR]/editable2
        # via -r requirements.in

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve an editable package with an invalid extra.
#[test]
fn editable_invalid_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e ../../scripts/packages/black_editable[empty]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg(requirements_in.path())
            .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    -e ../../scripts/packages/black_editable
        # via -r [TEMP_DIR]/requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    warning: The package `black @ file://[WORKSPACE]/scripts/packages/black_editable` does not have an extra named `empty`
    "###);

    Ok(())
}

/// Resolve a package with `--no-strip-extras`.
#[test]
fn no_strip_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask[dotenv]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-strip-extras"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-extras
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask[dotenv]==3.0.2
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    python-dotenv==1.0.1
        # via flask
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 8 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with `--no-strip-extras`.
#[test]
#[cfg(not(windows))]
fn no_strip_extras() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio[trio]\nanyio[doc]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-strip-extras"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-extras
    alabaster==0.7.16
        # via sphinx
    anyio[doc, trio]==4.3.0
        # via -r requirements.in
    attrs==23.2.0
        # via
        #   outcome
        #   trio
    babel==2.14.0
        # via sphinx
    certifi==2024.2.2
        # via requests
    charset-normalizer==3.3.2
        # via requests
    docutils==0.20.1
        # via
        #   sphinx
        #   sphinx-rtd-theme
    idna==3.6
        # via
        #   anyio
        #   requests
        #   trio
    imagesize==1.4.1
        # via sphinx
    jinja2==3.1.3
        # via sphinx
    markupsafe==2.1.5
        # via jinja2
    outcome==1.3.0.post0
        # via trio
    packaging==24.0
        # via
        #   anyio
        #   sphinx
    pygments==2.17.2
        # via sphinx
    requests==2.31.0
        # via sphinx
    sniffio==1.3.1
        # via
        #   anyio
        #   trio
    snowballstemmer==2.2.0
        # via sphinx
    sortedcontainers==2.4.0
        # via trio
    sphinx==7.2.6
        # via
        #   anyio
        #   sphinx-autodoc-typehints
        #   sphinx-rtd-theme
        #   sphinxcontrib-jquery
    sphinx-autodoc-typehints==2.0.0
        # via anyio
    sphinx-rtd-theme==2.0.0
        # via anyio
    sphinxcontrib-applehelp==1.0.8
        # via sphinx
    sphinxcontrib-devhelp==1.0.6
        # via sphinx
    sphinxcontrib-htmlhelp==2.0.5
        # via sphinx
    sphinxcontrib-jquery==4.1
        # via sphinx-rtd-theme
    sphinxcontrib-jsmath==1.0.1
        # via sphinx
    sphinxcontrib-qthelp==1.0.7
        # via sphinx
    sphinxcontrib-serializinghtml==1.1.10
        # via sphinx
    trio==0.25.0
        # via anyio
    urllib3==2.2.1
        # via requests

    ----- stderr -----
    Resolved 30 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with `--no-strip-markers`.
#[test]
fn no_strip_markers() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio ; python_version > '3.11'")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-strip-markers")
            .arg("--python-platform")
            .arg("linux"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-markers --python-platform linux
    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with `--no-strip-markers`. In this case, a single package is included with
/// multiple markers.
#[test]
fn no_strip_markers_multiple_markers() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        trio ; python_version > '3.11'
        trio ; sys_platform == 'win32'
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--no-strip-markers")
        .arg("--python-platform")
        .arg("windows"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-markers --python-platform windows
    attrs==23.2.0
        # via
        #   outcome
        #   trio
    cffi==1.16.0 ; implementation_name != 'pypy' and os_name == 'nt'
        # via trio
    idna==3.6
        # via trio
    outcome==1.3.0.post0
        # via trio
    pycparser==2.21 ; implementation_name != 'pypy' and os_name == 'nt'
        # via cffi
    sniffio==1.3.1
        # via trio
    sortedcontainers==2.4.0
        # via trio
    trio==0.25.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 8 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with `--no-strip-markers`. In this case, one of the dependencies has markers
/// on its own requirements.
#[test]
fn no_strip_markers_transitive_marker() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("trio ; python_version > '3.11'")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--no-strip-markers")
            .arg("--python-platform")
            .arg("windows"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-markers --python-platform windows
    attrs==23.2.0
        # via
        #   outcome
        #   trio
    cffi==1.16.0 ; implementation_name != 'pypy' and os_name == 'nt'
        # via trio
    idna==3.6
        # via trio
    outcome==1.3.0.post0
        # via trio
    pycparser==2.21 ; implementation_name != 'pypy' and os_name == 'nt'
        # via cffi
    sniffio==1.3.1
        # via trio
    sortedcontainers==2.4.0
        # via trio
    trio==0.25.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 8 packages in [TIME]
    "###
    );

    Ok(())
}

/// Perform a universal resolution with a package that has a marker.
#[test]
fn universal() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        trio ; python_version > '3.11'
        trio ; sys_platform == 'win32'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    attrs==23.2.0
        # via
        #   outcome
        #   trio
    cffi==1.16.0 ; implementation_name != 'pypy' and os_name == 'nt'
        # via trio
    idna==3.6
        # via trio
    outcome==1.3.0.post0
        # via trio
    pycparser==2.21 ; implementation_name != 'pypy' and os_name == 'nt'
        # via cffi
    sniffio==1.3.1
        # via trio
    sortedcontainers==2.4.0
        # via trio
    trio==0.25.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 8 packages in [TIME]
    "###
    );

    Ok(())
}

/// Perform a universal resolution with conflicting versions and markers.
#[test]
fn universal_conflicting() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        trio==0.25.0 ; sys_platform == 'darwin'
        trio==0.10.0 ; sys_platform == 'win32'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    async-generator==1.10 ; sys_platform == 'win32'
        # via trio
    attrs==23.2.0 ; sys_platform == 'darwin' or sys_platform == 'win32'
        # via
        #   outcome
        #   trio
    cffi==1.16.0 ; os_name == 'nt' and sys_platform == 'win32'
        # via trio
    idna==3.6 ; sys_platform == 'darwin' or sys_platform == 'win32'
        # via trio
    outcome==1.3.0.post0 ; sys_platform == 'darwin' or sys_platform == 'win32'
        # via trio
    pycparser==2.21 ; os_name == 'nt' and sys_platform == 'win32'
        # via cffi
    sniffio==1.3.1 ; sys_platform == 'darwin' or sys_platform == 'win32'
        # via trio
    sortedcontainers==2.4.0 ; sys_platform == 'darwin' or sys_platform == 'win32'
        # via trio
    trio==0.10.0 ; sys_platform == 'win32'
        # via -r requirements.in
    trio==0.25.0 ; sys_platform == 'darwin'
        # via -r requirements.in

    ----- stderr -----
    Resolved 10 packages in [TIME]
    "###
    );

    Ok(())
}

/// Perform a universal resolution with a package that contains cycles in its dependency graph.
#[test]
fn universal_cycles() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        testtools==2.3.0
        fixtures==3.0.0
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    argparse==1.4.0
        # via unittest2
    extras==1.0.0
        # via testtools
    fixtures==3.0.0
        # via
        #   -r requirements.in
        #   testtools
    linecache2==1.0.0
        # via traceback2
    pbr==6.0.0
        # via
        #   fixtures
        #   testtools
    python-mimeparse==1.6.0
        # via testtools
    six==1.16.0
        # via
        #   fixtures
        #   testtools
        #   unittest2
    testtools==2.3.0
        # via
        #   -r requirements.in
        #   fixtures
    traceback2==1.4.0
        # via
        #   testtools
        #   unittest2
    unittest2==1.1.0
        # via testtools

    ----- stderr -----
    Resolved 10 packages in [TIME]
    "###
    );

    Ok(())
}

/// Perform a universal resolution with a constraint.
#[test]
fn universal_constraint() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        anyio ; sys_platform == 'win32'
    "})?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str(indoc::indoc! {r"
        anyio==3.0.0
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-c")
            .arg("constraints.txt")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt --universal
    anyio==3.0.0 ; sys_platform == 'win32'
        # via
        #   -c constraints.txt
        #   -r requirements.in
    idna==3.6 ; sys_platform == 'win32'
        # via anyio
    sniffio==1.3.1 ; sys_platform == 'win32'
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Perform a universal resolution with a constraint, where the constraint itself has a marker.
#[test]
fn universal_constraint_marker() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        anyio ; sys_platform == 'win32'
    "})?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str(indoc::indoc! {r"
        anyio==3.0.0 ; os_name == 'nt'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-c")
            .arg("constraints.txt")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt --universal
    anyio==3.0.0 ; sys_platform == 'win32'
        # via
        #   -c constraints.txt
        #   -r requirements.in
    idna==3.6 ; sys_platform == 'win32'
        # via anyio
    sniffio==1.3.1 ; sys_platform == 'win32'
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Perform a universal resolution with a divergent requirement, and a third requirement that's
/// compatible with both forks.
///
/// This currently fails, but should succeed.
///
/// See: <https://github.com/astral-sh/uv/issues/4640>
#[test]
fn universal_multi_version() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        iniconfig
        iniconfig==2.0.0 ; python_version > '3.12'
        iniconfig==1.0.0 ; python_version == '3.12'
    "})?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str(indoc::indoc! {r"
        anyio==3.0.0 ; os_name == 'nt'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-c")
            .arg("constraints.txt")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt --universal
    iniconfig==1.0.0 ; python_full_version < '3.13'
        # via -r requirements.in
    iniconfig==2.0.0 ; python_full_version >= '3.13'
        # via -r requirements.in

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_platform_fork() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        --index-url https://astral-sh.github.io/pytorch-mirror/whl/cpu

        torch==2.5.1
    "})?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str(indoc::indoc! {r"
        jinja2<3.1.4
        typing-extensions<4.12.2
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
        .arg("requirements.in")
        .arg("--universal")
        .arg("-c")
        .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal -c constraints.txt
    filelock==3.13.1
        # via torch
    fsspec==2024.6.1
        # via torch
    jinja2==3.1.3
        # via
        #   -c constraints.txt
        #   torch
    markupsafe==3.0.2
        # via jinja2
    mpmath==1.3.0
        # via sympy
    networkx==3.3
        # via torch
    setuptools==70.2.0
        # via torch
    sympy==1.13.1
        # via torch
    torch==2.5.1 ; (platform_machine == 'aarch64' and sys_platform == 'linux') or sys_platform == 'darwin'
        # via -r requirements.in
    torch==2.5.1+cpu ; (platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')
        # via -r requirements.in
    typing-extensions==4.9.0
        # via
        #   -c constraints.txt
        #   torch

    ----- stderr -----
    Resolved 11 packages in [TIME]
    "###
    );

    Ok(())
}

/// Requested distinct local versions with disjoint markers.
#[test]
fn universal_disjoint_locals() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        --find-links https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html

        torch==2.0.0+cu118 ; platform_machine == 'x86_64'
        torch==2.0.0+cpu ; platform_machine != 'x86_64'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cmake==3.31.4 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    filelock==3.17.0
        # via
        #   torch
        #   triton
    jinja2==3.1.5
        # via torch
    lit==18.1.8 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    markupsafe==3.0.2
        # via jinja2
    mpmath==1.3.0
        # via sympy
    networkx==3.4.2
        # via torch
    sympy==1.13.3
        # via torch
    torch==2.0.0+cpu ; platform_machine != 'x86_64'
        # via -r requirements.in
    torch==2.0.0+cu118 ; platform_machine == 'x86_64'
        # via
        #   -r requirements.in
        #   triton
    triton==2.0.0 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via torch
    typing-extensions==4.12.2
        # via torch

    ----- stderr -----
    Resolved 12 packages in [TIME]
    "###
    );

    Ok(())
}

/// Requested distinct local versions with disjoint markers of a package
/// that is also present as a transitive dependency.
#[test]
fn universal_transitive_disjoint_locals() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        --find-links https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html

        torch==2.0.0+cu118 ; platform_machine == 'x86_64'
        torch==2.0.0+cpu ; platform_machine != 'x86_64'
        torchvision==0.15.1
    "})?;

    // Some marker expressions on the output here are missing due to https://github.com/astral-sh/uv/issues/5086,
    // but the local versions are still respected correctly.
    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    certifi==2024.12.14
        # via requests
    charset-normalizer==3.4.1
        # via requests
    cmake==3.31.4 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    filelock==3.17.0
        # via
        #   torch
        #   triton
    idna==3.10
        # via requests
    jinja2==3.1.5
        # via torch
    lit==18.1.8 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    markupsafe==3.0.2
        # via jinja2
    mpmath==1.3.0
        # via sympy
    networkx==3.4.2
        # via torch
    numpy==2.2.2
        # via torchvision
    pillow==11.1.0
        # via torchvision
    requests==2.32.3
        # via torchvision
    sympy==1.13.3
        # via torch
    torch==2.0.0+cpu ; platform_machine != 'x86_64'
        # via
        #   -r requirements.in
        #   torchvision
    torch==2.0.0+cu118 ; platform_machine == 'x86_64'
        # via
        #   -r requirements.in
        #   torchvision
        #   triton
    torchvision==0.15.1 ; platform_machine != 'x86_64' or sys_platform == 'darwin'
        # via -r requirements.in
    torchvision==0.15.1+rocm5.4.2 ; platform_machine == 'x86_64' and sys_platform != 'darwin'
        # via -r requirements.in
    triton==2.0.0 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via torch
    typing-extensions==4.12.2
        # via torch
    urllib3==2.3.0
        # via requests

    ----- stderr -----
    Resolved 21 packages in [TIME]
    "###
    );

    Ok(())
}

/// Prefer local versions for dependencies of path requirements.
#[test]
fn universal_local_path_requirement() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "example"
        version = "0.0.0"
        dependencies = [
            "torch==2.0.0+cu118"
        ]
        requires-python = ">=3.11"
    "#})?;

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        torch==2.0.0
        .
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--universal")
        .arg("--find-links")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cmake==3.31.4 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    .
        # via -r requirements.in
    filelock==3.17.0
        # via
        #   torch
        #   triton
    jinja2==3.1.5
        # via torch
    lit==18.1.8 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    markupsafe==3.0.2
        # via jinja2
    mpmath==1.3.0
        # via sympy
    networkx==3.4.2
        # via torch
    sympy==1.13.3
        # via torch
    torch==2.0.0+cu118
        # via
        #   -r requirements.in
        #   example
        #   triton
    triton==2.0.0 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via torch
    typing-extensions==4.12.2
        # via torch

    ----- stderr -----
    Resolved 12 packages in [TIME]
    "###
    );

    Ok(())
}

/// If a dependency requests a local version with an overlapping marker expression,
/// we should prefer the local in both forks.
#[test]
fn universal_overlapping_local_requirement() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "example"
        version = "0.0.0"
        dependencies = [
            "torch==2.0.0+cu118 ; platform_machine == 'x86_64'"
        ]
        requires-python = ">=3.11"
    "#})?;

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        torch==2.0.0
        .
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--universal")
        .arg("--find-links")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cmake==3.31.4 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    .
        # via -r requirements.in
    filelock==3.17.0
        # via
        #   torch
        #   triton
    jinja2==3.1.5
        # via torch
    lit==18.1.8 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    markupsafe==3.0.2
        # via jinja2
    mpmath==1.3.0
        # via sympy
    networkx==3.4.2
        # via torch
    sympy==1.13.3
        # via torch
    torch==2.0.0+cu118
        # via
        #   -r requirements.in
        #   example
        #   triton
    triton==2.0.0 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via torch
    typing-extensions==4.12.2
        # via torch

    ----- stderr -----
    Resolved 12 packages in [TIME]
    "###
    );

    Ok(())
}

/// If a dependency requests distinct local versions with disjoint marker expressions,
/// we should fork the root requirement.
#[test]
fn universal_disjoint_local_requirement() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "example"
        version = "0.0.0"
        dependencies = [
            "torch==2.0.0+cu118 ; platform_machine == 'x86_64'",
            "torch==2.0.0+cpu ; platform_machine != 'x86_64'"
        ]
        requires-python = ">=3.11"
    "#})?;

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        torch==2.0.0
        .
    "})?;

    // Some marker expressions on the output here are missing due to https://github.com/astral-sh/uv/issues/5086,
    // but the local versions are still respected correctly.
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--universal")
        .arg("--find-links")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cmake==3.31.4 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    .
        # via -r requirements.in
    filelock==3.17.0
        # via
        #   torch
        #   triton
    jinja2==3.1.5
        # via torch
    lit==18.1.8 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    markupsafe==3.0.2
        # via jinja2
    mpmath==1.3.0
        # via sympy
    networkx==3.4.2
        # via torch
    sympy==1.13.3
        # via torch
    torch==2.0.0+cpu ; platform_machine != 'x86_64'
        # via
        #   -r requirements.in
        #   example
    torch==2.0.0+cu118 ; platform_machine == 'x86_64'
        # via
        #   -r requirements.in
        #   example
        #   triton
    triton==2.0.0 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via torch
    typing-extensions==4.12.2
        # via torch

    ----- stderr -----
    Resolved 13 packages in [TIME]
    "###
    );

    Ok(())
}

/// If a dependency requests distinct local versions and non-local versions with disjoint marker
/// expressions, we should fork the root requirement.
#[test]
fn universal_disjoint_base_or_local_requirement() -> Result<()> {
    let context = TestContext::new("3.10").with_exclude_newer("2025-01-30T00:00:00Z");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "example"
        version = "0.0.0"
        dependencies = [
            "torch==2.0.0; python_version < '3.11'",
            "torch==2.0.0+cu118 ; python_version >= '3.11' and python_version <= '3.12'",
            "torch==2.0.0+cpu ; python_version > '3.12'"
        ]
        requires-python = ">=3.10"
    "#})?;

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        torch==2.0.0
        .
    "})?;

    // Some marker expressions on the output here are missing due to https://github.com/astral-sh/uv/issues/5086,
    // but the local versions are still respected correctly.
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--universal")
        .arg("--find-links")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cmake==3.31.4 ; python_full_version < '3.13' and platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    .
        # via -r requirements.in
    filelock==3.17.0
        # via
        #   torch
        #   triton
    jinja2==3.1.5
        # via torch
    lit==18.1.8 ; python_full_version < '3.13' and platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    markupsafe==3.0.2
        # via jinja2
    mpmath==1.3.0
        # via sympy
    networkx==3.4.2
        # via torch
    sympy==1.13.3
        # via torch
    torch==2.0.0 ; (python_full_version < '3.11' and platform_machine == 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'darwin')
        # via
        #   -r requirements.in
        #   example
    torch==2.0.0+cpu ; python_full_version >= '3.13'
        # via
        #   -r requirements.in
        #   example
    torch==2.0.0+cu118 ; (python_full_version >= '3.11' and python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version >= '3.11' and python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform != 'darwin' and sys_platform != 'linux')
        # via
        #   -r requirements.in
        #   example
        #   triton
    triton==2.0.0 ; python_full_version < '3.13' and platform_machine == 'x86_64' and sys_platform == 'linux'
        # via torch
    typing-extensions==4.12.2
        # via torch

    ----- stderr -----
    Resolved 14 packages in [TIME]
    "###
    );

    Ok(())
}

/// If a dependency requests a local version with an overlapping marker expression
/// that form a nested fork, we should prefer the local in both children of the outer
/// fork.
#[test]
fn universal_nested_overlapping_local_requirement() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "example"
        version = "0.0.0"
        dependencies = [
            "torch==2.0.0+cu118 ; implementation_name == 'cpython' and os_name == 'Linux'"
        ]
        requires-python = ">=3.11"
    "#})?;

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        torch==2.0.0 ; implementation_name == 'cpython'
        torch==2.3.0 ; implementation_name != 'cpython'
        .
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--universal")
        .arg("--find-links")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cmake==3.31.4 ; implementation_name == 'cpython' and platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    .
        # via -r requirements.in
    filelock==3.17.0
        # via
        #   pytorch-triton-rocm
        #   torch
        #   triton
    fsspec==2024.12.0 ; implementation_name != 'cpython'
        # via torch
    intel-openmp==2021.4.0 ; implementation_name != 'cpython' and sys_platform == 'win32'
        # via mkl
    jinja2==3.1.5
        # via torch
    lit==18.1.8 ; implementation_name == 'cpython' and platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    markupsafe==3.0.2
        # via jinja2
    mkl==2021.4.0 ; implementation_name != 'cpython' and sys_platform == 'win32'
        # via torch
    mpmath==1.3.0
        # via sympy
    networkx==3.4.2
        # via torch
    pytorch-triton-rocm==2.3.0 ; (implementation_name != 'cpython' and platform_machine != 'aarch64' and sys_platform == 'linux') or (implementation_name != 'cpython' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')
        # via torch
    sympy==1.13.3
        # via torch
    tbb==2021.13.1 ; implementation_name != 'cpython' and sys_platform == 'win32'
        # via mkl
    torch==2.0.0+cu118 ; implementation_name == 'cpython'
        # via
        #   -r requirements.in
        #   example
        #   triton
    torch==2.3.0 ; (implementation_name != 'cpython' and platform_machine == 'aarch64' and sys_platform == 'linux') or (implementation_name != 'cpython' and sys_platform == 'darwin') or (implementation_name != 'cpython' and sys_platform == 'win32')
        # via -r requirements.in
    torch==2.3.0+rocm6.0 ; (implementation_name != 'cpython' and platform_machine != 'aarch64' and sys_platform == 'linux') or (implementation_name != 'cpython' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')
        # via -r requirements.in
    triton==2.0.0 ; implementation_name == 'cpython' and platform_machine == 'x86_64' and sys_platform == 'linux'
        # via torch
    typing-extensions==4.12.2
        # via torch

    ----- stderr -----
    Resolved 19 packages in [TIME]
    "###
    );

    // A similar case, except the nested marker is now on the path requirement.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        torch==2.0.0 ; platform_machine == 'x86_64'
        torch==2.3.0 ; platform_machine != 'x86_64'
        . ; os_name == 'Linux'
    "})?;

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "example"
        version = "0.0.0"
        dependencies = [
            "torch==2.0.0+cu118 ; platform_machine == 'x86_64'",
        ]
        requires-python = ">=3.11"
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--universal")
        .arg("--find-links")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cmake==3.31.4 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    . ; os_name == 'Linux'
        # via -r requirements.in
    filelock==3.17.0
        # via
        #   torch
        #   triton
    fsspec==2024.12.0 ; platform_machine != 'x86_64'
        # via torch
    intel-openmp==2021.4.0 ; platform_machine != 'x86_64' and sys_platform == 'win32'
        # via mkl
    jinja2==3.1.5
        # via torch
    lit==18.1.8 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    markupsafe==3.0.2
        # via jinja2
    mkl==2021.4.0 ; platform_machine != 'x86_64' and sys_platform == 'win32'
        # via torch
    mpmath==1.3.0
        # via sympy
    networkx==3.4.2
        # via torch
    sympy==1.13.3
        # via torch
    tbb==2021.13.1 ; platform_machine != 'x86_64' and sys_platform == 'win32'
        # via mkl
    torch==2.0.0+cu118 ; platform_machine == 'x86_64'
        # via
        #   -r requirements.in
        #   example
        #   triton
    torch==2.3.0 ; platform_machine != 'x86_64'
        # via -r requirements.in
    triton==2.0.0 ; platform_machine == 'x86_64' and sys_platform == 'linux'
        # via torch
    typing-extensions==4.12.2
        # via torch

    ----- stderr -----
    Resolved 17 packages in [TIME]
    "###
    );

    Ok(())
}

/// If a dependency requests distinct local versions with disjoint marker expressions
/// that form a nested fork, we should create a nested fork.
#[test]
fn universal_nested_disjoint_local_requirement() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "example"
        version = "0.0.0"
        dependencies = [
            "torch==2.0.0+cu118 ; implementation_name == 'cpython'",
            "torch==2.0.0+cpu ; implementation_name != 'cpython'"
        ]
        requires-python = ">=3.11"
    "#})?;

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        torch==2.0.0 ; os_name == 'Linux'
        torch==2.3.0 ; os_name != 'Linux'
        . ; os_name == 'Linux'
    "})?;

    // Some marker expressions on the output here are missing due to https://github.com/astral-sh/uv/issues/5086,
    // but the local versions are still respected correctly.
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--universal")
        .arg("--find-links")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cmake==3.31.4 ; implementation_name == 'cpython' and os_name == 'Linux' and platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    . ; os_name == 'Linux'
        # via -r requirements.in
    filelock==3.17.0
        # via
        #   pytorch-triton-rocm
        #   torch
        #   triton
    fsspec==2024.12.0 ; os_name != 'Linux'
        # via torch
    intel-openmp==2021.4.0 ; os_name != 'Linux' and sys_platform == 'win32'
        # via mkl
    jinja2==3.1.5
        # via torch
    lit==18.1.8 ; implementation_name == 'cpython' and os_name == 'Linux' and platform_machine == 'x86_64' and sys_platform == 'linux'
        # via triton
    markupsafe==3.0.2
        # via jinja2
    mkl==2021.4.0 ; os_name != 'Linux' and sys_platform == 'win32'
        # via torch
    mpmath==1.3.0
        # via sympy
    networkx==3.4.2
        # via torch
    pytorch-triton-rocm==2.3.0 ; (os_name != 'Linux' and platform_machine != 'aarch64' and sys_platform == 'linux') or (os_name != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')
        # via torch
    sympy==1.13.3
        # via torch
    tbb==2021.13.1 ; os_name != 'Linux' and sys_platform == 'win32'
        # via mkl
    torch==2.0.0+cpu ; implementation_name != 'cpython' and os_name == 'Linux'
        # via
        #   -r requirements.in
        #   example
    torch==2.0.0+cu118 ; implementation_name == 'cpython' and os_name == 'Linux'
        # via
        #   -r requirements.in
        #   example
        #   triton
    torch==2.3.0 ; (os_name != 'Linux' and platform_machine == 'aarch64' and sys_platform == 'linux') or (os_name != 'Linux' and sys_platform == 'darwin') or (os_name != 'Linux' and sys_platform == 'win32')
        # via -r requirements.in
    torch==2.3.0+rocm6.0 ; (os_name != 'Linux' and platform_machine != 'aarch64' and sys_platform == 'linux') or (os_name != 'Linux' and sys_platform != 'darwin' and sys_platform != 'linux' and sys_platform != 'win32')
        # via -r requirements.in
    triton==2.0.0 ; implementation_name == 'cpython' and os_name == 'Linux' and platform_machine == 'x86_64' and sys_platform == 'linux'
        # via torch
    typing-extensions==4.12.2
        # via torch

    ----- stderr -----
    Resolved 20 packages in [TIME]
    "###
    );

    Ok(())
}

/// Respect an existing pre-release preference, even if preferences aren't enabled.
#[test]
fn existing_prerelease_preference() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-07-17T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        cffi
    "})?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc::indoc! {r"
        cffi==1.17.0rc1
        pyparser==2.22
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-o")
            .arg("requirements.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -o requirements.txt
    cffi==1.17.0rc1
        # via -r requirements.in
    pycparser==2.22
        # via cffi

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Requested distinct pre-release strategies with disjoint markers.
#[test]
fn universal_disjoint_prereleases() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-07-17T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        cffi >= 1.16.0rc1 ; os_name != 'linux'
        cffi >= 1.16.0rc1, <1.16.0rc2 ; os_name == 'linux'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cffi==1.16.0rc1
        # via -r requirements.in
    pycparser==2.22
        # via cffi

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Requested distinct pre-release strategies with disjoint markers.
#[test]
fn universal_disjoint_prereleases_preference() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-07-17T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        cffi ; os_name != 'linux'
        cffi > 1.16.0 ; os_name == 'linux'
    "})?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc::indoc! {r"
        cffi==1.17.0rc1
        pyparser==2.22
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-o")
            .arg("requirements.txt")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -o requirements.txt --universal
    cffi==1.17.0rc1
        # via -r requirements.in
    pycparser==2.22
        # via cffi

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Requested distinct pre-release strategies with disjoint markers.
///
/// TODO(charlie): This should resolve to two different `cffi` versions, one for each fork.
#[test]
fn universal_disjoint_prereleases_preference_marker() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-07-17T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        cffi ; os_name != 'linux'
        cffi >= 1.16.0rc1 ; os_name == 'linux'
    "})?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc::indoc! {r"
        cffi==1.16.0 ; os_name != 'linux'
        cffi==1.16.0rc1 ; os_name == 'linux'
        pyparser==2.22
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-o")
            .arg("requirements.txt")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -o requirements.txt --universal
    cffi==1.16.0
        # via -r requirements.in
    pycparser==2.22
        # via cffi

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve to a single version as `--prerelease=allow` is provided, even though the first branch
/// doesn't include a pre-release marker.
#[test]
fn universal_disjoint_prereleases_allow() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-07-17T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        cffi >= 1.15.0, < 1.17.0 ; os_name == 'linux'
        cffi >= 1.15.0, <= 1.16.0rc2 ; os_name != 'linux'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("--prerelease")
            .arg("allow"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --prerelease allow
    cffi==1.16.0rc2 ; os_name != 'linux'
        # via -r requirements.in
    cffi==1.16.0 ; os_name == 'linux'
        # via -r requirements.in
    pycparser==2.22
        # via cffi

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Requested distinct pre-release strategies with disjoint markers for a package
/// that is also present as a transitive dependency.
#[test]
fn universal_transitive_disjoint_prerelease_requirement() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-07-17T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        cffi ; os_name == 'linux'
        cffi >= 1.17.0rc1 ; os_name != 'linux'
        cryptography
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cffi==1.16.0 ; os_name == 'linux'
        # via
        #   -r requirements.in
        #   cryptography
    cffi==1.17.0rc1 ; os_name != 'linux'
        # via
        #   -r requirements.in
        #   cryptography
    cryptography==42.0.8
        # via -r requirements.in
    pycparser==2.22
        # via cffi

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Ensure that the global pre-release mode is respected across forks.
#[test]
fn universal_prerelease_mode() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-07-17T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        cffi ; os_name == 'linux'
        cffi >= 1.17.0rc1 ; os_name != 'linux'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("--prerelease=allow")
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --prerelease=allow requirements.in --universal
    cffi==1.17.0rc1
        # via -r requirements.in
    pycparser==2.22
        # via cffi

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// If a dependency requests a pre-release version with an overlapping marker expression,
/// we should prefer the pre-release version in both forks.
#[test]
fn universal_overlapping_prerelease_requirement() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-07-17T00:00:00Z");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "example"
        version = "0.0.0"
        dependencies = [
            "cffi >= 1.17.0rc1 ; os_name == 'Linux'"
        ]
        requires-python = ">=3.11"
    "#})?;

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        cffi
        .
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cffi==1.17.0rc1
        # via
        #   -r requirements.in
        #   example
    .
        # via -r requirements.in
    pycparser==2.22
        # via cffi

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// If a dependency requests distinct pre-release strategies with disjoint marker expressions,
/// we should fork the root requirement.
#[test]
fn universal_disjoint_prerelease_requirement() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-07-17T00:00:00Z");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "example"
        version = "0.0.0"
        dependencies = [
            "cffi >= 1.17.0rc1 ; os_name == 'Linux'",
            "cffi==1.15.0 ; os_name != 'Linux'"
        ]
        requires-python = ">=3.11"
    "#})?;

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        cffi
        .
    "})?;

    // Some marker expressions on the output here are missing due to https://github.com/astral-sh/uv/issues/5086,
    // but the pre-release versions are still respected correctly.
    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cffi==1.15.0 ; os_name != 'Linux'
        # via
        #   -r requirements.in
        #   example
    cffi==1.17.0rc1 ; os_name == 'Linux'
        # via
        #   -r requirements.in
        #   example
    .
        # via -r requirements.in
    pycparser==2.22
        # via cffi

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Perform a universal resolution that requires narrowing the supported Python range in one of the
/// fork branches.
#[test]
fn universal_requires_python() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        numpy >=1.26 ; python_version >= '3.9'
        numpy <1.26 ; python_version < '3.9'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-p")
            .arg("3.8")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.8 --universal
    numpy==1.24.4 ; python_full_version < '3.9'
        # via -r requirements.in
    numpy==1.26.4 ; python_full_version >= '3.9'
        # via -r requirements.in

    ----- stderr -----
    warning: The requested Python version 3.8 is not available; 3.12.[X] will be used to build dependencies instead.
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Perform a universal resolution that requires narrowing the supported Python range in a non-fork.
#[test]
fn universal_requires_python_incomplete() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        uv; python_version >= '3.8'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-p")
            .arg("3.7")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.7 --universal
    uv==0.1.24 ; python_full_version >= '3.8'
        # via -r requirements.in

    ----- stderr -----
    warning: The requested Python version 3.7 is not available; 3.12.[X] will be used to build dependencies instead.
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// This test captures a case[1] that was broken by Requires-Python version
/// narrowing[2] in the universal resolver, and was later fixed by [3].
///
/// [1]: https://github.com/astral-sh/uv/issues/4885
/// [2]: https://github.com/astral-sh/uv/pull/4707
/// [3]: https://github.com/astral-sh/uv/pull/5597
#[test]
fn universal_no_repeated_unconditional_distributions_1() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        pylint
        sphinx
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-p")
            .arg("3.8")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.8 --universal
    alabaster==0.7.13 ; python_full_version < '3.9'
        # via sphinx
    alabaster==0.7.16 ; python_full_version >= '3.9'
        # via sphinx
    astroid==3.1.0
        # via pylint
    babel==2.14.0
        # via sphinx
    certifi==2024.2.2
        # via requests
    charset-normalizer==3.3.2
        # via requests
    colorama==0.4.6 ; sys_platform == 'win32'
        # via
        #   pylint
        #   sphinx
    dill==0.3.8
        # via pylint
    docutils==0.20.1
        # via sphinx
    idna==3.6
        # via requests
    imagesize==1.4.1
        # via sphinx
    importlib-metadata==7.1.0 ; python_full_version < '3.10'
        # via sphinx
    isort==5.13.2
        # via pylint
    jinja2==3.1.3
        # via sphinx
    markupsafe==2.1.5
        # via jinja2
    mccabe==0.7.0
        # via pylint
    packaging==24.0
        # via sphinx
    platformdirs==4.2.0
        # via pylint
    pygments==2.17.2
        # via sphinx
    pylint==3.1.0
        # via -r requirements.in
    pytz==2024.1 ; python_full_version < '3.9'
        # via babel
    requests==2.31.0
        # via sphinx
    snowballstemmer==2.2.0
        # via sphinx
    sphinx==7.1.2 ; python_full_version < '3.9'
        # via -r requirements.in
    sphinx==7.2.6 ; python_full_version >= '3.9'
        # via -r requirements.in
    sphinxcontrib-applehelp==1.0.4 ; python_full_version < '3.9'
        # via sphinx
    sphinxcontrib-applehelp==1.0.8 ; python_full_version >= '3.9'
        # via sphinx
    sphinxcontrib-devhelp==1.0.2 ; python_full_version < '3.9'
        # via sphinx
    sphinxcontrib-devhelp==1.0.6 ; python_full_version >= '3.9'
        # via sphinx
    sphinxcontrib-htmlhelp==2.0.1 ; python_full_version < '3.9'
        # via sphinx
    sphinxcontrib-htmlhelp==2.0.5 ; python_full_version >= '3.9'
        # via sphinx
    sphinxcontrib-jsmath==1.0.1
        # via sphinx
    sphinxcontrib-qthelp==1.0.3 ; python_full_version < '3.9'
        # via sphinx
    sphinxcontrib-qthelp==1.0.7 ; python_full_version >= '3.9'
        # via sphinx
    sphinxcontrib-serializinghtml==1.1.5 ; python_full_version < '3.9'
        # via sphinx
    sphinxcontrib-serializinghtml==1.1.10 ; python_full_version >= '3.9'
        # via sphinx
    tomli==2.0.1 ; python_full_version < '3.11'
        # via pylint
    tomlkit==0.12.4
        # via pylint
    typing-extensions==4.10.0 ; python_full_version < '3.11'
        # via
        #   astroid
        #   pylint
    urllib3==2.2.1
        # via requests
    zipp==3.18.1 ; python_full_version < '3.10'
        # via importlib-metadata

    ----- stderr -----
    warning: The requested Python version 3.8 is not available; 3.12.[X] will be used to build dependencies instead.
    Resolved 41 packages in [TIME]
    "###
    );

    Ok(())
}

/// This test captures a case[1] that was broken by marker normalization.
///
/// [1]: https://github.com/astral-sh/uv/issues/6064
#[test]
fn universal_no_repeated_unconditional_distributions_2() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        pylint
        dill==0.3.1.1
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-p")
            .arg("3.11")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.11 --universal
    astroid==2.13.5
        # via pylint
    colorama==0.4.6 ; sys_platform == 'win32'
        # via pylint
    dill==0.3.1.1
        # via
        #   -r requirements.in
        #   pylint
    isort==5.13.2
        # via pylint
    lazy-object-proxy==1.10.0
        # via astroid
    mccabe==0.7.0
        # via pylint
    platformdirs==4.2.0
        # via pylint
    pylint==2.15.8
        # via -r requirements.in
    tomlkit==0.12.4
        # via pylint
    wrapt==1.16.0
        # via astroid

    ----- stderr -----
    warning: The requested Python version 3.11 is not available; 3.12.[X] will be used to build dependencies instead.
    Resolved 10 packages in [TIME]
    "###
    );

    Ok(())
}

/// Solve for upper bounds before solving for lower bounds. A solution that satisfies `pylint < 3`
/// can also work for `pylint > 2`, but the inverse isn't true (due to maximum version selection).
#[test]
fn universal_prefer_upper_bounds() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        pylint < 3 ; sys_platform == 'darwin'
        pylint > 2 ; sys_platform != 'darwin'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-p")
            .arg("3.8")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.8 --universal
    astroid==2.15.8
        # via pylint
    colorama==0.4.6 ; sys_platform == 'win32'
        # via pylint
    dill==0.3.8
        # via pylint
    isort==5.13.2
        # via pylint
    lazy-object-proxy==1.10.0
        # via astroid
    mccabe==0.7.0
        # via pylint
    platformdirs==4.2.0
        # via pylint
    pylint==2.17.7
        # via -r requirements.in
    tomli==2.0.1 ; python_full_version < '3.11'
        # via pylint
    tomlkit==0.12.4
        # via pylint
    typing-extensions==4.10.0 ; python_full_version < '3.11'
        # via
        #   astroid
        #   pylint
    wrapt==1.16.0
        # via astroid

    ----- stderr -----
    warning: The requested Python version 3.8 is not available; 3.12.[X] will be used to build dependencies instead.
    Resolved 12 packages in [TIME]
    "###
    );

    Ok(())
}

/// Remove `python_version` markers that are always true.
#[test]
fn universal_unnecessary_python() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        iniconfig ; python_version >= '3.7'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-p")
            .arg("3.8")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.8 --universal
    iniconfig==2.0.0
        # via -r requirements.in

    ----- stderr -----
    warning: The requested Python version 3.8 is not available; 3.12.[X] will be used to build dependencies instead.
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Tests the markers are propagated correctly.
///
/// The main issue here is that we depend on `torch` and `torchvision`
/// directly, but our dependency on the specific `torch` version is conditional
/// on the marker environment. This is usually fine, except in this case,
/// `torchvision` has an unconditional dependency on `torch`, and this resulted
/// in the markers being dropped. As a result, we'd previously write a
/// `requirements.txt` file that unconditionally depended on two different
/// versions of `torch`.
///
/// See: <https://github.com/astral-sh/uv/issues/5086>
#[test]
fn universal_marker_propagation() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        --find-links https://astral-sh.github.io/pytorch-mirror/whl/torch_stable.html

        torch==2.0.0 ; platform_machine == 'x86_64'
        torch==2.2.0 ; platform_machine != 'x86_64'
        torchvision
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("-p")
            .arg("3.8")
            .arg("--universal"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.8 --universal
    certifi==2024.2.2
        # via requests
    charset-normalizer==3.3.2
        # via requests
    cmake==3.28.4 ; platform_machine == 'x86_64' and sys_platform != 'darwin' and sys_platform != 'win32'
        # via pytorch-triton-rocm
    filelock==3.13.1
        # via
        #   pytorch-triton-rocm
        #   torch
    fsspec==2024.3.1 ; platform_machine != 'x86_64'
        # via torch
    idna==3.6
        # via requests
    jinja2==3.1.3
        # via torch
    lit==18.1.2 ; platform_machine == 'x86_64' and sys_platform != 'darwin' and sys_platform != 'win32'
        # via pytorch-triton-rocm
    markupsafe==2.1.5
        # via jinja2
    mpmath==1.3.0
        # via sympy
    networkx==3.1 ; python_full_version < '3.9'
        # via torch
    networkx==3.2 ; python_full_version >= '3.9'
        # via torch
    numpy==1.24.4 ; python_full_version < '3.9'
        # via torchvision
    numpy==1.26.4 ; python_full_version >= '3.9'
        # via torchvision
    pillow==10.2.0
        # via torchvision
    pytorch-triton-rocm==2.0.2 ; platform_machine == 'x86_64' and sys_platform != 'darwin' and sys_platform != 'win32'
        # via torch
    requests==2.31.0
        # via torchvision
    sympy==1.12
        # via torch
    torch==2.0.0 ; (platform_machine == 'x86_64' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and sys_platform == 'win32')
        # via
        #   -r requirements.in
        #   torchvision
    torch==2.0.0+rocm5.4.2 ; platform_machine == 'x86_64' and sys_platform != 'darwin' and sys_platform != 'win32'
        # via
        #   -r requirements.in
        #   pytorch-triton-rocm
        #   torchvision
    torch==2.2.0 ; platform_machine != 'x86_64'
        # via
        #   -r requirements.in
        #   torchvision
    torchvision==0.15.1 ; (platform_machine == 'x86_64' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and sys_platform == 'win32')
        # via -r requirements.in
    torchvision==0.15.1+rocm5.4.2 ; platform_machine == 'x86_64' and sys_platform != 'darwin' and sys_platform != 'win32'
        # via -r requirements.in
    torchvision==0.17.0 ; platform_machine != 'x86_64'
        # via -r requirements.in
    typing-extensions==4.10.0
        # via torch
    urllib3==2.2.1
        # via requests

    ----- stderr -----
    warning: The requested Python version 3.8 is not available; 3.12.[X] will be used to build dependencies instead.
    Resolved 26 packages in [TIME]
    "
    );

    Ok(())
}

#[test]
fn universal_disjoint_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        flask[async]; sys_platform == 'linux'
        flask[dotenv]; sys_platform == 'darwin'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    asgiref==3.8.1 ; sys_platform == 'linux'
        # via flask
    blinker==1.7.0 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    click==8.1.7 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    flask==3.0.2 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via -r requirements.in
    itsdangerous==2.1.2 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    jinja2==3.1.3 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    markupsafe==2.1.5 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via
        #   jinja2
        #   werkzeug
    python-dotenv==1.0.1 ; sys_platform == 'darwin'
        # via flask
    werkzeug==3.0.1 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_disjoint_extra_no_strip() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        flask[async]; sys_platform == 'linux'
        flask[dotenv]; sys_platform == 'darwin'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("--no-strip-extras"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --no-strip-extras
    asgiref==3.8.1 ; sys_platform == 'linux'
        # via flask
    blinker==1.7.0 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    click==8.1.7 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    flask==3.0.2 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via -r requirements.in
    flask[async]==3.0.2 ; sys_platform == 'linux'
        # via -r requirements.in
    flask[dotenv]==3.0.2 ; sys_platform == 'darwin'
        # via -r requirements.in
    itsdangerous==2.1.2 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    jinja2==3.1.3 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    markupsafe==2.1.5 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via
        #   jinja2
        #   werkzeug
    python-dotenv==1.0.1 ; sys_platform == 'darwin'
        # via flask
    werkzeug==3.0.1 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_overlap_extra_base() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        flask
        flask[dotenv]; sys_platform == 'darwin'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    colorama==0.4.6 ; sys_platform == 'win32'
        # via click
    flask==3.0.2
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    python-dotenv==1.0.1 ; sys_platform == 'darwin'
        # via flask
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_overlap_extra_base_no_strip() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        flask
        flask[dotenv]; sys_platform == 'darwin'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("--no-strip-extras"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --no-strip-extras
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    colorama==0.4.6 ; sys_platform == 'win32'
        # via click
    flask==3.0.2
        # via -r requirements.in
    flask[dotenv]==3.0.2 ; sys_platform == 'darwin'
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    python-dotenv==1.0.1 ; sys_platform == 'darwin'
        # via flask
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_overlap_extras() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        flask[async]; sys_platform == 'linux' or sys_platform == 'darwin'
        flask[dotenv]; sys_platform == 'darwin'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    asgiref==3.8.1 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    blinker==1.7.0 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    click==8.1.7 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    flask==3.0.2 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via -r requirements.in
    itsdangerous==2.1.2 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    jinja2==3.1.3 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    markupsafe==2.1.5 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via
        #   jinja2
        #   werkzeug
    python-dotenv==1.0.1 ; sys_platform == 'darwin'
        # via flask
    werkzeug==3.0.1 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_overlap_extras_no_strip() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        flask[async]; sys_platform == 'linux' or sys_platform == 'darwin'
        flask[dotenv]; sys_platform == 'darwin'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("--no-strip-extras"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --no-strip-extras
    asgiref==3.8.1 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    blinker==1.7.0 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    click==8.1.7 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    flask[async]==3.0.2 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via -r requirements.in
    flask[dotenv]==3.0.2 ; sys_platform == 'darwin'
        # via -r requirements.in
    itsdangerous==2.1.2 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    jinja2==3.1.3 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask
    markupsafe==2.1.5 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via
        #   jinja2
        #   werkzeug
    python-dotenv==1.0.1 ; sys_platform == 'darwin'
        # via flask
    werkzeug==3.0.1 ; sys_platform == 'darwin' or sys_platform == 'linux'
        # via flask

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_identical_extras() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        flask[async]; sys_platform == 'darwin'
        flask[dotenv]; sys_platform == 'darwin'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    asgiref==3.8.1 ; sys_platform == 'darwin'
        # via flask
    blinker==1.7.0 ; sys_platform == 'darwin'
        # via flask
    click==8.1.7 ; sys_platform == 'darwin'
        # via flask
    flask==3.0.2 ; sys_platform == 'darwin'
        # via -r requirements.in
    itsdangerous==2.1.2 ; sys_platform == 'darwin'
        # via flask
    jinja2==3.1.3 ; sys_platform == 'darwin'
        # via flask
    markupsafe==2.1.5 ; sys_platform == 'darwin'
        # via
        #   jinja2
        #   werkzeug
    python-dotenv==1.0.1 ; sys_platform == 'darwin'
        # via flask
    werkzeug==3.0.1 ; sys_platform == 'darwin'
        # via flask

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_identical_extras_no_strip() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        flask[async]; sys_platform == 'darwin'
        flask[dotenv]; sys_platform == 'darwin'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("--no-strip-extras"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --no-strip-extras
    asgiref==3.8.1 ; sys_platform == 'darwin'
        # via flask
    blinker==1.7.0 ; sys_platform == 'darwin'
        # via flask
    click==8.1.7 ; sys_platform == 'darwin'
        # via flask
    flask[async, dotenv]==3.0.2 ; sys_platform == 'darwin'
        # via -r requirements.in
    itsdangerous==2.1.2 ; sys_platform == 'darwin'
        # via flask
    jinja2==3.1.3 ; sys_platform == 'darwin'
        # via flask
    markupsafe==2.1.5 ; sys_platform == 'darwin'
        # via
        #   jinja2
        #   werkzeug
    python-dotenv==1.0.1 ; sys_platform == 'darwin'
        # via flask
    werkzeug==3.0.1 ; sys_platform == 'darwin'
        # via flask

    ----- stderr -----
    Resolved 9 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its transitive dependencies to a specific version.
#[test]
fn compile_constraints_compatible_version() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("virtualenv")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("filelock==3.8.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
    distlib==0.3.8
        # via virtualenv
    filelock==3.8.0
        # via
        #   -c constraints.txt
        #   virtualenv
    platformdirs==3.11.0
        # via virtualenv
    virtualenv==20.21.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its direct dependencies to an incompatible version.
#[test]
fn compile_constraints_incompatible_version() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("filelock==1.0.0")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("filelock==3.8.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because you require filelock==1.0.0 and filelock==3.8.0, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its direct dependencies to an incompatible version.
#[test]
fn conflicting_url_markers() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("filelock==1.0.0")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("filelock==3.8.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because you require filelock==1.0.0 and filelock==3.8.0, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Override a regular package with an editable. This should resolve to the editable package.
#[test]
fn editable_override() -> Result<()> {
    let context = TestContext::new("3.12");

    // Add a non-editable requirement.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black")?;

    // Add an editable override.
    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("-e ../../scripts/packages/black_editable")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .arg("--override")
        .arg(overrides_txt.path())
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --override [TEMP_DIR]/overrides.txt
    -e ../../scripts/packages/black_editable
        # via
        #   --override [TEMP_DIR]/overrides.txt
        #   -r [TEMP_DIR]/requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Override an editable with a regular package. This should resolve to the regular package.
#[test]
fn override_editable() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e ../../scripts/packages/black_editable")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("black==23.10.1")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg(requirements_in.path())
            .arg("--override")
            .arg(overrides_txt.path())
            .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --override [TEMP_DIR]/overrides.txt
    black==23.10.1
        # via
        #   --override [TEMP_DIR]/overrides.txt
        #   -r [TEMP_DIR]/requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###);

    Ok(())
}

/// Resolve a package with both a constraint _and_ an override. The override and the constraint are
/// compatible, but resolve to exactly the same version.
#[test]
fn override_with_compatible_constraint() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("anyio<=3.0.0")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("anyio>=3.0.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt")
            .arg("--override")
            .arg("overrides.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt --override overrides.txt
    anyio==3.0.0
        # via
        #   -c constraints.txt
        #   --override overrides.txt
        #   -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with both a constraint _and_ an override. The override and the constraint are
/// incompatible, and so should error. (The correctness of this behavior is subject to debate.)
#[test]
fn override_with_incompatible_constraint() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("anyio<3.0.0")?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("anyio>=3.0.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt")
            .arg("--override")
            .arg("overrides.txt"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because you require anyio>=3.0.0 and anyio<3.0.0, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Resolve a package, marking a dependency as unsafe.
#[test]
fn unsafe_package() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--unsafe-package")
            .arg("jinja2")
            .arg("--unsafe-package")
            .arg("pydantic"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --unsafe-package jinja2 --unsafe-package pydantic
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask==3.0.2
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    # The following packages were excluded from the output:
    # jinja2

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with a strict upper bound, allowing pre-releases. Per PEP 440, pre-releases
/// that match the bound (e.g., `2.0.0rc1`) should be _not_ allowed.
#[test]
fn prerelease_upper_bound_exclude() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask<2.0.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--prerelease=allow"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --prerelease=allow
    click==7.1.2
        # via flask
    flask==1.1.4
        # via -r requirements.in
    itsdangerous==1.1.0
        # via flask
    jinja2==2.11.3
        # via flask
    markupsafe==2.1.5
        # via jinja2
    werkzeug==1.0.1
        # via flask

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package with a strict upper bound that includes a pre-release. Per PEP 440,
/// pre-releases _should_ be allowed.
#[test]
fn prerelease_upper_bound_include() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask<2.0.0rc4")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--prerelease=allow"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --prerelease=allow
    click==8.1.7
        # via flask
    flask==2.0.0rc2
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Allow `--pre` as an alias for `--prerelease=allow`.
#[test]
fn pre_alias() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask<2.0.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--pre"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --pre
    click==7.1.2
        # via flask
    flask==1.1.4
        # via -r requirements.in
    itsdangerous==1.1.0
        # via flask
    jinja2==2.11.3
        # via flask
    markupsafe==2.1.5
        # via jinja2
    werkzeug==1.0.1
        # via flask

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Allow a pre-release for a version specifier in a constraint file.
#[test]
fn prerelease_constraint() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("flask<=2.0.0rc2")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--constraint")
            .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
    click==8.1.7
        # via flask
    flask==2.0.0rc2
        # via
        #   -c constraints.txt
        #   -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve from a `pyproject.toml` file with a mutually recursive extra.
#[test]
fn compile_pyproject_toml_mutually_recursive_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"
[project]
name = "project"
version = "0.0.1"
dependencies = [
    "anyio"
]

[project.optional-dependencies]
test = [
    "iniconfig",
    "project[dev]"
]
dev = [
    "project[test]",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("dev"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra dev
    anyio==4.3.0
        # via project (pyproject.toml)
    idna==3.6
        # via anyio
    iniconfig==2.0.0
        # via project (pyproject.toml)
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve from a `pyproject.toml` file with a recursive extra.
#[test]
fn compile_pyproject_toml_recursive_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"
[project]
name = "project"
version = "0.0.1"
dependencies = [
    "anyio"
]

[project.optional-dependencies]
test = [
    "iniconfig",
]
dev = [
    "project[test]",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("dev"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra dev
    anyio==4.3.0
        # via project (pyproject.toml)
    idna==3.6
        # via anyio
    iniconfig==2.0.0
        # via project (pyproject.toml)
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve from a `pyproject.toml` file with a recursive extra, with a marker attached.
#[test]
fn compile_pyproject_toml_recursive_extra_marker() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"
[project]
name = "project"
version = "0.0.1"
dependencies = [
    "anyio"
]

[project.optional-dependencies]
test = [
    "iniconfig",
]
dev = [
    "project[test] ; sys_platform == 'darwin'",
]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--extra")
            .arg("dev")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra dev --universal
    anyio==4.3.0
        # via project (pyproject.toml)
    idna==3.6
        # via anyio
    iniconfig==2.0.0 ; sys_platform == 'darwin'
        # via project (pyproject.toml)
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve from a `pyproject.toml` file with multiple recursive extras.
#[test]
fn compile_pyproject_toml_deeply_recursive_extra() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"
[project]
name = "project"
version = "0.0.1"
dependencies = []

[project.optional-dependencies]
foo = ["iniconfig"]
bar = ["project[foo]"]
baz = ["project[bar]"]
bop = ["project[bar] ; sys_platform == 'darwin'"]
qux = ["project[bop] ; python_version == '3.12'"]
"#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--universal")
            .arg("--extra")
            .arg("qux"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --universal --extra qux
    iniconfig==2.0.0 ; python_full_version < '3.13' and sys_platform == 'darwin'
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--universal")
            .arg("--extra")
            .arg("bop")
            .arg("--extra")
            .arg("bar"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --universal --extra bop --extra bar
    iniconfig==2.0.0
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("pyproject.toml")
            .arg("--universal")
            .arg("--all-extras"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --universal --all-extras
    iniconfig==2.0.0
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// The dependencies of a local editable dependency should be considered "direct" dependencies.
#[test]
fn editable_direct_dependency() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e ../../scripts/packages/setuptools_editable")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg(requirements_in.path())
            .arg("--resolution")
            .arg("lowest-direct")
            .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --resolution lowest-direct
    -e ../../scripts/packages/setuptools_editable
        # via -r [TEMP_DIR]/requirements.in
    iniconfig==0.1
        # via setuptools-editable

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###);

    Ok(())
}

/// Setting `UV_INDEX_URL` to the empty string should treat it as "unset".
#[test]
fn empty_index_url_env_var() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .env(EnvVars::UV_INDEX_URL, ""), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url
    --index-url https://pypi.org/simple

    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Setting `EXTRA_UV_INDEX_URL` to the empty string should treat it as "unset".
#[test]
fn empty_extra_index_url_env_var() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .env(EnvVars::UV_EXTRA_INDEX_URL, ""), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url
    --index-url https://pypi.org/simple

    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Setting `UV_INDEX_URL` to the empty string should treat it as "unset", and so should be
/// overridden by an `--index-url` in a requirements file.
#[test]
fn empty_index_url_env_var_override() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("--index-url https://test.pypi.org/simple\nidna")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .env(EnvVars::UV_INDEX_URL, ""), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url
    --index-url https://test.pypi.org/simple

    idna==2.7
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// The `UV_INDEX_URL` should override an `--index-url` in a requirements file.
#[test]
fn index_url_env_var_override() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("--index-url https://pypi.org/simple\nidna")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--emit-index-url")
            .env(EnvVars::UV_INDEX_URL, "https://test.pypi.org/simple"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url
    --index-url https://test.pypi.org/simple

    idna==2.7
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Expand an environment variable in a `-r` path within a `requirements.in` file.
#[test]
fn expand_env_var_requirements_txt() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-r ${PROJECT_ROOT}/requirements-dev.in")?;

    let requirements_dev_in = context.temp_dir.child("requirements-dev.in");
    requirements_dev_in.write_str("anyio")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    anyio==4.3.0
        # via -r requirements-dev.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Raise an error when an editable's `Requires-Python` constraint is not met.
#[test]
fn requires_python_editable() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create an editable package with a `Requires-Python` constraint that is not met.
    let editable_dir = context.temp_dir.child("editable");
    editable_dir.create_dir_all()?;
    let pyproject_toml = editable_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
  "anyio==4.0.0"
]
requires-python = ">=3.13"
"#,
    )?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because the current Python version (3.12.[X]) does not satisfy Python>=3.13 and example==0.0.0 depends on Python>=3.13, we can conclude that example==0.0.0 cannot be used.
          And because only example==0.0.0 is available and you require example, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Raise an error when an editable's `Requires-Python` constraint is not met.
#[test]
fn requires_python_editable_target_version() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create an editable package with a `Requires-Python` constraint that is not met.
    let editable_dir = context.temp_dir.child("editable");
    editable_dir.create_dir_all()?;
    let pyproject_toml = editable_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
  "anyio==4.0.0"
]
requires-python = ">=3.13"
"#,
    )?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?;

    let filters: Vec<_> = [
        // 3.11 may not be installed
        (
            "warning: The requested Python version 3.11 is not available; .* will be used to build dependencies instead.\n",
            "",
        ),
    ]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(filters, context.pip_compile()
        .arg("requirements.in")
        .arg("--python-version=3.11"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because the current Python version (3.12.[X]) does not satisfy Python>=3.13 and example==0.0.0 depends on Python>=3.13, we can conclude that example==0.0.0 cannot be used.
          And because only example==0.0.0 is available and you require example, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

#[test]
fn editable_optional_url() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create an editable package with an optional URL dependency.
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = []
requires-python = '>=3.8'

[project.optional-dependencies]
dev = [
  "anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl"
]
"#,
    )?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e .[dev]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    -e .
        # via -r requirements.in
    anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
        # via example
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Under `--resolution=lowest-direct`, ignore optional dependencies.
///
/// In the below example, ensure that `setuptools` does not resolve to the lowest-available version.
#[test]
fn editable_optional_lowest_direct() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create an editable package with an optional URL dependency.
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = ["setuptools-scm>=8.0.0"]
requires-python = '>=3.8'

[project.optional-dependencies]
dev = ["setuptools"]
"#,
    )?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e .")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--resolution=lowest-direct"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --resolution=lowest-direct
    -e .
        # via -r requirements.in
    packaging==24.0
        # via setuptools-scm
    setuptools==69.2.0
        # via setuptools-scm
    setuptools-scm==8.0.1
        # via example

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a source distribution that leverages Metadata 2.2.
#[test]
fn metadata_2_2() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("pyo3-mixed @ https://files.pythonhosted.org/packages/2b/b8/e04b783d3569d5b61b1dcdfda683ac2e3617340539aecd0f099fbade0b4a/pyo3_mixed-2.1.5.tar.gz")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    boltons==23.1.1
        # via pyo3-mixed
    pyo3-mixed @ https://files.pythonhosted.org/packages/2b/b8/e04b783d3569d5b61b1dcdfda683ac2e3617340539aecd0f099fbade0b4a/pyo3_mixed-2.1.5.tar.gz
        # via -r requirements.in

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a direct URL package with a URL that doesn't exist (i.e., returns a 404).
#[test]
fn not_found_direct_url() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("iniconfig @ https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × Failed to download `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl`
      ├─▶ Failed to fetch: `https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl`
      ╰─▶ HTTP status client error (404 Not Found) for url (https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl)
    "###
    );

    Ok(())
}

/// Raise an error when a direct URL dependency's `Requires-Python` constraint is not met.
#[test]
fn requires_python_direct_url() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create an editable package with a `Requires-Python` constraint that is not met.
    let editable_dir = context.temp_dir.child("editable");
    editable_dir.create_dir_all()?;
    let pyproject_toml = editable_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
  "anyio==4.0.0"
]
requires-python = ">=3.13"
"#,
    )?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(&format!("example @ {}", editable_dir.path().display()))?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because the current Python version (3.12.[X]) does not satisfy Python>=3.13 and example==0.0.0 depends on Python>=3.13, we can conclude that example==0.0.0 cannot be used.
          And because only example==0.0.0 is available and you require example, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Build an editable package with Hatchling's {root:uri} feature.
#[test]
fn compile_root_uri_editable() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e ${ROOT_PATH}")?;

    let root_path = current_dir()?.join("../../scripts/packages/root_editable");
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .env(EnvVars::ROOT_PATH, root_path.as_os_str()), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    -e ${ROOT_PATH}
        # via -r requirements.in
    black @ file://[WORKSPACE]/scripts/packages/root_editable/../black_editable
        # via root-editable

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Build a non-editable package with Hatchling's {root:uri} feature.
#[test]
fn compile_root_uri_non_editable() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("${ROOT_PATH}\n${BLACK_PATH}")?;

    let root_path = current_dir()?.join("../../scripts/packages/root_editable");
    let black_path = current_dir()?.join("../../scripts/packages/black_editable");
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .env(EnvVars::ROOT_PATH, root_path.as_os_str())
        .env(EnvVars::BLACK_PATH, black_path.as_os_str()), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    ${BLACK_PATH}
        # via
        #   -r requirements.in
        #   root-editable
    ${ROOT_PATH}
        # via -r requirements.in

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Request a local wheel with a mismatched package name.
#[test]
fn requirement_wheel_name_mismatch() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("dateutil @ https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Requested package name `dateutil` does not match `python-dateutil` in the distribution filename: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl
    "###
    );

    Ok(())
}

/// `--generate-hashes` should not update the hashes in the "lockfile" if the package is not
/// upgraded.
#[test]
fn preserve_hashes_no_upgrade() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("markupsafe")?;

    // Write a subset of the hashes to the "lockfile".
    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc! {r"
        # This file was autogenerated by uv via the following command:
        #    uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
        markupsafe==2.1.2 \
            --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
            --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
            --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
    "})?;

    // Avoid adding any additional hashes to the "lockfile".
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--output-file")
            .arg("requirements.txt")
            .arg("--generate-hashes"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt --generate-hashes
    markupsafe==2.1.2 \
        --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
        --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
        --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// `--generate-hashes` should update the hashes in the "lockfile" if the package is upgraded via
/// `--upgrade`.
#[test]
fn preserve_hashes_upgrade() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("markupsafe==2.1.2")?;

    // Write a subset of the hashes to the "lockfile".
    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc! {r"
        # This file was autogenerated by uv via the following command:
        #    uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
        markupsafe==2.1.2 \
            --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
            --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
            --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
    "})?;

    // Requesting an upgrade should update the hashes, even if the version didn't change.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--output-file")
            .arg("requirements.txt")
            .arg("--generate-hashes")
            .arg("--upgrade"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt --generate-hashes
    markupsafe==2.1.2 \
        --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
        --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
        --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
        --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
        --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
        --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
        --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
        --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
        --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
        --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
        --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
        --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
        --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
        --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
        --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
        --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
        --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
        --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
        --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
        --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
        --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
        --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
        --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
        --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
        --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
        --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
        --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
        --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
        --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
        --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
        --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
        --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
        --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
        --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
        --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
        --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
        --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
        --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
        --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
        --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
        --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
        --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
        --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
        --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
        --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
        --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
        --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
        --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
        --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
        --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// `--generate-hashes` should update the hashes in the "lockfile" if the package does not have
/// hashes, even if `--upgrade` is _not_ specified.
#[test]
fn preserve_hashes_no_existing_hashes() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("markupsafe")?;

    // Write a subset of the hashes to the "lockfile".
    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc! {r"
        # This file was autogenerated by uv via the following command:
        #    uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
        markupsafe==2.1.2
    "})?;

    // Add additional hashes to the "lockfile".
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--output-file")
            .arg("requirements.txt")
            .arg("--generate-hashes"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt --generate-hashes
    markupsafe==2.1.2 \
        --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
        --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
        --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
        --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
        --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
        --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
        --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
        --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
        --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
        --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
        --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
        --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
        --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
        --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
        --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
        --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
        --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
        --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
        --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
        --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
        --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
        --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
        --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
        --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
        --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
        --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
        --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
        --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
        --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
        --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
        --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
        --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
        --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
        --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
        --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
        --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
        --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
        --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
        --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
        --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
        --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
        --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
        --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
        --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
        --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
        --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
        --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
        --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
        --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
        --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// `--generate-hashes` should update the hashes in the "lockfile" if the package is upgraded due
/// to a change in requirements.
#[test]
fn preserve_hashes_newer_version() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("markupsafe==2.1.3")?;

    // Write a subset of the hashes to the "lockfile".
    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc! {r"
        # This file was autogenerated by uv via the following command:
        #    uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
        markupsafe==2.1.2 \
            --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
            --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
            --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
    "})?;

    // Requesting a more specific version should update the hashes.
    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--output-file")
            .arg("requirements.txt")
            .arg("--generate-hashes"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt --generate-hashes
    markupsafe==2.1.3 \
        --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
        --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \
        --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \
        --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \
        --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \
        --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \
        --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \
        --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \
        --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \
        --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \
        --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \
        --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \
        --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \
        --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \
        --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \
        --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \
        --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \
        --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \
        --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \
        --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \
        --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \
        --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \
        --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \
        --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \
        --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \
        --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \
        --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \
        --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \
        --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \
        --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \
        --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \
        --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \
        --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \
        --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \
        --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \
        --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \
        --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \
        --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \
        --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \
        --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \
        --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \
        --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \
        --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \
        --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \
        --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \
        --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \
        --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \
        --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \
        --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \
        --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \
        --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \
        --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \
        --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \
        --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \
        --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \
        --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \
        --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \
        --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \
        --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \
        --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Detect the package name from metadata sources from local directories.
#[test]
fn unnamed_path_requirement() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        ../../scripts/packages/poetry_editable
        ../../scripts/packages/black_editable
        ../../scripts/packages/setup_py_editable
        ../../scripts/packages/setup_cfg_editable
        "
    })?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    anyio==4.3.0
        # via
        #   httpx
        #   poetry-editable
    ../../scripts/packages/black_editable
        # via -r [TEMP_DIR]/requirements.in
    certifi==2024.2.2
        # via
        #   httpcore
        #   httpx
        #   requests
    charset-normalizer==3.3.2
        # via requests
    h11==0.14.0
        # via httpcore
    httpcore==1.0.4
        # via httpx
    httpx==0.27.0
        # via setup-py-editable
    idna==3.6
        # via
        #   anyio
        #   httpx
        #   requests
    ../../scripts/packages/poetry_editable
        # via -r [TEMP_DIR]/requirements.in
    requests==2.31.0
        # via setup-cfg-editable
    ../../scripts/packages/setup_cfg_editable
        # via -r [TEMP_DIR]/requirements.in
    ../../scripts/packages/setup_py_editable
        # via -r [TEMP_DIR]/requirements.in
    sniffio==1.3.1
        # via
        #   anyio
        #   httpx
    urllib3==2.2.1
        # via requests

    ----- stderr -----
    Resolved 14 packages in [TIME]
    "###);

    Ok(())
}

/// Detect the package name from an unnamed Git requirement.
#[test]
#[cfg(feature = "git")]
fn unnamed_git_requirement() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("git+https://github.com/pallets/flask.git@3.0.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ git+https://github.com/pallets/flask.git@735a4701d6d5e848241e7d7535db898efb62d400
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###);

    Ok(())
}

/// Detect the package name from an unnamed HTTPS requirement.
#[test]
fn unnamed_https_requirement() -> Result<()> {
    // Given the filename `3.0.2.tar.gz`, we need to download the file to determine the package name.
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("https://github.com/pallets/flask/archive/refs/tags/3.0.2.tar.gz")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    blinker==1.7.0
        # via flask
    click==8.1.7
        # via flask
    flask @ https://github.com/pallets/flask/archive/refs/tags/3.0.2.tar.gz
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###);

    Ok(())
}

/// Detect the package name from metadata sources from local directories.
#[test]
fn dynamic_dependencies() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("hatchling-dynamic @ ../../scripts/packages/hatchling_dynamic")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.path())
        .current_dir(current_dir()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    anyio==4.3.0
        # via hatchling-dynamic
    ../../scripts/packages/hatchling_dynamic
        # via -r [TEMP_DIR]/requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###);

    Ok(())
}

/// This tests the marker expressions emitted when depending on a package with
/// exciting markers like 'anyio'.
#[cfg(feature = "python-patch")]
#[test]
fn emit_marker_expression_exciting_linux() -> Result<()> {
    let context = TestContext::new("3.12.9");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("--python-platform")
        .arg("linux")
        .arg("--emit-marker-expression"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-platform linux --emit-marker-expression
    # Pinned dependencies known to be valid for:
    #    python_full_version == '3.12.9' and platform_python_implementation == 'CPython' and sys_platform == 'linux'
    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    ");

    Ok(())
}

/// This tests that the marker expression emitted accounts for markers directly
/// in `requirements.in`.
#[cfg(feature = "python-patch")]
#[test]
fn emit_marker_expression_direct() -> Result<()> {
    let context = TestContext::new("3.12.9");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio ; sys_platform == 'linux'")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("--python-platform")
        .arg("linux")
        .arg("--emit-marker-expression"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-platform linux --emit-marker-expression
    # Pinned dependencies known to be valid for:
    #    python_full_version == '3.12.9' and platform_python_implementation == 'CPython' and sys_platform == 'linux'
    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    ");

    Ok(())
}

/// This tests that the marker expression emitted accounts for markers directly
/// in `requirements.in`, even when the marker evaluates to false on the
/// current platform. In this case, we set `sys_platform == 'macos'` so that on
/// Linux, this dependency is ignored. But the marker expression generated must
/// have `sys_platform == 'Linux'`, since the locked set of packages might be
/// different (and indeed are different) on other platforms.
#[test]
fn emit_marker_expression_conditional() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio ; sys_platform == 'macos'")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("--python-platform")
        .arg("linux")
        .arg("--emit-marker-expression"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-platform linux --emit-marker-expression
    # Pinned dependencies known to be valid for:
    #    sys_platform == 'linux'

    ----- stderr -----
    Resolved in [TIME]
    "###);

    Ok(())
}

/// This tests the marker expressions emitted when depending on a package with
/// a non-pypy dependency. Specifically, `pendulum` depends on `time-machine`,
/// but not when using pypy.
#[cfg(feature = "python-patch")]
#[test]
fn emit_marker_expression_pypy() -> Result<()> {
    let context = TestContext::new("3.12.9");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("pendulum")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("--python-platform")
        .arg("linux")
        .arg("--emit-marker-expression"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-platform linux --emit-marker-expression
    # Pinned dependencies known to be valid for:
    #    python_full_version == '3.12.9' and implementation_name == 'cpython'
    pendulum==3.0.0
        # via -r requirements.in
    python-dateutil==2.9.0.post0
        # via
        #   pendulum
        #   time-machine
    six==1.16.0
        # via python-dateutil
    time-machine==2.14.1
        # via pendulum
    tzdata==2024.1
        # via pendulum

    ----- stderr -----
    Resolved 5 packages in [TIME]
    ");

    Ok(())
}

/// A local version of a package shadowing a remote package is installed.
#[test]
fn local_version_of_remote_package() -> Result<()> {
    let context = TestContext::new("3.12");
    let root_path = context.workspace_root.join("scripts/packages");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.canonicalize()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###);

    // Actually install the local dependency
    let mut command = context.pip_install();
    command.arg(root_path.join("anyio_local"));
    uv_snapshot!(
        context.filters(),
        command, @r###"
    success: true
    exit_code: 0
    ----- stdout -----

    ----- stderr -----
    Resolved 1 package in [TIME]
    Prepared 1 package in [TIME]
    Installed 1 package in [TIME]
     + anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
    "###
    );

    // The local version should _not_ be included in the resolution
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.canonicalize()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###);

    // Write a lockfile with the local version
    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(&indoc::formatdoc! {r"
            anyio @ {workspace_root}/scripts/packages/anyio_local
        ",
        workspace_root = context.workspace_root.simplified_display(),
    })?;

    // The local version is _still_ excluded from the resolution
    // `uv pip compile` does not have access to an environment and cannot consider installed packages
    // We may want to allow the lockfile to be preserved in this case in the future, but right now
    // we require the URL to always be in the input file.
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(requirements_in.canonicalize()?)
        .arg("--output-file")
        .arg(requirements_txt.canonicalize()?), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --output-file [TEMP_DIR]/requirements.txt
    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###);

    Ok(())
}

#[test]
fn pendulum_no_tzdata_on_windows() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("pendulum")?;

    uv_snapshot!(
        context.filters(),
        windows_filters=false,
        context.pip_compile().arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    pendulum==3.0.0
        # via -r requirements.in
    python-dateutil==2.9.0.post0
        # via
        #   pendulum
        #   time-machine
    six==1.16.0
        # via python-dateutil
    time-machine==2.14.1
        # via pendulum
    tzdata==2024.1
        # via pendulum

    ----- stderr -----
    Resolved 5 packages in [TIME]
    "###);

    Ok(())
}

/// Allow URL dependencies recursively for local source trees.
#[test]
fn allow_recursive_url_local_path() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create a standalone library named "anyio".
    let anyio = context.temp_dir.child("anyio");
    anyio.create_dir_all()?;
    let pyproject_toml = anyio.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
    "idna"
]
requires-python = ">3.8"
"#,
    )?;

    // Create a library that depends on the standalone library.
    let lib = context.temp_dir.child("lib");
    lib.create_dir_all()?;
    let pyproject_toml = lib.child("pyproject.toml");
    pyproject_toml.write_str(&format!(
        r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
    "anyio @ {}"
]
requires-python = ">3.8"
"#,
        Url::from_directory_path(anyio.path()).unwrap().as_str(),
    ))?;

    // Create an application that depends on the library.
    let app = context.temp_dir.child("app");
    app.create_dir_all()?;
    let pyproject_toml = app.child("pyproject.toml");
    pyproject_toml.write_str(&format!(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
  "lib @ {}"
]
requires-python = ">3.8"
"#,
        Url::from_directory_path(lib.path()).unwrap().as_str(),
    ))?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("./app")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    anyio @ file://[TEMP_DIR]/anyio/
        # via lib
    ./app
        # via -r requirements.in
    idna==3.6
        # via anyio
    lib @ file://[TEMP_DIR]/lib/
        # via example

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Allow URL dependencies recursively for local source trees, but respect overrides.
#[test]
fn allow_recursive_url_local_path_override() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create a standalone library named "anyio".
    let anyio = context.temp_dir.child("anyio");
    anyio.create_dir_all()?;
    let pyproject_toml = anyio.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
    "idna"
]
requires-python = ">3.8"
"#,
    )?;

    // Create a library that depends on the standalone library.
    let lib = context.temp_dir.child("lib");
    lib.create_dir_all()?;
    let pyproject_toml = lib.child("pyproject.toml");
    pyproject_toml.write_str(&format!(
        r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
    "anyio @ {}"
]
requires-python = ">3.8"
"#,
        Url::from_directory_path(anyio.path()).unwrap().as_str(),
    ))?;

    // Create an application that depends on the library.
    let app = context.temp_dir.child("app");
    app.create_dir_all()?;
    let pyproject_toml = app.child("pyproject.toml");
    pyproject_toml.write_str(&format!(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
  "lib @ {}"
]
requires-python = ">3.8"
"#,
        Url::from_directory_path(lib.path()).unwrap().as_str(),
    ))?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("./app")?;

    // Create an override that pulls `anyio` from PyPI.
    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("anyio==3.7.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--override")
        .arg("overrides.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
    anyio==3.7.0
        # via
        #   --override overrides.txt
        #   lib
    ./app
        # via -r requirements.in
    idna==3.6
        # via anyio
    lib @ file://[TEMP_DIR]/lib/
        # via example
    sniffio==1.3.1
        # via anyio

    ----- stderr -----
    Resolved 5 packages in [TIME]
    "###
    );

    Ok(())
}

/// Allow URL dependencies recursively for local source trees, but respect both overrides _and_
/// constraints.
///
/// We have app -> lib -> anyio and root has a directory requirement on app.
#[test]
fn allow_recursive_url_local_path_override_constraint() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create a standalone library named "anyio".
    let anyio = context.temp_dir.child("anyio");
    anyio.create_dir_all()?;
    let pyproject_toml = anyio.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
    "idna"
]
requires-python = ">3.8"
"#,
    )?;

    // Create a library that depends on the standalone library.
    let lib = context.temp_dir.child("lib");
    lib.create_dir_all()?;
    let pyproject_toml = lib.child("pyproject.toml");
    pyproject_toml.write_str(&format!(
        r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
    "anyio @ {}"
]
requires-python = ">3.8"
"#,
        Url::from_directory_path(anyio.path()).unwrap().as_str(),
    ))?;

    // Create an application that depends on the library.
    let app = context.temp_dir.child("app");
    app.create_dir_all()?;
    let pyproject_toml = app.child("pyproject.toml");
    pyproject_toml.write_str(&format!(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
  "lib @ {}"
]
requires-python = ">3.8"
"#,
        Url::from_directory_path(lib.path()).unwrap().as_str(),
    ))?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("./app")?;

    // Create an override that pulls `anyio` from PyPI.
    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str("anyio==0.0.0")?;

    // Ensure that resolution fails, since `0.0.0` does not exist on PyPI.
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--override")
        .arg("overrides.txt"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because there is no version of anyio==0.0.0 and lib==0.0.0 depends on anyio==0.0.0, we can conclude that lib==0.0.0 cannot be used.
          And because only lib==0.0.0 is available and example==0.0.0 depends on lib, we can conclude that example==0.0.0 cannot be used.
          And because only example==0.0.0 is available and you require example, we can conclude that your requirements are unsatisfiable.
    "###
    );

    // Now constrain `anyio` to the local version.
    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("anyio @ ./anyio")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--override")
        .arg("overrides.txt")
        .arg("--constraint")
        .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt --constraint constraints.txt
    ./anyio
        # via
        #   -c constraints.txt
        #   --override overrides.txt
        #   lib
    ./app
        # via -r requirements.in
    idna==3.6
        # via anyio
    lib @ file://[TEMP_DIR]/lib/
        # via example

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

/// Allow pre-releases for dependencies of source path requirements.
#[test]
fn prerelease_path_requirement() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create an a package that requires a pre-release version of `flask`.
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
  "flask==2.0.0rc1"
]
requires-python = ">3.8"
"#,
    )?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(".")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    click==8.1.7
        # via flask
    .
        # via -r requirements.in
    flask==2.0.0rc1
        # via example
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Allow pre-releases for dependencies of editable requirements.
#[test]
fn prerelease_editable_requirement() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create an a package that requires a pre-release version of `flask`.r
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
  "flask==2.0.0rc1"
]
requires-python = ">3.8"
"#,
    )?;

    // Write to a requirements file.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("-e .")?;

    uv_snapshot!( context.pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    -e .
        # via -r requirements.in
    click==8.1.7
        # via flask
    flask==2.0.0rc1
        # via example
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Install a package via `--extra-index-url`.
///
/// If the package exists on the "extra" index, but at an incompatible version, the resolution
/// should fail by default (even though a compatible version exists on the "primary" index).
#[test]
fn compile_index_url_first_match_base() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("jinja2==3.1.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--index-url")
        .arg("https://pypi.org/simple")
        .arg("--extra-index-url")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/cpu")
        .arg("requirements.in")
        .arg("--no-deps"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because there is no version of jinja2==3.1.0 and you require jinja2==3.1.0, we can conclude that your requirements are unsatisfiable.

          hint: `jinja2` was found on https://astral-sh.github.io/pytorch-mirror/whl/cpu, but not at the requested version (jinja2==3.1.0). A compatible version may be available on a subsequent index (e.g., https://pypi.org/simple). By default, uv will only consider versions that are published on the first index that contains a given package, to avoid dependency confusion attacks. If all indexes are equally trusted, use `--index-strategy unsafe-best-match` to consider all versions from all indexes, regardless of the order in which they were defined.
    "###
    );

    Ok(())
}

/// Install a package via `--extra-index-url`.
///
/// If the package exists on the "extra" index, but at an incompatible version, the resolution
/// should fail by default (even though a compatible version exists on the "primary" index).
#[test]
fn compile_index_url_first_match_marker() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("jinja2==3.1.0 ; sys_platform == 'linux'")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--universal")
        .arg("--index-url")
        .arg("https://pypi.org/simple")
        .arg("--extra-index-url")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/cpu")
        .arg("requirements.in")
        .arg("--no-deps"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because there is no version of jinja2{sys_platform == 'linux'}==3.1.0 and you require jinja2{sys_platform == 'linux'}==3.1.0, we can conclude that your requirements are unsatisfiable.

          hint: `jinja2` was found on https://astral-sh.github.io/pytorch-mirror/whl/cpu, but not at the requested version (jinja2==3.1.0). A compatible version may be available on a subsequent index (e.g., https://pypi.org/simple). By default, uv will only consider versions that are published on the first index that contains a given package, to avoid dependency confusion attacks. If all indexes are equally trusted, use `--index-strategy unsafe-best-match` to consider all versions from all indexes, regardless of the order in which they were defined.
    "###
    );

    Ok(())
}

/// Install a package via `--extra-index-url`.
///
/// If the package "exists" on the "extra" index, but without any versions, the resolution
/// should fail by default (even though a compatible version exists on the "primary" index).
#[test]
fn compile_index_url_first_match_all_versions() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("pandas")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--index-url")
        .arg("https://pypi.org/simple")
        .arg("--extra-index-url")
        .arg("https://test.pypi.org/simple")
        .arg("requirements.in")
        .arg("--no-deps"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because there are no versions of pandas and you require pandas, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Install a package via `--extra-index-url`.
///
/// If the package exists exist on the "extra" index, but at an incompatible version, the
/// resolution should fallback to the "primary" index when `--index-strategy unsafe-any-match`
/// is provided.
#[test]
fn compile_index_url_fallback() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("jinja2==3.1.0")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--index-strategy")
        .arg("unsafe-any-match")
        .arg("--index-url")
        .arg("https://pypi.org/simple")
        .arg("--extra-index-url")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/cpu")
        .arg("requirements.in")
        .arg("--no-deps"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --index-strategy unsafe-any-match requirements.in --no-deps
    jinja2==3.1.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Install a package via `--extra-index-url`.
///
/// If the package exists exist on the "extra" index at a compatible version, the resolver should
/// prefer it, even if a newer versions exists on the "primary" index.
///
/// In this case, anyio 3.5.0 is hosted on the "extra" index, but newer versions are available on
/// the "primary" index. We should prefer the older version from the "extra" index, since it's the
/// preferred index.
#[test]
fn compile_index_url_fallback_prefer_primary() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--index-strategy")
        .arg("unsafe-any-match")
        .arg("--index-url")
        .arg("https://pypi.org/simple")
        .arg("--extra-index-url")
        .arg("https://test.pypi.org/simple")
        .arg("requirements.in")
        .arg("--no-deps"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --index-strategy unsafe-any-match requirements.in --no-deps
    anyio==3.5.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Install a package via `--extra-index-url`.
///
/// With `unsafe-best-match`, the resolver should prefer the highest compatible version,
/// regardless of which index it comes from.
///
/// In this case, anyio 3.5.0 is hosted on the "extra" index, but newer versions are available on
/// the "primary" index. We should prefer the newer version from the "primary" index, despite the
/// "extra" index being the preferred index.
#[test]
fn compile_index_url_unsafe_highest() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--index-strategy")
        .arg("unsafe-best-match")
        .arg("--index-url")
        .arg("https://pypi.org/simple")
        .arg("--extra-index-url")
        .arg("https://test.pypi.org/simple")
        .arg("requirements.in")
        .arg("--no-deps"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --index-strategy unsafe-best-match requirements.in --no-deps
    anyio==4.3.0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Install a package via `--extra-index-url`.
///
/// With `unsafe-best-match`, the resolver should prefer the lowest compatible version,
/// regardless of which index it comes from.
///
/// In this case, anyio 3.5.0 is hosted on the "extra" index, but older versions are available on
/// the "primary" index. We should prefer the older version from the "primary" index, despite the
/// "extra" index being the preferred index.
///
/// We also test here that a warning is raised for missing lower bounds on direct dependencies with
/// `--resolution lowest`.
#[test]
fn compile_index_url_unsafe_lowest() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio<100")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--resolution")
        .arg("lowest")
        .arg("--index-strategy")
        .arg("unsafe-best-match")
        .arg("--index-url")
        .arg("https://pypi.org/simple")
        .arg("--extra-index-url")
        .arg("https://test.pypi.org/simple")
        .arg("requirements.in")
        .arg("--no-deps"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --resolution lowest --index-strategy unsafe-best-match requirements.in --no-deps
    anyio==1.0.0
        # via -r requirements.in

    ----- stderr -----
    warning: The direct dependency `anyio` is unpinned. Consider setting a lower bound when using `--resolution lowest` to avoid using outdated versions.
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Ensure that the username and the password are omitted when
/// index annotations are displayed via `--emit-index-annotation`.
#[test]
fn emit_index_annotation_hide_password() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("requests")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--emit-index-annotation")
        .env(EnvVars::UV_INDEX_URL, "https://test-user:test-password@pypi.org/simple"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation
    certifi==2024.2.2
        # via requests
        # from https://pypi.org/simple
    charset-normalizer==3.3.2
        # via requests
        # from https://pypi.org/simple
    idna==3.6
        # via requests
        # from https://pypi.org/simple
    requests==2.31.0
        # via -r requirements.in
        # from https://pypi.org/simple
    urllib3==2.2.1
        # via requests
        # from https://pypi.org/simple

    ----- stderr -----
    Resolved 5 packages in [TIME]
    "###
    );

    Ok(())
}

/// Ensure that `--emit-index-annotation` prints the index URL for each package.
#[test]
fn emit_index_annotation_pypi_org_simple() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("requests")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--emit-index-annotation"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation
    certifi==2024.2.2
        # via requests
        # from https://pypi.org/simple
    charset-normalizer==3.3.2
        # via requests
        # from https://pypi.org/simple
    idna==3.6
        # via requests
        # from https://pypi.org/simple
    requests==2.31.0
        # via -r requirements.in
        # from https://pypi.org/simple
    urllib3==2.2.1
        # via requests
        # from https://pypi.org/simple

    ----- stderr -----
    Resolved 5 packages in [TIME]
    "###
    );

    Ok(())
}

/// Ensure that `--emit-index-annotation` plays nicely with `--no-annotate`.
///
/// For now, `--no-annotate` doesn't affect `--emit-index-annotation`, in that we still emit the
/// index annotation, and leave `--no-annotate` to only affect the package _source_ annotations.
#[test]
fn emit_index_annotation_no_annotate() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("requests")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--emit-index-annotation")
        .arg("--no-annotate"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation --no-annotate
    certifi==2024.2.2
        # from https://pypi.org/simple
    charset-normalizer==3.3.2
        # from https://pypi.org/simple
    idna==3.6
        # from https://pypi.org/simple
    requests==2.31.0
        # from https://pypi.org/simple
    urllib3==2.2.1
        # from https://pypi.org/simple

    ----- stderr -----
    Resolved 5 packages in [TIME]
    "###
    );

    Ok(())
}

/// Ensure that `--emit-index-annotation` plays nicely with `--annotation-style=line`.
#[test]
fn emit_index_annotation_line() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("requests")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--emit-index-annotation")
        .arg("--annotation-style")
        .arg("line"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation --annotation-style line
    certifi==2024.2.2         # via requests
        # from https://pypi.org/simple
    charset-normalizer==3.3.2  # via requests
        # from https://pypi.org/simple
    idna==3.6                 # via requests
        # from https://pypi.org/simple
    requests==2.31.0          # via -r requirements.in
        # from https://pypi.org/simple
    urllib3==2.2.1            # via requests
        # from https://pypi.org/simple

    ----- stderr -----
    Resolved 5 packages in [TIME]
    "###
    );

    Ok(())
}

/// `--emit-index-annotation` where packages are pulled from two distinct indexes.
#[test]
fn emit_index_annotation_multiple_indexes() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("uv\nrequests")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        .arg("--extra-index-url")
        .arg("https://test.pypi.org/simple")
        .arg("--emit-index-annotation"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation
    requests==2.5.4.1
        # via -r requirements.in
        # from https://test.pypi.org/simple
    uv==0.1.24
        # via -r requirements.in
        # from https://pypi.org/simple

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Test error message when direct dependency is an empty set.
#[test]
fn no_version_for_direct_dependency() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("pypyp==1,>=1.2")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in")
        // Must error before we make any network requests
        .arg("--offline"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ you require pypyp ∅
    "###
    );

    Ok(())
}

/// Compile against a dedicated platform, which may differ from the current platform.
#[test]
fn python_platform() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("black")?;

    uv_snapshot!(context.filters(),
        windows_filters=false,
        context.pip_compile()
        .arg("requirements.in")
        .arg("--python-platform")
        .arg("aarch64-unknown-linux-gnu"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-platform aarch64-unknown-linux-gnu
    black==24.3.0
        # via -r requirements.in
    click==8.1.7
        # via black
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    uv_snapshot!(context.filters(),
        windows_filters=false,
        context.pip_compile()
        .arg("requirements.in")
        .arg("--python-platform")
        .arg("x86_64-pc-windows-msvc"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-platform x86_64-pc-windows-msvc
    black==24.3.0
        # via -r requirements.in
    click==8.1.7
        # via black
    colorama==0.4.6
        # via click
    mypy-extensions==1.0.0
        # via black
    packaging==24.0
        # via black
    pathspec==0.12.1
        # via black
    platformdirs==4.2.0
        # via black

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a specific source distribution via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_default_branch() -> Result<()> {
    let context = TestContext::new("3.12");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "uv-public-pypackage",
        ]

        [tool.uv.sources]
        uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage" }
    "#})?;

    // In addition to the standard filters, remove the `main` commit, which will change frequently.
    let filters: Vec<_> = [(r"@(\d|\w){40}", "@[COMMIT]")]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(filters, context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@[COMMIT]
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Resolve a specific branch via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_branch() -> Result<()> {
    let context = TestContext::new("3.12");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "uv-public-pypackage",
        ]

        [tool.uv.sources]
        uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", branch = "test-branch" }
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Resolve a specific tag via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_tag() -> Result<()> {
    let context = TestContext::new("3.12");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "uv-public-pypackage",
        ]

        [tool.uv.sources]
        uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", tag = "test-tag" }
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_long_commit() -> Result<()> {
    let context = TestContext::new("3.12");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "uv-public-pypackage",
        ]

        [tool.uv.sources]
        uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_short_commit() -> Result<()> {
    let context = TestContext::new("3.12");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "uv-public-pypackage",
        ]

        [tool.uv.sources]
        uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "0dacfd6" }
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Resolve a specific ref via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_refs() -> Result<()> {
    let context = TestContext::new("3.12");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "uv-public-pypackage",
        ]

        [tool.uv.sources]
        uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "refs/pull/4/head" }
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@9d01a806f17ddacb9c7b66b1b68574adf790b63f
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Request a non-existent tag via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
#[cfg_attr(windows, ignore = "Git error messages differ on Windows")]
fn git_source_missing_tag() -> Result<()> {
    let context = TestContext::new("3.12");

    let mut filters = context.filters();
    filters.push(("`.*/git fetch (.*)`", "`git fetch $1`"));

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "uv-public-pypackage",
        ]

        [tool.uv.sources]
        uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", tag = "missing" }
    "#})?;

    uv_snapshot!(filters, context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × Failed to download and build `uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@missing`
      ├─▶ Git operation failed
      ├─▶ failed to clone into: [CACHE_DIR]/git-v0/db/8dab139913c4b566
      ├─▶ failed to fetch tag `missing`
      ╰─▶ process didn't exit successfully: `git fetch --force --update-head-ok 'https://github.com/astral-test/uv-public-pypackage' '+refs/tags/missing:refs/remotes/origin/tags/missing'` (exit status: 128)
          --- stderr
          fatal: couldn't find remote ref refs/tags/missing
    "###);

    Ok(())
}

#[test]
fn warn_missing_constraint() -> Result<()> {
    let context = TestContext::new("3.12");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "foo"
        version = "0.0.0"
        dependencies = [
          "tqdm",
          "anyio==4.3.0",
        ]

        [tool.uv.sources]
        anyio = { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl" }
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl
        # via foo (pyproject.toml)
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio
    tqdm==4.66.2
        # via foo (pyproject.toml)

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###);

    Ok(())
}

/// Ensure that this behavior is constraint to preview mode.
#[test]
fn dont_warn_missing_constraint_without_sources() -> Result<()> {
    let context = TestContext::new("3.12");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "foo"
        version = "0.0.0"
        dependencies = [
          "tqdm",
          "anyio==4.3.0",
        ]
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    anyio==4.3.0
        # via foo (pyproject.toml)
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio
    tqdm==4.66.2
        # via foo (pyproject.toml)

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###);

    Ok(())
}

#[test]
#[cfg(feature = "git")]
fn tool_uv_sources() -> Result<()> {
    let context = TestContext::new("3.12");
    // Use a subdir to test path normalization.
    let require_path = "some_dir/pyproject.toml";
    let pyproject_toml = context.temp_dir.child(require_path);
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "tqdm>4,<=5",
          "packaging @ git+https://github.com/pypa/packaging@32deafe8668a2130a3366b98154914d188f3718e",
          "poetry_editable",
          "urllib3 @ https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.whl",
        ]

        [project.optional-dependencies]
        utils = [
            "boltons==24.0.0"
        ]
        dont_install_me = [
            "broken @ https://example.org/does/not/exist.tar.gz"
        ]

        [tool.uv.sources]
        tqdm = { url = "https://files.pythonhosted.org/packages/a5/d6/502a859bac4ad5e274255576cd3e15ca273cdb91731bc39fb840dd422ee9/tqdm-4.66.0-py3-none-any.whl" }
        boltons = { git = "https://github.com/mahmoud/boltons", rev = "57fbaa9b673ed85b32458b31baeeae230520e4a0" }
        poetry_editable = { path = "../poetry_editable", editable = true }
    "#})?;

    let project_root = fs_err::canonicalize(current_dir()?.join("../.."))?;
    fs_err::create_dir_all(context.temp_dir.join("poetry_editable/poetry_editable"))?;
    fs_err::copy(
        project_root.join("scripts/packages/poetry_editable/pyproject.toml"),
        context.temp_dir.join("poetry_editable/pyproject.toml"),
    )?;
    fs_err::copy(
        project_root.join("scripts/packages/poetry_editable/poetry_editable/__init__.py"),
        context
            .temp_dir
            .join("poetry_editable/poetry_editable/__init__.py"),
    )?;

    // Install the editable packages.
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(require_path)
        .arg("--extra")
        .arg("utils"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] some_dir/pyproject.toml --extra utils
    -e ../poetry_editable
        # via project (some_dir/pyproject.toml)
    anyio==4.3.0
        # via poetry-editable
    boltons @ git+https://github.com/mahmoud/boltons@57fbaa9b673ed85b32458b31baeeae230520e4a0
        # via project (some_dir/pyproject.toml)
    idna==3.6
        # via anyio
    packaging @ git+https://github.com/pypa/packaging@32deafe8668a2130a3366b98154914d188f3718e
        # via project (some_dir/pyproject.toml)
    sniffio==1.3.1
        # via anyio
    tqdm @ https://files.pythonhosted.org/packages/a5/d6/502a859bac4ad5e274255576cd3e15ca273cdb91731bc39fb840dd422ee9/tqdm-4.66.0-py3-none-any.whl
        # via project (some_dir/pyproject.toml)
    urllib3 @ https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.whl
        # via project (some_dir/pyproject.toml)

    ----- stderr -----
    Resolved 8 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn invalid_tool_uv_sources() -> Result<()> {
    let context = TestContext::new("3.12");

    // Write an invalid extension on a PEP 508 URL.
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "urllib3 @ https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.tar.baz",
        ]
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(context.temp_dir.join("pyproject.toml")), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Failed to parse metadata from built wheel
      Caused by: Expected direct URL (`https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.tar.baz`) to end in a supported file extension: `.whl`, `.tar.gz`, `.zip`, `.tar.bz2`, `.tar.lz`, `.tar.lzma`, `.tar.xz`, `.tar.zst`, `.tar`, `.tbz`, `.tgz`, `.tlz`, or `.txz`
    urllib3 @ https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.tar.baz
              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    "###
    );

    // Write an invalid extension on a source.
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "project"
        version = "0.0.0"
        dependencies = [
          "urllib3",
        ]

        [tool.uv.sources]
        urllib3 = { url = "https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.tar.baz" }
    "#})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(context.temp_dir.join("pyproject.toml")), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Failed to parse entry: `urllib3`
      Caused by: Expected direct URL (`https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.tar.baz`) to end in a supported file extension: `.whl`, `.tar.gz`, `.zip`, `.tar.bz2`, `.tar.lz`, `.tar.lzma`, `.tar.xz`, `.tar.zst`, `.tar`, `.tbz`, `.tgz`, `.tlz`, or `.txz`
    "###
    );

    Ok(())
}

/// Check that a dynamic `pyproject.toml` is supported a compile input file.
#[test]
fn dynamic_pyproject_toml() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc! {r#"
        [project]
        name = "bird-feeder"
        version = "1.0.0"
        dynamic = ["dependencies"]

        [build-system]
        requires = ["hatchling"]
        build-backend = "hatchling.build"
    "#})?;
    let bird_feeder = context.temp_dir.child("bird_feeder/__init__.py");
    bird_feeder.write_str("__all__= []")?;

    uv_snapshot!(context.filters(), context.pip_compile().arg("pyproject.toml"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml

    ----- stderr -----
    Resolved in [TIME]
    "###);

    Ok(())
}

/// Accept `file://` URLs as installation sources.
#[test]
fn file_url() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements file.txt");
    requirements_txt.write_str("iniconfig")?;

    let url = Url::from_file_path(requirements_txt.simple_canonicalize()?).expect("valid file URL");

    uv_snapshot!(context.filters(), context.pip_compile().arg(url.to_string()), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] file://[TEMP_DIR]/requirements%20file.txt
    iniconfig==2.0.0
        # via -r requirements file.txt

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Allow `--no-binary` to override `--only-binary`, to allow select source distributions.
#[test]
fn no_binary_only_binary() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-29T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("source-distribution<=0.0.1")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("--only-binary")
        .arg(":all:"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because only source-distribution>=0.0.1 is available and source-distribution==0.0.1 has no usable wheels, we can conclude that source-distribution<=0.0.1 cannot be used.
          And because you require source-distribution<=0.0.1, we can conclude that your requirements are unsatisfiable.

          hint: Wheels are required for `source-distribution` because building from source is disabled for all packages (i.e., with `--no-build`)
    "###
    );

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("--only-binary")
        .arg(":all:")
        .arg("--no-binary")
        .arg("source-distribution"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --only-binary :all: --no-binary source-distribution
    source-distribution==0.0.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// `gunicorn` only depends on `eventlet` via an extra, so the resolution should succeed despite
/// the nonsensical extra.
#[test]
fn ignore_invalid_constraint() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("gunicorn>=20")?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str("eventlet==9999.0.1.2.3.4.5")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("-c")
        .arg("constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt
    gunicorn==21.2.0
        # via -r requirements.in
    packaging==24.0
        # via gunicorn

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// Include a `build_constraints.txt` file with an incompatible constraint.
#[test]
fn incompatible_build_constraint() -> Result<()> {
    let context = TestContext::new("3.8");
    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("requests==1.2")?;

    let constraints_txt = context.temp_dir.child("build_constraints.txt");
    constraints_txt.write_str("setuptools==1")?;

    uv_snapshot!(context.pip_compile()
        .arg("requirements.txt")
        .arg("--build-constraint")
        .arg("build_constraints.txt"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × Failed to download and build `requests==1.2.0`
      ├─▶ Failed to resolve requirements from `setup.py` build
      ├─▶ No solution found when resolving: `setuptools>=40.8.0`
      ╰─▶ Because you require setuptools>=40.8.0 and setuptools==1, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Include a `build_constraints.txt` file with a compatible constraint.
#[test]
fn compatible_build_constraint() -> Result<()> {
    let context = TestContext::new("3.8");
    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("requests==1.2")?;

    let constraints_txt = context.temp_dir.child("build_constraints.txt");
    constraints_txt.write_str("setuptools>=40")?;

    uv_snapshot!(context.pip_compile()
        .arg("requirements.txt")
        .arg("--build-constraint")
        .arg("build_constraints.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --build-constraint build_constraints.txt
    requests==1.2.0
        # via -r requirements.txt

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

/// Include `build-constraint-dependencies` in pyproject.toml with an incompatible constraint.
#[test]
fn incompatible_build_constraint_in_pyproject_toml() -> Result<()> {
    let context = TestContext::new("3.8");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools"]

[project]
name = "project"
version = "0.1.0"
dependencies = [
    "requests==1.2",
]

[tool.uv]
build-constraint-dependencies = [
    "setuptools==1",
]
"#,
    )?;

    uv_snapshot!(context.pip_compile()
        .arg("pyproject.toml"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × Failed to download and build `requests==1.2.0`
      ├─▶ Failed to resolve requirements from `setup.py` build
      ├─▶ No solution found when resolving: `setuptools>=40.8.0`
      ╰─▶ Because you require setuptools>=40.8.0 and setuptools==1, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Include `build-constraint-dependencies` in pyproject.toml with a compatible constraint.
#[test]
fn compatible_build_constraint_in_pyproject_toml() -> Result<()> {
    let context = TestContext::new("3.8");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools"]

[project]
name = "project"
version = "0.1.0"
dependencies = [
    "requests==1.2",
]

[tool.uv]
build-constraint-dependencies = [
    "setuptools>=40",
]
"#,
    )?;

    uv_snapshot!(context.pip_compile()
        .arg("pyproject.toml"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
    requests==1.2.0
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "
    );

    Ok(())
}

/// Merge `build_constraints.txt` with `build-constraint-dependencies` in pyproject.toml with an incompatible constraint.
#[test]
fn incompatible_build_constraint_merged_with_pyproject_toml() -> Result<()> {
    let context = TestContext::new("3.8");

    // incompatible setuptools version in pyproject.toml, compatible in build_constraints.txt
    let constraints_txt = context.temp_dir.child("build_constraints.txt");
    constraints_txt.write_str("setuptools>=40")?;
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools"]

[project]
name = "project"
version = "0.1.0"
dependencies = [
    "requests==1.2",
]

[tool.uv]
build-constraint-dependencies = [
    "setuptools==1",
]
"#,
    )?;

    uv_snapshot!(context.pip_compile()
        .arg("pyproject.toml")
        .arg("--build-constraint")
        .arg("build_constraints.txt"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × Failed to download and build `requests==1.2.0`
      ├─▶ Failed to resolve requirements from `setup.py` build
      ├─▶ No solution found when resolving: `setuptools>=40.8.0`
      ╰─▶ Because you require setuptools>=40 and setuptools==1, we can conclude that your requirements are unsatisfiable.
    "###
    );

    // compatible setuptools version in pyproject.toml, incompatible in build_constraints.txt
    let constraints_txt = context.temp_dir.child("build_constraints.txt");
    constraints_txt.write_str("setuptools==1")?;

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools"]

[project]
name = "project"
version = "0.1.0"
dependencies = [
    "requests==1.2",
]

[tool.uv]
build-constraint-dependencies = [
    "setuptools>=40",
]
"#,
    )?;

    uv_snapshot!(context.pip_compile()
        .arg("pyproject.toml")
        .arg("--build-constraint")
        .arg("build_constraints.txt"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × Failed to download and build `requests==1.2.0`
      ├─▶ Failed to resolve requirements from `setup.py` build
      ├─▶ No solution found when resolving: `setuptools>=40.8.0`
      ╰─▶ Because you require setuptools==1 and setuptools>=40, we can conclude that your requirements are unsatisfiable.
    "###
    );

    Ok(())
}

/// Merge CLI args `build_constraints.txt` with `build-constraint-dependencies` in pyproject.toml with a compatible constraint.
#[test]
fn compatible_build_constraint_merged_with_pyproject_toml() -> Result<()> {
    let context = TestContext::new("3.8");

    // incompatible setuptools version in pyproject.toml, compatible in build_constraints.txt
    let constraints_txt = context.temp_dir.child("build_constraints.txt");
    constraints_txt.write_str("setuptools>=40")?;
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools"]

[project]
name = "project"
version = "0.1.0"
dependencies = [
    "requests==1.2",
]

[tool.uv]
build-constraint-dependencies = [
    "setuptools>=1",
]
"#,
    )?;

    uv_snapshot!(context.pip_compile()
        .arg("pyproject.toml")
        .arg("--build-constraint")
        .arg("build_constraints.txt"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --build-constraint build_constraints.txt
    requests==1.2.0
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "
    );

    // compatible setuptools version in pyproject.toml, incompatible in build_constraints.txt
    let constraints_txt = context.temp_dir.child("build_constraints.txt");
    constraints_txt.write_str("setuptools>=1")?;

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"[build-system]
requires = ["setuptools"]

[project]
name = "project"
version = "0.1.0"
dependencies = [
    "requests==1.2",
]

[tool.uv]
build-constraint-dependencies = [
    "setuptools>=40",
]
"#,
    )?;

    uv_snapshot!(context.pip_compile()
        .arg("pyproject.toml")
        .arg("--build-constraint")
        .arg("build_constraints.txt"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --build-constraint build_constraints.txt
    requests==1.2.0
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 1 package in [TIME]
    "
    );

    Ok(())
}

/// Ensure that we treat invalid extra markers as `false`, i.e., in projects that define
/// non-spec-compliant extras.
#[test]
fn invalid_extra() -> Result<()> {
    let context = TestContext::new("3.12");

    let setup_py = context.temp_dir.child("setup.py");
    setup_py.write_str(indoc! {r#"
        from setuptools import setup

        extras_require = {
            "_anyio": ["anyio"],
            "config": ["jsonschema>=2.6.0"],
            "encryption": ["iniconfig"],
        }

        setup(name="project", install_requires=[], extras_require=extras_require)
    "#})?;

    // Sync the `encryption` extra. `anyio` should be omitted.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(".[encryption]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    iniconfig==2.0.0
        # via project
    .
        # via -r requirements.in

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###);

    // Sync the `_anyio` extra. We should reject it.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(".[_anyio]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Couldn't parse requirement in `requirements.in` at position 0
      Caused by: Expected an alphanumeric character starting the extra name, found `_`
    .[_anyio]
      ^
    "###);

    // Sync the `anyio` extra. We should reject it.
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(".[anyio]")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    .
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    warning: The package `project @ file://[TEMP_DIR]/` does not have an extra named `anyio`
    "###);

    Ok(())
}

/// Respect symlinks of output files.
#[test]
#[cfg(not(windows))]
fn symlink() -> Result<()> {
    let context = TestContext::new("3.8");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio")?;

    // Create an output file.
    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("anyio")?;

    // Create a symlink to the output file.
    let symlink = context.temp_dir.child("requirements-symlink.txt");
    symlink.symlink_to_file(requirements_txt.path())?;

    // Write to the symlink.
    uv_snapshot!(context.pip_compile()
        .arg("requirements.in")
        .arg("--output-file")
        .arg("requirements-symlink.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements-symlink.txt
    anyio==4.3.0
        # via -r requirements.in
    exceptiongroup==1.2.0
        # via anyio
    idna==3.6
        # via anyio
    sniffio==1.3.1
        # via anyio
    typing-extensions==4.10.0
        # via anyio

    ----- stderr -----
    Resolved 5 packages in [TIME]
    "###
    );

    // The symlink should still be a symlink.
    assert!(symlink.path().symlink_metadata()?.file_type().is_symlink());

    // The destination of the symlink should be the same as the output file.
    assert_eq!(symlink.path().read_link()?, requirements_txt.path());

    Ok(())
}

/// Resolve with `--universal`, applying user-provided constraints to the space of supported
/// environments.
#[test]
fn universal_constrained_environment() -> Result<()> {
    let context = TestContext::new("3.12");

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"
        [project]
        name = "project"
        version = "0.1.0"
        requires-python = ">=3.12"
        dependencies = ["black"]

        [tool.uv]
        environments = "platform_system != 'Windows'"
        "#,
    )?;

    uv_snapshot!(context.filters(), context.pip_compile()
    .arg("pyproject.toml")
    .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --universal
    black==24.3.0 ; sys_platform != 'win32'
        # via project (pyproject.toml)
    click==8.1.7 ; sys_platform != 'win32'
        # via black
    mypy-extensions==1.0.0 ; sys_platform != 'win32'
        # via black
    packaging==24.0 ; sys_platform != 'win32'
        # via black
    pathspec==0.12.1 ; sys_platform != 'win32'
        # via black
    platformdirs==4.2.0 ; sys_platform != 'win32'
        # via black

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###
    );

    Ok(())
}

/// Resolve a package that has no versions that satisfy the current Python version.
#[test]
fn compile_enumerate_no_versions() -> Result<()> {
    let context = TestContext::new("3.10").with_exclude_newer("2024-12-01");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("rooster-blue")?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("requirements.in"),
    @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because the current Python version (3.10.[X]) does not satisfy Python>=3.11,<4.0 and rooster-blue<=0.0.8 depends on Python>=3.11,<4.0, we can conclude that rooster-blue<=0.0.8 cannot be used.
          And because you require rooster-blue, we can conclude that your requirements are unsatisfiable.
    "###);

    Ok(())
}

/// Resolve a version of Flask that ships a `requires.txt` file in an `egg-info` directory, but
/// otherwise doesn't include static metadata.
#[test]
fn compile_requires_txt() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/36/70/2234ee8842148cef44261c2cebca3a6384894bce6112b73b18693cdcc62f/Flask-1.0.4.tar.gz")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    click==8.1.7
        # via flask
    flask @ https://files.pythonhosted.org/packages/36/70/2234ee8842148cef44261c2cebca3a6384894bce6112b73b18693cdcc62f/Flask-1.0.4.tar.gz
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###);

    Ok(())
}

/// Regression test for: <https://github.com/astral-sh/uv/issues/6269>
#[test]
fn astroid_not_repeated() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(
        "\
alembic==1.8.1
ipython>=8.4.0
pylint>=2.14.5
    ",
    )?;

    let constraints_txt = context.temp_dir.child("constraints.txt");
    constraints_txt.write_str(
        "\
dill==0.3.1.1
exceptiongroup==1.0.0rc8
    ",
    )?;

    let filters: Vec<_> = [
        // 3.10 may not be installed
        (
            "warning: The requested Python version 3.10 is not available; .* will be used to build dependencies instead.\n",
            "",
        ),
    ]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(
        filters,
        context
            .pip_compile()
            .arg("requirements.in")
            .arg("-c").arg("constraints.txt")
            .arg("--universal")
            .arg("-p").arg("3.10"),
        @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt --universal -p 3.10
    alembic==1.8.1
        # via -r requirements.in
    astroid==2.13.5 ; python_full_version >= '3.11'
        # via pylint
    astroid==3.1.0 ; python_full_version < '3.11'
        # via pylint
    asttokens==2.4.1
        # via stack-data
    colorama==0.4.6 ; sys_platform == 'win32'
        # via
        #   ipython
        #   pylint
    decorator==5.1.1
        # via ipython
    dill==0.3.1.1
        # via
        #   -c constraints.txt
        #   pylint
    exceptiongroup==1.0.0rc8 ; python_full_version < '3.11'
        # via
        #   -c constraints.txt
        #   ipython
    executing==2.0.1
        # via stack-data
    greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'
        # via sqlalchemy
    ipython==8.22.2
        # via -r requirements.in
    isort==5.13.2
        # via pylint
    jedi==0.19.1
        # via ipython
    lazy-object-proxy==1.10.0 ; python_full_version >= '3.11'
        # via astroid
    mako==1.3.2
        # via alembic
    markupsafe==2.1.5
        # via mako
    matplotlib-inline==0.1.6
        # via ipython
    mccabe==0.7.0
        # via pylint
    parso==0.8.3
        # via jedi
    pexpect==4.9.0 ; sys_platform != 'emscripten' and sys_platform != 'win32'
        # via ipython
    platformdirs==4.2.0
        # via pylint
    prompt-toolkit==3.0.43
        # via ipython
    ptyprocess==0.7.0 ; sys_platform != 'emscripten' and sys_platform != 'win32'
        # via pexpect
    pure-eval==0.2.2
        # via stack-data
    pygments==2.17.2
        # via ipython
    pylint==2.15.8 ; python_full_version >= '3.11'
        # via -r requirements.in
    pylint==3.1.0 ; python_full_version < '3.11'
        # via -r requirements.in
    six==1.16.0
        # via asttokens
    sqlalchemy==2.0.29
        # via alembic
    stack-data==0.6.3
        # via ipython
    tomli==2.0.1 ; python_full_version < '3.11'
        # via pylint
    tomlkit==0.12.4
        # via pylint
    traitlets==5.14.2
        # via
        #   ipython
        #   matplotlib-inline
    typing-extensions==4.10.0
        # via
        #   astroid
        #   sqlalchemy
    wcwidth==0.2.13
        # via prompt-toolkit
    wrapt==1.16.0 ; python_full_version >= '3.11'
        # via astroid

    ----- stderr -----
    Resolved 36 packages in [TIME]
    "###);

    Ok(())
}

/// Regression test for: <https://github.com/astral-sh/uv/issues/6412>
#[test]
fn tomli_less_than_python311() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(
        "\
coverage[toml]
pandas
matplotlib
    ",
    )?;

    let filters: Vec<_> = [
        // 3.8 may not be installed
        (
            "warning: The requested Python version 3.8 is not available; .* will be used to build dependencies instead.\n",
            "",
        ),
        // For Windows, `tzdata` isn't included in the resolution.
        //
        // This should probably be investigated. It is possible this
        // is a correct/expected result. For example, if there is a
        // dependency that is sdist-only and has dynamic platform
        // dependent dependencies. But if not, `tzdata` should still
        // show up in the lock file.
        //
        // In any case, we filter `tzdata` out of the snapshot entirely
        // on all platforms for this reason.
        (r"( ?[-+~] ?)?tzdata==\d+(\.\d+)+(\s+[-+~]?\s+# via .*)?\n", ""),
        // And because tzdata is omitted on Windows, the number of deps
        // is different too. So filter that out too.
        (r"Resolved 19 packages", "Resolved [NUM] packages"),
    ]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(
        filters,
        context
            .pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("-p").arg("3.8"),
        @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal -p 3.8
    contourpy==1.1.1 ; python_full_version < '3.9'
        # via matplotlib
    contourpy==1.2.0 ; python_full_version >= '3.9'
        # via matplotlib
    coverage==7.4.4
        # via -r requirements.in
    cycler==0.12.1
        # via matplotlib
    fonttools==4.50.0
        # via matplotlib
    importlib-resources==6.4.0 ; python_full_version < '3.10'
        # via matplotlib
    kiwisolver==1.4.5
        # via matplotlib
    matplotlib==3.7.5 ; python_full_version < '3.9'
        # via -r requirements.in
    matplotlib==3.8.3 ; python_full_version >= '3.9'
        # via -r requirements.in
    numpy==1.24.4 ; python_full_version < '3.9'
        # via
        #   contourpy
        #   matplotlib
        #   pandas
    numpy==1.26.4 ; python_full_version >= '3.9'
        # via
        #   contourpy
        #   matplotlib
        #   pandas
    packaging==24.0
        # via matplotlib
    pandas==2.0.3 ; python_full_version < '3.9'
        # via -r requirements.in
    pandas==2.2.1 ; python_full_version >= '3.9'
        # via -r requirements.in
    pillow==10.2.0
        # via matplotlib
    pyparsing==3.1.2
        # via matplotlib
    python-dateutil==2.9.0.post0
        # via
        #   matplotlib
        #   pandas
    pytz==2024.1
        # via pandas
    six==1.16.0
        # via python-dateutil
    tomli==2.0.1 ; python_full_version <= '3.11'
        # via coverage
    zipp==3.18.1 ; python_full_version < '3.10'
        # via importlib-resources

    ----- stderr -----
    Resolved 22 packages in [TIME]
    "###);

    Ok(())
}

/// Regression test for: <https://github.com/astral-sh/uv/issues/6836>
#[test]
fn importlib_metadata_not_repeated() -> Result<()> {
    let context = TestContext::new("3.12");

    let uv_toml = context.temp_dir.child("uv.toml");
    uv_toml.write_str(r#"environments = ["python_version >= '3.10'", "python_version >= '3.8' and python_version < '3.10'", "python_version < '3.8'"]"#)?;
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("build")?;

    let filters: Vec<_> = [
        // 3.7 may not be installed
        (
            "warning: The requested Python version 3.7 is not available; .* will be used to build dependencies instead.\n",
            "",
        ),
    ]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(
        filters,
        context
            .pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("-p").arg("3.7"),
        @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal -p 3.7
    build==1.1.1
        # via -r requirements.in
    colorama==0.4.6 ; os_name == 'nt'
        # via build
    importlib-metadata==6.7.0 ; python_full_version < '3.8'
        # via build
    importlib-metadata==7.1.0 ; python_full_version >= '3.8' and python_full_version < '3.10.2'
        # via build
    packaging==24.0
        # via build
    pyproject-hooks==1.0.0
        # via build
    tomli==2.0.1 ; python_full_version < '3.11'
        # via
        #   build
        #   pyproject-hooks
    typing-extensions==4.7.1 ; python_full_version < '3.8'
        # via importlib-metadata
    zipp==3.15.0 ; python_full_version < '3.8'
        # via importlib-metadata
    zipp==3.18.1 ; python_full_version >= '3.8' and python_full_version < '3.10.2'
        # via importlib-metadata

    ----- stderr -----
    Resolved 10 packages in [TIME]
    "###);

    Ok(())
}

/// Regression test for: <https://github.com/astral-sh/uv/issues/6836>
#[test]
fn prune_unreachable() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("argcomplete ; python_version >= '3.8'")?;

    let filters: Vec<_> = [
        // 3.7 may not be installed
        (
            "warning: The requested Python version 3.7 is not available; .* will be used to build dependencies instead.\n",
            "",
        ),
    ]
        .into_iter()
        .chain(context.filters())
        .collect();

    uv_snapshot!(
        filters,
        context
            .pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("-p")
            .arg("3.7"),
        @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal -p 3.7
    argcomplete==3.2.3 ; python_full_version >= '3.8'
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Allow resolving a package that requires a Python version that is not available, as long as it
/// includes static metadata.
///
/// See: <https://github.com/astral-sh/uv/issues/8767>
#[test]
fn unsupported_requires_python_static_metadata() -> Result<()> {
    let context = TestContext::new("3.11").with_exclude_newer("2024-11-04T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("interpreters-pep-734 <= 0.4.1 ; python_version >= '3.13'")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("--universal")
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --universal requirements.in
    interpreters-pep-734==0.4.1 ; python_full_version >= '3.13'
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###);

    Ok(())
}

/// Disallow resolving a package that requires a Python version that is not available, if it uses
/// dynamic metadata.
///
/// See: <https://github.com/astral-sh/uv/issues/8767>
#[test]
fn unsupported_requires_python_dynamic_metadata() -> Result<()> {
    let context = TestContext::new("3.8").with_exclude_newer("2024-11-04T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("source-distribution==0.0.3 ; python_version >= '3.10'")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("--universal")
        .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies for split (python_full_version >= '3.10'):
      ╰─▶ Because source-distribution==0.0.3 requires Python >=3.10 and you require source-distribution{python_full_version >= '3.10'}==0.0.3, we can conclude that your requirements are unsatisfiable.

          hint: The source distribution for `source-distribution` (v0.0.3) does not include static metadata. Generating metadata for this package requires Python >=3.10, but Python 3.8.[X] is installed.
    "###);

    Ok(())
}

#[test]
fn negation_not_imply_prerelease() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("flask<2.0.1, !=2.0.0rc1")?;
    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in
    click==8.1.7
        # via flask
    flask==2.0.0
        # via -r requirements.in
    itsdangerous==2.1.2
        # via flask
    jinja2==3.1.3
        # via flask
    markupsafe==2.1.5
        # via
        #   jinja2
        #   werkzeug
    werkzeug==3.0.1
        # via flask

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "###);

    Ok(())
}

#[test]
fn lowest_direct_fork_min_python() -> Result<()> {
    let context = TestContext::new("3.10");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        pycountry >= 22.1.10
        setuptools >= 50.0.0 ; python_version >= '3.12'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("--resolution")
            .arg("lowest-direct"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --resolution lowest-direct
    pycountry==22.1.10
        # via -r requirements.in
    setuptools==50.0.0 ; python_full_version >= '3.12'
        # via
        #   -r requirements.in
        #   pycountry
    setuptools==69.2.0 ; python_full_version < '3.12'
        # via
        #   -r requirements.in
        #   pycountry

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn lowest_fork_min_python() -> Result<()> {
    let context = TestContext::new("3.10");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        anyio >= 3.0.0
        idna >= 3.0.0 ; python_version >= '3.12'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("--resolution")
            .arg("lowest"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --resolution lowest
    anyio==3.0.0
        # via -r requirements.in
    idna==2.8 ; python_full_version < '3.12'
        # via
        #   -r requirements.in
        #   anyio
    idna==3.0 ; python_full_version >= '3.12'
        # via
        #   -r requirements.in
        #   anyio
    sniffio==1.1.0
        # via anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn lowest_direct_fork_max_python() -> Result<()> {
    let context = TestContext::new("3.10");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        pycountry >= 22.1.10
        setuptools >= 50.0.0 ; python_version < '3.12'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("--resolution")
            .arg("lowest-direct"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --resolution lowest-direct
    pycountry==22.1.10
        # via -r requirements.in
    setuptools==50.0.0
        # via
        #   -r requirements.in
        #   pycountry

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn lowest_fork_max_python() -> Result<()> {
    let context = TestContext::new("3.10");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        pycountry >= 22.1.10
        setuptools >= 50.0.0 ; python_version < '3.12'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--universal")
            .arg("--resolution")
            .arg("lowest"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --resolution lowest
    pycountry==22.1.10
        # via -r requirements.in
    setuptools==50.0.0
        # via
        #   -r requirements.in
        #   pycountry

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###
    );

    Ok(())
}

/// See: <https://github.com/astral-sh/uv/issues/8922>
#[test]
fn same_version_multi_index_incompatibility() -> Result<()> {
    let context = TestContext::new("3.10");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("cffi==1.15.1")?;

    // `cffi` is present on Test PyPI, but only as a single wheel: `cffi-1.15.1-cp311-cp311-win_arm64.whl`.
    // If we don't check PyPI for the same version, we'll fail.
    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("--extra-index-url")
        .arg("https://test.pypi.org/simple")
        .arg("--index-strategy")
        .arg("unsafe-best-match")
        .arg("--python-platform")
        .arg("linux")
        .arg("--python-version")
        .arg("3.10"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --index-strategy unsafe-best-match --python-platform linux --python-version 3.10
    cffi==1.15.1
        # via -r requirements.in
    pycparser==2.21
        # via cffi

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###);

    Ok(())
}

/// Show the derivation chain on build failure.
#[test]
fn compile_derivation_chain() -> Result<()> {
    let context = TestContext::new("3.12");

    let child = context.temp_dir.child("child");
    child.child("pyproject.toml").write_str(
        r#"
        [project]
        name = "child"
        version = "0.1.0"
        requires-python = ">=3.12"
        dependencies = ["wsgiref"]
        "#,
    )?;

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(&indoc::formatdoc! {r#"
        [build-system]
        requires = ["setuptools>=42"]

        [project]
        name = "project"
        version = "0.1.0"
        dependencies = [
            "child @ {}",
        ]
    "#, Url::from_file_path(child).unwrap()})?;

    let filters = context
        .filters()
        .into_iter()
        .chain([
            (r"exit code: 1", "exit status: 1"),
            (r"/.*/src", "/[TMP]/src"),
        ])
        .collect::<Vec<_>>();

    uv_snapshot!(filters, context.pip_compile().arg("pyproject.toml"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × Failed to build `wsgiref==0.1.2`
      ├─▶ The build backend returned an error
      ╰─▶ Call to `setuptools.build_meta:__legacy__.build_wheel` failed (exit status: 1)

          [stderr]
          Traceback (most recent call last):
            File "<string>", line 14, in <module>
            File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel
              return self._get_build_requires(config_settings, requirements=['wheel'])
                     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
            File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires
              self.run_setup()
            File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 487, in run_setup
              super().run_setup(setup_script=setup_script)
            File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup
              exec(code, locals())
            File "<string>", line 5, in <module>
            File "[CACHE_DIR]/[TMP]/src/ez_setup/__init__.py", line 170
              print "Setuptools version",version,"or greater has been installed."
              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
          SyntaxError: Missing parentheses in call to 'print'. Did you mean print(...)?

          hint: This usually indicates a problem with the package or the build environment.
      help: `wsgiref` (v0.1.2) was included because `child` (v0.1.0) depends on `wsgiref`
    "###
    );

    Ok(())
}

/// See: <https://github.com/astral-sh/uv/issues/7553>
#[test]
fn invalid_platform() -> Result<()> {
    let context = TestContext::new("3.10");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("open3d")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("--python-platform")
        .arg("linux")
        .arg("requirements.in"), @r###"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because only open3d<=0.18.0 is available and open3d<=0.15.2 has no wheels with a matching Python ABI tag (e.g., `cp310`), we can conclude that open3d<=0.15.2 cannot be used.
          And because open3d>=0.16.0,<=0.18.0 has no wheels with a matching platform tag (e.g., `manylinux_2_17_x86_64`) and you require open3d, we can conclude that your requirements are unsatisfiable.

          hint: You require CPython 3.10 (`cp310`), but we only found wheels for `open3d` (v0.15.2) with the following Python ABI tags: `cp36m`, `cp37m`, `cp38`, `cp39`

          hint: Wheels are available for `open3d` (v0.18.0) on the following platforms: `manylinux_2_27_aarch64`, `manylinux_2_27_x86_64`, `macosx_11_0_x86_64`, `macosx_13_0_arm64`, `win_amd64`
    "###);

    Ok(())
}

/// Treat `sys_platform` and `sys.platform` as equivalent markers in the marker algebra.
#[test]
fn universal_disjoint_deprecated_markers() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        anyio ; sys_platform == 'win32' and sys.platform == 'win32'
        typing-extensions ; platform.python_implementation == 'CPython' and python_implementation != 'CPython'
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    anyio==4.3.0 ; sys_platform == 'win32'
        # via -r requirements.in
    idna==3.6 ; sys_platform == 'win32'
        # via anyio
    sniffio==1.3.1 ; sys_platform == 'win32'
        # via anyio

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_disjoint_override_urls() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        anyio
    "})?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str(indoc::indoc! {r"
        sniffio @ https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl ; sys_platform == 'win32'
        sniffio @ https://files.pythonhosted.org/packages/c3/a0/5dba8ed157b0136607c7f2151db695885606968d1fae123dc3391e0cfdbf/sniffio-1.3.0-py3-none-any.whl ; sys_platform == 'darwin'
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--overrides")
            .arg("overrides.txt")
            .arg("--universal"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --overrides overrides.txt --universal
    anyio==4.3.0
        # via -r requirements.in
    idna==3.6
        # via anyio
    sniffio @ https://files.pythonhosted.org/packages/c3/a0/5dba8ed157b0136607c7f2151db695885606968d1fae123dc3391e0cfdbf/sniffio-1.3.0-py3-none-any.whl ; sys_platform == 'darwin'
        # via
        #   --override overrides.txt
        #   anyio
    sniffio @ https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl ; sys_platform == 'win32'
        # via
        #   --override overrides.txt
        #   anyio

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn universal_conflicting_override_urls() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        anyio
    "})?;

    let overrides_txt = context.temp_dir.child("overrides.txt");
    overrides_txt.write_str(indoc::indoc! {r"
        sniffio @ https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl ; sys_platform == 'win32'
        sniffio @ https://files.pythonhosted.org/packages/c3/a0/5dba8ed157b0136607c7f2151db695885606968d1fae123dc3391e0cfdbf/sniffio-1.3.0-py3-none-any.whl ; sys_platform == 'darwin' or sys_platform == 'win32'
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
            .arg("requirements.in")
            .arg("--overrides")
            .arg("overrides.txt")
            .arg("--universal"), @r###"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Requirements contain conflicting URLs for package `sniffio` in split `sys_platform == 'win32'`:
    - https://files.pythonhosted.org/packages/c3/a0/5dba8ed157b0136607c7f2151db695885606968d1fae123dc3391e0cfdbf/sniffio-1.3.0-py3-none-any.whl
    - https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl
    "###
    );

    Ok(())
}

#[test]
fn compile_lowest_extra_unpinned_warning() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        all-extras-required-a[extra_b,extra_c]
        all-extras-required-b==1
        all-extras-required-c==1
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--resolution")
        .arg("lowest")
        .arg("--index-url")
        .arg(packse_index_url())
        .arg(requirements_in.path())
        .env_remove(EnvVars::UV_EXCLUDE_NEWER), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --resolution lowest [TEMP_DIR]/requirements.in
    all-extras-required-a==1.0.0
        # via -r requirements.in
    all-extras-required-b==1.0.0
        # via
        #   -r requirements.in
        #   all-extras-required-a
    all-extras-required-c==1.0.0
        # via
        #   -r requirements.in
        #   all-extras-required-a

    ----- stderr -----
    warning: The direct dependency `all-extras-required-a` is unpinned. Consider setting a lower bound when using `--resolution lowest` to avoid using outdated versions.
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn disjoint_requires_python() -> Result<()> {
    let context = TestContext::new("3.8").with_exclude_newer("2025-01-29T00:00:00Z");

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        iniconfig ; platform_python_implementation == 'CPython' and python_version >= '3.10'
        coverage
    "})?;

    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--universal")
        .arg(requirements_in.path()), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --universal [TEMP_DIR]/requirements.in
    coverage==7.6.1 ; python_full_version < '3.9'
        # via -r requirements.in
    coverage==7.6.10 ; python_full_version >= '3.9'
        # via -r requirements.in
    iniconfig==2.0.0 ; python_full_version >= '3.10' and platform_python_implementation == 'CPython'
        # via -r requirements.in

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "###
    );

    Ok(())
}

/// Test that we use the version in the source distribution filename for compiling, even if the
/// version is declared as dynamic.
///
/// `test_dynamic_version_sdist_wrong_version` checks that this version must be correct.
#[test]
fn dynamic_version_source_dist() -> Result<()> {
    let context = TestContext::new("3.12");

    // Write a source dist that has a version in its name, a dynamic version in pyproject.toml
    // and check that we don't build it when compiling.
    let pyproject_toml = r#"
    [project]
    name = "foo"
    requires-python = ">=3.9"
    dependencies = []
    dynamic = ["version"]
    "#;

    let setup_py = "boom()";

    let source_dist = context.temp_dir.child("foo-1.2.3.tar.gz");
    // Flush the file after we're done.
    {
        let file = File::create(source_dist.path())?;
        let enc = GzEncoder::new(file, flate2::Compression::default());
        let mut tar = tar::Builder::new(enc);

        for (path, contents) in [
            ("foo-1.2.3/pyproject.toml", pyproject_toml),
            ("foo-1.2.3/setup.py", setup_py),
        ] {
            let mut header = tar::Header::new_gnu();
            header.set_size(contents.len() as u64);
            header.set_mode(0o644);
            header.set_cksum();
            tar.append_data(&mut header, path, Cursor::new(contents))?;
        }
        tar.finish()?;
    }

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        foo
    "})?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg(requirements_in.path())
        .arg("--no-index")
        .arg("--find-links")
        .arg(context.temp_dir.path()), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --no-index
    foo==1.2.3
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    "###
    );

    Ok(())
}

#[test]
fn max_python_requirement() -> Result<()> {
    let context = TestContext::new("3.8").with_exclude_newer("2024-12-18T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc::indoc! {r"
        nox >=2024.04.15
        nox[uv] >=2024.03.02; python_version >= '3.8'
    "})?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--fork-strategy")
            .arg("requires-python")
            .arg("--universal")
            .arg("--python-version")
            .arg("3.7"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --fork-strategy requires-python --universal --python-version 3.7
    argcomplete==3.1.2 ; python_full_version < '3.8'
        # via nox
    argcomplete==3.5.2 ; python_full_version >= '3.8'
        # via nox
    colorama==0.4.6 ; sys_platform == 'win32'
        # via colorlog
    colorlog==6.9.0
        # via nox
    distlib==0.3.9
        # via virtualenv
    filelock==3.12.2 ; python_full_version < '3.8'
        # via virtualenv
    filelock==3.16.1 ; python_full_version >= '3.8'
        # via virtualenv
    importlib-metadata==6.7.0 ; python_full_version < '3.8'
        # via
        #   argcomplete
        #   nox
        #   virtualenv
    nox==2024.4.15 ; python_full_version < '3.8'
        # via -r requirements.in
    nox==2024.10.9 ; python_full_version >= '3.8'
        # via -r requirements.in
    packaging==24.0 ; python_full_version < '3.8'
        # via nox
    packaging==24.2 ; python_full_version >= '3.8'
        # via nox
    platformdirs==4.0.0 ; python_full_version < '3.8'
        # via virtualenv
    platformdirs==4.3.6 ; python_full_version >= '3.8'
        # via virtualenv
    tomli==2.0.1 ; python_full_version < '3.8'
        # via nox
    tomli==2.2.1 ; python_full_version >= '3.8' and python_full_version < '3.11'
        # via nox
    typing-extensions==4.7.1 ; python_full_version < '3.8'
        # via
        #   importlib-metadata
        #   nox
        #   platformdirs
    uv==0.5.10 ; python_full_version >= '3.8'
        # via nox
    virtualenv==20.26.6 ; python_full_version < '3.8'
        # via nox
    virtualenv==20.28.0 ; python_full_version >= '3.8'
        # via nox
    zipp==3.15.0 ; python_full_version < '3.8'
        # via importlib-metadata

    ----- stderr -----
    warning: The requested Python version 3.7 is not available; 3.8.[X] will be used to build dependencies instead.
    Resolved 21 packages in [TIME]
    "###
    );

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
            .arg("requirements.in")
            .arg("--fork-strategy")
            .arg("fewest")
            .arg("--universal")
            .arg("--python-version")
            .arg("3.7"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --fork-strategy fewest --universal --python-version 3.7
    argcomplete==3.1.2
        # via nox
    colorama==0.4.6 ; sys_platform == 'win32'
        # via colorlog
    colorlog==6.9.0
        # via nox
    distlib==0.3.9
        # via virtualenv
    filelock==3.12.2
        # via virtualenv
    importlib-metadata==6.7.0 ; python_full_version < '3.8'
        # via
        #   argcomplete
        #   nox
        #   virtualenv
    nox==2024.4.15
        # via -r requirements.in
    packaging==24.0
        # via nox
    platformdirs==4.0.0
        # via virtualenv
    tomli==2.0.1 ; python_full_version < '3.11'
        # via nox
    typing-extensions==4.7.1 ; python_full_version < '3.8'
        # via
        #   importlib-metadata
        #   nox
        #   platformdirs
    uv==0.5.10 ; python_full_version >= '3.8'
        # via nox
    virtualenv==20.26.6
        # via nox
    zipp==3.15.0 ; python_full_version < '3.8'
        # via importlib-metadata

    ----- stderr -----
    warning: The requested Python version 3.7 is not available; 3.8.[X] will be used to build dependencies instead.
    Resolved 14 packages in [TIME]
    "###
    );

    Ok(())
}

/// See: <https://github.com/astral-sh/uv/issues/10383>
#[test]
fn respect_index_preference() -> Result<()> {
    let context = TestContext::new("3.12");
    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(indoc::indoc! {r#"
        [project]
        name = "project"
        version = "0.1.0"
        dependencies = ["iniconfig>=1", "typing-extensions>=4"]

        [[tool.uv.index]]
        name = "pypi"
        url = "https://pypi.org/simple"
        explicit = true

        [tool.uv.sources]
        iniconfig = { index = "pypi" }
    "#})?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc::indoc! {r"
        iniconfig==1.1.1
        typing-extensions==4.6.0
    "})?;

    uv_snapshot!(context
        .pip_compile()
        .arg("pyproject.toml")
        .arg("-o")
        .arg("requirements.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml -o requirements.txt
    iniconfig==1.1.1
        # via project (pyproject.toml)
    typing-extensions==4.6.0
        # via project (pyproject.toml)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###);

    Ok(())
}

#[test]
fn dependency_group() -> Result<()> {
    // uv pip compile --group tests, with a single pyproject.toml
    fn new_context() -> Result<TestContext> {
        let context = TestContext::new("3.12");

        let pyproject_toml = context.temp_dir.child("pyproject.toml");
        pyproject_toml.write_str(
            r#"
            [project]
            name = "myproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            dependencies = ["typing-extensions"]
            [dependency-groups]
            foo = ["sortedcontainers"]
            bar = ["iniconfig"]
            dev = ["sniffio"]
            "#,
        )?;

        Ok(context)
    }

    let mut context;

    // Passing --group should add just that group's contents from ./pyproject.toml
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("bar"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --group bar
    iniconfig==2.0.0
        # via myproject (pyproject.toml:bar)

    ----- stderr -----
    Resolved 1 package in [TIME]
    ");

    // Passing a pyproject.toml and --group should include both its deps and the group
    // (This is a "try to confuse the internals" test, as this file is logically
    // imported twice, but with two different semantics.)
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml")
        .arg("--group").arg("bar"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --group bar
    iniconfig==2.0.0
        # via myproject (pyproject.toml:bar)
    typing-extensions==4.10.0
        # via myproject (pyproject.toml)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // Another "try to confuse the internals" test with an absolute path
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg(context.temp_dir.child("pyproject.toml").path())
        .arg("--group").arg("bar"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/pyproject.toml --group bar
    iniconfig==2.0.0
        # via myproject (pyproject.toml:bar)
    typing-extensions==4.10.0
        # via myproject (pyproject.toml)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // An explicit use of `<path>:<group>` syntax, here using what the default is anyway
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("pyproject.toml:bar"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --group pyproject.toml:bar
    iniconfig==2.0.0
        # via myproject (pyproject.toml:bar)

    ----- stderr -----
    Resolved 1 package in [TIME]
    ");

    // "try to confuse the internals" with an explicit path for the group
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml")
        .arg("--group").arg("pyproject.toml:bar"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --group pyproject.toml:bar
    iniconfig==2.0.0
        # via myproject (pyproject.toml:bar)
    typing-extensions==4.10.0
        # via myproject (pyproject.toml)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // Let's check that the other group works fine individually
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --group foo
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)

    ----- stderr -----
    Resolved 1 package in [TIME]
    ");

    // Now let's do both of the groups together
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("foo")
        .arg("--group").arg("bar"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --group foo --group bar
    iniconfig==2.0.0
        # via myproject (pyproject.toml:bar)
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // And finally put it all together
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml")
        .arg("--group").arg("foo")
        .arg("--group").arg("bar"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --group foo --group bar
    iniconfig==2.0.0
        # via myproject (pyproject.toml:bar)
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)
    typing-extensions==4.10.0
        # via myproject (pyproject.toml)

    ----- stderr -----
    Resolved 3 packages in [TIME]
    ");

    Ok(())
}

#[test]
fn many_pyproject_group() -> Result<()> {
    // uv pip compile --group tests, with multiple pyproject.tomls at once
    fn new_context() -> Result<TestContext> {
        let context = TestContext::new("3.12");

        let pyproject_toml = context.temp_dir.child("pyproject.toml");
        pyproject_toml.write_str(
            r#"
            [project]
            name = "myproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            dependencies = ["typing-extensions"]
            [dependency-groups]
            foo = ["sortedcontainers"]
            "#,
        )?;

        let subdir = context.temp_dir.child("subdir");
        subdir.create_dir_all()?;
        let pyproject_toml2 = subdir.child("pyproject.toml");
        pyproject_toml2.write_str(
            r#"
            [project]
            name = "mysubproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            dependencies = ["typing-extensions"]
            [dependency-groups]
            foo = ["iniconfig"]
            bar = ["sniffio"]
            "#,
        )?;

        Ok(context)
    }

    let mut context;

    // Use the 'foo' group from the main toml
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("pyproject.toml:foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --group pyproject.toml:foo
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)

    ----- stderr -----
    Resolved 1 package in [TIME]
    ");

    // Use the 'foo' group from the subtoml
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("subdir/pyproject.toml:foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --group subdir/pyproject.toml:foo
    iniconfig==2.0.0
        # via mysubproject (subdir/pyproject.toml:foo)

    ----- stderr -----
    Resolved 1 package in [TIME]
    ");

    // Now try both together, where they happen to define a group with the same name
    // (This does nothing special but the code shouldn't get confused.)
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("pyproject.toml:foo")
        .arg("--group").arg("subdir/pyproject.toml:foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --group pyproject.toml:foo --group subdir/pyproject.toml:foo
    iniconfig==2.0.0
        # via mysubproject (subdir/pyproject.toml:foo)
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    Ok(())
}

#[test]
fn suspicious_group() -> Result<()> {
    // uv pip compile --group tests, where the invocations are suspicious
    // and we might want to add warnings
    fn new_context() -> Result<TestContext> {
        let context = TestContext::new("3.12");

        let pyproject_toml = context.temp_dir.child("pyproject.toml");
        pyproject_toml.write_str(
            r#"
            [project]
            name = "myproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            dependencies = ["typing-extensions"]
            [dependency-groups]
            foo = ["sortedcontainers"]
            "#,
        )?;

        let subdir = context.temp_dir.child("subdir");
        subdir.create_dir_all()?;
        let pyproject_toml2 = subdir.child("pyproject.toml");
        pyproject_toml2.write_str(
            r#"
            [project]
            name = "mysubproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            dependencies = ["typing-extensions"]
            [dependency-groups]
            foo = ["iniconfig"]
            bar = ["sniffio"]
            "#,
        )?;

        Ok(context)
    }

    let mut context;

    // Another variant of "both" but with the path sugar applied to the one in cwd
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("foo")
        .arg("--group").arg("subdir/pyproject.toml:foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --group foo --group subdir/pyproject.toml:foo
    iniconfig==2.0.0
        # via mysubproject (subdir/pyproject.toml:foo)
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // Using the path sugar for "foo" but requesting "bar" for the subtoml
    // Although you would be forgiven for thinking "foo" should be used from
    // the subtoml, that's not what should happen.
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("foo")
        .arg("--group").arg("subdir/pyproject.toml:bar"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --group foo --group subdir/pyproject.toml:bar
    sniffio==1.3.1
        # via mysubproject (subdir/pyproject.toml:bar)
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // Using the path sugar to request pyproject.toml:foo
    // while also importing subdir/pyproject.toml's dependencies
    // Although you would be forgiven for thinking "foo" should be used from
    // the subtoml, that's not what should happen.
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("subdir/pyproject.toml")
        .arg("--group").arg("foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] subdir/pyproject.toml --group foo
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)
    typing-extensions==4.10.0
        # via mysubproject (subdir/pyproject.toml)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // An inversion of the previous -- this one isn't terribly ambiguous
    // but we should have it in the suite too in case it should be distinguished!
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("pyproject.toml")
        .arg("--group").arg("subdir/pyproject.toml:foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --group subdir/pyproject.toml:foo
    iniconfig==2.0.0
        # via mysubproject (subdir/pyproject.toml:foo)
    typing-extensions==4.10.0
        # via myproject (pyproject.toml)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    Ok(())
}

#[test]
fn invalid_group() -> Result<()> {
    // uv pip compile --group tests, where the invocations should fail
    fn new_context() -> Result<TestContext> {
        let context = TestContext::new("3.12");

        let pyproject_toml = context.temp_dir.child("pyproject.toml");
        pyproject_toml.write_str(
            r#"
            [project]
            name = "myproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            dependencies = ["typing-extensions"]
            [dependency-groups]
            foo = ["sortedcontainers"]
            "#,
        )?;

        let subdir = context.temp_dir.child("subdir");
        subdir.create_dir_all()?;
        let pyproject_toml2 = subdir.child("pyproject.toml");
        pyproject_toml2.write_str(
            r#"
            [project]
            name = "mysubproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            dependencies = ["typing-extensions"]
            [dependency-groups]
            foo = ["iniconfig"]
            bar = ["sniffio"]
            "#,
        )?;

        Ok(context)
    }

    let context = new_context()?;

    // Hey you passed a path and not a group!
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("subdir/"), @r#"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: invalid value 'subdir/' for '--group <GROUP>': Not a valid package or extra name: "subdir/". Names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters.

    For more information, try '--help'.
    "#);

    // Hey this path needs to end with "pyproject.toml"!
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("./:foo"), @r"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: invalid value './:foo' for '--group <GROUP>': The `--group` path is required to end in 'pyproject.toml' for compatibility with pip; got: ./

    For more information, try '--help'.
    ");

    // Hey this path needs to end with "pyproject.toml"!
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("subdir/:foo"), @r"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: invalid value 'subdir/:foo' for '--group <GROUP>': The `--group` path is required to end in 'pyproject.toml' for compatibility with pip; got: subdir/

    For more information, try '--help'.
    ");

    // Another invocation that Looks Weird but is asking for bar from two
    // different tomls. In this case the main one doesn't define it and
    // we should error!
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--group").arg("bar")
        .arg("--group").arg("subdir/pyproject.toml:bar"), @r"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: The dependency group 'bar' was not found in the project: pyproject.toml
    ");

    Ok(())
}

#[test]
fn project_and_group() -> Result<()> {
    // Checking that --project is handled properly with --group
    fn new_context() -> Result<TestContext> {
        let context = TestContext::new("3.12");

        let pyproject_toml = context.temp_dir.child("pyproject.toml");
        pyproject_toml.write_str(
            r#"
            [project]
            name = "myproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            dependencies = ["typing-extensions"]
            [dependency-groups]
            foo = ["sortedcontainers"]
            "#,
        )?;

        let subdir = context.temp_dir.child("subdir");
        subdir.create_dir_all()?;
        let pyproject_toml2 = subdir.child("pyproject.toml");
        pyproject_toml2.write_str(
            r#"
            [project]
            name = "mysubproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            [dependency-groups]
            foo = ["iniconfig"]
            bar = ["sniffio"]
            "#,
        )?;

        Ok(context)
    }

    let mut context;

    // 'foo' from subtoml, by implicit-sugar + --project
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--project").arg("subdir")
        .arg("--group").arg("foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --project subdir --group foo
    iniconfig==2.0.0
        # via mysubproject (subdir/pyproject.toml:foo)

    ----- stderr -----
    Resolved 1 package in [TIME]
    ");

    // 'foo' from subtoml, by implicit-sugar + --project
    // 'bar' from subtoml, by explicit relpath from cwd
    // (explicit relpaths are not affected by --project)
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--project").arg("subdir")
        .arg("--group").arg("subdir/pyproject.toml:bar")
        .arg("--group").arg("foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --project subdir --group subdir/pyproject.toml:bar --group foo
    iniconfig==2.0.0
        # via mysubproject (subdir/pyproject.toml:foo)
    sniffio==1.3.1
        # via mysubproject (subdir/pyproject.toml:bar)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // 'bar' from subtoml, by implicit-sugar + --project
    // 'foo' from main toml, by explicit relpath from cwd
    // (explicit relpaths are not affected by --project)
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--project").arg("subdir")
        .arg("--group").arg("bar")
        .arg("--group").arg("pyproject.toml:foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --project subdir --group bar --group pyproject.toml:foo
    sniffio==1.3.1
        # via mysubproject (subdir/pyproject.toml:bar)
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // 'bar' from subtoml, by explicit relpath from cwd
    // 'foo' from main toml, by explicit relpath from cwd
    // (explicit relpaths are not affected by --project)
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--project").arg("subdir")
        .arg("--group").arg("subdir/pyproject.toml:bar")
        .arg("--group").arg("pyproject.toml:foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --project subdir --group subdir/pyproject.toml:bar --group pyproject.toml:foo
    sniffio==1.3.1
        # via mysubproject (subdir/pyproject.toml:bar)
    sortedcontainers==2.4.0
        # via myproject (pyproject.toml:foo)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    Ok(())
}

#[test]
fn directory_and_group() -> Result<()> {
    // Checking that --directory is handled properly with --group
    fn new_context() -> Result<TestContext> {
        let context = TestContext::new("3.12");

        let pyproject_toml = context.temp_dir.child("pyproject.toml");
        pyproject_toml.write_str(
            r#"
            [project]
            name = "myproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            dependencies = ["typing-extensions"]
            [dependency-groups]
            foo = ["sortedcontainers"]
            "#,
        )?;

        let subdir = context.temp_dir.child("subdir");
        subdir.create_dir_all()?;
        let pyproject_toml2 = subdir.child("pyproject.toml");
        pyproject_toml2.write_str(
            r#"
            [project]
            name = "mysubproject"
            version = "0.1.0"
            requires-python = ">=3.12"
            [dependency-groups]
            foo = ["iniconfig"]
            bar = ["sniffio"]
            "#,
        )?;

        Ok(context)
    }

    let mut context;

    // 'bar' from subtoml, by implicit-sugar + --directory
    // 'foo' from main toml, by explicit relpath from --directory
    // (explicit relpaths ARE affected by --directory)
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--directory").arg("subdir")
        .arg("--group").arg("bar")
        .arg("--group").arg("../pyproject.toml:foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --directory subdir --group bar --group ../pyproject.toml:foo
    sniffio==1.3.1
        # via mysubproject (pyproject.toml:bar)
    sortedcontainers==2.4.0
        # via myproject (../pyproject.toml:foo)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // 'bar' from subtoml, by explicit relpath from --directory
    // 'foo' from main toml, by explicit relpath from --directory
    // (explicit relpaths ARE affected by --directory)
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--directory").arg("subdir")
        .arg("--group").arg("pyproject.toml:bar")
        .arg("--group").arg("../pyproject.toml:foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --directory subdir --group pyproject.toml:bar --group ../pyproject.toml:foo
    sniffio==1.3.1
        # via mysubproject (pyproject.toml:bar)
    sortedcontainers==2.4.0
        # via myproject (../pyproject.toml:foo)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    // 'bar' from subtoml, by explicit relpath from --directory
    // 'foo' from main toml, by implicit path + --project + --directory
    // (explicit relpaths ARE affected by --directory)
    context = new_context()?;
    uv_snapshot!(context.filters(), context.pip_compile()
        .arg("--directory").arg("subdir")
        .arg("--project").arg("../")
        .arg("--group").arg("pyproject.toml:bar")
        .arg("--group").arg("foo"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --directory subdir --project ../ --group pyproject.toml:bar --group foo
    sniffio==1.3.1
        # via mysubproject (pyproject.toml:bar)
    sortedcontainers==2.4.0
        # via myproject ([TEMP_DIR]/pyproject.toml:foo)

    ----- stderr -----
    Resolved 2 packages in [TIME]
    ");

    Ok(())
}

/// See: <https://github.com/astral-sh/uv/issues/10957>
#[test]
fn compile_preserve_requires_python_split() -> Result<()> {
    let context = TestContext::new("3.8").with_exclude_newer("2025-01-01T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("zipp")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("--python-version")
        .arg("3.8")
        .arg("--universal")
        .arg("requirements.in")
        .arg("-o")
        .arg("requirements.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --python-version 3.8 --universal requirements.in -o requirements.txt
    zipp==3.20.2 ; python_full_version < '3.9'
        # via -r requirements.in
    zipp==3.21.0 ; python_full_version >= '3.9'
        # via -r requirements.in

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###);

    // Re-running shouldn't change the output.
    uv_snapshot!(context
        .pip_compile()
        .arg("--python-version")
        .arg("3.8")
        .arg("--universal")
        .arg("requirements.in")
        .arg("-o")
        .arg("requirements.txt"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --python-version 3.8 --universal requirements.in -o requirements.txt
    zipp==3.20.2 ; python_full_version < '3.9'
        # via -r requirements.in
    zipp==3.21.0 ; python_full_version >= '3.9'
        # via -r requirements.in

    ----- stderr -----
    Resolved 2 packages in [TIME]
    "###);

    Ok(())
}

/// Regression test for <https://github.com/astral-sh/uv/issues/11279#issuecomment-2640270189>
#[test]
fn markers_on_extra_packages() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {"
        psycopg[binary]; platform_python_implementation != 'PyPy'
        psycopg; platform_python_implementation == 'PyPy'
    "})?;

    uv_snapshot!(context
        .pip_compile()
        .arg("--universal")
        .arg("requirements.in"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --universal requirements.in
    psycopg==3.1.18
        # via -r requirements.in
    psycopg-binary==3.1.18 ; implementation_name != 'pypy' and platform_python_implementation != 'PyPy'
        # via psycopg
    typing-extensions==4.10.0
        # via psycopg
    tzdata==2024.1 ; sys_platform == 'win32'
        # via psycopg

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "###);

    Ok(())
}

#[test]
fn respect_non_local_preference() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("torch")?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("torch==2.6.0")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("-o")
        .arg("requirements.txt")
        .arg("--universal")
        .arg("--index")
        .arg("https://astral-sh.github.io/pytorch-mirror/whl/cpu"), @r###"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in -o requirements.txt --universal
    filelock==3.13.1
        # via torch
    fsspec==2024.6.1
        # via torch
    jinja2==3.1.4
        # via torch
    markupsafe==3.0.2
        # via jinja2
    mpmath==1.3.0
        # via sympy
    networkx==3.3
        # via torch
    setuptools==70.2.0
        # via torch
    sympy==1.13.1
        # via torch
    torch==2.6.0 ; sys_platform == 'darwin'
        # via -r requirements.in
    torch==2.6.0+cpu ; sys_platform != 'darwin'
        # via -r requirements.in
    typing-extensions==4.12.2
        # via torch

    ----- stderr -----
    Resolved 11 packages in [TIME]
    "###);

    Ok(())
}

#[test]
fn omit_wheels_exclude_newer() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2024-08-01T00:00:00Z");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("pillow-avif-plugin")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("--universal")
        .arg("--generate-hashes"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal --generate-hashes
    pillow-avif-plugin==1.4.6 \
        --hash=sha256:0014a215e197c52520d3946f3704c8c0932a170cc5783f96d2385f55191dce29 \
        --hash=sha256:07372b7740439cc26346d8e3995de1fd5c49a92ab307321b74b3e6305a7e0e49 \
        --hash=sha256:09a7e4b00b18df55b9f34d4f031060ca46d8f5f5e0ba347dda600dcb5172e5f2 \
        --hash=sha256:0e699ca8dcfee82732495e101401567184fed6ba10f17e7fb872c46415606ec7 \
        --hash=sha256:2347399f2457e5efacec8fc9e446a5a90252b8723c6a47dc61e2353aa97e3e2e \
        --hash=sha256:25d1dea0c496a49b17a336b271263ea76a4a0af19553565e95c4bb03281a4113 \
        --hash=sha256:323804efe752cf4d15fdcf770749ba23d727f8ea94b95cfe42bec597f3b9bbbb \
        --hash=sha256:334e1d39e8b3b4548db690df3735039378e96e1497fd8ba0e25a5e21561b7cf5 \
        --hash=sha256:41a8c41b56a891adbcff30933009d475fdd649f2025d62ba59885975ed4379c0 \
        --hash=sha256:450b34d19d88443e39b011e84b54433f7ccd6cf8774ed626e433ec3cc7d52924 \
        --hash=sha256:56be2604b734caf23788922dbcc92d880d241d02b444c7a8367a65bb25b16aac \
        --hash=sha256:584469ea7dedd8ca4f579917cf22f25e8ab980e1b98bbe212cbd7395f881cd42 \
        --hash=sha256:5bacc0802516f054f98d9f218ada17b2e8a756e35cb71e7401bb8422848fe796 \
        --hash=sha256:5c5e6575e0ca0cd292d459cf627a27a505f38a6edad6f35fd9c4bce4a2cccef3 \
        --hash=sha256:5d3c1202e9e03b93ef5e385fcee917d73e23833618472e6416c0fc58b53ba8b8 \
        --hash=sha256:60699d10679c8361690703b79abde4a2e7b8047540f0c58fd5da0ac672a15321 \
        --hash=sha256:6556cbee2d755dc99a99a5a85c302393e58bcbbf675bc93fa9ab283904dadbfc \
        --hash=sha256:6bc73ea62605c8725aba2422de1b546a5c4a6e5e73dcf66f9e22102249342d6b \
        --hash=sha256:7d2e933e9b197e9a51c3fbfce389a70201fbce1b7c60172f790760217d7927f8 \
        --hash=sha256:855cf50d03f6fc16e1fd5e364b3cea0b79f4bf90d39ff2123969735d851e08ba \
        --hash=sha256:91537935612d8fb4b8f621a912ce0eb4e363fdf615d472b20a043a7a18efb461 \
        --hash=sha256:963ce7b93340f235db5c7f16b46835c72681896052dcbf1652a01946e7b9103e \
        --hash=sha256:a6f97ffc84cdce0926f86a2f4bee088e661f5f93bec9112adca281341f463479 \
        --hash=sha256:b37e1314500cec3457210f4c8a7583afe35751f076efa8122faa0f205403d645 \
        --hash=sha256:b4f08c341d8aed2d7762589fdd99c4d3e191d4976dab59516b522704a67a281d \
        --hash=sha256:b7c2e4adcdf7341dc05f31f13d85b6c4eed0e08daafc836e7b3317df41074bab \
        --hash=sha256:b95c477fc619a82a68800ff18599e2704aec6fcf9aa65898b02f0240feeb0af5 \
        --hash=sha256:c1cd659136fca622a9324fa7efa56f711f2e576206754c284b80aa5504fb96e4 \
        --hash=sha256:c8b9347a91acd183db302e198cf582127eb3de98ad185bf9aff773c99e415320 \
        --hash=sha256:c96ee1d1b504a2efa80c9d6d3b71a9884c724dc34d6e67131a64678e09c7a81c \
        --hash=sha256:ce89c26671cd0fcb7967e4be4098ae8775b93cc6376ecd523c815cb5a2146298 \
        --hash=sha256:d643db246d6c07994fbb98b5fa6c6ae8f9b19b4ed24566bc06942b7dad10ad47 \
        --hash=sha256:dec8a348e46266dd0bf20a6edd01b96b0a11042e8654d701444e4a5cebf7f44b \
        --hash=sha256:df9a1e569543006abe0c534a3fa66ee1d72393644fd0d5bc74de57bfdb619573 \
        --hash=sha256:e2087daa49881421a5e703fcff80aa2cbcb5a455cf73114ed5f0ea2a697794c8 \
        --hash=sha256:e74e53951228c3e6ff5141121bd2876e8aecdb27d5f12d01cc519258e0073d8b \
        --hash=sha256:f262547edeec00ad287c8845ac6c9d7d822ef4b00d1832175c4c8fd692e34eba \
        --hash=sha256:fdd6ee615d948a2b68fd293f74a1a73d22e9d075f5d714b95a90ec2cb8da8de0
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    ");

    Ok(())
}

/// See: <https://github.com/astral-sh/uv/issues/12260>
#[test]
fn compile_quotes() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("tbump")?;

    uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
        .arg("requirements.in")
        .arg("--universal"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
    cli-ui==0.17.2
        # via tbump
    colorama==0.4.6
        # via cli-ui
    contextlib2==21.6.0
        # via schema
    docopt==0.6.2
        # via tbump
    schema==0.7.5
        # via tbump
    tabulate==0.8.10
        # via cli-ui
    tbump==6.11.0
        # via -r requirements.in
    tomlkit==0.11.8
        # via tbump
    unidecode==1.3.8
        # via cli-ui

    ----- stderr -----
    Resolved 9 packages in [TIME]
    ");

    Ok(())
}

#[test]
fn compile_invalid_output_file() -> Result<()> {
    let context = TestContext::new("3.12");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("anyio==3.7.0")?;

    uv_snapshot!(context
        .pip_compile()
        .arg("requirements.in")
        .arg("-o")
        .arg("pyproject.toml"), @r"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: `pyproject.toml` is not a supported output format for `uv pip compile` (only `requirements.txt`-style output is supported)
    ");

    Ok(())
}

#[test]
fn pep_751_filename() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("iniconfig")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("--format")
        .arg("pylock.toml")
        .arg("-o")
        .arg("test.toml"), @r"
    success: false
    exit_code: 2
    ----- stdout -----

    ----- stderr -----
    error: Expected the output filename to start with `pylock.` and end with `.toml` (e.g., `pylock.toml`, `pylock.dev.toml`); `test.toml` won't be recognized as a `pylock.toml` file in subsequent commands
    ");

    Ok(())
}

#[test]
fn pep_751_compile_registry_wheel() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("iniconfig")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "iniconfig"
    version = "2.0.0"
    sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", upload-time = 2023-01-07T11:08:11Z, size = 4646, hashes = { sha256 = "2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", upload-time = 2023-01-07T11:08:09Z, size = 5892, hashes = { sha256 = "b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" } }]

    ----- stderr -----
    Resolved 1 package in [TIME]
    "#);

    uv_snapshot!(context.filters(), context.pip_sync()
        .arg("--preview")
        .arg("pylock.toml"), @r"
    success: true
    exit_code: 0
    ----- stdout -----

    ----- stderr -----
    Prepared 1 package in [TIME]
    Installed 1 package in [TIME]
     + iniconfig==2.0.0
    "
    );

    Ok(())
}

#[test]
fn pep_751_compile_registry_sdist() -> Result<()> {
    let context = TestContext::new("3.12").with_exclude_newer("2025-01-29T00:00:00Z");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("source-distribution")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "source-distribution"
    version = "0.0.3"
    sdist = { url = "https://files.pythonhosted.org/packages/1f/e5/5b016c945d745f8b108e759d428341488a6aee8f51f07c6c4e33498bb91f/source_distribution-0.0.3.tar.gz", upload-time = 2024-11-03T02:35:36Z, size = 2166, hashes = { sha256 = "be5895c175dbca2d91709a6ab7d5f28e1794272db551ae9a5faf3ae2ed74c3d8" } }

    ----- stderr -----
    Resolved 1 package in [TIME]
    "#);

    uv_snapshot!(context.filters(), context.pip_sync()
        .arg("--preview")
        .arg("pylock.toml"), @r"
    success: true
    exit_code: 0
    ----- stdout -----

    ----- stderr -----
    Prepared 1 package in [TIME]
    Installed 1 package in [TIME]
     + source-distribution==0.0.3
    "
    );

    Ok(())
}

#[test]
fn pep_751_compile_directory() -> Result<()> {
    let context = TestContext::new("3.12");

    // Create a local dependency in a subdirectory.
    let pyproject_toml = context.temp_dir.child("foo").child("pyproject.toml");
    pyproject_toml.write_str(
        r#"
        [project]
        name = "foo"
        version = "1.0.0"
        dependencies = ["anyio"]

        [build-system]
        requires = ["hatchling"]
        build-backend = "hatchling.build"
        "#,
    )?;
    context
        .temp_dir
        .child("foo")
        .child("src")
        .child("foo")
        .child("__init__.py")
        .touch()?;

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("./foo")?;

    let pyproject_toml = context.temp_dir.child("pyproject.toml");
    pyproject_toml.write_str(
        r#"
        [project]
        name = "project"
        version = "0.1.0"
        requires-python = ">=3.12"
        dependencies = ["foo"]

        [tool.uv.sources]
        foo = { path = "foo" }
        "#,
    )?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "anyio"
    version = "4.3.0"
    sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", upload-time = 2024-02-19T08:36:28Z, size = 159642, hashes = { sha256 = "f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", upload-time = 2024-02-19T08:36:26Z, size = 85584, hashes = { sha256 = "048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" } }]

    [[packages]]
    name = "foo"
    directory = { path = "foo" }

    [[packages]]
    name = "idna"
    version = "3.6"
    sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", upload-time = 2023-11-25T15:40:54Z, size = 175426, hashes = { sha256 = "9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", upload-time = 2023-11-25T15:40:52Z, size = 61567, hashes = { sha256 = "c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f" } }]

    [[packages]]
    name = "sniffio"
    version = "1.3.1"
    sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", upload-time = 2024-02-25T23:20:04Z, size = 20372, hashes = { sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", upload-time = 2024-02-25T23:20:01Z, size = 10235, hashes = { sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" } }]

    ----- stderr -----
    Resolved 4 packages in [TIME]
    "#);

    uv_snapshot!(context.filters(), context.pip_sync()
        .arg("--preview")
        .arg("pylock.toml"), @r"
    success: true
    exit_code: 0
    ----- stdout -----

    ----- stderr -----
    Prepared 4 packages in [TIME]
    Installed 4 packages in [TIME]
     + anyio==4.3.0
     + foo==1.0.0 (from file://[TEMP_DIR]/foo)
     + idna==3.6
     + sniffio==1.3.1
    "
    );

    Ok(())
}

#[test]
#[cfg(feature = "git")]
fn pep_751_compile_git() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(
        "uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.1",
    )?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "uv-public-pypackage"
    version = "0.1.0"
    vcs = { type = "git", url = "https://github.com/astral-test/uv-public-pypackage.git", requested-revision = "0.0.1", commit-id = "0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }

    ----- stderr -----
    Resolved 1 package in [TIME]
    "#);

    uv_snapshot!(context.filters(), context.pip_sync()
        .arg("--preview")
        .arg("pylock.toml"), @r"
    success: true
    exit_code: 0
    ----- stdout -----

    ----- stderr -----
    Prepared 1 package in [TIME]
    Installed 1 package in [TIME]
     + uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage.git@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
    "
    );

    Ok(())
}

#[test]
fn pep_751_compile_url_wheel() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(
        "anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl",
    )?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "anyio"
    version = "4.3.0"
    archive = { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hashes = { sha256 = "048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" } }

    [[packages]]
    name = "idna"
    version = "3.6"
    sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", upload-time = 2023-11-25T15:40:54Z, size = 175426, hashes = { sha256 = "9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", upload-time = 2023-11-25T15:40:52Z, size = 61567, hashes = { sha256 = "c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f" } }]

    [[packages]]
    name = "sniffio"
    version = "1.3.1"
    sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", upload-time = 2024-02-25T23:20:04Z, size = 20372, hashes = { sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", upload-time = 2024-02-25T23:20:01Z, size = 10235, hashes = { sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" } }]

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "#);

    uv_snapshot!(context.filters(), context.pip_sync()
        .arg("--preview")
        .arg("pylock.toml"), @r"
    success: true
    exit_code: 0
    ----- stdout -----

    ----- stderr -----
    Prepared 2 packages in [TIME]
    Installed 3 packages in [TIME]
     + anyio==4.3.0 (from https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl)
     + idna==3.6
     + sniffio==1.3.1
    "
    );

    Ok(())
}

#[test]
fn pep_751_compile_url_sdist() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(
        "anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz",
    )?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "anyio"
    version = "4.3.0"
    archive = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hashes = { sha256 = "f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6" } }

    [[packages]]
    name = "idna"
    version = "3.6"
    sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", upload-time = 2023-11-25T15:40:54Z, size = 175426, hashes = { sha256 = "9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", upload-time = 2023-11-25T15:40:52Z, size = 61567, hashes = { sha256 = "c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f" } }]

    [[packages]]
    name = "sniffio"
    version = "1.3.1"
    sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", upload-time = 2024-02-25T23:20:04Z, size = 20372, hashes = { sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", upload-time = 2024-02-25T23:20:01Z, size = 10235, hashes = { sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" } }]

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "#);

    uv_snapshot!(context.filters(), context.pip_sync()
        .arg("--preview")
        .arg("pylock.toml"), @r"
    success: true
    exit_code: 0
    ----- stdout -----

    ----- stderr -----
    Prepared 3 packages in [TIME]
    Installed 3 packages in [TIME]
     + anyio==4.3.0 (from https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz)
     + idna==3.6
     + sniffio==1.3.1
    "
    );

    Ok(())
}

#[test]
fn pep_751_compile_path_wheel() -> Result<()> {
    let context = TestContext::new("3.12");

    // Download the source.
    let archive = context.temp_dir.child("iniconfig-2.0.0-py3-none-any.whl");
    download_to_disk(
        "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl",
        &archive,
    );

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("./iniconfig-2.0.0-py3-none-any.whl")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "iniconfig"
    version = "2.0.0"
    archive = { path = "iniconfig-2.0.0-py3-none-any.whl", hashes = { sha256 = "b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" } }

    ----- stderr -----
    Resolved 1 package in [TIME]
    "#);

    uv_snapshot!(context.filters(), context.pip_sync()
        .arg("--preview")
        .arg("pylock.toml"), @r"
    success: true
    exit_code: 0
    ----- stdout -----

    ----- stderr -----
    Installed 1 package in [TIME]
     + iniconfig==2.0.0 (from file://[TEMP_DIR]/iniconfig-2.0.0-py3-none-any.whl)
    "
    );

    // Ensure that the path is relative to the output `pylock.toml` file.
    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("nested/pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "iniconfig"
    version = "2.0.0"
    archive = { path = "../iniconfig-2.0.0-py3-none-any.whl", hashes = { sha256 = "b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" } }

    ----- stderr -----
    Resolved 1 package in [TIME]
    "#);

    Ok(())
}

#[test]
fn pep_751_compile_path_sdist() -> Result<()> {
    let context = TestContext::new("3.12");

    // Download the source.
    let archive = context.temp_dir.child("iniconfig-2.0.0.tar.gz");
    download_to_disk(
        "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz",
        &archive,
    );

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("./iniconfig-2.0.0.tar.gz")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "iniconfig"
    version = "2.0.0"
    archive = { path = "iniconfig-2.0.0.tar.gz", hashes = { sha256 = "2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3" } }

    ----- stderr -----
    Resolved 1 package in [TIME]
    "#);

    uv_snapshot!(context.filters(), context.pip_sync()
        .arg("--preview")
        .arg("pylock.toml"), @r"
    success: true
    exit_code: 0
    ----- stdout -----

    ----- stderr -----
    Prepared 1 package in [TIME]
    Installed 1 package in [TIME]
     + iniconfig==2.0.0 (from file://[TEMP_DIR]/iniconfig-2.0.0.tar.gz)
    "
    );

    // Ensure that the path is relative to the output `pylock.toml` file.
    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("nested/pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "iniconfig"
    version = "2.0.0"
    archive = { path = "../iniconfig-2.0.0.tar.gz", hashes = { sha256 = "2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3" } }

    ----- stderr -----
    Resolved 1 package in [TIME]
    "#);

    Ok(())
}

#[test]
fn pep_751_compile_preferences() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str(indoc::indoc! {r"
        anyio==3.0.0
        idna==3.0.0
    "})?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "anyio"
    version = "3.0.0"
    sdist = { url = "https://files.pythonhosted.org/packages/99/0d/65165f99e5f4f3b4c43a5ed9db0fb7aa655f5a58f290727a30528a87eb45/anyio-3.0.0.tar.gz", upload-time = 2021-04-20T14:02:14Z, size = 116952, hashes = { sha256 = "b553598332c050af19f7d41f73a7790142f5bc3d5eb8bd82f5e515ec22019bd9" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/3b/49/ebee263b69fe243bd1fd0a88bc6bb0f7732bf1794ba3273cb446351f9482/anyio-3.0.0-py3-none-any.whl", upload-time = 2021-04-20T14:02:13Z, size = 72182, hashes = { sha256 = "e71c3d9d72291d12056c0265d07c6bbedf92332f78573e278aeb116f24f30395" } }]

    [[packages]]
    name = "idna"
    version = "3.0"
    sdist = { url = "https://files.pythonhosted.org/packages/2f/2e/bfe821bd26194fb474e0932df8ed82e24bd312ba628a8644d93c5a28b5d4/idna-3.0.tar.gz", upload-time = 2021-01-01T05:58:25Z, size = 180786, hashes = { sha256 = "c9a26e10e5558412384fac891eefb41957831d31be55f1e2c98ed97a70abb969" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/0f/6b/3a878f15ef3324754bf4780f8f047d692d9860be894ff8fb3135cef8bed8/idna-3.0-py2.py3-none-any.whl", upload-time = 2021-01-01T05:58:22Z, size = 58618, hashes = { sha256 = "320229aadbdfc597bc28876748cc0c9d04d476e0fe6caacaaddea146365d9f63" } }]

    [[packages]]
    name = "sniffio"
    version = "1.3.1"
    sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", upload-time = 2024-02-25T23:20:04Z, size = 20372, hashes = { sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", upload-time = 2024-02-25T23:20:01Z, size = 10235, hashes = { sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" } }]

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "#);

    // Modify the requirements to loosen the `anyio` version.
    requirements_txt.write_str("anyio")?;

    // The `anyio` version should be retained, since we respect the existing preferences.
    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "anyio"
    version = "3.0.0"
    sdist = { url = "https://files.pythonhosted.org/packages/99/0d/65165f99e5f4f3b4c43a5ed9db0fb7aa655f5a58f290727a30528a87eb45/anyio-3.0.0.tar.gz", upload-time = 2021-04-20T14:02:14Z, size = 116952, hashes = { sha256 = "b553598332c050af19f7d41f73a7790142f5bc3d5eb8bd82f5e515ec22019bd9" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/3b/49/ebee263b69fe243bd1fd0a88bc6bb0f7732bf1794ba3273cb446351f9482/anyio-3.0.0-py3-none-any.whl", upload-time = 2021-04-20T14:02:13Z, size = 72182, hashes = { sha256 = "e71c3d9d72291d12056c0265d07c6bbedf92332f78573e278aeb116f24f30395" } }]

    [[packages]]
    name = "idna"
    version = "3.0"
    sdist = { url = "https://files.pythonhosted.org/packages/2f/2e/bfe821bd26194fb474e0932df8ed82e24bd312ba628a8644d93c5a28b5d4/idna-3.0.tar.gz", upload-time = 2021-01-01T05:58:25Z, size = 180786, hashes = { sha256 = "c9a26e10e5558412384fac891eefb41957831d31be55f1e2c98ed97a70abb969" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/0f/6b/3a878f15ef3324754bf4780f8f047d692d9860be894ff8fb3135cef8bed8/idna-3.0-py2.py3-none-any.whl", upload-time = 2021-01-01T05:58:22Z, size = 58618, hashes = { sha256 = "320229aadbdfc597bc28876748cc0c9d04d476e0fe6caacaaddea146365d9f63" } }]

    [[packages]]
    name = "sniffio"
    version = "1.3.1"
    sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", upload-time = 2024-02-25T23:20:04Z, size = 20372, hashes = { sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", upload-time = 2024-02-25T23:20:01Z, size = 10235, hashes = { sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" } }]

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "#);

    // Unless we pass `--upgrade-package`.
    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml")
        .arg("--upgrade-package")
        .arg("idna"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "anyio"
    version = "3.0.0"
    sdist = { url = "https://files.pythonhosted.org/packages/99/0d/65165f99e5f4f3b4c43a5ed9db0fb7aa655f5a58f290727a30528a87eb45/anyio-3.0.0.tar.gz", upload-time = 2021-04-20T14:02:14Z, size = 116952, hashes = { sha256 = "b553598332c050af19f7d41f73a7790142f5bc3d5eb8bd82f5e515ec22019bd9" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/3b/49/ebee263b69fe243bd1fd0a88bc6bb0f7732bf1794ba3273cb446351f9482/anyio-3.0.0-py3-none-any.whl", upload-time = 2021-04-20T14:02:13Z, size = 72182, hashes = { sha256 = "e71c3d9d72291d12056c0265d07c6bbedf92332f78573e278aeb116f24f30395" } }]

    [[packages]]
    name = "idna"
    version = "3.6"
    sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", upload-time = 2023-11-25T15:40:54Z, size = 175426, hashes = { sha256 = "9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", upload-time = 2023-11-25T15:40:52Z, size = 61567, hashes = { sha256 = "c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f" } }]

    [[packages]]
    name = "sniffio"
    version = "1.3.1"
    sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", upload-time = 2024-02-25T23:20:04Z, size = 20372, hashes = { sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", upload-time = 2024-02-25T23:20:01Z, size = 10235, hashes = { sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" } }]

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "#);

    // Or `--upgrade`.
    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml")
        .arg("--upgrade"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "anyio"
    version = "4.3.0"
    sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", upload-time = 2024-02-19T08:36:28Z, size = 159642, hashes = { sha256 = "f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", upload-time = 2024-02-19T08:36:26Z, size = 85584, hashes = { sha256 = "048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" } }]

    [[packages]]
    name = "idna"
    version = "3.6"
    sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", upload-time = 2023-11-25T15:40:54Z, size = 175426, hashes = { sha256 = "9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", upload-time = 2023-11-25T15:40:52Z, size = 61567, hashes = { sha256 = "c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f" } }]

    [[packages]]
    name = "sniffio"
    version = "1.3.1"
    sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", upload-time = 2024-02-25T23:20:04Z, size = 20372, hashes = { sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", upload-time = 2024-02-25T23:20:01Z, size = 10235, hashes = { sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" } }]

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "#);

    Ok(())
}

#[test]
fn pep_751_compile_warn() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("iniconfig")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml")
        .arg("--emit-index-url"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --emit-index-url
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "iniconfig"
    version = "2.0.0"
    sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", upload-time = 2023-01-07T11:08:11Z, size = 4646, hashes = { sha256 = "2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", upload-time = 2023-01-07T11:08:09Z, size = 5892, hashes = { sha256 = "b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" } }]

    ----- stderr -----
    Resolved 1 package in [TIME]
    warning: The `--emit-index-url` option is not supported for `pylock.toml` output
    "#);

    Ok(())
}

#[test]
fn pep_751_compile_non_universal() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("black")?;

    // `colorama` should be excluded, since we're on Linux.
    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--python-platform")
        .arg("linux")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --python-platform linux -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "black"
    version = "24.3.0"
    sdist = { url = "https://files.pythonhosted.org/packages/8f/5f/bac24a952668c7482cfdb4ebf91ba57a796c9da8829363a772040c1a3312/black-24.3.0.tar.gz", upload-time = 2024-03-15T19:35:43Z, size = 634292, hashes = { sha256 = "a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f" } }
    wheels = [
        { url = "https://files.pythonhosted.org/packages/3b/32/1a25d1b83147ca128797a627f429f9dc390eb066805c6aa319bea3ffffa5/black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:43:32Z, size = 1587891, hashes = { sha256 = "7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395" } },
        { url = "https://files.pythonhosted.org/packages/c4/91/6cb204786acc693edc4bf1b9230ffdc3cbfaeb7cd04d3a12fb4b13882a53/black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:41:59Z, size = 1434886, hashes = { sha256 = "9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995" } },
        { url = "https://files.pythonhosted.org/packages/ef/e4/53b5d07117381f7d5e946a54dd4c62617faad90713649619bbc683769dfe/black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:22Z, size = 1747400, hashes = { sha256 = "e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7" } },
        { url = "https://files.pythonhosted.org/packages/13/9c/f2e7532d11b05add5ab383a9f90be1a49954bf510803f98064b45b42f98e/black-24.3.0-cp310-cp310-win_amd64.whl", upload-time = 2024-03-15T19:39:43Z, size = 1363816, hashes = { sha256 = "4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0" } },
        { url = "https://files.pythonhosted.org/packages/68/df/ceea5828be9c4931cb5a75b7e8fb02971f57524da7a16dfec0d4d575327f/black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:45:27Z, size = 1571235, hashes = { sha256 = "4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9" } },
        { url = "https://files.pythonhosted.org/packages/46/5f/30398c5056cb72f883b32b6520ad00042a9d0454b693f70509867db03a80/black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:43:52Z, size = 1414926, hashes = { sha256 = "aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597" } },
        { url = "https://files.pythonhosted.org/packages/6b/59/498885b279e890f656ea4300a2671c964acb6d97994ea626479c2e5501b4/black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:13Z, size = 1725920, hashes = { sha256 = "65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d" } },
        { url = "https://files.pythonhosted.org/packages/8f/b0/4bef40c808cc615187db983b75bacdca1c110a229d41ba9887549fac529c/black-24.3.0-cp311-cp311-win_amd64.whl", upload-time = 2024-03-15T19:39:34Z, size = 1372608, hashes = { sha256 = "bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5" } },
        { url = "https://files.pythonhosted.org/packages/b6/c6/1d174efa9ff02b22d0124c73fc5f4d4fb006d0d9a081aadc354d05754a13/black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:45:20Z, size = 1600822, hashes = { sha256 = "2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f" } },
        { url = "https://files.pythonhosted.org/packages/d9/ed/704731afffe460b8ff0672623b40fce9fe569f2ee617c15857e4d4440a3a/black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:45:00Z, size = 1429987, hashes = { sha256 = "4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11" } },
        { url = "https://files.pythonhosted.org/packages/a8/05/8dd038e30caadab7120176d4bc109b7ca2f4457f12eef746b0560a583458/black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:24Z, size = 1755319, hashes = { sha256 = "c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4" } },
        { url = "https://files.pythonhosted.org/packages/71/9d/e5fa1ff4ef1940be15a64883c0bb8d2fcf626efec996eab4ae5a8c691d2c/black-24.3.0-cp312-cp312-win_amd64.whl", upload-time = 2024-03-15T19:39:37Z, size = 1385180, hashes = { sha256 = "56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5" } },
        { url = "https://files.pythonhosted.org/packages/37/76/1f85c4349d6b3424c7672dbc6c4b39ab89372b575801ffdc23d34b023c6f/black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:47:26Z, size = 1579568, hashes = { sha256 = "79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837" } },
        { url = "https://files.pythonhosted.org/packages/ba/24/6d82cde63c1340ea55cb74fd697f62b94b6d6fa7069a1aa216475dfd2a30/black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:46:18Z, size = 1423188, hashes = { sha256 = "e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd" } },
        { url = "https://files.pythonhosted.org/packages/71/61/48664319cee4f8e22633e075ff101ec6253195b056cb23e0c5f8a5086e87/black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:15Z, size = 1730623, hashes = { sha256 = "65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213" } },
        { url = "https://files.pythonhosted.org/packages/3b/95/ed26a160d7a13d6afb3e94448ec079fb4e37bbedeaf408b6b6dbf67d6cd2/black-24.3.0-cp38-cp38-win_amd64.whl", upload-time = 2024-03-15T19:39:43Z, size = 1370465, hashes = { sha256 = "b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959" } },
        { url = "https://files.pythonhosted.org/packages/62/f5/78881e9b1c340ccc02d5d4ebe61cfb9140452b3d11272a896b405033511b/black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:48:33Z, size = 1587504, hashes = { sha256 = "c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb" } },
        { url = "https://files.pythonhosted.org/packages/17/cc/67ba827fe23b39d55e8408937763b2ad21d904d63ca1c60b47d608ee7fb2/black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:47:39Z, size = 1434037, hashes = { sha256 = "6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7" } },
        { url = "https://files.pythonhosted.org/packages/fa/aa/6a2493c7d3506e9b64edbd0782e21637c376da005eecc546904e47b5cdbf/black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:16Z, size = 1745481, hashes = { sha256 = "d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7" } },
        { url = "https://files.pythonhosted.org/packages/18/68/9e86e73b58819624af6797ffe68dd7d09ed90fa1f9eb8d4d675f8c5e6ab0/black-24.3.0-cp39-cp39-win_amd64.whl", upload-time = 2024-03-15T19:39:15Z, size = 1363531, hashes = { sha256 = "7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f" } },
        { url = "https://files.pythonhosted.org/packages/4d/ea/31770a7e49f3eedfd8cd7b35e78b3a3aaad860400f8673994bc988318135/black-24.3.0-py3-none-any.whl", upload-time = 2024-03-15T19:35:41Z, size = 201493, hashes = { sha256 = "41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93" } },
    ]

    [[packages]]
    name = "click"
    version = "8.1.7"
    sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", upload-time = 2023-08-17T17:29:11Z, size = 336121, hashes = { sha256 = "ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", upload-time = 2023-08-17T17:29:10Z, size = 97941, hashes = { sha256 = "ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28" } }]

    [[packages]]
    name = "mypy-extensions"
    version = "1.0.0"
    sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", upload-time = 2023-02-04T12:11:27Z, size = 4433, hashes = { sha256 = "75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", upload-time = 2023-02-04T12:11:25Z, size = 4695, hashes = { sha256 = "4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d" } }]

    [[packages]]
    name = "packaging"
    version = "24.0"
    sdist = { url = "https://files.pythonhosted.org/packages/ee/b5/b43a27ac7472e1818c4bafd44430e69605baefe1f34440593e0332ec8b4d/packaging-24.0.tar.gz", upload-time = 2024-03-10T09:39:28Z, size = 147882, hashes = { sha256 = "eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl", upload-time = 2024-03-10T09:39:25Z, size = 53488, hashes = { sha256 = "2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5" } }]

    [[packages]]
    name = "pathspec"
    version = "0.12.1"
    sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", upload-time = 2023-12-10T22:30:45Z, size = 51043, hashes = { sha256 = "a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", upload-time = 2023-12-10T22:30:43Z, size = 31191, hashes = { sha256 = "a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08" } }]

    [[packages]]
    name = "platformdirs"
    version = "4.2.0"
    sdist = { url = "https://files.pythonhosted.org/packages/96/dc/c1d911bf5bb0fdc58cc05010e9f3efe3b67970cef779ba7fbc3183b987a8/platformdirs-4.2.0.tar.gz", upload-time = 2024-01-31T01:00:36Z, size = 20055, hashes = { sha256 = "ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/55/72/4898c44ee9ea6f43396fbc23d9bfaf3d06e01b83698bdf2e4c919deceb7c/platformdirs-4.2.0-py3-none-any.whl", upload-time = 2024-01-31T01:00:34Z, size = 17717, hashes = { sha256 = "0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068" } }]

    ----- stderr -----
    Resolved 6 packages in [TIME]
    "#);

    // `colorama` should be included, since we're on Windows.
    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--python-platform")
        .arg("windows")
        .arg("-o")
        .arg("pylock.toml"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --python-platform windows -o pylock.toml
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "black"
    version = "24.3.0"
    sdist = { url = "https://files.pythonhosted.org/packages/8f/5f/bac24a952668c7482cfdb4ebf91ba57a796c9da8829363a772040c1a3312/black-24.3.0.tar.gz", upload-time = 2024-03-15T19:35:43Z, size = 634292, hashes = { sha256 = "a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f" } }
    wheels = [
        { url = "https://files.pythonhosted.org/packages/3b/32/1a25d1b83147ca128797a627f429f9dc390eb066805c6aa319bea3ffffa5/black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:43:32Z, size = 1587891, hashes = { sha256 = "7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395" } },
        { url = "https://files.pythonhosted.org/packages/c4/91/6cb204786acc693edc4bf1b9230ffdc3cbfaeb7cd04d3a12fb4b13882a53/black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:41:59Z, size = 1434886, hashes = { sha256 = "9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995" } },
        { url = "https://files.pythonhosted.org/packages/ef/e4/53b5d07117381f7d5e946a54dd4c62617faad90713649619bbc683769dfe/black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:22Z, size = 1747400, hashes = { sha256 = "e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7" } },
        { url = "https://files.pythonhosted.org/packages/13/9c/f2e7532d11b05add5ab383a9f90be1a49954bf510803f98064b45b42f98e/black-24.3.0-cp310-cp310-win_amd64.whl", upload-time = 2024-03-15T19:39:43Z, size = 1363816, hashes = { sha256 = "4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0" } },
        { url = "https://files.pythonhosted.org/packages/68/df/ceea5828be9c4931cb5a75b7e8fb02971f57524da7a16dfec0d4d575327f/black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:45:27Z, size = 1571235, hashes = { sha256 = "4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9" } },
        { url = "https://files.pythonhosted.org/packages/46/5f/30398c5056cb72f883b32b6520ad00042a9d0454b693f70509867db03a80/black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:43:52Z, size = 1414926, hashes = { sha256 = "aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597" } },
        { url = "https://files.pythonhosted.org/packages/6b/59/498885b279e890f656ea4300a2671c964acb6d97994ea626479c2e5501b4/black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:13Z, size = 1725920, hashes = { sha256 = "65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d" } },
        { url = "https://files.pythonhosted.org/packages/8f/b0/4bef40c808cc615187db983b75bacdca1c110a229d41ba9887549fac529c/black-24.3.0-cp311-cp311-win_amd64.whl", upload-time = 2024-03-15T19:39:34Z, size = 1372608, hashes = { sha256 = "bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5" } },
        { url = "https://files.pythonhosted.org/packages/b6/c6/1d174efa9ff02b22d0124c73fc5f4d4fb006d0d9a081aadc354d05754a13/black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:45:20Z, size = 1600822, hashes = { sha256 = "2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f" } },
        { url = "https://files.pythonhosted.org/packages/d9/ed/704731afffe460b8ff0672623b40fce9fe569f2ee617c15857e4d4440a3a/black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:45:00Z, size = 1429987, hashes = { sha256 = "4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11" } },
        { url = "https://files.pythonhosted.org/packages/a8/05/8dd038e30caadab7120176d4bc109b7ca2f4457f12eef746b0560a583458/black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:24Z, size = 1755319, hashes = { sha256 = "c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4" } },
        { url = "https://files.pythonhosted.org/packages/71/9d/e5fa1ff4ef1940be15a64883c0bb8d2fcf626efec996eab4ae5a8c691d2c/black-24.3.0-cp312-cp312-win_amd64.whl", upload-time = 2024-03-15T19:39:37Z, size = 1385180, hashes = { sha256 = "56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5" } },
        { url = "https://files.pythonhosted.org/packages/37/76/1f85c4349d6b3424c7672dbc6c4b39ab89372b575801ffdc23d34b023c6f/black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:47:26Z, size = 1579568, hashes = { sha256 = "79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837" } },
        { url = "https://files.pythonhosted.org/packages/ba/24/6d82cde63c1340ea55cb74fd697f62b94b6d6fa7069a1aa216475dfd2a30/black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:46:18Z, size = 1423188, hashes = { sha256 = "e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd" } },
        { url = "https://files.pythonhosted.org/packages/71/61/48664319cee4f8e22633e075ff101ec6253195b056cb23e0c5f8a5086e87/black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:15Z, size = 1730623, hashes = { sha256 = "65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213" } },
        { url = "https://files.pythonhosted.org/packages/3b/95/ed26a160d7a13d6afb3e94448ec079fb4e37bbedeaf408b6b6dbf67d6cd2/black-24.3.0-cp38-cp38-win_amd64.whl", upload-time = 2024-03-15T19:39:43Z, size = 1370465, hashes = { sha256 = "b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959" } },
        { url = "https://files.pythonhosted.org/packages/62/f5/78881e9b1c340ccc02d5d4ebe61cfb9140452b3d11272a896b405033511b/black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", upload-time = 2024-03-15T19:48:33Z, size = 1587504, hashes = { sha256 = "c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb" } },
        { url = "https://files.pythonhosted.org/packages/17/cc/67ba827fe23b39d55e8408937763b2ad21d904d63ca1c60b47d608ee7fb2/black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", upload-time = 2024-03-15T19:47:39Z, size = 1434037, hashes = { sha256 = "6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7" } },
        { url = "https://files.pythonhosted.org/packages/fa/aa/6a2493c7d3506e9b64edbd0782e21637c376da005eecc546904e47b5cdbf/black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", upload-time = 2024-03-15T19:38:16Z, size = 1745481, hashes = { sha256 = "d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7" } },
        { url = "https://files.pythonhosted.org/packages/18/68/9e86e73b58819624af6797ffe68dd7d09ed90fa1f9eb8d4d675f8c5e6ab0/black-24.3.0-cp39-cp39-win_amd64.whl", upload-time = 2024-03-15T19:39:15Z, size = 1363531, hashes = { sha256 = "7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f" } },
        { url = "https://files.pythonhosted.org/packages/4d/ea/31770a7e49f3eedfd8cd7b35e78b3a3aaad860400f8673994bc988318135/black-24.3.0-py3-none-any.whl", upload-time = 2024-03-15T19:35:41Z, size = 201493, hashes = { sha256 = "41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93" } },
    ]

    [[packages]]
    name = "click"
    version = "8.1.7"
    sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", upload-time = 2023-08-17T17:29:11Z, size = 336121, hashes = { sha256 = "ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", upload-time = 2023-08-17T17:29:10Z, size = 97941, hashes = { sha256 = "ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28" } }]

    [[packages]]
    name = "colorama"
    version = "0.4.6"
    marker = "sys_platform == 'win32'"
    sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", upload-time = 2022-10-25T02:36:22Z, size = 27697, hashes = { sha256 = "08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", upload-time = 2022-10-25T02:36:20Z, size = 25335, hashes = { sha256 = "4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" } }]

    [[packages]]
    name = "mypy-extensions"
    version = "1.0.0"
    sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", upload-time = 2023-02-04T12:11:27Z, size = 4433, hashes = { sha256 = "75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", upload-time = 2023-02-04T12:11:25Z, size = 4695, hashes = { sha256 = "4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d" } }]

    [[packages]]
    name = "packaging"
    version = "24.0"
    sdist = { url = "https://files.pythonhosted.org/packages/ee/b5/b43a27ac7472e1818c4bafd44430e69605baefe1f34440593e0332ec8b4d/packaging-24.0.tar.gz", upload-time = 2024-03-10T09:39:28Z, size = 147882, hashes = { sha256 = "eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl", upload-time = 2024-03-10T09:39:25Z, size = 53488, hashes = { sha256 = "2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5" } }]

    [[packages]]
    name = "pathspec"
    version = "0.12.1"
    sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", upload-time = 2023-12-10T22:30:45Z, size = 51043, hashes = { sha256 = "a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", upload-time = 2023-12-10T22:30:43Z, size = 31191, hashes = { sha256 = "a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08" } }]

    [[packages]]
    name = "platformdirs"
    version = "4.2.0"
    sdist = { url = "https://files.pythonhosted.org/packages/96/dc/c1d911bf5bb0fdc58cc05010e9f3efe3b67970cef779ba7fbc3183b987a8/platformdirs-4.2.0.tar.gz", upload-time = 2024-01-31T01:00:36Z, size = 20055, hashes = { sha256 = "ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/55/72/4898c44ee9ea6f43396fbc23d9bfaf3d06e01b83698bdf2e4c919deceb7c/platformdirs-4.2.0-py3-none-any.whl", upload-time = 2024-01-31T01:00:34Z, size = 17717, hashes = { sha256 = "0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068" } }]

    ----- stderr -----
    Resolved 7 packages in [TIME]
    "#);

    Ok(())
}

#[test]
fn pep_751_compile_no_emit_package() -> Result<()> {
    let context = TestContext::new("3.12");

    let requirements_txt = context.temp_dir.child("requirements.txt");
    requirements_txt.write_str("anyio")?;

    uv_snapshot!(context.filters(), context
        .pip_compile()
        .arg("requirements.txt")
        .arg("--universal")
        .arg("-o")
        .arg("pylock.toml")
        .arg("--no-emit-package")
        .arg("idna"), @r#"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --no-emit-package idna
    lock-version = "1.0"
    created-by = "uv"
    requires-python = ">=3.12.[X]"

    [[packages]]
    name = "anyio"
    version = "4.3.0"
    sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", upload-time = 2024-02-19T08:36:28Z, size = 159642, hashes = { sha256 = "f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", upload-time = 2024-02-19T08:36:26Z, size = 85584, hashes = { sha256 = "048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" } }]

    [[packages]]
    name = "sniffio"
    version = "1.3.1"
    sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", upload-time = 2024-02-25T23:20:04Z, size = 20372, hashes = { sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" } }
    wheels = [{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", upload-time = 2024-02-25T23:20:01Z, size = 10235, hashes = { sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" } }]

    # The following packages were excluded from the output:
    # idna

    ----- stderr -----
    Resolved 3 packages in [TIME]
    "#);

    Ok(())
}

/// Check that we reject versions that have an incompatible `Requires-Python`, but don't
/// have a `data-requires-python` key on the index page.
#[tokio::test]
async fn index_has_no_requires_python() -> Result<()> {
    let context = TestContext::new_with_versions(&["3.9", "3.12"]);
    let server = MockServer::start().await;

    // Unlike PyPI, https://download.pytorch.org/whl/cpu/networkx/ does not contain the
    // `data-requires-python` key.
    let networkx_page = r#"
    <!DOCTYPE html>
    <html>
        <body>
        <h1>Links for networkx</h1>
        <a href="https://download.pytorch.org/whl/networkx-3.0-py3-none-any.whl#sha256=58058d66b1818043527244fab9d41a51fcd7dcc271748015f3c181b8a90c8e2e">networkx-3.0-py3-none-any.whl</a><br/>
        <a href="https://download.pytorch.org/whl/networkx-3.2.1-py3-none-any.whl#sha256=f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2">networkx-3.2.1-py3-none-any.whl</a><br/>
        <a href="https://download.pytorch.org/whl/networkx-3.3-py3-none-any.whl#sha256=28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2">networkx-3.3-py3-none-any.whl</a><br/>
    </body>
    </html>
    "#;
    Mock::given(method("GET"))
        .and(path("/networkx/"))
        .respond_with(ResponseTemplate::new(200).set_body_raw(networkx_page, "text/html"))
        .mount(&server)
        .await;

    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str("networkx >3.0,<=3.3")?;

    uv_snapshot!(context
        .pip_compile()
        .env_remove(EnvVars::UV_EXCLUDE_NEWER)
        .arg("--python")
        .arg("3.9")
        .arg("--index-url")
        .arg(server.uri())
        .arg("requirements.in"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --python 3.9 requirements.in
    networkx==3.2.1
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    ");

    uv_snapshot!(context
        .pip_compile()
        .env_remove(EnvVars::UV_EXCLUDE_NEWER)
        .arg("--python")
        .arg("3.12")
        .arg("--index-url")
        .arg(server.uri())
        .arg("requirements.in"), @r"
    success: true
    exit_code: 0
    ----- stdout -----
    # This file was autogenerated by uv via the following command:
    #    uv pip compile --cache-dir [CACHE_DIR] --python 3.12 requirements.in
    networkx==3.3
        # via -r requirements.in

    ----- stderr -----
    Resolved 1 package in [TIME]
    ");

    Ok(())
}

/// Disallow resolving to multiple different PyTorch indexes.
#[test]
fn incompatible_cuda() -> Result<()> {
    let context = TestContext::new("3.11");
    let requirements_in = context.temp_dir.child("requirements.in");
    requirements_in.write_str(indoc! {r"
        torch==2.6.0+cu126
        torchvision==0.16.0+cu121
    "})?;

    uv_snapshot!(context
        .pip_compile()
        .env_remove(EnvVars::UV_EXCLUDE_NEWER)
        .env(EnvVars::UV_TORCH_BACKEND, "auto")
        .env(EnvVars::UV_CUDA_DRIVER_VERSION, "525.60.13")
        .arg("--preview")
        .arg("requirements.in")
        .arg("--python-platform")
        .arg("x86_64-manylinux_2_28")
        .arg("--python-version")
        .arg("3.11"), @r"
    success: false
    exit_code: 1
    ----- stdout -----

    ----- stderr -----
      × No solution found when resolving dependencies:
      ╰─▶ Because torchvision==0.16.0+cu121 depends on system:cuda==12.1 and torch==2.6.0+cu126 depends on system:cuda==12.6, we can conclude that torch==2.6.0+cu126 and torchvision==0.16.0+cu121 are incompatible.
          And because you require torch==2.6.0+cu126 and torchvision==0.16.0+cu121, we can conclude that your requirements are unsatisfiable.
    ");

    Ok(())
}
