# Copyright 2024 Pex project contributors.
# Licensed under the Apache License, Version 2.0 (see LICENSE).

from __future__ import absolute_import

import hashlib
import os.path
from argparse import Namespace, _ActionsContainer

from pex import requirements, sdist, toml
from pex.build_system import pep_517
from pex.common import pluralize, safe_mkdtemp
from pex.compatibility import string
from pex.dependency_configuration import DependencyConfiguration
from pex.dist_metadata import DistMetadata, Requirement, RequirementParseError, is_wheel
from pex.fingerprinted_distribution import FingerprintedDistribution
from pex.interpreter import PythonInterpreter
from pex.jobs import Job, Retain, SpawnedJob, execute_parallel
from pex.orderedset import OrderedSet
from pex.pep_427 import InstallableType
from pex.pep_503 import ProjectName
from pex.pip.tool import PackageIndexConfiguration
from pex.pip.version import PipVersionValue
from pex.requirements import LocalProjectRequirement, ParseError, URLRequirement
from pex.resolve.configured_resolve import resolve
from pex.resolve.configured_resolver import ConfiguredResolver
from pex.resolve.requirement_configuration import RequirementConfiguration
from pex.resolve.resolver_configuration import PipConfiguration
from pex.resolve.resolvers import Resolver
from pex.resolver import BuildAndInstallRequest, BuildRequest, InstallRequest
from pex.result import Error, ResultError
from pex.sorted_tuple import SortedTuple
from pex.targets import LocalInterpreter, Target, Targets
from pex.tracer import TRACER
from pex.typing import TYPE_CHECKING
from pex.util import CacheHelper

if TYPE_CHECKING:
    from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, Set, Tuple, Union

    import attr  # vendor:skip

    from pex.requirements import ParsedRequirement
else:
    from pex.third_party import attr


def _iter_requirements(
    target,  # type: Target
    dist_metadata,  # type: DistMetadata
    extras,  # type: Iterable[str]
):
    # type: (...) -> Iterator[Requirement]
    for req in dist_metadata.requires_dists:
        if not target.requirement_applies(requirement=req, extras=extras):
            continue
        yield attr.evolve(req, marker=None)


@attr.s(frozen=True)
class BuiltProject(object):
    target = attr.ib()  # type: Target
    fingerprinted_distribution = attr.ib()  # type: FingerprintedDistribution
    satisfied_direct_requirements = attr.ib()  # type: SortedTuple[Requirement]

    def iter_requirements(self):
        # type: () -> Iterator[Requirement]
        seen = set()  # type: Set[Requirement]
        for satisfied_direct_requirement in self.satisfied_direct_requirements:
            for req in _iter_requirements(
                target=self.target,
                dist_metadata=self.fingerprinted_distribution.distribution.metadata,
                extras=satisfied_direct_requirement.extras,
            ):
                if req not in seen:
                    seen.add(req)
                    yield req


@attr.s(frozen=True)
class ProjectDirectory(object):
    requirement = attr.ib()  # type: LocalProjectRequirement

    @property
    def path(self):
        # type: () -> str
        return self.requirement.path

    @property
    def requirement_str(self):
        # type: () -> str
        # N.B.: Requirements are ASCII text. See: https://peps.python.org/pep-0508/#grammar
        return str(self.requirement.line.processed_text)


@attr.s(frozen=True)
class ProjectArchive(object):
    requirement = attr.ib()  # type: URLRequirement

    @property
    def path(self):
        # type: () -> str
        return self.requirement.url.path

    @property
    def is_wheel(self):
        # type: () -> bool
        return is_wheel(self.path)

    @property
    def subdirectory(self):
        # type: () -> Optional[str]
        return self.requirement.subdirectory


@attr.s(frozen=True)
class Projects(object):
    project_directories = attr.ib(default=())  # type: Tuple[ProjectDirectory, ...]
    project_archives = attr.ib(default=())  # type: Tuple[ProjectArchive, ...]

    def build(
        self,
        targets,  # type: Targets
        pip_configuration,  # type: PipConfiguration
        compile_pyc=False,  # type: bool
        ignore_errors=False,  # type: bool
        result_type=InstallableType.INSTALLED_WHEEL_CHROOT,  # type: InstallableType.Value
        dependency_config=DependencyConfiguration(),  # type: DependencyConfiguration
    ):
        # type: (...) -> Iterator[BuiltProject]

        if self.project_directories:
            resolve_result = resolve(
                targets=targets,
                requirement_configuration=RequirementConfiguration(
                    requirements=[project.requirement_str for project in self.project_directories]
                ),
                resolver_configuration=attr.evolve(pip_configuration, transitive=False),
                compile_pyc=compile_pyc,
                ignore_errors=ignore_errors,
                result_type=result_type,
                dependency_configuration=dependency_config,
            )
            for resolved_distribution in resolve_result.distributions:
                yield BuiltProject(
                    target=resolved_distribution.target,
                    fingerprinted_distribution=resolved_distribution.fingerprinted_distribution,
                    satisfied_direct_requirements=resolved_distribution.direct_requirements,
                )

        if self.project_archives:
            build_requests = []  # type: List[BuildRequest]
            install_requests = []  # type: List[InstallRequest]
            direct_requirements = []  # type: List[ParsedRequirement]
            for project_archive in self.project_archives:
                fingerprint = CacheHelper.hash(project_archive.path, hasher=hashlib.sha256)
                direct_requirements.append(project_archive.requirement)
                for target in targets.unique_targets():
                    if project_archive.is_wheel:
                        install_requests.append(
                            InstallRequest(
                                download_target=target,
                                wheel_path=project_archive.path,
                                fingerprint=fingerprint,
                            )
                        )
                    else:
                        build_requests.append(
                            BuildRequest(
                                download_target=target,
                                source_path=project_archive.path,
                                fingerprint=fingerprint,
                                subdirectory=project_archive.subdirectory,
                            )
                        )

            build_and_install_request = BuildAndInstallRequest(
                build_requests=build_requests,
                install_requests=install_requests,
                direct_requirements=direct_requirements,
                package_index_configuration=PackageIndexConfiguration.create(
                    pip_version=pip_configuration.version,
                    resolver_version=pip_configuration.resolver_version,
                    repos_configuration=pip_configuration.repos_configuration,
                    network_configuration=pip_configuration.network_configuration,
                    use_pip_config=pip_configuration.use_pip_config,
                    extra_pip_requirements=pip_configuration.extra_requirements,
                    keyring_provider=pip_configuration.keyring_provider,
                ),
                compile=compile_pyc,
                build_configuration=pip_configuration.build_configuration,
                pip_version=pip_configuration.version,
                resolver=ConfiguredResolver(pip_configuration=pip_configuration),
                dependency_configuration=dependency_config,
            )

            # This checks the resolve, but we're not doing a full resolve here - we're installing
            # projects to gather their requirements and _then_ perform a resolve of those
            # requirements.
            ignore_errors = True

            if result_type is InstallableType.INSTALLED_WHEEL_CHROOT:
                resolved_distributions = build_and_install_request.install_distributions(
                    max_parallel_jobs=pip_configuration.max_jobs, ignore_errors=ignore_errors
                )
            else:
                resolved_distributions = build_and_install_request.build_distributions(
                    max_parallel_jobs=pip_configuration.max_jobs,
                    ignore_errors=ignore_errors,
                )

            for resolved_distribution in resolved_distributions:
                yield BuiltProject(
                    target=resolved_distribution.target,
                    fingerprinted_distribution=resolved_distribution.fingerprinted_distribution,
                    satisfied_direct_requirements=resolved_distribution.direct_requirements,
                )

    def collect_requirements(
        self,
        resolver,  # type: Resolver
        interpreter=None,  # type: Optional[PythonInterpreter]
        pip_version=None,  # type: Optional[PipVersionValue]
        max_jobs=None,  # type: Optional[int]
    ):
        # type: (...) -> Iterator[Requirement]

        target = LocalInterpreter.create(interpreter)
        seen = set()  # type: Set[Requirement]

        source_projects = list(
            self.project_directories
        )  # type: List[Union[ProjectDirectory, ProjectArchive]]
        for project_archive in self.project_archives:
            if project_archive.is_wheel:
                for req in DistMetadata.load(project_archive.path).requires_dists:
                    if req not in seen:
                        seen.add(req)
                        yield req
            else:
                source_projects.append(project_archive)

        wheels_to_build = []  # type: List[str]
        prepare_metadata_errors = {}  # type: Dict[str, str]

        def spawn_prepare_metadata_func(project):
            # type: (Union[ProjectDirectory, ProjectArchive]) -> SpawnedJob[DistMetadata]

            if isinstance(project, ProjectDirectory):
                project_dir = project.path
            else:
                project_dir = sdist.extract_tarball(
                    tarball_path=project.path, dest_dir=safe_mkdtemp()
                )

            return pep_517.spawn_prepare_metadata(
                project_directory=project_dir,
                target=target,
                resolver=resolver,
                pip_version=pip_version,
            )

        for project_directory, dist_metadata_result in zip(
            source_projects,
            execute_parallel(
                source_projects,
                # MyPy just can't figure out the next two args types; they're OK.
                spawn_func=spawn_prepare_metadata_func,  # type: ignore[arg-type]
                error_handler=Retain["Union[ProjectDirectory, ProjectArchive]"](),  # type: ignore[arg-type]
                max_jobs=max_jobs,
            ),
        ):
            if isinstance(dist_metadata_result, DistMetadata):
                for req in _iter_requirements(
                    target=target,
                    dist_metadata=dist_metadata_result,
                    extras=project_directory.requirement.extras,
                ):
                    if req not in seen:
                        seen.add(req)
                        yield req
            else:
                _item, error = dist_metadata_result
                if isinstance(error, Job.Error) and pep_517.is_hook_unavailable_error(error):
                    TRACER.log(
                        "Failed to prepare metadata for {project}, trying to build a wheel "
                        "instead: {err}".format(
                            project=project_directory.path, err=dist_metadata_result
                        ),
                        V=3,
                    )
                    wheels_to_build.append(project_directory.path)
                else:
                    prepare_metadata_errors[project_directory.path] = str(error)

        if wheels_to_build:
            resolve_result = resolver.resolve_requirements(
                requirements=wheels_to_build,
                targets=Targets.from_target(target),
                pip_version=pip_version,
            )
            for resolved_distribution in resolve_result.distributions:
                for req in resolved_distribution.distribution.requires():
                    if req not in seen:
                        seen.add(req)
                        yield req

        if prepare_metadata_errors:
            raise ResultError(
                Error(
                    "Encountered {count} {errors} collecting project requirements:\n"
                    "{error_items}".format(
                        count=len(prepare_metadata_errors),
                        errors=pluralize(prepare_metadata_errors, "error"),
                        error_items="\n".join(
                            "{index}. {path}: {error}".format(index=index, path=path, error=error)
                            for index, (path, error) in enumerate(
                                prepare_metadata_errors.items(), start=1
                            )
                        ),
                    )
                )
            )

    def __len__(self):
        # type: () -> int
        return len(self.project_directories) + len(self.project_archives)


@attr.s(frozen=True)
class GroupName(ProjectName):
    # N.B.: A dependency group name follows the same rules, including canonicalization, as project
    # names.
    pass


@attr.s(frozen=True)
class DependencyGroup(object):
    @classmethod
    def parse(cls, spec):
        # type: (str) -> DependencyGroup

        group, sep, project_dir = spec.partition("@")
        abs_project_dir = os.path.realpath(project_dir)
        if not os.path.isdir(abs_project_dir):
            raise ValueError(
                "The project directory specified by '{spec}' is not a directory".format(spec=spec)
            )

        pyproject_toml = os.path.join(abs_project_dir, "pyproject.toml")
        if not os.path.isfile(pyproject_toml):
            raise ValueError(
                "The project directory specified by '{spec}' does not contain a pyproject.toml "
                "file".format(spec=spec)
            )

        group_name = GroupName(group)
        try:
            dependency_groups = {
                GroupName(name): group
                for name, group in toml.load(pyproject_toml)["dependency-groups"].items()
            }  # type: Mapping[GroupName, Any]
        except (IOError, OSError, KeyError, ValueError, AttributeError) as e:
            raise ValueError(
                "Failed to read `[dependency-groups]` metadata from {pyproject_toml} when parsing "
                "dependency group spec '{spec}': {err}".format(
                    pyproject_toml=pyproject_toml, spec=spec, err=e
                )
            )
        if group_name not in dependency_groups:
            raise KeyError(
                "The dependency group '{group}' specified by '{spec}' does not exist in "
                "{pyproject_toml}".format(group=group, spec=spec, pyproject_toml=pyproject_toml)
            )

        return cls(project_dir=abs_project_dir, name=group_name, groups=dependency_groups)

    project_dir = attr.ib()  # type: str
    name = attr.ib()  # type: GroupName
    _groups = attr.ib()  # type: Mapping[GroupName, Any]

    def _parse_group_items(
        self,
        group,  # type: GroupName
        required_by=None,  # type: Optional[GroupName]
    ):
        # type: (...) -> Iterator[Union[GroupName, Requirement]]

        members = self._groups.get(group)
        if not members:
            if not required_by:
                raise KeyError(
                    "The dependency group '{group}' does not exist in the project at "
                    "{project_dir}.".format(group=group, project_dir=self.project_dir)
                )
            else:
                raise KeyError(
                    "The dependency group '{group}' required by dependency group '{required_by}' "
                    "does not exist in the project at {project_dir}.".format(
                        group=group, required_by=required_by, project_dir=self.project_dir
                    )
                )

        if not isinstance(members, list):
            raise ValueError(
                "Invalid dependency group '{group}' in the project at {project_dir}.\n"
                "The value must be a list containing dependency specifiers or dependency group "
                "includes.\n"
                "See https://peps.python.org/pep-0735/#specification for the specification "
                "of [dependency-groups] syntax."
            )

        for index, item in enumerate(members, start=1):
            if isinstance(item, string):
                try:
                    yield Requirement.parse(item)
                except RequirementParseError as e:
                    raise ValueError(
                        "Invalid [dependency-group] entry '{name}'.\n"
                        "Item {index}: '{req}', is an invalid dependency specifier: {err}".format(
                            name=group.raw, index=index, req=item, err=e
                        )
                    )
            elif isinstance(item, dict):
                try:
                    yield GroupName(item["include-group"])
                except KeyError:
                    raise ValueError(
                        "Invalid [dependency-group] entry '{name}'.\n"
                        "Item {index} is a non 'include-group' table and only dependency "
                        "specifiers and single entry 'include-group' tables are allowed in group "
                        "dependency lists.\n"
                        "See https://peps.python.org/pep-0735/#specification for the specification "
                        "of [dependency-groups] syntax.\n"
                        "Given: {item}".format(name=group.raw, index=index, item=item)
                    )
            else:
                raise ValueError(
                    "Invalid [dependency-group] entry '{name}'.\n"
                    "Item {index} is not a dependency specifier or a dependency group include.\n"
                    "See https://peps.python.org/pep-0735/#specification for the specification "
                    "of [dependency-groups] syntax.\n"
                    "Given: {item}".format(name=group.raw, index=index, item=item)
                )

    def iter_requirements(self):
        # type: () -> Iterator[Requirement]

        visited_groups = set()  # type: Set[GroupName]

        def iter_group(
            group,  # type: GroupName
            required_by=None,  # type: Optional[GroupName]
        ):
            # type: (...) -> Iterator[Requirement]
            if group not in visited_groups:
                visited_groups.add(group)
                for item in self._parse_group_items(group, required_by=required_by):
                    if isinstance(item, Requirement):
                        yield item
                    else:
                        for req in iter_group(item, required_by=group):
                            yield req

        return iter_group(self.name)


def register_options(
    parser,  # type: _ActionsContainer
    project_help,  # type: str
):
    # type: (...) -> None

    parser.add_argument(
        "--project",
        dest="projects",
        metavar="DIR",
        default=[],
        type=str,
        action="append",
        help=project_help,
    )

    parser.add_argument(
        "--group",
        "--dependency-group",
        dest="dependency_groups",
        metavar="GROUP[@DIR]",
        default=[],
        type=DependencyGroup.parse,
        action="append",
        help=(
            "Pull requirements from the specified PEP-735 dependency group. Dependency groups are "
            "specified by referencing the group name in a given project's pyproject.toml in the "
            "form `<group name>@<project directory>`; e.g.: `test@local/project/directory`. If "
            "either the `@<project directory>` suffix is not present or the suffix is just `@`, "
            "the current working directory is assumed to be the project directory to read the "
            "dependency group information from. Multiple dependency groups across any number of "
            "projects can be specified. Read more about dependency groups at "
            "https://peps.python.org/pep-0735/."
        ),
    )


def get_projects(options):
    # type: (Namespace) -> Projects

    project_directories = []  # type: List[ProjectDirectory]
    project_archives = []  # type: List[ProjectArchive]
    errors = []  # type: List[str]
    for project in getattr(options, "projects", ()):
        try:
            parsed = requirements.parse_requirement_string(project)
        except (ParseError, ValueError) as e:
            errors.append(
                "The --project {project} is not a valid local project requirement: {err}".format(
                    project=project, err=e
                )
            )
        else:
            if isinstance(parsed, (LocalProjectRequirement, URLRequirement)):
                if parsed.marker:
                    errors.append(
                        "The --project {project} has a marker, which is not supported. "
                        "Remove marker: ;{marker}".format(project=project, marker=parsed.marker)
                    )
                elif isinstance(parsed, LocalProjectRequirement):
                    project_directories.append(ProjectDirectory(requirement=parsed))
                elif parsed.url.scheme != "file":
                    errors.append(
                        "The --project {project} URL must be a local file: URL.".format(
                            project=project
                        )
                    )
                else:
                    project_archives.append(ProjectArchive(requirement=parsed))
            else:
                errors.append(
                    "The --project {project} does not appear to point to a directory containing a "
                    "Python project or a project archive (sdist or whl).".format(project=project)
                )

    if errors:
        raise ValueError(
            "Found {count} invalid --project {specifiers}:\n{errors}".format(
                count=len(errors),
                specifiers=pluralize(errors, "specifier"),
                errors="\n".join(
                    "{index}. {error}".format(index=index, error=error)
                    for index, error in enumerate(errors, start=1)
                ),
            )
        )

    return Projects(
        project_directories=tuple(project_directories), project_archives=tuple(project_archives)
    )


def get_group_requirements(options):
    # type: (Namespace) -> Iterable[Requirement]

    group_requirements = OrderedSet()  # type: OrderedSet[Requirement]
    for dependency_group in getattr(options, "dependency_groups", ()):
        for requirement in dependency_group.iter_requirements():
            group_requirements.add(requirement)
    return group_requirements
