import yaml
from pathlib import Path
from tools.logger import logger


class SDFWriter:
    """
    Builds the SDF data structure and writes it to a YAML file.
    """

    def __init__(
        self,
        output: str,
        package_name: str,
        classified_slices: dict[str, set[str]],
        slice_deps: dict[str, set[str]],
    ):
        self.output_path = Path(output) / f"{package_name}.yaml"
        self.package_name = package_name
        self.classified_slices = classified_slices
        self.slice_deps = slice_deps

    def write(self):
        """
        Main entry point to build the data and write the file.
        """
        sdf_data = self._build_sdf_structure()

        self.output_path.parent.mkdir(parents=True, exist_ok=True)
        with open(self.output_path, "w", encoding="utf-8") as f:
            yaml.dump(sdf_data, f, indent=2, sort_keys=False, default_flow_style=False)

        logger.info(f"SDF file written to: {self.output_path}")

    def _build_sdf_structure(self) -> dict:
        """
        Assembles the final SDF data dictionary.
        """
        copyright_slice_name = f"{self.package_name}_copyright"
        sdf_data = {
            "package": self.package_name,
            "deps": (
                [copyright_slice_name]
                if copyright_slice_name in self.classified_slices
                else []
            ),
            "slices": {},
        }

        for slice_name, files in sorted(self.classified_slices.items()):

            short_slice_name = slice_name.replace(f"{self.package_name}_", "", 1)
            slice_content = {}

            if self.slice_deps.get(slice_name):
                slice_content["deps"] = sorted(list(self.slice_deps[slice_name]))

            # Apply path compression before adding to the structure
            compressed_files = self._compress_paths(files)
            slice_content["contents"] = {"common": sorted(list(compressed_files))}

            sdf_data["slices"][short_slice_name] = slice_content

        return sdf_data

    def _compress_paths(self, file_set: set[str]) -> set[str]:
        """
        Performs a robust path compression for versioned shared libraries.
        This version uses a direct prefix matching approach.
        """
        # We only attempt to compress files that look like libraries
        libs = sorted([f for f in file_set if ".so" in f], key=len)
        other_files = {f for f in file_set if ".so" not in f}

        if not libs:
            return other_files

        # The core idea: iterate through the sorted libs. If a lib is a prefix
        # of subsequent libs, it becomes a candidate for a wildcard.
        compressed_libs = set()

        # Use a boolean array to mark which libraries have been consumed
        # by a wildcard prefix.
        consumed = [False] * len(libs)

        for i in range(len(libs)):
            if consumed[i]:
                continue

            # The current library is a potential prefix
            prefix = libs[i]
            is_prefix_for_others = False

            for j in range(i + 1, len(libs)):
                if libs[j].startswith(prefix):
                    # If we find at least one longer file that starts with our prefix,
                    # it confirms this is a valid compression case.
                    is_prefix_for_others = True
                    consumed[j] = True  # Mark the longer path as consumed

            if is_prefix_for_others:
                # Add the prefix with a wildcard
                compressed_libs.add(f"{prefix}*")
            else:
                # If it wasn't a prefix for any other lib, add it as is
                compressed_libs.add(prefix)

        return compressed_libs.union(other_files)
