import logging
import subprocess
from typing import Dict, Any

from benchmarks.base_benchmark import BaseBenchmark
from benchmarks.command import Command, CommandResult
from benchmarks.cargo_helper import build_example
from benchmarks.config_utils import get_s3_keys
from omegaconf import DictConfig

log = logging.getLogger(__name__)


class PrefetchBenchmark(BaseBenchmark):
    def __init__(self, cfg: DictConfig, metadata: Dict[str, Any]):
        self.cfg = cfg
        self.metadata = metadata

    def setup(self, with_flamegraph: bool = False) -> Dict[str, Any]:
        log.info("Compiling prefetch_benchmark example...")
        self.executable_path = build_example("prefetch_benchmark", with_flamegraph=with_flamegraph)
        log.info(f"Prefetch benchmark executable ready at: {self.executable_path}")

        return self.metadata

    def get_command(self) -> Command:
        subprocess_args = [
            self.executable_path,
            self.cfg.s3_bucket,
        ]

        object_size = self.cfg.object_size_in_gib
        size_gib = str(object_size)
        app_workers = self.cfg.application_workers

        # Check if objects are specified or if we have to fall back to objects
        # generated by fio.
        objects = get_s3_keys(self.cfg.s3_keys, app_workers, size_gib)

        if len(objects) >= app_workers:
            for i in range(app_workers):
                subprocess_args.append(objects[i])
        else:
            raise ValueError("Seeing fewer objects than app workers. So cannot proceed with the run.")

        region = self.cfg.region
        subprocess_args.extend(["--region", region])

        if (max_throughput := self.cfg.network.maximum_throughput_gbps) is not None:
            subprocess_args.extend(["--maximum-throughput-gbps", str(max_throughput)])

        if (max_memory_target := self.cfg.benchmarks.prefetch.max_memory_target) is not None:
            subprocess_args.extend(["--max-memory-target", str(max_memory_target)])

        if (read_part_size := self.cfg.read_part_size) is not None:
            subprocess_args.extend(["--part-size", str(read_part_size)])

        read_size = self.cfg.read_size
        subprocess_args.extend(["--read-size", str(read_size)])

        for interface in self.cfg.network.interface_names:
            subprocess_args.extend(["--bind", interface])

        if (run_time := self.cfg.run_time) is not None:
            subprocess_args.extend(["--max-duration", str(run_time)])

        subprocess_args.extend(["--output-file", "prefetch-output.json"])

        prefetch_env = {}
        if not self.cfg.download_checksums:
            prefetch_env["EXPERIMENTAL_MOUNTPOINT_NO_DOWNLOAD_INTEGRITY_VALIDATION"] = "ON"

        if (crt_eventloop_threads := self.cfg.crt_eventloop_threads) is not None:
            prefetch_env["UNSTABLE_CRT_EVENTLOOP_THREADS"] = str(crt_eventloop_threads)

        log.info("Prefetch benchmark command prepared with args: %s", subprocess_args)

        return Command(args=subprocess_args, env=prefetch_env)

    def post_process(self, result: CommandResult) -> Dict[str, Any]:
        if result.returncode != 0:
            log.error(f"Prefetch benchmark failed with exit code {result.returncode}")
            if result.stderr:
                log.error(f"Error output: {result.stderr}")
            raise subprocess.CalledProcessError(result.returncode, ["prefetch_benchmark"])

        log.info("Prefetch benchmark completed successfully.")
        self.metadata["prefetch_output_file"] = "prefetch-output.json"
        return self.metadata
