import os
import argparse
import numpy as np
import torch as th

from Squidiff import dist_util, logger
from Squidiff.hic_script_util import (
    get_all_defaults,
    create_model_and_diffusion,
    args_to_dict,
    add_dict_to_argparser,
)
from Squidiff.hic_inference import (
    load_model_from_checkpoint,
    sample_hic_batch,
    compute_semantic_embedding,
    interpolate_semantic_embeddings,
    spherical_interpolate_semantic_embeddings,
)


def parse_args():
    """
    Minimal CLI for Hi-C diffusion inference.
    - Denoise from pure noise
    - Optional semantic guidance via zsem (early/late -> interpolate)
    """
    defaults = get_all_defaults()

    parser = argparse.ArgumentParser(
        description="Minimal Hi-C Diffusion Inference (with optional semantic guidance)",
        conflict_handler='resolve'  # Allow overriding duplicate arguments
    )

    # Core required args
    parser.add_argument("--checkpoint_path", type=str, required=True,
                        help="Path to model checkpoint file or directory")
    parser.add_argument("--output_dir", type=str, required=True,
                        help="Directory to save generated results")

    # Pull model defaults first; then override below
    add_dict_to_argparser(parser, defaults)

    # Override inference controls (these will replace defaults)
    parser.add_argument("--timepoints", type=str, default="0.5",
                        help="Comma-separated biological times in [0,1], e.g. 0.25,0.5,0.75")
    parser.add_argument("--matrix_size", type=int, default=256,
                        help="Generated Hi-C matrix size (also used as window_size)")
    parser.add_argument("--num_samples", type=int, default=1,
                        help="Number of samples per timepoint")
    parser.add_argument("--ddim_steps", type=int, default=50,
                        help="Number of denoising steps for sampling. "
                             "Fewer steps = faster but lower quality. "
                             "Common values: 20 (fast), 50 (balanced), 100 (high quality), "
                             "1000 (full, slowest). Must be <= diffusion_steps (default 1000)")
    parser.add_argument("--use_fp16", action="store_true",
                        help="Enable mixed precision inference")
    parser.add_argument("--use_ema", action="store_true", default=True,
                        help="Prefer EMA weights when loading checkpoints")

    # Semantic guidance
    parser.add_argument("--use_semantic_guidance", action="store_true",
                        help="Use zsem guidance (requires early/late matrices)")
    parser.add_argument("--early_matrix_path", type=str, default="",
                        help="Path to early timepoint matrix (npy/npz->array)")
    parser.add_argument("--late_matrix_path", type=str, default="",
                        help="Path to late timepoint matrix (npy/npz->array)")
    parser.add_argument("--use_slerp", action="store_true",
                        help="Use SLERP interpolation for zsem (default linear)")

    args = parser.parse_args()
    args = vars(args)

    # Ensure window_size follows matrix_size for model creation & sampling
    args["window_size"] = int(args.get("matrix_size", 256))

    # Output dir
    os.makedirs(args["output_dir"], exist_ok=True)
    return args


def maybe_load_matrix(path):
    if not path:
        return None
    if path.endswith(".npy"):
        return np.load(path)
    if path.endswith(".npz"):
        data = np.load(path)
        first_key = list(data.keys())[0]
        return data[first_key]
    raise ValueError(f"Unsupported matrix format: {path}")


def main():
    args = parse_args()

    # Setup distributed & logging
    dist_util.setup_dist()
    logger.configure(dir=args["output_dir"])

    # Parse timepoints
    timepoints = [float(t) for t in args["timepoints"].split(",") if t.strip() != ""]

    # Create model & diffusion with args; load weights (prefer EMA if requested)
    model, diffusion, ckpt_info = load_model_from_checkpoint(
        args["checkpoint_path"], args, use_ema=args["use_ema"]
    )

    # Enforce matrix size
    matrix_size = int(args["window_size"])  # unified usage

    # Optional semantic guidance
    zsem_early = None
    zsem_late = None
    if args["use_semantic_guidance"]:
        early_mat = maybe_load_matrix(args["early_matrix_path"]) if args["early_matrix_path"] else None
        late_mat = maybe_load_matrix(args["late_matrix_path"]) if args["late_matrix_path"] else None
        if early_mat is None or late_mat is None:
            logger.log("[WARN] use_semantic_guidance enabled but early/late matrices missing. Disabling guidance.")
            args["use_semantic_guidance"] = False
        else:
            logger.log("Computing zsem for early/late matrices...")
            zsem_early = compute_semantic_embedding(model, early_mat)
            zsem_late = compute_semantic_embedding(model, late_mat)

    # Generate for each timepoint
    for t in timepoints:
        if args["use_semantic_guidance"]:
            alphas = np.array([t], dtype=np.float32)
            if args["use_slerp"]:
                zsem_interp = spherical_interpolate_semantic_embeddings(zsem_early, zsem_late, alphas)[0]
            else:
                zsem_interp = interpolate_semantic_embeddings(zsem_early, zsem_late, alphas)[0]
            sem_batch = np.tile(zsem_interp[None, :], (args["num_samples"], 1))
        else:
            sem_batch = None

        bio_times = [t] * args["num_samples"]
        samples = sample_hic_batch(
            model=model,
            diffusion=diffusion,
            bio_times=bio_times,
            matrix_size=matrix_size,
            ddim_steps=args["ddim_steps"],
            eta=0.0,
            clip_mode="nonneg",
            use_fp16=args["use_fp16"],
            device=dist_util.dev(),
            sem_cond=sem_batch,
        )  # [B,1,H,W] (cpu,float32)

        arr = samples.numpy()[:, 0]  # [B,H,W]
        np.save(os.path.join(args["output_dir"], f"hic_t{t:.3f}_fp32.npy"), arr.astype(np.float32))

        # Save average for quick glance
        avg = arr.mean(axis=0) if arr.shape[0] > 1 else arr[0]
        try:
            import matplotlib.pyplot as plt
            fig = plt.figure(figsize=(6, 6))
            ax = fig.add_subplot(111)
            im = ax.imshow(avg, cmap="Reds", origin="lower")
            ax.set_title(f"Hi-C @ t={t:.3f}")
            plt.colorbar(im, ax=ax, shrink=0.8)
            fig.tight_layout()
            fig.savefig(os.path.join(args["output_dir"], f"hic_t{t:.3f}.png"), dpi=150, bbox_inches="tight")
            plt.close(fig)
        except Exception:
            logger.log("matplotlib not available; skipped PNG visualization.")

        logger.log(
            f"Generated t={t:.3f}: shape={arr.shape}, mean={float(avg.mean()):.4f}, max={float(avg.max()):.4f}"
        )

    logger.log("Inference complete.")


if __name__ == "__main__":
    main() 