# -*- coding: utf-8 -*-
import os
import sys
import typing
from glob import glob
from typing import Union

import h5py
import hdf5plugin  # noqa: F401
import numpy as np
from astropy.io import fits
from jsonargparse import auto_cli
from returns.pipeline import is_successful
from returns.result import Failure, Success, safe

if typing.TYPE_CHECKING:
    from os import PathLike

    from astropy.io.fits.column import Column
    from astropy.io.fits.hdu.hdulist import HDUList
    from astropy.io.fits.hdu.image import PrimaryHDU
    from astropy.io.fits.hdu.table import BinTableHDU
    from h5py import Group
    from jsonargparse.typing import Path_fr, path_type

    Path_dr = path_type("dr", docstring="path to a directory that exists and is readable")


def main():
    auto_cli({
        "file": check_file_cli,
        "dir": check_dir_cli,
    }, as_positional=False)


def check_dir_cli(
        h5_dir: "Path_dr", fits_dir: "Path_dr",
        include_globs: str | list[str] = "*.h5",
        exclude_globs: str | list[str] | None = None,
) -> None:
    """check all the hdf5 files in `h5_dir` with the fits files in `fits_dir`

    :param h5_dir: a directory to the HDF5 files to be read from
    :param fits_dir: a directory to the FITS files to be read from
    :param include_globs: only files matching one of this glob pattens are included as candidate for checking.
    :param exclude_globs: files matching any of these patterns are excluded for checking.
    """

    if isinstance(include_globs, str):
        include_globs = [include_globs]

    candidate_files = set()
    for include_glob in include_globs:
        candidate_files.update(glob(os.path.join(h5_dir, include_glob)))

    if exclude_globs is not None:
        if isinstance(exclude_globs, str):
            exclude_globs = [exclude_globs]
        exclude_files = set()
        for exclude_glob in exclude_globs:
            exclude_files.update(glob(os.path.join(h5_dir, exclude_glob)))
        checking_files = candidate_files - exclude_files

        suffix = "s" if len(exclude_globs) > 1 else ""
        print(f"Found {len(candidate_files)} files for checking. "
              f"{len(exclude_files)} files are excluded by glob pattern{suffix} {exclude_globs}. "
              f"{len(checking_files)} files are remaining for checking. ")
    else:
        checking_files = candidate_files
        print(f"Found {len(candidate_files)} files for checking.")

    all_failures = []
    checked_keys = set()
    for h5_file in sorted(checking_files):
        print(f"Checking {h5_file}")
        status = check_file(h5_file, fits_dir, verbose=True)
        match status:
            case Success(checked):
                checked_keys.update(checked)
                print(f"check {h5_file} passed")
                continue
            case Failure(msg):
                print(msg)
                all_failures.append(h5_file)

    if all_failures:
        print(f"checking on the following {len(all_failures)} files failed")
        for h5_file in all_failures:
            print(f"  - {h5_file}")
        sys.exit(1)

    found_fits = set(glob(os.path.join(fits_dir, "*.fits")))
    checked_fits = set(os.path.join(fits_dir, f"{x}.fits") for x in checked_keys)
    num_fits_files = len(found_fits)
    num_h5_group = len(checked_fits)
    if num_h5_group != num_fits_files:
        print(f"Error: {num_h5_group} HDF5 groups checked, {num_fits_files} FITS files found", file=sys.stderr)
        unchecked = found_fits.difference(checked_fits)
        print(f"{len(unchecked)} FITS files are found but not checked:")
        for x in unchecked:
            print(f"  - {x}")
        sys.exit(2)

    print(f"All files ({len(checking_files)} HDF5 files and {num_fits_files} FITS files) are checked and passed")


def check_file_cli(h5_file: "Path_fr", fits_dir: "Path_dr"):
    """check a hdf5 file with the fits files in `fits_dir`

    :param h5_file: path to the existing HDF5 file
    :param fits_dir: a directory to the FITS files to be read from
    """
    match check_file(h5_file, fits_dir, verbose=True):
        case Success(_):
            print(f"check {h5_file} passed")
        case Failure(msg):
            raise RuntimeError(msg)


@safe
def check_file(
        h5_file: Union[str, "PathLike[str]"],
        fits_dir: Union[str, "PathLike[str]"],
        verbose: bool = False
) -> list[str]:
    """check a hdf5 file with the fits files in `fits_dir`

    :param h5_file: path to the existing HDF5 file
    :param fits_dir: a directory to the FITS files to be read from
    :param verbose: print checking information or not
    """
    verbose_print = get_verbose_print(verbose)
    with h5py.File(h5_file, "r") as h5_fp:
        checked_keys = list(h5_fp.keys())
        for grp_name, grp in h5_fp.items():
            fits_file = os.path.join(fits_dir, f"{grp_name}.fits")
            with fits.open(fits_file) as fits_fp:
                verbose_print(f"Checking '{h5_file}:{grp_name}' with {fits_file!r}")
                status = _check_opened_fits_file(fits_fp, grp)
                if not is_successful(status):
                    raise ValueError(f"check '{h5_file}/{grp_name}' with {fits_file!r} failed: {status.failure()}")
    return checked_keys


@safe
def check_fits_file(
        h5_file: Union[str, "PathLike[str]"],
        group_name: str,
        fits_file: Union[str, "PathLike[str]"],
        verbose: bool = False
) -> None:
    verbose_print = get_verbose_print(verbose)
    verbose_print(f"Checking {h5_file!r}: {group_name!r} <==> {fits_file!r}: ", end="")

    with h5py.File(h5_file, "r") as h5_root, fits.open(fits_file) as fits_fp:
        group = h5_root[group_name]
        _check_opened_fits_file(fits_fp, group)
    verbose_print("[OK]")


def get_verbose_print(verbose: bool):
    if verbose:
        def verbose_print(text: str, *args, **kwargs):
            print(text, *args, **kwargs)
    else:
        def verbose_print(_, *_args, **_kwargs):
            pass
    return verbose_print


@safe
def _check_opened_fits_file(fits_hdus: "HDUList", h5_root: "Group") -> None:
    """
    :param fits_hdus: the opened fits file
    :param h5_root: the HDF5 group that is a representation of a converted fits file.
    """
    for hdu in fits_hdus:
        hdu: Union["BinTableHDU", "PrimaryHDU"]
        fits_attrs = dict(hdu.header)
        if (comment := fits_attrs.pop("COMMENT", None)) is not None:
            fits_attrs["COMMENT"] = str(comment)
        h5_grp = h5_root[hdu.name]
        h5_attrs = {k: v for k, v in h5_grp.attrs.items() if not (k.startswith("__") and k.startswith("__"))}
        if h5_attrs != fits_attrs:
            raise ValueError(f"attributes of h5:{h5_grp.name!r} and fits:{hdu.name} not equal")

        if hasattr(hdu, "columns") and len(h5_grp) != len(hdu.columns):
            raise ValueError(f"number of datasets of h5:{h5_grp.name!r} and fits:{hdu.name} not equal")

        for ds_name in h5_grp:
            fits_ds = hdu.data.field(ds_name)
            h5_ds = h5_grp[ds_name][...]

            col: "Column" = hdu.columns[ds_name]
            if str(col.unit) != h5_grp[ds_name].attrs["unit"]:
                raise ValueError(f"unit of h5:{h5_ds.name!r} and fits:'{hdu.name}/{ds_name}' not equal")

            if np.issubdtype(h5_ds.dtype, np.floating):
                if not np.allclose(fits_ds, h5_ds):
                    raise ValueError(f"data of h5:{h5_ds.name!r} and fits:'{hdu.name}/{ds_name}' not equal")
            else:
                if not np.all(np.equal(fits_ds.ravel(), h5_ds.ravel())):
                    raise ValueError(f"data of h5:{h5_ds.name!r} and fits:'{hdu.name}/{ds_name}' not equal")


if __name__ == "__main__":
    main()
