"""Module wrapping raster data capability of gdal."""

from __future__ import annotations
from typing import (
    Optional,
    Union,
    Dict,
    Iterator,
    Tuple,
    List,
    Any,
)

import os
import uuid
from contextlib import contextmanager
from math import log2
from enum import IntEnum
from collections import Counter
import threading

import numpy as np
import numpy.typing as npt
from osgeo import gdal, gdal_array, ogr, osr
from osgeo.gdal_array import BandRasterIONumPy
from pyproj import CRS

from common.logger import logger
from common.config import DEFAULT_CACHE_DIR
from common.io_utils.temps import temp_dir
from common.typing_utils import numeric
from common.geometry.geom import Geom
from common.geometry.bbox import BoundingBox

from common.geoimage.utils import (
    SUPPORTED_OVERVIEW_METHODS,
    SUPPORTED_WARP_RESAMPLE_ALGS,
    SUPPORTED_TRANSLATE_RESAMPLE_ALGS,
    polyfill_resample_alg_for_gdal_vrt,
)
from common.geoimage.scene_meta import (
    SceneMeta,
    ValueInterpretationMeta,
    SourceWindow,
    calc_window_from_aoi,
)

gdal.UseExceptions()
ogr.UseExceptions()


# NOTE: we could enable more drivers if needed
# for example: DERIVED, JPEG, PNG, WEBP, GPKG, Rasterlite, MBTiles, WMS, WCS, etc.
ALLOWED_GDAL_DRIVERS = ["GTiff", "VRT", "MEM", "HDF4", "HFA"]
IS_KAKADU_AVAILABLE = gdal.GetDriverByName("JP2KAK") is not None
IS_COG_AVAILABLE = gdal.GetDriverByName("COG") is not None
if IS_KAKADU_AVAILABLE:
    ALLOWED_GDAL_DRIVERS.append("JP2KAK")
    # NOTE default to use 8 threads for JP2KAK
    # should be a good compromise for our usage and our workstations' resources
    gdal.SetConfigOption("JP2KAK_THREADS", "8")
else:
    ALLOWED_GDAL_DRIVERS.append("JP2OpenJPEG")
if IS_COG_AVAILABLE:
    ALLOWED_GDAL_DRIVERS.append("COG")

DEFAULT_GEOTIFF_PROFILE = {
    "TILED": "YES",
    "BLOCKXSIZE": 256,
    "BLOCKYSIZE": 256,
    "COMPRESS": "LZW",
    "ZLEVEL": 6,
    "NUM_THREADS": 2,
    "BIGTIFF": "IF_SAFER",
}
ADDITIONAL_GEOTIFF_LERC_ZSTD_PROFILE = {
    "COMPRESS": "LERC_ZSTD",
    "ZLEVEL": 9,
    "ZSTD_LEVEL": 9,
    "MAX_Z_ERROR": 0,
}
DEFAULT_COG_PROFILE = {
    "BLOCKSIZE": 512,
    "COMPRESS": "LZW",
    "LEVEL": 6,
    "NUM_THREADS": 2,
    "BIGTIFF": "IF_SAFER",
}
ADDITIONAL_COG_LERC_ZSTD_PROFILE = {
    "COMPRESS": "LERC_ZSTD",
    "LEVEL": 9,
    "MAX_Z_ERROR": 0,
}
DEFAULT_JPEG2000_PROFILE = {
    "QUALITY": 100,
    "REVERSIBLE": "YES",
    "YCBCR420": "NO",
    "PROGRESSION": "RPCL",
}
DEFAULT_JPEG2000_KAKADU_PROFILE = {"QUALITY": 100, "Corder": "RPCL"}


class GDALNumeric(IntEnum):
    """Enum for GDAL Numeric DataTypes."""

    Byte = gdal.GDT_Byte
    Int16 = gdal.GDT_Int16
    UInt16 = gdal.GDT_UInt16
    Int32 = gdal.GDT_Int32
    UInt32 = gdal.GDT_UInt32
    Float32 = gdal.GDT_Float32
    Float64 = gdal.GDT_Float64


@contextmanager
def gdal_env(**kwargs: Any) -> Iterator[None]:
    """Inject gdal environment configurations when enter a context, and revert back when leave the context.

    Parameters
    ----------
    kwargs : Any
        GDAL configuration options to set in the form of key, value pairs,
        It is expected that those values can be converted to string using the `str` function.

    Examples
    --------
    >>> with gdal_env(CPL_DEBUG="ON"):
            # do something you want
            # while gdal's full debugging output has been turn on
            # same as running a gdal binary command with `--config CPL_DEBUG=ON`
            pass
    """
    prev_env: Dict[str, Optional[str]] = {}
    for k, v in kwargs.items():
        k = k.upper()
        old_value = gdal.GetConfigOption(k, None)
        prev_env[k] = old_value
        gdal.SetConfigOption(k, str(v))

    try:
        yield
    finally:
        for k, v in prev_env.items():
            gdal.SetConfigOption(k, v)


def convert_dtype_np2gdal(np_dtype: npt.DTypeLike) -> GDALNumeric:
    """Convert numpy datatype to gdal numeric datatype.

    Parameters
    ----------
    np_dtype : npt.DTypeLike
        numpy dtype to convert.

    Returns
    -------
    GDALNumeric
        GDAL numeric datatype corresponding to the given numpy dtype.

    Raises
    ------
    ValueError
        When a matching conversion cannot be found, or the given dtype is not supported.
    """
    if (np_dtype == np.bool) or (np_dtype == bool):
        res = gdal.GDT_Byte
    elif np_dtype == np.uint8:
        res = gdal.GDT_Byte
    elif np_dtype == np.uint16:
        res = gdal.GDT_UInt16
    elif np_dtype == np.int16:
        res = gdal.GDT_Int16
    elif np_dtype == np.float32:
        res = gdal.GDT_Float32
    elif np_dtype == np.float64:
        res = gdal.GDT_Float64
    elif np_dtype == np.int32:
        res = gdal.GDT_Int32
    elif np_dtype == np.uint32:
        res = gdal.GDT_UInt32
    else:
        raise ValueError(
            (
                "Currently we have not supported numpy dtype {}, "
                "only np.bool, np.uint8, np.uint16, np.int16, np.int32, np.uint32, np.float32, np.float64 supported"
            ).format(np_dtype)
        )

    return GDALNumeric(res)


def convert_dtype_gdal2np(dtype_gdal: Union[int, GDALNumeric]) -> npt.DTypeLike:
    """Convert gdal numeric datatype to numpy datatype.

    Parameters
    ----------
    dtype_gdal : Union[GDALNumeric, int]
        GDAL numeric datatype to convert.

    Returns
    -------
    npt.DTypeLike
        numpy dtype corresponding to the given GDAL numeric datatype.

    Raises
    ------
    ValueError
        When a matching conversion cannot be found, or the given dtype is not supported.
    """
    if dtype_gdal == gdal.GDT_Byte:
        res = np.uint8
    elif dtype_gdal == gdal.GDT_UInt16:
        res = np.uint16
    elif dtype_gdal == gdal.GDT_Int16:
        res = np.int16
    elif dtype_gdal == gdal.GDT_Int32:
        res = np.int32
    elif dtype_gdal == gdal.GDT_UInt32:
        res = np.uint32
    elif dtype_gdal == gdal.GDT_Float32:
        res = np.float32
    elif dtype_gdal == gdal.GDT_Float64:
        res = np.float64
    else:
        raise ValueError(
            (
                "Currently we have not supported GDAL dtype {}, "
                "only gdal.GDT_Byte, gdal.GDT_UInt16, gdal.GDT_Int16, gdal.GDT_Int32,"
                "gdal.GDT_UInt32, gdal.GDT_Float32, gdal.GDT_Float64 supported"
            ).format(dtype_gdal)
        )

    return res


def srs_to_epsg_str(srs: osr.SpatialReference) -> str:
    "Convert a GDAL SpatialReference to an EPSG code string"
    authority_name = srs.GetAuthorityName(None)
    authority_code = srs.GetAuthorityCode(None)
    return f"{authority_name}:{authority_code}"


def read_meta_from_ds(
    ds: gdal.Dataset, read_value_interpretation: bool = False, read_dtype: bool = False
) -> Union[Tuple[SceneMeta, Tuple[npt.DTypeLike, GDALNumeric]], SceneMeta]:
    """Read metadata from a `gdal.Dataset`.

    Parameters
    ----------
    ds : gdal.Dataset
        Dataset whose metadata to be read.
    read_value_interpretation : bool, optional
        Whether to read the per-band value interpretation metadata, by default False.
    read_dtype: bool, optional
        Whether to determine the data type of the dataset, by default False.
        Notice we assume the whole dataset is with the same data type, so only the first band's data type will be read.

    Returns
    -------
    SceneMeta or Tuple[SceneMeta, Tuple[npt.DTypeLike, GDALNumeric]]
        Metadata for the given `gdal.Dataset`,
        optionally with an additional tuple of (`npt.DTypeLike`, `GDALNumeric`)
        for the data type of the dataset if `read_dtype` is True.
    """
    n_cols, n_rows, n_bands = ds.RasterXSize, ds.RasterYSize, ds.RasterCount
    prj_wkt = ds.GetProjection()
    geotrans = ds.GetGeoTransform()
    if read_value_interpretation:
        value_interpretations = []
        for ind_band in range(1, n_bands + 1):
            bd = ds.GetRasterBand(ind_band)
            scale = bd.GetScale()
            offset = bd.GetOffset()
            nodata = bd.GetNoDataValue()
            value_interpretations.append(ValueInterpretationMeta(scale, offset, nodata))
    else:
        value_interpretations = None
    meta = SceneMeta(n_cols, n_rows, n_bands, prj_wkt, geotrans, value_interpretations)
    if read_dtype:
        dtype_gdal = GDALNumeric(ds.GetRasterBand(1).DataType)
        dtype_np = convert_dtype_gdal2np(dtype_gdal)
        return meta, (dtype_np, dtype_gdal)
    else:
        return meta


class GDALRaster(object):
    """Class wrapping a gdal.Dataset object."""

    def __init__(self):
        """Initialize a dummy GDALRaster object.

        Note
        ----
        Expected to be used by classes and functions within `common.geoimage.gdal_sys`
        to create a dummy object, before further population.
        If you want to create a `GDALRaster` object, Please use `GDALRaster.open`, `GDALRaster.open_ndarray`
        or `GDALRaster.wrap_gdal_dataset` instead.
        """
        self._from = None
        self._info = {}
        self.ds = None

    @staticmethod
    def open(
        fpath_raster: str,
        mode: str = "r",
        out_format: Optional[str] = None,
        n_bands: Optional[int] = None,
        n_cols: Optional[int] = None,
        n_rows: Optional[int] = None,
        data_type: Optional[Union[GDALNumeric, int]] = None,
        options: Optional[Dict[str, str]] = None,
    ) -> GDALRaster:
        """Create a `GDALRaster` by opening a geo-raster file.

        Parameters
        ----------
        fpath_raster : str
            File path to the geo raster.
        mode : str, optional
            Mode for opening the file, valid choice include "r" (read), "u" (update) and "w" (write),
            by default "r".
            Notice that JPEG2000 file format is only with "r" support.
        out_format : Optional[str], optional
            File format for the output, only valid when in "w" mode, by default None.
            Currently support two options: `tif` (geotiff) or `cog` (cloud optimized geotiff).
        n_bands : Optional[int], optional
            Number of bands, only valid when in "w" mode, by default None.
        n_cols : Optional[int], optional
            Number of columns, only valid when in "w" mode, by default None.
        n_rows : Optional[int], optional
            Number of rows, only valid when in "w" mode, by default None.
        data_type : Optional[Union[GDALNumeric, int]], optional
            Data type for the output file, only valid when in "w" mode, by default None.
        options : Optional[Dict[str, str]], optional
            Additional options passed into gdal, only valid when in "w" mode, by default None.

        Returns
        -------
        GDALRaster
            Created `GDALRaster` object.

        Raises
        ------
        NotImplementedError
            Raised when the given `fpath_raster` is a `vsi` type path other than the `vsimem`.
        ValueError
            Raised when the given `mode` is not one of "r", "u", and "w".
        """
        rst = GDALRaster()
        rst._from = "file"
        rst._info["fpath_raster"] = fpath_raster
        rst._info["flag_mem"] = False
        if fpath_raster.startswith("/vsi"):
            if fpath_raster.startswith("/vsimem"):
                # vsimem has its own memory management requirements, should be more careful
                rst._info["flag_mem"] = True
            else:
                # NOTE check and prohibit vsi for now
                # vsizip, vsigzip, vsitar have their own memory management considerations
                # vsicurl and the derived vsis3, vsigcs, etc. have their own security considerations
                raise NotImplementedError("vsi support not implemented yet")

        if mode not in ("r", "u", "w"):
            raise ValueError("only support mode `r`, `w` and `u`.")
        rst._info["mode"] = mode

        if mode == "w":
            rst._info["n_bands"] = n_bands
            rst._info["n_cols"] = n_cols
            rst._info["n_rows"] = n_rows
            rst._info["out_format"] = out_format
            rst._info["data_type"] = data_type
            rst._info["options"] = options
        return rst

    @staticmethod
    def open_ndarray(data: np.ndarray, meta: SceneMeta, mode: str = "r") -> GDALRaster:
        """Create a GDALRaster by opening a `np.ndarray`

        Parameters
        ----------
        data : np.ndarray
            A numpy array to be opened, should be a 2D array in the shape of $(n_{rows} X n_{cols})$,
            or a 3D array in the shape of $(n_{bands} X n_{rows} X n_{cols})$
        meta : SceneMeta
            Metadata for the opened raster.
        mode : str, optional
            Mode for opening the numpy array, could be "r" (read) or "u" (update), by default "r".

        Returns
        -------
        GDALRaster
            A `GDALRaster` object representing the given numpy array and meta data.

        Raises
        ------
        ValueError
            Raised when the `mode` is not one of "r" or "u".
        """
        if mode not in ("r", "u"):
            raise ValueError("only support mode `r` and `u`.")

        if len(data.shape) == 2:
            data = data[np.newaxis, :, :]
        elif len(data.shape) == 3:
            # deliberately do nothing
            pass
        else:
            raise ValueError("Only support 2D or 3D numpy array")

        rst = GDALRaster()
        rst._from = "ndarray"
        rst._info["ndarray"] = data
        rst._info["meta"] = meta
        rst._info["mode"] = mode

        return rst

    @staticmethod
    def wrap_gdal_dataset(ds: gdal.Dataset) -> GDALRaster:
        """Create a `GDALRaster` by wrapping a `gdal.Dataset`

        Parameters
        ----------
        ds : gdal.Dataset
            A `gdal.Dataset` object to be wrapped.

        Returns
        -------
        GDALRaster
            Created `GDALRaster` object.
        """
        rst = GDALRaster()
        rst._from = "gdaldataset"
        rst.ds = ds
        return rst

    def __enter__(self) -> GDALRaster:  # noqa:C901
        if self._from == "file":
            mode = self._info["mode"]
            fpath_raster = self._info["fpath_raster"]
            if mode == "r":
                ds = gdal.Open(fpath_raster, gdal.GA_ReadOnly)
            elif mode == "u":
                ds = gdal.Open(fpath_raster, gdal.GA_Update)
            elif mode == "w":
                out_format = self._info["out_format"]
                n_bands = self._info["n_bands"]
                n_cols = self._info["n_cols"]
                n_rows = self._info["n_rows"]
                data_type = self._info["data_type"]
                options = self._info["options"]
                if (
                    (out_format is None)
                    or (n_bands is None)
                    or (n_cols is None)
                    or (n_rows is None)
                    or (data_type is None)
                ):
                    raise ValueError(
                        "Must specify `out_format`, `n_bands`, `n_cols`, `n_rows` and `data_type` in `w` mode."
                    )

                out_format = out_format.lower()
                if out_format not in ("tif",):
                    raise ValueError("only support `tif` in `out_format` for now.")

                if out_format == "tif":
                    driver_name = "GTiff"
                    creation_options = DEFAULT_GEOTIFF_PROFILE.copy()

                if options is not None:
                    options = {
                        key.upper(): str(value).upper()
                        for key, value in options.items()
                    }
                    creation_options.update(options)
                lst_creation_option = [
                    f"{key}={value}" for key, value in creation_options.items()
                ]

                driver = gdal.GetDriverByName(driver_name)
                ds = driver.Create(
                    fpath_raster,
                    n_cols,
                    n_rows,
                    n_bands,
                    data_type,
                    options=lst_creation_option,
                )

            self.ds = ds
        elif self._from == "ndarray":
            data = self._info["ndarray"]
            meta = self._info["meta"]
            if self._info["mode"] == "r":
                ds = gdal_array.OpenNumPyArray(data, True)  # assume band first
                # self._info["fpath_raster"] = gdal_array.GetArrayFilename(data)
            elif self._info["mode"] == "u":
                n_bands, n_rows, n_cols = data.shape
                fpath_raster = "/vsimem/from_ndarray_{}.tif".format(uuid.uuid4())
                lst_creation_option = [
                    f"{key}={value}" for key, value in DEFAULT_GEOTIFF_PROFILE.items()
                ]
                driver = gdal.GetDriverByName("GTiff")
                ds = driver.Create(
                    fpath_raster,
                    n_cols,
                    n_rows,
                    n_bands,
                    convert_dtype_np2gdal(data.dtype),
                    options=lst_creation_option,
                )
                for band_ind in range(data.shape[0]):
                    ds.GetRasterBand(band_ind + 1).WriteArray(data[band_ind, :, :])
                self._info["flag_mem"] = True
                self._info["fpath_raster"] = fpath_raster

            ds.SetProjection(meta.prj_wkt)
            ds.SetGeoTransform(meta.geotrans)
            if (
                hasattr(meta, "value_interpretations")
                and meta.value_interpretations is not None
            ):
                for band_ind, vi in enumerate(meta.value_interpretations, start=1):
                    bd = ds.GetRasterBand(band_ind)
                    if vi.nodata is not None:
                        bd.SetNoDataValue(vi.nodata)
                    if vi.scale is not None:
                        bd.SetScale(vi.scale)
                    if vi.offset is not None:
                        bd.SetOffset(vi.offset)
                    bd = None
            self.ds = ds
        elif self._from == "gdaldataset":
            # explicitly do NOTHING
            pass
        return self

    def __exit__(self, *args):
        self.ds = None
        if (
            self._from == "file"
            and "flag_mem" in self._info
            and self._info["flag_mem"]
            and self._info["mode"] == "w"
            and (gdal.VSIStatL(self._info["fpath_raster"]) is not None)
        ):
            gdal.Unlink(self._info["fpath_raster"])
        elif (
            self._from == "ndarray"
            and "flag_mem" in self._info
            and self._info["flag_mem"]
            and self._info["mode"] == "u"
            and (gdal.VSIStatL(self._info["fpath_raster"]) is not None)
        ):
            gdal.Unlink(self._info["fpath_raster"])


def _cross_check_data_and_meta(data, meta):
    """cross check to make sure given meta and data array are compatible"""
    if len(data.shape) == 2:
        if meta.n_bands == 1:
            data = data[np.newaxis, :, :]
        else:
            raise ValueError("Given `data` and `meta` have mismatched dimensions")
    elif len(data.shape) == 3:
        # deliberately do nothing here
        pass
    else:
        raise ValueError("Only support 2D or 3D numpy array")

    if data.shape != (meta.n_bands, meta.n_rows, meta.n_cols):
        raise ValueError("Given `data` and `meta` have mismatched dimensions")
    return data, meta


def write_ndarray_to_geotiff(
    data: np.ndarray,
    meta: SceneMeta,
    fpath_output: str,
    use_lerc_zstd: bool = False,
    max_z_error: numeric = 0,
    options: Optional[Dict[str, str]] = None,
):
    """Write given numpy array into a geotiff file.

    Parameters
    ----------
    data : np.ndarray
        A numpy array to be written, should be a 2D or 3D array, with the same dimension as specified in `meta`.
    meta : SceneMeta
        Metadata object for the raster file to write.
    fpath_output : str
        File path for the output.
    use_lerc_zstd : bool, optional
        Whether to use the LERC_ZSTD compression method, by default False not to use.
        Notice this option and the `max_z_error` is only valid
        when you are using our own provisioned gdal with LERC_ZSTD support.
    max_z_error : numeric, optional
        Maximum tolerated compression error when using LERC_ZSTD compression in lossy mode.
        By default 0 to use the lossless mode.
    options : Optional[Dict[str, str]], optional
        Additional options passing to gdal for writing out the file, by default None.

    Raises
    ------
    ValueError
        Raised when the given `data` and `meta` have mismatched dimensions.
        Raised when the given `data` is not a 2-D or 3-D numpy array.
    """
    data, meta = _cross_check_data_and_meta(data, meta)
    gdal_datatype = convert_dtype_np2gdal(data.dtype)

    if use_lerc_zstd:
        creation_options = ADDITIONAL_GEOTIFF_LERC_ZSTD_PROFILE.copy()
        if max_z_error != 0:
            creation_options["MAX_Z_ERROR"] = max_z_error

        if options is not None:
            options = {
                key.upper(): str(value).upper() for key, value in options.items()
            }
            creation_options.update(options)
    else:
        creation_options = options

    with GDALRaster.open(
        fpath_output,
        mode="w",
        out_format="tif",
        n_bands=meta.n_bands,
        n_cols=meta.n_cols,
        n_rows=meta.n_rows,
        data_type=gdal_datatype,
        options=creation_options,
    ) as gr:
        if meta.geotrans is not None:
            gr.ds.SetGeoTransform(meta.geotrans)
        if meta.prj_wkt is not None:
            gr.ds.SetProjection(meta.prj_wkt)

        for band_ind in range(meta.n_bands):
            bd = gr.ds.GetRasterBand(band_ind + 1)
            # use gdal_array's direct IO for some performance gain, which is equivalent to
            # bd.WriteArray(data[band_ind, :, :])
            BandRasterIONumPy(
                bd,
                1,
                0,
                0,
                meta.n_cols,
                meta.n_rows,
                data[band_ind, :, :],
                gdal_datatype,
                gdal.GRIORA_NearestNeighbour,
            )
            if hasattr(meta, "value_interpretations") and (
                meta.value_interpretations is not None
            ):
                vi = meta.value_interpretations[band_ind]
                if vi.nodata is not None:
                    bd.SetNoDataValue(vi.nodata)
                if vi.scale is not None:
                    bd.SetScale(vi.scale)
                if vi.offset is not None:
                    bd.SetOffset(vi.offset)
            bd = None
        gr.ds.FlushCache()


def write_ndarray_to_jpeg2000(
    data: np.ndarray,
    meta: SceneMeta,
    fpath_output: str,
    use_kakadu: bool = False,
    kakadu_nthreads: int = 1,
    options: Optional[Dict[str, str]] = None,
):
    """Write given numpy array into a jpeg2000 file.

    Parameters
    ----------
    data : np.ndarray
        A numpy array to be written, should be a 2D or 3D array, with the same dimension as specified in `meta`.
    meta : SceneMeta
        Metadata object for the raster file to write.
    fpath_output : str
        File path for the output.
    use_kakadu : bool, optional
        Whether to use JP2KAK driver, only valid when there is JP2KAK driver detected,
        by default False not to use (so use JP2OpenJPEG driver).
    kakadu_nthreads : int, optional
        A positive integer for the number of threads to use by Kakadu, only valid if `use_kakadu` is True,
        by default 1 to use only 1 thread.
        Notice it is the caller's duty to make sure that a reasonable number is provided here,
        a too large number might lead to worse performance or even program halting.
    options : Optional[Dict[str, str]], optional
        Additional options passing to gdal for writing out the jpeg2000 file, by default None.

    Raises
    ------
    ValueError
        Raised when the given `data` and `meta` have mismatched dimensions.
        Raised when the given `data` is not a 2-D or 3-D numpy array.
        Raised when the given `kakadu_nthreads` is not a positive integer.
    """
    data, meta = _cross_check_data_and_meta(data, meta)
    if IS_KAKADU_AVAILABLE and use_kakadu:
        driver_name = "JP2KAK"
        if kakadu_nthreads < 0:
            raise ValueError("`kakadu_nthreads` must be a positive integer.")
        creation_options = DEFAULT_JPEG2000_KAKADU_PROFILE.copy()
    else:
        driver_name = "JP2OpenJPEG"
        creation_options = DEFAULT_JPEG2000_PROFILE.copy()

    if options is not None:
        options = {key.upper(): str(value).upper() for key, value in options.items()}
        creation_options.update(options)
    lst_creation_option = [f"{key}={value}" for key, value in creation_options.items()]

    with GDALRaster.open_ndarray(data, meta) as rst:
        driver = gdal.GetDriverByName(driver_name)
        with gdal_env(JP2KAK_THREADS=kakadu_nthreads):
            _ = driver.CreateCopy(fpath_output, rst.ds, options=lst_creation_option)


def convert_to_cloud_optimized_geotiff(
    fpath_or_ds_input: Union[str, gdal.Dataset, GDALRaster],
    fpath_output: str,
    method: str = "nearest",
    overviews: Optional[List[int]] = None,
    use_lerc_zstd: bool = False,
    max_z_error: numeric = 0,
    options: Optional[Dict[str, str]] = None,
):
    """Convert a geotiff file or `gdal.Dataset` or GDALRaster` to a cloud optimized geotiff file.

    Parameters
    ----------
    fpath_or_ds_input : Union[str, gdal.Dataset, GDALRaster]
        Input geotiff file, `gdal.Dataset` object, or `GDALRaster` object to convert to cloud optimized geotiff.
    fpath_output : str
        File path for the output.
    method : str, optional
        Method used to create the overviews, can be one of "nearest", "mode", "average", "gauss",
        or their upper case version, by default "nearest".
    overviews : Optional[List[int]], optional
        Overview levels to be created, by default None to calculate the overview levels by the data's size.
    use_lerc_zstd : bool, optional
        Whether to use the LERC_ZSTD compression method, by default False not to use.
        Notice this option and the `max_z_error` is only valid
        when you are using our own provisioned gdal with LERC_ZSTD support.
    max_z_error : numeric, optional
        Maximum tolerated compression error when using LERC_ZSTD compression in lossy mode.
        By default 0 to use the lossless mode.
    options : Optional[Dict[str, str]], optional
        Additional options passed into gdal for creating the output file, by default None.

    Raises
    ------
    ValueError
        Raised when the given `method` is not one of the four supported methods.
    """

    method = method.upper()
    if method not in SUPPORTED_OVERVIEW_METHODS:
        raise ValueError(
            f"Unsupported overview sampling method {method}, must be one of {SUPPORTED_OVERVIEW_METHODS}"
        )

    def _convert(
        ds_input, fpath_output, method, overviews, use_lerc_zstd, max_z_error, options
    ):
        if IS_COG_AVAILABLE:
            creation_options = DEFAULT_COG_PROFILE.copy()
            if use_lerc_zstd:
                creation_options.update(ADDITIONAL_COG_LERC_ZSTD_PROFILE)
        else:
            if overviews is None:
                n_rows, n_cols = ds_input.RasterYSize, ds_input.RasterXSize
                n_levels = int(log2(max(n_rows, n_cols) // 512)) + 1
                overviews = [2 ** i for i in range(1, n_levels + 1)]
            creation_options = DEFAULT_GEOTIFF_PROFILE.copy()
            if use_lerc_zstd:
                creation_options.update(ADDITIONAL_GEOTIFF_LERC_ZSTD_PROFILE)
            creation_options["COPY_SRC_OVERVIEWS"] = "YES"

        if use_lerc_zstd and max_z_error != 0:
            creation_options["MAX_Z_ERROR"] = max_z_error

        if options is not None:
            options = {
                key.upper(): str(value).upper() for key, value in options.items()
            }
            creation_options.update(options)

        lst_creation_option = [
            f"{key}={value}" for key, value in creation_options.items()
        ]

        if IS_COG_AVAILABLE:
            with gdal_env(OVERVIEW_COMPRESS="LERC_ZSTD" if use_lerc_zstd else "LZW"):
                driver = gdal.GetDriverByName("COG")
                _ = driver.CreateCopy(
                    fpath_output, ds_input, options=lst_creation_option
                )
        else:
            with gdal_env(
                COMPRESS_OVERVIEW="LERC_ZSTD" if use_lerc_zstd else "LZW",
                TILED_OVERVIEW="YES",
            ):
                ds_input.BuildOverviews(method, overviews)
                driver = gdal.GetDriverByName("GTiff")
                _ = driver.CreateCopy(
                    fpath_output, ds_input, options=lst_creation_option
                )
                ds_input.BuildOverviews(method)  # remove overviews in input file

    if isinstance(fpath_or_ds_input, gdal.Dataset):
        _convert(
            fpath_or_ds_input,
            fpath_output,
            method,
            overviews,
            use_lerc_zstd,
            max_z_error,
            options,
        )
    elif isinstance(fpath_or_ds_input, GDALRaster):
        with fpath_or_ds_input as rst:
            _convert(
                rst.ds,
                fpath_output,
                method,
                overviews,
                use_lerc_zstd,
                max_z_error,
                options,
            )
    else:
        with GDALRaster.open(fpath_or_ds_input) as rst:
            _convert(
                rst.ds,
                fpath_output,
                method,
                overviews,
                use_lerc_zstd,
                max_z_error,
                options,
            )


def _unwrap_ds(ds: Union[gdal.Dataset, GDALRaster, str]) -> Tuple[gdal.Dataset, bool]:
    to_close = False
    # if the caller of this function open the file then it need to close it
    # otherwise don't handle this, GDALRaster has its own `__exit__` that would handle this
    # or if the user give a raw gdal.Dataset then we assume the user know what he/she is doing and will close it
    if isinstance(ds, str):
        ds_tmp = gdal.Open(ds)
        to_close = True
    elif isinstance(ds, GDALRaster):
        ds_tmp = ds.ds
    else:
        ds_tmp = ds
    return ds_tmp, to_close


def _precheck_output_option(fpath_dest):
    if (
        fpath_dest is None
        or fpath_dest.endswith(".tif")
        or fpath_dest.endswith(".tiff")
        or fpath_dest.endswith(".jp2")
        or fpath_dest.endswith(".vrt")
    ):
        pass
    else:
        raise ValueError(
            "Only support GTiff (.tif or .tiff) or JPEG2000 (.jp2) or GDAL VRT (.vrt) formats."
        )


def _precheck_vector_output_option(fpath_dest) -> str:
    """determine driver name for vector output based on destination file's extension"""
    if fpath_dest is not None and (
        fpath_dest.endswith(".shp")
        or fpath_dest.endswith(".gpkg")
        or fpath_dest.endswith(".geojson")
    ):
        if fpath_dest.endswith(".shp"):
            driver_name = "ESRI Shapefile"
        elif fpath_dest.endswith(".gpkg"):
            driver_name = "GPKG"
        elif fpath_dest.endswith(".geojson"):
            driver_name = "GeoJSON"
        else:
            logger.warn(
                "Unknown file format for output polygon file, use default driver: ESRI Shapefile"
            )
            driver_name = "ESRI Shapefile"
        return driver_name
    else:
        raise ValueError(
            "Only support Shape (.shp) or GeoJSON (.geojson) or GPKG (.gpkg) formats."
        )


def _check_creation_options(fpath_dest: str, options: Optional[Dict] = None):
    """Determine driver and file creation profile"""
    if fpath_dest.endswith(".tif") or fpath_dest.endswith(".tiff"):
        driver_name = "GTiff"
        creation_options = DEFAULT_GEOTIFF_PROFILE.copy()
    elif fpath_dest.endswith(".jp2"):
        driver_name = "JP2OpenJPEG"
        creation_options = DEFAULT_JPEG2000_PROFILE.copy()
        # NOTE driver JP2OpenJPEG does not support creation option NUM_THREADS
        if "NUM_THREADS" in options:
            del options["NUM_THREADS"]
    elif fpath_dest.endswith(".vrt"):
        return "VRT", None
    else:
        raise ValueError(
            "Only support GTiff (.tif or .tiff) or JPEG2000 (jp2) formats."
        )

    if options is not None:
        options = {key.upper(): str(value).upper() for key, value in options.items()}
        creation_options.update(options)

    lst_creation_option = [f"{key}={value}" for key, value in creation_options.items()]
    if not lst_creation_option:
        lst_creation_option = None

    return driver_name, lst_creation_option


def _to_gdal_raster_or_file(
    ds_output: gdal.Dataset,
    fpath_dest: Optional[str] = None,
    options: Optional[Dict] = None,
):
    """convert the output dataset to GDALRaster or write to the given destination file"""
    if fpath_dest is None:
        # create a GDALRaster and let it hold the reference to the actual gdal.Dataset
        return GDALRaster.wrap_gdal_dataset(ds_output)

    driver_name, lst_creation_option = _check_creation_options(
        fpath_dest, options=options
    )

    driver = gdal.GetDriverByName(driver_name)
    try:
        if driver_name == "VRT":
            gdal.Translate(fpath_dest, ds_output)
        else:
            _ = driver.CreateCopy(fpath_dest, ds_output, options=lst_creation_option)
    except Exception as err:
        logger.error(f"Fail to write to {fpath_dest} due to err `{str(err)}`")
    finally:
        driver = None
    return None


def _check_translate_params(
    ds: Union[gdal.Dataset, GDALRaster, str],
    bands: Optional[Union[List[int], int]] = None,
    window: Optional[SourceWindow] = None,
    extent: Optional[BoundingBox] = None,
    aoi: Optional[Geom] = None,
    dest_res: Optional[numeric] = None,
    resample_alg: str = "nearest",
    fpath_dest: Optional[str] = None,
    options: Optional[Dict] = None,
):
    """Precheck parameters for translating input dataset, convert parameters if necessary."""
    if resample_alg == "near":
        resample_alg = "nearest"
    if resample_alg not in SUPPORTED_TRANSLATE_RESAMPLE_ALGS:
        raise ValueError(f"Unknown `resample_alg` {resample_alg}")

    n_not_none = sum([1 for to_check in (window, extent, aoi) if to_check is not None])
    if n_not_none >= 2:
        raise RuntimeError("You can only specify one of `window` or `aoi` or `extent`.")

    _precheck_output_option(fpath_dest)

    if bands is not None:
        if isinstance(bands, int):
            bands = [bands + 1]
        else:
            bands = [v + 1 for v in bands]

    ds_tmp, to_close = _unwrap_ds(ds)

    if aoi is not None:
        meta = read_meta_from_ds(ds_tmp, False)
        window = calc_window_from_aoi(meta, aoi)
        if window is None:
            if to_close:
                ds_tmp = None
            raise RuntimeError("The given `aoi` does not intersect with the dataset")

    if window is None:
        srcWin = None
    else:
        srcWin = [
            window.x_off,
            window.y_off,
            window.x_size,
            window.y_size,
        ]

    if extent is not None:
        projWin = extent.to_gdal_projected_window()
    else:
        projWin = None

    if dest_res is None:
        dest_res = 0

    if options is None:
        options = {}

    return ds_tmp, to_close, bands, srcWin, projWin, dest_res, resample_alg, options


def translate_ds(
    ds: Union[gdal.Dataset, GDALRaster, str],
    bands: Optional[Union[List[int], int]] = None,
    window: Optional[SourceWindow] = None,
    extent: Optional[BoundingBox] = None,
    aoi: Optional[Geom] = None,
    dest_res: Optional[numeric] = None,
    nodata: Optional[numeric] = None,
    resample_alg: str = "nearest",
    n_threads: int = 4,
    progress: Optional[bool] = False,
    fpath_dest: Optional[str] = None,
    options: Optional[Dict] = None,
) -> GDALRaster:
    """Subset a given `gdal.Dataset` or `GDALRaster` spatially and spectrally,
    by calling `gdal.Translate`.

    Parameters
    ----------
    ds : Union[gdal.Dataset, GDALRaster, str]
        A given `gdal.Dataset` or `GDALRaster` object, or a gdal raster file to be subset.
    bands : Optional[Union[List[int], int]], optional
        Bands (a list of integers) or band (a single integer) to be selected,
        notice the band index here is expected to be 0-based,
        by default None to use all bands.
    window : Optional[SourceWindow], optional
        Source window to be subset in image pixel space, by default None
    aoi : Optional[Geom], optional
        Area of interest to be subset, by default None
        NOTE aoi is used to calculate the window, not a true aoi for cropping.
    extent : Optional[BoundingBox], optional
        Extent of the output, specified in the dataset's own spatial reference system,
        by default None.
    dest_res : Optional[numeric], optional
        Destination resolution, by default None to let `gdal` decide the output resolution.
    nodata : Optional[numeric], optional
        Destination nodata value(s), by default None to let `gdal` decide the right nodata value to be used.
    resample_alg : Optional[str], optional
        Resampling algorithm to be used,
        should be one of "nearest", "bilinear", "cubic", "cubicspline", "lanczos", "average", "rms", "mode".
        Default: "nearest".
    n_threads: Optional[int]
        Number of threads used for creating the output file, if specified. Default: 4
    progress: Optional[bool]
        If showing the progress of processing. Default: False
    fpath_dest : Optional[str], optional
        Output file path, by default None not to output.
        Notice it only support file path ends with ".tif", ".tiff" (GeoTiff format),
        or with ".jp2" (JPEG2000 format).
    options : Optional[Dict[str, str]], optional
        Additional options passed into gdal for creating the output file, by default None.
        Only used when `fpath_dest` is provided.

    Returns
    -------
    Optional[GDALRaster]
        A `GDALRaster` object that is the subset of the given object,
        or None if it is instructed to write to a file path.

    Raises
    ------
    RuntimeError
        Raised when the more than one of the `window`, `aoi`, `extent` are specified.
        Raised when the given `aoi` does not intersect with the given `gdal.Dataset` or `GDALRaster`.
    """

    (
        ds_tmp,
        to_close,
        bands,
        srcWin,
        projWin,
        dest_res,
        resample_alg,
        options,
    ) = _check_translate_params(
        ds,
        bands=bands,
        window=window,
        extent=extent,
        aoi=aoi,
        dest_res=dest_res,
        resample_alg=resample_alg,
        fpath_dest=fpath_dest,
        options=options,
    )

    kwargs = {
        "bandList": bands,
        "srcWin": srcWin,
        "projWin": projWin,
        "xRes": dest_res,
        "yRes": dest_res,
        "noData": nodata,
        "resampleAlg": resample_alg,
        "callback": gdal.TermProgress_nocb if progress else None,  # progress bar
    }

    try:
        if fpath_dest is None:
            kwargs["format"] = "MEM"
            ds_translated = gdal.Translate("", ds_tmp, **kwargs)
        elif fpath_dest is not None and isinstance(fpath_dest, str):
            options.update({"NUM_THREADS": n_threads})
            driver_name, lst_creation_option = _check_creation_options(
                fpath_dest, options=options
            )
            kwargs["format"] = driver_name
            kwargs["creationOptions"] = lst_creation_option
            gdal.Translate(fpath_dest, ds_tmp, **kwargs)
        else:
            raise ValueError("Invalid input of `fpath_dest`, accepts a file path str.")
    except Exception as err:
        if fpath_dest is None:
            ds_translated = None
        raise err
    else:
        if fpath_dest is None:
            ret = _to_gdal_raster_or_file(ds_translated, fpath_dest, options)
            ds_translated = None
        else:
            ret = None
        return ret
    finally:
        if to_close:
            ds_tmp = None


def _check_warp_params(
    ds: Union[gdal.Dataset, GDALRaster, str],
    dest_extent: Optional[BoundingBox] = None,
    cutline: Optional[Geom] = None,
    resample_alg: str = "near",
    ovr_to_use: Optional[str] = "AUTO-1",
    n_threads: int = 4,
    mem: Optional[int] = None,
    fpath_dest: Optional[str] = None,
    options: Optional[Dict] = None,
):
    """Precheck parameters for warp input dataset, convert parameters if necessary."""
    if ovr_to_use not in ("AUTO", "AUTO-1", None):
        raise ValueError(f"Not supported method of `ovr_to_use` {ovr_to_use}")

    if not (isinstance(n_threads, int) and (n_threads >= 1)):
        # NOTE: gdal support the use of "ALL" to use all available CPUs
        # we deliberately prohibit this usage and require caller to specify number of threads
        raise ValueError("`n_threads` must be an integer greater or equal to 1.")

    if resample_alg == "nearest":
        resample_alg = "near"
    if resample_alg not in SUPPORTED_WARP_RESAMPLE_ALGS:
        raise ValueError(f"Unknown `resample_alg` {resample_alg}")

    _precheck_output_option(fpath_dest)

    if dest_extent is not None:
        output_bounds = [
            dest_extent.left,
            dest_extent.bottom,
            dest_extent.right,
            dest_extent.upper,
        ]
    else:
        output_bounds = None

    if cutline is not None:
        cutline_ds_name = cutline.to_ogr_mem_ds()
    else:
        cutline_ds_name = None

    ds_tmp, to_close = _unwrap_ds(ds)

    if mem is not None and mem >= 10000:
        mem *= 1e6

    if options is None:
        options = {}

    return ds_tmp, to_close, output_bounds, cutline_ds_name, resample_alg, mem, options


def warp_ds(
    ds: Union[gdal.Dataset, GDALRaster, str],
    dest_srs: str,
    dest_extent: Optional[BoundingBox] = None,
    dest_res: Optional[numeric] = None,
    dest_nodata: Optional[Union[numeric, List[numeric]]] = None,
    src_srs: Optional[str] = None,
    src_nodata: Optional[numeric] = None,
    cutline: Optional[Geom] = None,
    crop_to_cutline: bool = False,
    resample_alg: str = "near",
    ovr_to_use: Optional[str] = "AUTO-1",
    n_threads: int = 4,
    mem: Optional[int] = None,
    progress: Optional[bool] = False,
    fpath_dest: Optional[str] = None,
    options: Optional[Dict] = None,
) -> Optional[GDALRaster]:
    """Warp a given `gdal.Dataset` or `GDALRaster` by calling `gdal.Warp`.

    Parameters
    ----------
    ds : Union[gdal.Dataset, GDALRaster, str]
        A given `gdal.Dataset` or `GDALRaster` object, or a geoimage file path to be warped.
    dest_srs : str
        Destination spatial reference system as a epsg string in the format of "EPSG:{EPSG-code}".
    dest_extent : Optional[BoundingBox], optional
        A `BoundingBox` object describing the output extent,
        the coordinates are under the `dest_srs`,
        by default None to let `gdal` decide the extent.
    dest_res : Optional[numeric], optional
        Destination resolution, by default None to let `gdal` decide the output resolution.
    dest_nodata : Optional[Union[numeric, List[numeric]]], optional
        Destination nodata value(s), by default None to let `gdal` decide the right nodata value to be used.
    src_srs : Optional[str]
        Source spatial reference system as a epsg string in the format of "EPSG:{EPSG-code}".
    src_nodata : Optional[numeric], optional
        Source nodata value, by default None to let `gdal` decide the right nodata value to be used.
    cutline : Optional[Geom], optional
        A `common.geometry.geom.Geom` representing the cutline, by default None not to use a cutline.
    crop_to_cutline : bool, optional
        Whether to crop to cutline geometry, only used when `cutline` is specified, by default False.
    resample_alg : str, optional
        Resampling algorithm to be used,
        should be one of "near", "bilinear", "cubic", "cubicspline", "lanczos",
        "average", "mode", "max", "min", "med", "q1", "q3", "sum", by default "near".
    ovr_to_use : Optional[str], optional
        Overview level to be used, could be "AUTO" to use the overview that is closest to the output resolution,
        or "AUTO-1" to use the overview that is one level finer than the output resolution,
        or None to use the full resolution, by default "AUTO-1".
    n_threads : int, optional
        Number of threads to be used in warping, must be a positive integer greater than or equal to 1, by default 4.
    mem: int, optional
        Size of working buffer in MB. By default to None, set by GDAL itself.
    progress: Optional[bool]
        If showing the progress of processing. Default: False
    fpath_dest : Optional[str], optional
        Output file path, by default None not to output.
        Notice it only support file path ends with ".tif", ".tiff" (GeoTiff format),
        or with ".jp2" (JPEG2000 format).
    options : Optional[Dict[str, str]], optional
        Additional options passed into gdal for creating the output file, by default None.
        Only used when `fpath_dest` is provided.

    Returns
    -------
    Optional[GDALRaster]
        A `GDALRaster` object for the warped data,
        or None if it is instructed to write to a file path.

    Raises
    ------
    ValueError
        Raised when the given `ovr_to_use` is not a supported option,
        Raised when the given `resample_alg` is not a supported option.
        Raised when the given `n_threads` is not a valid one.
    Exception
        Raised when `gdal.Warp` call failed.
    """
    (
        ds_tmp,
        to_close,
        output_bounds,
        cutline_ds_name,
        resample_alg,
        mem,
        options,
    ) = _check_warp_params(
        ds,
        dest_extent=dest_extent,
        cutline=cutline,
        resample_alg=resample_alg,
        ovr_to_use=ovr_to_use,
        n_threads=n_threads,
        mem=mem,
        fpath_dest=fpath_dest,
        options=options,
    )

    kwargs = {
        "srcSRS": src_srs,
        "dstSRS": dest_srs,
        "xRes": dest_res,
        "yRes": dest_res,
        "outputBounds": output_bounds,
        "resampleAlg": resample_alg,
        "cutlineDSName": cutline_ds_name,
        "cropToCutline": crop_to_cutline,
        "overviewLevel": ovr_to_use,
        "srcNodata": src_nodata,
        "dstNodata": dest_nodata,
        "warpMemoryLimit": mem,
        "warpOptions": [
            "SOURCE_EXTRA=2",
            "NUM_THREADS={}".format(n_threads),
            # NOTE, maybe in the future we could expose some advanced options
            # so users could tune these two options when they really need to, for example
            # "SAMPLE_GRID=YES",
            # "SAMPLE_STEPS=21",
        ],  # always reach out for extra source pixels
        "multithread": True,  # always use an additional thread for reading/writing
        "callback": gdal.TermProgress_nocb if progress else None,  # progress bar
    }

    try:
        if fpath_dest is None:
            kwargs["format"] = "MEM"
            ds_warped = gdal.Warp("", ds_tmp, **kwargs)
            # NOTE per https://github.com/OSGeo/gdal/issues/3232
            # in gdal 3.0 (including gdal 3.0.4 as we are currently using),
            # there is a bug that gdalwarp ignore scale/offset in source dataset
            # this has been fixed in newer gdal but we are not upgrading our gdal so soon,
            # so add the following quick duct tape fix
            for i in range(ds_tmp.RasterCount):
                bd_tmp_src = ds_tmp.GetRasterBand(i + 1)
                bd_tmp_dest = ds_warped.GetRasterBand(i + 1)
                scale_tmp = bd_tmp_src.GetScale()
                offset_tmp = bd_tmp_src.GetOffset()
                if scale_tmp is not None:
                    bd_tmp_dest.SetScale(scale_tmp)
                if offset_tmp is not None:
                    bd_tmp_dest.SetOffset(offset_tmp)
                bd_tmp_src = bd_tmp_dest = None
        elif fpath_dest is not None and isinstance(fpath_dest, str):
            options.update({"NUM_THREADS": n_threads})
            driver_name, lst_creation_option = _check_creation_options(
                fpath_dest, options=options
            )
            kwargs["format"] = driver_name
            kwargs["creationOptions"] = lst_creation_option
            gdal.Warp(fpath_dest, ds_tmp, **kwargs)
        else:
            raise ValueError("Invalid input of `fpath_dest`, accepts a file path str.")
    except Exception as err:
        if fpath_dest is None:
            ds_warped = None
        raise err
    else:
        if fpath_dest is None:
            ret = _to_gdal_raster_or_file(ds_warped, fpath_dest, options)
            ds_warped = None
        else:
            ret = None
        return ret
    finally:
        if to_close:
            ds_tmp = None
        if cutline is not None:
            # cutline_ds = None
            if gdal.VSIStatL(cutline_ds_name) is not None:
                gdal.Unlink(cutline_ds_name)


def rasterize_ds(
    ds: Union[gdal.Dataset, GDALRaster, str],
    geoms: Geom,
    burn_values: Union[numeric, List[numeric]],
    bands: Optional[Union[int, List[int]]],
    inverse: bool = False,
    all_touched: bool = False,
    fpath_dest: Optional[str] = None,
    options: Optional[Dict] = None,
) -> Optional[GDALRaster]:
    """Rasterize a given geometry to a given `gdal.Dataset` or `GDALRaster`.

    Parameters
    ----------
    ds : Union[gdal.Dataset, GDALRaster, str]
        The `gdal.dataset` or `GDALRaster`, or a geoimage file path, where the geometry to be burned to.
    geoms : Geom
        A `common.geometry.geom.Geom` to be burned.
    burn_values : Union[numeric, List[numeric]]
        Values to be set for the geometry.
        Could be a numeric value or a list of numeric values.
        Either inside the geometry or outside the geometry, see argument `inverse` for how to control.
    bands : Optional[Union[int, List[int]]]
        Bands to be set with given values. Could be an integer for the designated band,
        or a list of integers for a list of bands. Notice the band index is expected to be 0-based.
    inverse : bool, optional
        Whether to burn values inside the geometry (False) or outside (True), by default False.
    all_touched : bool, optional
        Option controlling how to determine if a pixel should be burned with the given value,
        True to use the normal rendering mode to only set pixels of which the central points are within the geometry,
        False to instruct all pixels touched with the geometry to be set with the given value,
        by default False.
    fpath_dest : Optional[str], optional
        Output file path, by default None not to output.
        Notice it only support file path ends with ".tif", ".tiff" (GeoTiff format),
        or with ".jp2" (JPEG2000 format).
    options : Optional[Dict[str, str]], optional
        Additional options passed into gdal for creating the output file, by default None.
        Only used when `fpath_dest` is provided.


    Returns
    -------
    Optional[GDALRaster]
        The resulting `GDALRaster` with the given geometry burned into it as designated values,
        or None if the result is written to the given destination file path.

    Raises
    ------
    ValueError
        Raised when the length of `burn_values` does not match with `bands`.
    err
        Raised when `gdal.Rasterize` call failed.
    """
    ds_tmp, to_close = _unwrap_ds(ds)

    # list of bands to be painted
    if bands is None:
        n_bands = ds_tmp.RasterCount
        bands_for_gdal = list(range(1, n_bands + 1))
    elif isinstance(bands, list):
        n_bands = len(bands)
        bands_for_gdal = [b + 1 for b in bands]
    else:
        n_bands = 1
        bands_for_gdal = [bands + 1]

    # deduce the burn_values
    if isinstance(burn_values, list):
        if n_bands != len(burn_values):
            if to_close:
                ds_tmp = None
            raise ValueError(
                "`burn_values` should be a list containing values to burn for each band to be burned."
            )
    else:
        burn_values = [burn_values] * n_bands

    tmp_uuid = str(uuid.uuid4())
    shape_ds_name = f"/vsimem/{tmp_uuid}.json"
    shape_ds = ogr.GetDriverByName("GeoJSON").CreateDataSource(shape_ds_name)
    shape_lyr = shape_ds.CreateLayer("shape")
    f = ogr.Feature(shape_lyr.GetLayerDefn())
    f.SetGeometry(ogr.CreateGeometryFromWkt(geoms.to_wkt()))
    shape_lyr.CreateFeature(f)
    f = None
    shape_lyr = None
    shape_ds = None

    try:
        _ = gdal.Rasterize(
            ds_tmp,
            shape_ds_name,
            options=gdal.RasterizeOptions(
                allTouched=all_touched,
                inverse=inverse,
                burnValues=burn_values,
                bands=bands_for_gdal,
            ),
        )
    except Exception as err:
        raise err
    else:
        ret = _to_gdal_raster_or_file(ds_tmp, fpath_dest, options)
        return ret
    finally:
        if to_close:
            ds_tmp = None
        if gdal.VSIStatL(shape_ds_name) is not None:
            gdal.Unlink(shape_ds_name)


def mosaic_ds(
    lst_ds: List[Union[GDALRaster, gdal.Dataset, str]],
    resample_alg: str = "nearest",
    src_nodata: Optional[Union[numeric, List[numeric]]] = None,
    dest_extent: Optional[BoundingBox] = None,
    dest_res: Optional[numeric] = None,
    dest_nodata: Optional[Union[numeric, List[numeric]]] = None,
    fpath_dest: Optional[str] = None,
    options: Optional[dict] = None,
) -> Optional[GDALRaster]:
    """Spatially mosaic a list of `gdal.dataset` or image files,
    assuming they are under the same Spatial Reference System.

    Parameters
    ----------
    lst_ds : List[Union[GDALRaster, gdal.Dataset, str]]
        A list of `GDALRaster`, `gdal.Dataset` or file paths to gdal supported images to be mosaiced.
    resample_alg : str, optional
        Resampling algorithm to be used, could be one of
        "nearest", "bilinear", "cubic", "cubicspline", "lanczos", "average", and "mode",
        by default "nearest".
    src_nodata : Optional[Union[numeric, List[numeric]]], optional
        Source data's nodata value(s),
        could be a single value for all bands, or a list of values with each value for one band,
        by default None to let `gdal` determine the nodata value(s).
    dest_extent : Optional[BoundingBox], optional
        Destination extent as under the result spatial reference system, by default None
    dest_res: Optional[numeric], optional
        Destination resolution, by default None
    dest_nodata : Optional[Union[numeric, List[numeric]]], optional
        Destination data's nodata value(s),
        could be a single value for all bands, or a list of values with each value for one band,
        by default None to use the source data's nodata value(s).
    fpath_dest : Optional[str], optional
        Output file path, by default None not to output.
        Notice it only support file path ends with ".tif", ".tiff" (GeoTiff format),
        or with ".jp2" (JPEG2000 format).
    options : Optional[Dict[str, str]], optional
        Additional options passed into gdal for creating the output file, by default None.
        Only used when `fpath_dest` is provided.

    Returns
    -------
    Optional[GDALRaster]
        Mosaiced `GDALRaster` object, or None if `fpath_dest` is provided.

    Raises
    ------
    err
        Raised when the call to `gdal.BuildVRT` failed.

    Notes
    -----
    This function expects all given raster files or datasets are in the same spatial reference system,
    and expects they have the same number of bands; if the `dest_extent` and `dest_res` are provided,
    those are expected to be compatible with the sources' coordinate system.
    """
    _precheck_output_option(fpath_dest)

    tmp_uuid = str(uuid.uuid4())
    mosaiced_ds_name = f"/vsimem/mosaiced_{tmp_uuid}.vrt"

    resample_alg = polyfill_resample_alg_for_gdal_vrt(resample_alg)
    if dest_extent is not None:
        output_bounds = dest_extent.to_gdal_target_extent()
    else:
        output_bounds = None

    try:
        ds_tmp = gdal.BuildVRT(
            mosaiced_ds_name,
            lst_ds,
            options=gdal.BuildVRTOptions(
                resolution="highest" if dest_res is None else "user",
                resampleAlg=resample_alg,
                separate=False,
                allowProjectionDifference=False,
                srcNodata=src_nodata,
                VRTNodata=dest_nodata,
                outputSRS=None,
                outputBounds=output_bounds,
                xRes=dest_res,
                yRes=dest_res,
            ),
        )
    except Exception as err:
        ds_tmp = None
        raise err
    else:
        ret = _to_gdal_raster_or_file(ds_tmp, fpath_dest, options)
        ds_tmp = None
        return ret
    finally:
        if gdal.VSIStatL(mosaiced_ds_name) is not None:
            gdal.Unlink(mosaiced_ds_name)


def mosaic_and_crop_ds(
    lst_ds: List[Union[GDALRaster, gdal.Dataset, str]],
    resample_alg: str = "nearest",
    src_nodata: Optional[Union[numeric, List[numeric]]] = None,
    dest_extent: Optional[BoundingBox] = None,
    dest_aoi: Optional[Geom] = None,
    crop_to_aoi: bool = False,
    dest_res: Optional[numeric] = None,
    dest_nodata: Optional[Union[numeric, List[numeric]]] = None,
    fpath_dest: Optional[str] = None,
    n_threads: int = 4,
    options: Optional[dict] = None,
) -> Optional[GDALRaster]:
    """Spatially mosaic and crop a list of `gdal.dataset` or image files,
    assuming they are under the Same Spatial Reference System.

    Parameters
    ----------
    lst_ds : List[Union[GDALRaster, gdal.Dataset, str]]
        A list of `GDALRaster`, `gdal.Dataset` or file paths to gdal supported images to be mosaiced.
    resample_alg : Optional[str]
        Resampling algorithm to be used, could be one of
        "nearest", "bilinear", "cubic", "cubicspline", "lanczos", "average", and "mode",
        by default "nearest".
    src_nodata : Optional[Union[numeric, List[numeric]]]
        Source data's nodata value(s),
        could be a single value for all bands, or a list of values with each value for one band,
        by default None to let `gdal` determine the nodata value(s).
    dest_extent : Optional[BoundingBox]
        Destination extent as under the result spatial reference system, by default None
    dest_aoi: Optional[Geom]
        AOI geometry in WGS84, by default None
    crop_to_aoi: bool
        If crop the mosaicked dataset to the AOI, by default False
    dest_res: Optional[numeric]
        Destination resolution, by default None
    dest_nodata : Optional[Union[numeric, List[numeric]]]
        Destination data's nodata value(s),
        could be a single value for all bands, or a list of values with each value for one band,
        by default None to use the source data's nodata value(s).
    fpath_dest : Optional[str]
        Output file path, by default None not to output.
        Notice it only support file path ends with ".tif", ".tiff" (GeoTiff format),
        or with ".jp2" (JPEG2000 format).
    n_threads : int
        Number of threads to be used in warping, must be a positive integer greater than or equal to 1, by default 4.
    options : Optional[Dict[str, str]]
        Additional options passed into gdal for creating the output file, by default None.
        Only used when `fpath_dest` is provided.

    Returns
    -------
    Optional[GDALRaster]
        Mosaiced `GDALRaster` object, or None if `fpath_dest` is provided.

    Raises
    ------
    err
        Raised when the call to `gdal.Warp` failed.

    Notes
    -----
    This function expects all given raster files or datasets are in the same spatial reference system,
    and expects they have the same number of bands; if the `dest_extent` and `dest_res` are provided,
    those are expected to be compatible with the sources' spatial reference system.
    """
    try:
        if dest_extent is not None:
            dest_extent = (
                dest_extent.left,
                dest_extent.bottom,
                dest_extent.right,
                dest_extent.upper,
            )
        # for i, ds in enumerate(lst_ds):
        #     if isinstance(ds, GDALRaster):
        #         lst_ds[i] = ds.ds
        #     elif isinstance(ds, gdal.Dataset) or isinstance(ds, str):
        #         pass
        #     else:
        #         raise ValueError("Invalid type of list of datasets.")

        aoi_ds_name = dest_aoi.to_ogr_mem_ds() if dest_aoi is not None else None
        fpath_mosaic = f"/vsimem/tmp_mosaic_{uuid.uuid4()}.tif"
        mosaicked_ds = gdal.Warp(
            fpath_mosaic,
            lst_ds,
            outputBounds=dest_extent,
            xRes=dest_res,
            yRes=dest_res,
            resampleAlg=resample_alg,
            srcNodata=src_nodata,
            dstNodata=dest_nodata,
            cutlineDSName=aoi_ds_name,
            cropToCutline=crop_to_aoi,
            multithread=True,
            options=[
                f"NUM_THREADS={n_threads}",
            ],
        )
    except Exception as e:
        raise Exception(f"Failed to mosaic multiple geoimages due to `{e}`.")
    else:
        if mosaicked_ds is None:
            raise Exception("GDAL failed to mosaic multiple geoimages.")
        ret = _to_gdal_raster_or_file(mosaicked_ds, fpath_dest, options)
    finally:
        mosaicked_ds = None
        if gdal.VSIStatL(fpath_mosaic) is not None:
            gdal.Unlink(fpath_mosaic)
        if isinstance(aoi_ds_name, str) and gdal.VSIStatL(aoi_ds_name) is not None:
            gdal.Unlink(aoi_ds_name)

    return ret


def stack_ds(
    lst_ds: List[Union[GDALRaster, gdal.Dataset, str]],
    resample_alg: str = "nearest",
    fpath_dest: Optional[str] = None,
    options: Optional[dict] = None,
) -> Optional[GDALRaster]:
    """Stack multiple `gdal.Dataset` or raster image files vertically along the band axis.

    Parameters
    ----------
    lst_ds : List[Union[GDALRaster, gdal.Dataset, str]]
        A list of `GDALRaster`, `gdal.Dataset` or file paths to gdal supported images to be mosaiced.
    resample_alg : str, optional
        Resampling algorithm to be used, could be one of
        "nearest", "bilinear", "cubic", "cubicspline", "lanczos", "average", and "mode",
        by default "nearest".
    fpath_dest : Optional[str], optional
        Output file path, by default None not to output.
        Notice it only support file path ends with ".tif", ".tiff" (GeoTiff format),
        or with ".jp2" (JPEG2000 format).
    options : Optional[Dict[str, str]], optional
        Additional options passed into gdal for creating the output file, by default None.
        Only used when `fpath_dest` is provided.

    Returns
    -------
    Optional[GDALRaster]
        Stacked `GDALRaster`, or None if written to the designated destination file path.

    Raises
    ------
    err
        Raised when the call to `gdal.BuildVRT` failed.

    Notes
    -----
    This function expects all given raster files or datasets are in the same spatial reference system.
    """
    tmp_uuid = str(uuid.uuid4())
    stacked_ds_name = f"/vsimem/stacked_{tmp_uuid}.vrt"
    resample_alg = polyfill_resample_alg_for_gdal_vrt(resample_alg)

    _precheck_output_option(fpath_dest)

    try:
        ds_tmp = gdal.BuildVRT(
            stacked_ds_name,
            lst_ds,
            options=gdal.BuildVRTOptions(
                resolution="highest",
                resampleAlg=resample_alg,
                separate=True,
                allowProjectionDifference=False,
            ),
        )
    except Exception as err:
        ds_tmp = None
        raise err
    else:
        ret = _to_gdal_raster_or_file(ds_tmp, fpath_dest, options)
        ds_tmp = None
        return ret
    finally:
        if gdal.VSIStatL(stacked_ds_name) is not None:
            gdal.Unlink(stacked_ds_name)


def raster_to_polygon(
    ds: Union[GDALRaster, gdal.Dataset, str],
    fpath_polygon: str,
    band_ind: int = 0,
    mask_band: Optional[Union[int, np.ndarray, gdal.Band]] = None,
    eight_connected: bool = True,
    no_need_to_wgs84: bool = False,
) -> bool:
    """Convert a raster to a polygon.

    Parameters
    ----------
    ds : Union[GDALRaster, gdal.Dataset, str]
        The raster dataset to be converted.
    fpath_polygon : str
        The output polygon file path, could be a `shp` file, a `gpkg` file or a `geojson` file.
    band_ind : int, optional
        The band index for the band within the raster to be converted, by default 0 to use the first band.
        Notice the index is 0-based.
    mask_band : Optional[Union[int, np.ndarray, gdal.Band]], optional
        Mask band to be used,
        could be 1) None not to use a mask,
        or 2) an integer as the band index for the band in the given `ds` to be used,
        or 3) a gdal.Band object for the band to be used,
        or 4) a np.ndarray for the mask to be used.
        If 2), it is the user's duty to ensure the index (0-based) is within the raster's band range.
        If 3), it is the user's duty to ensure the band is with the same number of rows and columns as the given ds,
        and all non-0 pixels would be considered as valid data and included in the polygon transformation.
        If 4), it is the user's duty to ensure the mask is a 2-D array
        with the same number of rows and columns as the given ds,
        and all non-0 pixels would be considered as valid data and included in the polygon transformation.
        By default None.
    eight_connected : bool, optional
        Whether to use eight-connected or four-connected, by default True.
    no_need_to_wgs84 : bool, optional
        If the given destination file path is a geojson path,
        while the source image is with a coordinate system but not WGS84,
        we will convert the resulting geojson to be under WGS84 so to be compatible with the GeoJSON specification.
        This keyword argument gives the user the option to disable this conversion, by default False.

    Returns
    -------
    bool
        Whether the conversion is successful.

    Raises
    ------
    err
        Raised when the call to `gdal.Polygonize` or `gdal.VectorTranslate` failed.
    """

    ds_tmp, to_close = _unwrap_ds(ds)
    driver_name = _precheck_vector_output_option(fpath_polygon)

    if eight_connected:
        options = ["8CONNECTED=8"]
    else:
        options = ["8CONNECTED=4"]

    prj_wkt = ds_tmp.GetProjection()
    if prj_wkt == "":
        srs = None
    else:
        srs = osr.SpatialReference()
        srs.ImportFromWkt(prj_wkt)

    fpath_polygon_work = fpath_polygon
    if driver_name == "GeoJSON" and srs is not None:
        # if the output is GeoJSON, while the srs is not None and not WGS84
        # we need to later convert it to WGS84 to be standard compliant
        # thus we create a temp in-memory gpkg at this step instead of a on-disk geojson file
        epsg_str = srs_to_epsg_str(srs)
        if epsg_str != "EPSG:4326" and (not no_need_to_wgs84):
            fpath_polygon_work = "/vsimem/tmp_polygon_{}.gpkg".format(str(uuid.uuid4()))
            driver_name = "GPKG"

    try:
        # derive the mask_band if not None
        if mask_band is not None:
            if isinstance(mask_band, int):
                mask_band = ds_tmp.GetRasterBand(mask_band + 1)
            elif isinstance(mask_band, gdal.Band):
                # deliberately do nothing, it's the caller's duty to make sure the band is compatible
                pass
            elif isinstance(mask_band, np.ndarray):
                # no check, it is the caller's duty to make sure the array is compatible
                if mask_band.dtype == np.bool:
                    # NOTE: gdal_array does not accept bool array so have to convert it
                    mask_band = mask_band.astype(np.uint8)
                ds_mask = gdal_array.OpenNumPyArray(
                    mask_band, True
                )  # assume band first and only 1 band
                mask_band = ds_mask.GetRasterBand(1)
            else:
                raise ValueError(f"Unsupported `mask_band` type {type(mask_band)}")

        # create the output file (with a single field `VAL` to hold the pixel value)
        drv = gdal.GetDriverByName(driver_name)
        ds_polygon = drv.Create(fpath_polygon_work, 0, 0, 0, gdal.GDT_Unknown)
        out_layer = ds_polygon.CreateLayer("polygon", srs, ogr.wkbPolygon)
        field_defn = ogr.FieldDefn("VAL", ogr.OFTInteger)
        out_layer.CreateField(field_defn)

        # call Polygonize
        ret = gdal.Polygonize(
            ds_tmp.GetRasterBand(band_ind + 1), mask_band, out_layer, 0, options=options
        )
        out_layer.SyncToDisk()
    except Exception as err:
        logger.error(f"Fail to convert to polygon due to {str(err)}")
        return False
    else:
        if ret == 0:
            # make sure the output GeoJSON is in WGS84 coordinate system to follow the standard
            if fpath_polygon_work.startswith("/vsimem"):
                return _post_process_to_wgs84(
                    fpath_polygon_work, fpath_polygon, epsg_str
                )
            return True
        else:
            logger.error(
                "Fail to convert to polygon due to errors in calling `gdal.Polygonize`"
            )
            return False
    finally:
        del ds_polygon
        out_layer = None
        if to_close:
            ds_tmp = None
        ds_mask = mask_band = None
        if fpath_polygon_work.startswith("/vsimem") and gdal.VSIStatL(
            fpath_polygon_work
        ):
            gdal.Unlink(fpath_polygon_work)


def _post_process_to_wgs84(
    fpath_polygon_work: str, fpath_polygon: str, src_epsg_str: str
) -> bool:
    try:
        gdal.VectorTranslate(
            fpath_polygon,
            fpath_polygon_work,
            options=gdal.VectorTranslateOptions(
                srcSRS=src_epsg_str, dstSRS="EPSG:4326"
            ),
        )
    except Exception as err:
        logger.error(f"Fail to convert the output polygon to WGS84 due to {str(err)}")
        return False
    return True


def get_epsg_from_proj_wkt(proj_wkt):
    """Get EPSG code from the proj wkt string.

    Parameters
    ----------
    proj_wkt: str
        WKT string of the spatial reference system (SRS)

    Returns
    -------
    epsg_code: int
        EPSG code associated with the proj_wkt.
    """
    crs = CRS(proj_wkt)
    epsg_code = crs.to_epsg()

    # if the pyproj.CRS package cannot find the EPSG code, use osr to find it.
    n_matched = 0
    if epsg_code is None:
        srs = osr.SpatialReference(wkt=proj_wkt)
        for s in srs.FindMatches():
            if s[0].GetAttrValue("AUTHORITY", 0) == "EPSG":
                epsg_code = int(s[0].GetAttrValue("AUTHORITY", 1))
                n_matched += 1
    if n_matched > 1:
        raise Exception("Matched too many EPSG codes.")

    return epsg_code


def get_epsg_from_geoimage(geoimage_fpath):
    """Get EPSG code from geoimage.

    Parameters
    ----------
    geoimage_fpath: str
        Path to the geoimage.

    Returns
    -------
    epsg_code: int
        EPSG code associated with the geoimage.
    """
    try:
        img_ds = gdal.Open(geoimage_fpath, gdal.GA_ReadOnly)
    except Exception as e:
        raise Exception(
            f"GDAL failed to open geoimage: {geoimage_fpath} due to err `{e}`"
        )
    epsg_code = get_epsg_from_proj_wkt(img_ds.GetProjection())
    img_ds = None

    return epsg_code


def get_common_epsg_from_geoimages(geoimage_fpaths):
    """Get common EPSG code from multiple geoimages.

    Parameters
    ----------
    geoimage_fpaths: List[str]
        A list of geoimage paths.

    Returns
    -------
    epsg_code: int
        Common EPSG code for the geoimage list.
    """
    epsg_cnt = Counter()
    for fp in geoimage_fpaths:
        epsg_code = get_epsg_from_geoimage(fp)
        epsg_cnt[epsg_code] += 1
    return epsg_cnt.most_common(1)[0][0]


def _read_gdal_band_ndarray(fpath_band, bands_nda, band_idx, exceptions):
    """Read a single band ndarray"""
    try:
        ds = gdal.Open(fpath_band)
        bands_nda[band_idx] = ds.ReadAsArray()
    except Exception:
        exceptions[band_idx] = True
    finally:
        ds = None


def parallel_read_array_from_ds(ds: gdal.Dataset) -> np.ndarray:
    """Parallel reading multi-bands array from GDAL Dataset.

    NOTE This module is developed for faster reading of GDAL dataset
    compared to GDAl's own ReadAsArray().
    """
    # if ds is already in-mem, no need to multi-thread read
    n_rows, n_cols, n_bands = ds.RasterYSize, ds.RasterXSize, ds.RasterCount
    if (
        ds.GetDescription().startswith("/vsi")
        or ds.GetDescription() == ""
        or ds.GetDriver().ShortName == "MEM"
        or n_bands == 1  # only 1 band
    ):
        return ds.ReadAsArray()

    # Initialize data array
    band = ds.GetRasterBand(1)
    bands_nda = np.zeros(
        (n_bands, n_rows, n_cols), dtype=convert_dtype_gdal2np(band.DataType)
    )
    band = None

    exceptions = [False] * n_bands
    dir_cache_base = os.path.join(DEFAULT_CACHE_DIR, "common", "geoimage", "gdal_sys")
    with temp_dir(
        dir_name=f"parallel_read_array_from_ds_{uuid.uuid4()}",
        base_dir=dir_cache_base,
    ) as dir_cache:
        threads = []
        for b in range(n_bands):
            fp_band = os.path.join(dir_cache, f"B{b}.vrt")
            gdal.BuildVRT(fp_band, ds, bandList=[b + 1])
            threads.append(
                threading.Thread(
                    target=_read_gdal_band_ndarray,
                    args=(fp_band, bands_nda, b, exceptions),
                )
            )
            threads[b].start()

        for b in range(n_bands):
            threads[b].join()

    if any(exceptions):
        return ds.ReadAsArray()

    return bands_nda
