from __future__ import annotations

import os
import math
import uuid
from typing import List, Dict, Optional, Iterator, Union, Tuple
from typing_extensions import Literal
from collections import Counter

from numba import njit
import numpy as np
import numpy.typing as npd
from osgeo import gdal, gdal_array
import geopandas as gpd
import pyproj
from tqdm import tqdm

import matplotlib.cm as cm
from matplotlib.colors import Colormap

from common.typing_utils import numeric
from common.config import DEFAULT_CACHE_DIR
from common.logger import logger
from common.io_utils.temps import temp_dir

from common.img_utils.img_color import (
    color_stretch_linear,
    color_stretch_normal,
    color_stretch_percentage,
)
from common.img_utils.img_shape import (
    iter_image_by_block,
    iter_image_by_block_with_buffer,
    BBoxInfo,
)
from common.geometry.bbox import BoundingBox, nudge_bbox
from common.geometry.geom import Geom
from common.geometry.srs import geom_to_epsg_under_utm, epsg_to_wkt, wkt_to_epsg
from common.geometry.tools import project_geom

from common.geoimage.gdal_sys import (
    SceneMeta,
    ValueInterpretationMeta,
    SourceWindow,
    calc_window_from_aoi,
    get_epsg_from_geoimage,
    get_common_epsg_from_geoimages,
    parallel_read_array_from_ds,
)
from common.geoimage.gdal_sys import (
    convert_to_cloud_optimized_geotiff,
    mosaic_and_crop_ds,
    rasterize_ds,
    read_meta_from_ds,
    GDALRaster,
    write_ndarray_to_geotiff,
    write_ndarray_to_jpeg2000,
    translate_ds,
    warp_ds,
    raster_to_polygon,
)
from common.geoimage.virtual_raster import VRTScene, VRTSceneCollection


class RasterDatasetPerBlockTraverser(object):
    def __init__(
        self,
        source_file_path: Union[str, RasterDataset],
        block_x_size: int,
        block_y_size: int,
        buffer_x_size: int = 0,
        buffer_y_size: int = 0,
        bands: Optional[Union[List[int], int]] = None,
        boundary_treatment: str = "shrink",
    ):
        """Initialize a block traverser for a given raster image file

        Parameters
        ----------
        source_file_path : Union[str, RasterDataset]
            File path to a raster image file to be traversed through,
            or a `RasterDataset` object to be traversed through.
        block_x_size : int
            Traversing block size in X direction.
        block_y_size : int
            Traversing block size in Y direction.
        buffer_x_size : int
            Traversing block buffer size in X direction. Default is 0.
        buffer_y_size : int
            Traversing block buffer size in Y direction. Default is 0.
        bands: Optional[Union[List[int], int]]
            Bands or a single band to be selected, start from 0. By default None to select all bands.
        boundary_treatment : str, optional
            How to treat the boundary condition when the moving block meets the end of rows/columns.
            Either "shrink" to shrink the block so to be within the valid zone but with smaller size then specified,
            or "shift" to shift the block to the valid zone and keep the size to be the same as specified.
            By default "shrink".
        """
        self.source_file_path = source_file_path
        if isinstance(self.source_file_path, RasterDataset):
            self.source_meta = self.source_file_path.meta
        else:
            with GDALRaster.open(self.source_file_path) as gr:
                source_meta, (source_dtype_np, source_dtype_gdal) = read_meta_from_ds(
                    gr.ds, read_value_interpretation=True, read_dtype=True
                )
                self.source_meta = source_meta
                self.source_dtype_np = source_dtype_np
                self.source_dtype_gdal = source_dtype_gdal
        self.block_x_size = block_x_size
        self.block_y_size = block_y_size

        ALL_BANDS = tuple(range(self.source_meta.n_bands))
        if bands is None:
            self.bands_list = ALL_BANDS
        elif isinstance(bands, int):
            self.bands_list = tuple([bands])
        else:
            self.bands_list = tuple(bands)
        self.to_slice = True
        if self.bands_list == ALL_BANDS:
            self.to_slice = False

        self.buffer_x_size = buffer_x_size
        self.buffer_y_size = buffer_y_size
        if self.buffer_x_size == 0 and self.buffer_y_size == 0:
            self.buffered = False
        else:
            self.buffered = True

        # self.blocks: Union[List[BBoxInfo], Generator[BBoxInfo, None, None]] = (
        if self.buffered:
            self.blocks = (
                block_info
                for block_info in iter_image_by_block_with_buffer(
                    self.source_meta.n_rows,
                    self.source_meta.n_cols,
                    block_nrows=block_y_size,
                    block_ncols=block_x_size,
                    buffer_nrows=buffer_y_size,
                    buffer_ncols=buffer_x_size,
                    boundary_treatment=boundary_treatment,
                )
            )
        else:
            self.blocks = (
                (block_info, None)
                for block_info in iter_image_by_block(
                    self.source_meta.n_rows,
                    self.source_meta.n_cols,
                    block_nrows=block_y_size,
                    block_ncols=block_x_size,
                    boundary_treatment=boundary_treatment,
                )
            )
        self._estimated_n_blocks = -1
        self._n_blocks = -1

    @property
    def estimated_n_blocks(self) -> int:
        """estimated number of blocks that should be traversed through"""
        if self._n_blocks > 0:
            return self._n_blocks
        if self._estimated_n_blocks < 0:
            self._estimated_n_blocks = int(
                math.ceil(self.source_meta.n_cols / self.block_x_size)
                * math.ceil(self.source_meta.n_rows / self.block_y_size)
            )
        return self._estimated_n_blocks

    @property
    def n_blocks(self) -> int:
        """number of blocks that should be traversed through"""
        if self._n_blocks < 0:
            self.blocks = list(self.blocks)
            self._n_blocks = len(self.blocks)
        return self._n_blocks

    def _read_file_subset(self, gr, n_bands, window):
        """an efficient way of reading subset from gdal using low level gdal_array interface"""
        x_off, y_off = window.x_off, window.y_off
        x_size, y_size = window.x_size, window.y_size
        data = np.empty((n_bands, y_size, x_size), dtype=self.source_dtype_np)
        gdal_array.DatasetIONumPy(
            gr.ds,
            0,
            x_off,
            y_off,
            x_size,
            y_size,
            data,
            self.source_dtype_gdal,
            resample_alg=gdal.GRIORA_NearestNeighbour,
            binterleave=True,
            band_list=[v + 1 for v in self.bands_list],
        )

        geotrans = list(self.source_meta.geotrans)
        geotrans[0] = geotrans[0] + x_off * geotrans[1] + y_off * geotrans[2]
        geotrans[3] = geotrans[3] + x_off * geotrans[4] + y_off * geotrans[5]
        meta_new = self.source_meta.update(
            n_cols=x_size,
            n_rows=y_size,
            n_bands=n_bands,
            geotrans=tuple(geotrans),
        )
        rds = RasterDataset.from_ndarray(data, meta_new)
        return rds

    @staticmethod
    def _get_source_window(valid_block_info, buffered_block_info):
        """deduce a source window for reading from the given valid block and optional buffered block"""
        if buffered_block_info is not None:
            x_off, y_off = (
                buffered_block_info.start_col,
                buffered_block_info.start_row,
            )
            x_size = buffered_block_info.end_col - x_off
            y_size = buffered_block_info.end_row - y_off
        else:
            x_off, y_off = valid_block_info.start_col, valid_block_info.start_row
            x_size = valid_block_info.end_col - x_off
            y_size = valid_block_info.end_row - y_off
        window = SourceWindow(x_off, y_off, x_size, y_size)
        return window

    def traverse(
        self, copy_ndarray: bool = False
    ) -> Iterator[Tuple[RasterDataset, BBoxInfo, Optional[BBoxInfo]]]:
        """Traversed through the underlying raster file or dataset

        Yields
        -------
        Iterator[Tuple[RasterDataset, BBoxInfo, Optional[BBoxInfo]]]
            The first item is a `RasterDataset` for each block visited.
            The second item is a `BBoxInfo` for the valid block zone represented by the yielded `RasterDataset`.
            The last item is either None (when there is no buffer) or a `BBoxInfo` representing the buffered zone.

        copy_ndarray : bool, optional
            Only used when the source is a `RasterDataset` object.
            By default `copy_ndarray=True`, the returned subset `RasterDataset` objects share the same in-memory ndarray
            with the source object to reduce memory usage, and any in-place change would modify
            the source `RasterDataset` object's ndarray too.

        """
        if isinstance(self.source_file_path, RasterDataset):
            for valid_block_info, buffered_block_info in self.blocks:
                window = RasterDatasetPerBlockTraverser._get_source_window(
                    valid_block_info, buffered_block_info
                )
                source_rds = self.source_file_path
                rds = source_rds.subset(
                    window=window,
                    bands=None if not self.to_slice else self.bands_list,
                    copy_ndarray=copy_ndarray,
                )
                yield rds, valid_block_info, buffered_block_info
        else:
            with GDALRaster.open(self.source_file_path) as gr:
                n_bands = len(self.bands_list)
                for valid_block_info, buffered_block_info in self.blocks:
                    window = RasterDatasetPerBlockTraverser._get_source_window(
                        valid_block_info, buffered_block_info
                    )
                    rds = self._read_file_subset(gr, n_bands, window)
                    yield rds, valid_block_info, buffered_block_info


class RasterDataset(object):
    """An in-memory raster dataset

    Attributes
    ----------
    data: np.ndarray
        raster dataset's data represented as a 3D array ($n_{bands} X n_{rows} X n_{cols}$)
    meta: common.geoimage.gdal_sys.SceneMeta
        metadata for the raster dataset
    """

    # NOTE this is the currently recommended way of type annotating **an instance variable**
    # as noted by PEP-0526: https://www.python.org/dev/peps/pep-0526/#class-and-instance-variable-annotations
    # and supported by the BDFL himself: https://gitter.im/python/typing?at=595f9f5b2723db8d5ee9c892
    # this is kind of confusing as it is with the same syntax as the traditional way of defining a class variable
    # TODO, add a "Developer Note" section in our `skywalker-docs` and collect more notes there
    data: Optional[np.ndarray] = None
    meta: Optional[SceneMeta] = None

    def __init__(self):
        """Initialize an empty RasterDataset.

        Notes
        ------
        Reserved for internal use only. Please use other methods, e.g,
        `RasterDataset.from_gdal_raster`, `RasterDataset.from_file` instead.
        """
        self.data: Optional[np.ndarray] = None
        self.meta: Optional[SceneMeta] = None

    def _check_validity(self):
        if (self.data is None) or (self.meta is None):
            raise ValueError("Not a correctly initialized RasterDataset, cannot write.")

    @staticmethod
    def read_meta(fpath_raster: str) -> SceneMeta:
        """Read the metadata from a given give raster file path."""
        with GDALRaster.open(fpath_raster) as grt:
            meta = read_meta_from_ds(grt.ds)
        return meta

    @staticmethod
    def from_gdal_raster(
        ds: GDALRaster,
        bands: Optional[Union[List[int], int]] = None,
        window: Optional[SourceWindow] = None,
        extent: Optional[BoundingBox] = None,
        aoi: Optional[Geom] = None,
    ) -> RasterDataset:
        """Create a `RasterDataset object from a given `GDALRaster` object.

        Parameters
        ----------
        ds : GDALRaster
            A `GDALRaster` object to read from.
        bands : Optional[Union[List[int], int]], optional
            List of band index or a band index to read, by default None to read all bands.
        window : Optional[SourceWindow], optional
            Output extent specified by source extent in image space coordinate, by default None.
        extent : Optional[BoundingBox], optional
            Output extent specified by coordinates under destination SRS, by default None
        aoi : Optional[Geom], optional
            Output extent specified by a geometry object, by default None

        Returns
        -------
        RasterDataset
            Extracted in-memory raster dataset object.

        Note
        ----
        Only one of `window`, `extent` and `aoi` could be specified.
        """
        ds_toread = ds.ds if isinstance(ds, GDALRaster) else ds
        if (
            (bands is not None)
            or (window is not None)
            or (aoi is not None)
            or (extent is not None)
        ):
            dest_srs = wkt_to_epsg(ds_toread.GetProjection())
            dest_res = abs(ds_toread.GetGeoTransform()[1])
            if (aoi is not None) and (extent is None):
                projected_aoi = project_geom(
                    aoi, src_epsg="EPSG:4326", dest_epsg=dest_srs
                )
                extent_tmp = projected_aoi.bounds
                extent = nudge_bbox(extent_tmp, dest_res)
            elif extent is not None:
                extent = nudge_bbox(extent, dest_res)

            with translate_ds(ds, bands=bands, window=window, extent=extent) as gdr:
                # data = gdr.ds.ReadAsArray()
                data = parallel_read_array_from_ds(gdr.ds)
                meta = read_meta_from_ds(gdr.ds, True)
        else:
            # data = ds_toread.ReadAsArray()
            data = parallel_read_array_from_ds(ds_toread)
            meta = read_meta_from_ds(ds_toread, True)
            ds_toread = None

        if len(data.shape) == 2:
            data = data[np.newaxis, :, :]
        rds = RasterDataset()
        rds.data = data
        rds.meta = meta
        return rds

    @staticmethod
    def from_file(
        fpath_input: str,
        bands: Optional[Union[List[int], int]] = None,
        window: Optional[SourceWindow] = None,
        extent: Optional[BoundingBox] = None,
        aoi: Optional[Geom] = None,
    ) -> RasterDataset:
        """Create a `RasterDataset object by reading from a given raster image file path.

        Parameters
        ----------
        fpath_input : str
            File path to a raster image file to be read from.
        bands : Optional[Union[List[int], int]], optional
            List of band index or a band index to read, by default None to read all bands.
        window : Optional[SourceWindow], optional
            Output extent specified by source extent in image space coordinate, by default None.
        extent : Optional[BoundingBox], optional
            Output extent specified by coordinates under destination SRS, by default None
        aoi : Optional[Geom], optional
            Output extent specified by a geometry object, by default None

        Returns
        -------
        RasterDataset
            Extracted in-memory raster dataset object

        Note
        ----
        Only one of `window`, `extent` and `aoi` could be specified.
        """
        with GDALRaster.open(fpath_input) as gr:
            if (
                (bands is not None)
                or (window is not None)
                or (aoi is not None)
                or (extent is not None)
            ):
                with translate_ds(
                    gr.ds, bands=bands, window=window, extent=extent, aoi=aoi
                ) as ds_tmp:
                    rds = RasterDataset.from_gdal_raster(ds_tmp)
            else:
                rds = RasterDataset.from_gdal_raster(gr.ds)
        return rds

    @staticmethod
    def from_vrt_scene(
        vrt_scene: VRTScene,
        bands: Optional[Union[List[int], int]] = None,
        window: Optional[SourceWindow] = None,
        extent: Optional[BoundingBox] = None,
        aoi: Optional[Geom] = None,
    ) -> RasterDataset:
        """Create a `RasterDataset object from a given `VRTScene` object.

        Parameters
        ----------
        vrt_scene : VRTScene
            A `VRTScene` object to read from.
        bands : Optional[Union[List[int], int]], optional
            List of band index or a single band index to read, by default None to read all bands.
        window : Optional[SourceWindow], optional
            Output extent specified by source extent in image space coordinate, by default None.
        extent : Optional[BoundingBox], optional
            Output extent specified by coordinates under destination SRS, by default None
        aoi : Optional[Geom], optional
            Output extent specified by a geometry object, by default None

        Returns
        -------
        RasterDataset
            Extracted in-memory raster dataset object

        Note
        ----
        Only one of `window`, `extent` and `aoi` could be specified.
        """
        with GDALRaster.open(vrt_scene._vrt_file_path) as gdal_rst:
            rds = RasterDataset.from_gdal_raster(
                gdal_rst, bands=bands, window=window, extent=extent, aoi=aoi
            )
        return rds

    @staticmethod
    def iter_from_file(
        fpath_input: str,
        block_x_size: int = 1024,
        block_y_size: int = 1024,
        buffer_x_size: int = 0,
        buffer_y_size: int = 0,
        bands: Optional[Union[List[int], int]] = None,
        boundary_treatment="shrink",
    ) -> RasterDatasetPerBlockTraverser:
        """Iterate read through a given raster image file as a series of `RasterDataset`s by a given block size,
        optionally with buffered paddings.

        Parameters
        ----------
        fpath_input : str
            File path to the raster image file to iterate through.
        block_x_size : int, optional
            Block size in the X direction, by default 1024
        block_y_size : int, optional
            Block size in the Y direction, by default 1024
        buffer_x_size : int, optional
            Buffer padding size in the Y direction, by default 0
        buffer_y_size : int, optional
            Buffer padding size in the Y direction, by default 0
        bands : Optional[Union[List[int], int]], optional
            List of band index or a single band index to read, by default None to read all bands.
        boundary_treatment : str, optional
            How to treat the boundary condition when the moving block meets the end of rows/columns.
            Either "shrink" to shrink the block so to be within the valid zone but with smaller size then specified,
            or "shift" to shift the block to the valid zone and keep the size to be the same as specified.
            By default "shrink".

        Returns
        -------
        RasterDatasetPerBlockTraverser
            A traversing object to go through the given file.
        """
        return RasterDatasetPerBlockTraverser(
            fpath_input,
            block_x_size=block_x_size,
            block_y_size=block_y_size,
            buffer_x_size=buffer_x_size,
            buffer_y_size=buffer_y_size,
            bands=bands,
            boundary_treatment=boundary_treatment,
        )

    @staticmethod
    def from_ndarray(data: np.ndarray, meta: SceneMeta) -> RasterDataset:
        """Create a `RasterDataset` from a given 2D or 3D array and companion metadata.

        Parameters
        ----------
        data : np.ndarray
            A 2-D ($n_{rows} X n_{cols}$) or 3-D ($n_{bands} X n_{cols} X n_{rows}$) array.
        meta : SceneMeta
            The companion metadata object.

        Returns
        -------
        RasterDataset
            The created `RasterDataset` object.

        Raises
        ------
        ValueError
            Raised when the given `data` is not a 2-D or 3-D array.
            Raised when the given `data` and `meta` do not have matching dimensions.
        """
        if len(data.shape) == 2:
            n_rows, n_cols = data.shape
            n_bands = 1
            data = data[np.newaxis, :, :]
        elif len(data.shape) == 3:
            n_bands, n_rows, n_cols = data.shape
        else:
            raise ValueError("Only support 2-D or 3-D ndarray")

        if (
            (n_rows != meta.n_rows)
            or (n_cols != meta.n_cols)
            or (n_bands != meta.n_bands)
        ):
            raise ValueError(
                "Provided ndarray `data` does not have compatible shape with the provided SceneMeta `meta`. "
                + f"data: [{n_bands,n_rows,n_cols}], meta: [{meta.n_bands},{meta.n_rows},{meta.n_cols}]"
            )

        rds = RasterDataset()
        rds.data = data
        rds.meta = meta
        return rds

    @staticmethod
    def create_dummy_raster(
        n_rows: int,
        n_cols: int,
        prj_wkt: str,
        geotrans: Tuple[numeric, numeric, numeric, numeric, numeric, numeric],
        dtype: npd.DTypeLike = np.uint8,
        fill_value: numeric = 0,
        nodata_val: Optional[numeric] = None,
    ) -> RasterDataset:
        """Create a dummy single band `RasterDataset` with given size, data type, and fill value

        Parameters
        ----------
        n_rows : int
            Number of rows.
        n_cols : int
            Number of columns.
        prj_wkt : str
            Projection as in WKT.
        geotrans : Tuple[numeric, numeric, numeric, numeric, numeric, numeric]
            Geotransform as a 6-element tuple.
        dtype : npd.DTypeLike, optional
            Data type for the resulting `RasterDataset`, by default np.uint8
        fill_value : numeric, optional
            Fill value used in the resulting `RasterDataset`, by default 0.
            Notice it is the caller's duty to make sure the `dtype` and `fill_type` are compatible.
        nodata_val : Optional[numeric], optional
            Nodata value for the resulting `RasterDataset`, by default None not to set.
            Notice it its the caller's duty to make sure `nodata_val` is compatible with the `dtype`.
            One typical use is to set the `nodata_val` same as the `fill_value`.

        Returns
        -------
        `RasterDataset`
            The created single band dummy raster.
        """
        data = np.full((1, n_rows, n_cols), fill_value=fill_value, dtype=dtype)
        meta = SceneMeta(
            n_cols=n_cols,
            n_rows=n_rows,
            n_bands=1,
            prj_wkt=prj_wkt,
            geotrans=geotrans,
            value_interpretations=[
                ValueInterpretationMeta(scale=1, offset=1, nodata=nodata_val)
            ],
        )
        return RasterDataset.from_ndarray(data, meta)

    @staticmethod
    def create_dummy_raster_like(
        meta: SceneMeta,
        dtype: npd.DTypeLike = np.uint8,
        fill_value: numeric = 0,
        nodata_val: Optional[numeric] = None,
    ) -> RasterDataset:
        """Create a dummy single band `RasterDataset` with 2-D shape and geospatial information like a given meta.

        Parameters
        ----------
        meta : SceneMeta
            Scene metadata, the resulting `RasterDataset` will have same `n_cols`, `n_rows`, `prj_wkt` and `geotrans`.
        dtype, fill_value, nodata_val
            See `RasterDataset.create_dummy_raster` for details.

        Returns
        -------
        `RasterDataset`
            The created single band dummy raster.
        """
        return RasterDataset.create_dummy_raster(
            meta.n_rows,
            meta.n_cols,
            meta.prj_wkt,
            meta.geotrans,
            dtype=dtype,
            fill_value=fill_value,
            nodata_val=nodata_val,
        )

    @staticmethod
    def from_geom(
        geom: Geom,
        dest_res: numeric,
        dest_srs: Optional[str] = None,
        dtype: npd.DTypeLike = np.uint8,
        value_within_geom: numeric = 1,
        value_outside_geom: numeric = 0,
        nodata_val: Optional[numeric] = None,
    ) -> RasterDataset:
        """Create a single band mask RasterDataset from a given geometry.

        Parameters
        ----------
        geom : Geom
            Given geometry, with coordinates in WGS84 coordinate system.
        dest_res : numeric
            Destination resolution for the resulting raster,
            notice it is the caller's duty to make sure the `dest_res` is compatible with `dest_srs`.
        dest_srs : Optional[str], optional
            Destination srs as an EPSG string, by default None to
            deduce the EPSG string for a coordinate system under UTM system.
        dtype : npd.DTypeLike, optional
            Data type for the created mask raster dataset, by default np.uint8
        value_within_geom : numeric, optional
            Value within the given geometry, by default 1
        value_outside_geom : numeric, optional
            Value outside the given geometry, by default 0
        nodata_val : Optional[numeric], optional
            Nodata value for the resulting `RasterDataset`, by default None not to set.
            Notice it its the caller's duty to make sure `nodata_val` is compatible with the `dtype`.
            One typical use is to set the `nodata_val` same as the `fill_value`.

        Returns
        -------
        `RasterDataset`
            The created single band dummy raster.
        """
        if dest_srs is None:
            epsg_code = geom_to_epsg_under_utm(geom)
            dest_srs = f"EPSG:{epsg_code}"

        if dest_srs == "EPSG:4326":
            bbox = geom.bounds
        else:
            projected_geom = project_geom(geom, "EPSG:4326", dest_srs)
            bbox = projected_geom.bounds

        bbox = nudge_bbox(bbox, dest_res)
        n_cols = int((bbox.right - bbox.left) / dest_res)
        n_rows = int((bbox.upper - bbox.bottom) / dest_res)
        prj_wkt = epsg_to_wkt(dest_srs)
        geotrans = [bbox.left, dest_res, 0, bbox.upper, 0, -dest_res]

        rds = RasterDataset.create_dummy_raster(
            n_rows,
            n_cols,
            prj_wkt,
            geotrans,
            dtype=dtype,
            fill_value=value_outside_geom,
            nodata_val=nodata_val,
        )
        rds_res = rds.paint(geom, painted_values=value_within_geom, inverse=False)
        return rds_res

    def slice(self, bands: List[int], copy_ndarray: bool = False) -> RasterDataset:
        """Slice the raster dataset by selecting bands of interest.

        Parameters
        ----------
        bands : List[int]
            List of bands' index to be selected. Notice the band's index is 0-based.
        copy_ndarray : bool, optional
            Whether to enforce creating a copy of the sliced ndarray (True)
            or to use a view to the original ndarray (False), by default False.

        Returns
        -------
        RasterDataset
            Created sliced RasterDataset object.
        """
        self._check_validity()
        rds = RasterDataset()
        updated_data = self.data[bands, :, :]
        if copy_ndarray:
            updated_data = np.array(updated_data, copy=True)
        updated_meta = self.meta._asdict()
        updated_meta["n_bands"] = len(bands)
        if self.meta.value_interpretations is not None:
            updated_value_interpretations = []
            for band_ind in bands:
                updated_value_interpretations.append(
                    self.meta.value_interpretations[band_ind]
                )
            updated_meta["value_interpretations"] = updated_value_interpretations
        updated_meta = SceneMeta(**updated_meta)
        rds.data = updated_data
        rds.meta = updated_meta
        return rds

    def subset(
        self,
        window: Optional[SourceWindow] = None,
        aoi: Optional[Geom] = None,
        bands: Optional[List[int]] = None,
        copy_ndarray: bool = False,
    ) -> RasterDataset:
        """Subset the raster dataset by selecting only spatial extent of interest.

        Parameters
        ----------
        window : Optional[SourceWindow], optional
            Output extent specified by source extent in image space coordinate, by default None.
        aoi : Optional[Geom], optional
            Output extent specified by a geometry object, by default None.
        bands : Optional[List[int]], optional
            List of bands' index to be selected. Notice the band's index is 0-based.
            By default None, not to subset by bands.
        copy_ndarray : bool, optional
            Whether to enforce creating a copy of the sliced ndarray (True) or
            to allow using a view to the original ndarray (False), by default False.
            Notice underlying we call numpy for the subset and relying on its judgement whether to use a copy or a view.
            Setting this argument to True could enforce numpy to use the copy,
            but setting it as False would not guarantee a view to be used.

        Returns
        -------
        RasterDataset
            Subsetted `RasterDataset` object.

        Raises
        ------
        ValueError
            Raised when both `window` and `aoi` are specified, or neither of the two specified.
            Raised when the specified `aoi` does not intersect with the source `RasterDataset`.
        """
        self._check_validity()
        if window is None:
            if aoi is None:
                raise ValueError("One of `window` or `aoi` should be specified.")
            else:
                window = calc_window_from_aoi(self.meta, aoi)
                if window is None:
                    raise ValueError(
                        "Given `aoi` does not intersect with the RasterDataset."
                    )
        else:
            if aoi is not None:
                raise ValueError("Please specified only one of `window` or `aoi`.")

        rds = RasterDataset()
        if bands is None:
            updated_data = self.data[
                :,
                window.y_off : window.y_off + window.y_size,
                window.x_off : window.x_off + window.x_size,
            ]
        else:
            updated_data = self.data[
                bands,
                window.y_off : window.y_off + window.y_size,
                window.x_off : window.x_off + window.x_size,
            ]

        if copy_ndarray:
            updated_data = np.array(updated_data, copy=True)

        new_n_cols = int(window.x_size)
        new_n_rows = int(window.y_size)
        new_geotrans = list(self.meta.geotrans)
        new_geotrans[0] = (
            self.meta.geotrans[0]
            + window.x_off * self.meta.geotrans[1]
            + window.y_off * self.meta.geotrans[2]
        )
        new_geotrans[3] = (
            self.meta.geotrans[3]
            + window.x_off * self.meta.geotrans[4]
            + window.y_off * self.meta.geotrans[5]
        )

        if bands is None:
            new_n_bands = self.meta.n_bands
            new_value_interpretations = self.meta.value_interpretations
        else:
            new_n_bands = len(bands)
            if self.meta.value_interpretations is not None:
                new_value_interpretations = []
                for band_ind in bands:
                    new_value_interpretations.append(
                        self.meta.value_interpretations[band_ind]
                    )
            else:
                new_value_interpretations = None

        updated_meta = self.meta.update(
            n_cols=new_n_cols,
            n_rows=new_n_rows,
            geotrans=new_geotrans,
            n_bands=new_n_bands,
            value_interpretations=new_value_interpretations,
        )
        rds.data = updated_data
        rds.meta = updated_meta
        return rds

    def iter_by_block(
        self,
        block_x_size: int = 1024,
        block_y_size: int = 1024,
        buffer_x_size: int = 0,
        buffer_y_size: int = 0,
        bands: Optional[Union[List[int], int]] = None,
        boundary_treatment: str = "shrink",
    ) -> RasterDatasetPerBlockTraverser:
        """Iterate read through the `RasterDataset` object as a series of `RasterDataset`s by a given block size,
        optionally with buffered paddings.

        Parameters
        ----------
        block_x_size : int, optional
            Block size in the X direction, by default 1024
        block_y_size : int, optional
            Block size in the Y direction, by default 1024
        buffer_x_size : int, optional
            Buffer padding size in the Y direction, by default 0
        buffer_y_size : int, optional
            Buffer padding size in the Y direction, by default 0
        bands : Optional[Union[List[int], int]], optional
            List of band index or a single band index to read, by default None to read all bands.
        boundary_treatment : str, optional
            How to treat the boundary condition when the moving block meets the end of rows/columns.
            Either "shrink" to shrink the block so to be within the valid zone but with smaller size then specified,
            or "shift" to shift the block to the valid zone and keep the size to be the same as specified.
            By default "shrink".

        Returns
        -------
        RasterDatasetPerBlockTraverser
            A traversing object to go through the given file.
        """
        return RasterDatasetPerBlockTraverser(
            self,
            block_x_size=block_x_size,
            block_y_size=block_y_size,
            buffer_x_size=buffer_x_size,
            buffer_y_size=buffer_y_size,
            bands=bands,
            boundary_treatment=boundary_treatment,
        )

    def paint(
        self,
        geoms: Geom,
        painted_values: Union[numeric, List[numeric]],
        bands: Optional[Union[int, List[int]]] = None,
        inverse: bool = False,
        all_touched: bool = False,
    ) -> RasterDataset:
        """Paint the given geometry to the raster dataset.

        Parameters
        ----------
        geoms : Geom
            The geometry to be pain to the raster dataset.
        painted_values : Union[numeric, List[numeric]]
            Value(s) used to paint the geometry.
            If a single value is given, the geometry is going to be painted to all specified `bands` by that value.
            If a list of value is given, it is expected to have the same number of
            values as the number of `bands` to be painted.
        bands : Optional[Union[int, List[int]]], optional
            Bands' index for those to be painted,
            could be a single band index, or a list of band index, and notice they are 0-based,
            or None to include all bands, by default None.
        inverse : bool, optional
            Whether to paint values inside the geometry (False) or outside (True), by default False.
        all_touched : bool, optional
            Option controlling how to determine if a pixel should be burned with the given value,
            True to use the normal rendering mode to only set pixels of which
            the central points are within the geometry,
            False to instruct all pixels touched with the geometry to be set with the given value,
            by default False.

        Returns
        -------
        RasterDataset
            The result `RasterDataset` with geometry painted.
        """
        self._check_validity()
        if bands is None:
            bands = list(range(0, self.meta.n_bands))
            if type(painted_values) in (int, float):
                painted_values = self.meta.n_bands * [painted_values]
            else:
                if len(painted_values) != self.meta.n_bands:
                    raise ValueError(
                        "If a list of values is given for `painted_values`, "
                        "it should have same number of values as number of bands."
                    )
        with self.to_gdal_raster(mode="u") as gdal_raster_dataset_input:
            with rasterize_ds(
                gdal_raster_dataset_input,
                geoms,
                burn_values=painted_values,
                inverse=inverse,
                all_touched=all_touched,
                bands=bands,
            ) as gdal_raster_dataset_output:
                return self.from_gdal_raster(gdal_raster_dataset_output)

    def crop(
        self, aoi: Geom, nodata_values: Optional[Union[numeric, List[numeric]]] = None
    ) -> RasterDataset:
        """Crop the raster dataset to a given geometry,
        including subsetting the raster dataset to the extent of the given geometry,
        and setting all pixels to nodata if outside the given geometry.

        Parameters
        ----------
        aoi : Geom
            The given geometry to be cropped to.
        nodata_values : Optional[Union[numeric, List[numeric]]], optional
            The nodata values to be used,
            could be a numeric for all bands,
            or a list of numeric with each value for one band,
            or None to use the nodata stored in `meta.value_interpretations`,
            by default None.

        Returns
        -------
        RasterDataset
            The cropped `RasterDataset` object.

        Raises
        ------
        ValueError
            Raised when the `nodata_values` is not specified and cannot be deduced.
        """
        self._check_validity()
        if nodata_values is None:
            if self.meta.value_interpretations is not None:
                nodata_values = [
                    item.nodata for item in self.meta.value_interpretations
                ]
            else:
                raise ValueError(
                    "No `nodata_values` provided and cannot deduced from the `meta.value_interpretations`"
                )
        rds_tmp = self.subset(aoi=aoi)
        return rds_tmp.paint(aoi, painted_values=nodata_values, inverse=True)

    def warp(
        self,
        dest_srs: str,
        dest_extent: Optional[BoundingBox] = None,
        dest_res: Optional[numeric] = None,
        dest_nodata: Optional[Union[numeric, List[numeric]]] = None,
        cutline: Optional[Geom] = None,
        crop_to_cutline: bool = False,
        resample_alg: str = "near",
    ) -> RasterDataset:
        """Warp a raster dataset to a designated SRS.

        Parameters
        ----------
        dest_srs : str
            Destination spatial reference system as a epsg string in the format of "EPSG:{EPSG-code}".
        dest_extent : Optional[BoundingBox], optional
            A `BoundingBox` object describing the output extent, by default None to let `gdal` decide the extent.
        dest_res: Optional[numeric], optional
            Destination resolution, by default None to let `gdal` decide the output resolution.
        dest_nodata : Optional[Union[numeric, List[numeric]]], optional
            Destination data's nodata value(s),
            could be a single value for all bands, or a list of values with each value for one band,
            by default None to use the source data's nodata value(s).
        cutline : Optional[Geom], optional
            A `common.geometry.geom.Geom` representing the cutline, by default None not to use a cutline.
        crop_to_cutline : bool, optional
            Whether to crop to cutline geometry, only used when `cutline` is specified, by default False.
        resample_alg : str, optional
            Resampling algorithm to be used,
            should be one of "near", "bilinear", "cubic", "cubicspline", "lanczos",
            "average", "mode", "max", "min", "med", "q1", "q3", "sum", by default "near".

        Returns
        -------
        RasterDataset
            The warped `RasterDataset` object.

        Note
        -----
        Notice `slice`, `subset`, `paint` and `crop` methods,
        guarantee the resulting raster dataset to share the same gridding system with the source raster dataset.
        Or to say that a pixel in the result could find its exact origin in the source raster dataset.
        But `warp` does not have this guarantee and typically would change to use a different gridding
        even if the `dest_srs` given is same as the source raster dataset's SRS,
        which means a pixel in the result could be the resampled weighted mean of
        multiple origin pixels in the source raster dataset.
        """
        self._check_validity()
        with self.to_gdal_raster() as gdal_raster_dataset_input:
            with warp_ds(
                gdal_raster_dataset_input.ds,
                dest_srs,
                dest_extent=dest_extent,
                dest_res=dest_res,
                dest_nodata=dest_nodata,
                cutline=cutline,
                crop_to_cutline=crop_to_cutline,
                resample_alg=resample_alg,
            ) as gdal_raster_dataset_output:
                return self.from_gdal_raster(gdal_raster_dataset_output)

    @staticmethod
    def from_npz_and_json(dir_input: str, fname_base: str) -> RasterDataset:
        """Create a `RasterDataset` object from a pair of npz and json file.

        Parameters
        ----------
        dir_input : str
            Path to the input directory.
        fname_base : str
            Base name for the input files.
            Thus the npz file would be `<fname_base>.npz` and the json file would be `<fname_base>.json`.

        Returns
        -------
        RasterDataset
            The created `RasterDataset` object.
        """
        fpath_data = os.path.join(dir_input, fname_base + ".npz")
        fpath_meta = os.path.join(dir_input, fname_base + ".json")
        with open(fpath_meta) as fin:
            json_str = fin.read()
        meta = SceneMeta.from_json(json_str)
        data = np.load(fpath_data)["data"]
        return RasterDataset.from_ndarray(data, meta)

    def to_npz_and_json(self, dir_output: str, fname_base: str):
        """Save the RasterDataset object to a npz file for data and a json file for metadata

        Parameters
        ----------
        dir_output : str
            Path to the output directory.
        fname_base : str
            Base name for the output files.
            Thus the npz file would be `<fname_base>.npz` and the json file would be `<fname_base>.json`.
        """
        fpath_data = os.path.join(dir_output, fname_base + ".npz")
        np.savez_compressed(fpath_data, data=self.data)

        fpath_meta = os.path.join(dir_output, fname_base + ".json")
        with open(fpath_meta, "w") as fout:
            fout.write(self.meta.to_json())

    def to_geotiff(
        self,
        fpath_output: str,
        cloud_optimized: bool = False,
        method: str = "nearest",
        overviews: Optional[List[int]] = None,
        use_lerc_zstd: bool = False,
        max_z_error: numeric = 0,
        options: Optional[Dict[str, str]] = None,
    ):
        """Save the RasterDataset object to a geotiff file.

        Parameters
        ----------
        fpath_output : str
            File path for the output.
        cloud_optimized : bool, optional
            Whether to write to cloud-optimized-geotiff directly, by default False.
        method : str, optional
            Method used to create the overviews, can be one of "nearest", "mode", "average", "gauss",
            or their upper case version, only valid when `cloud_optimized` is True, by default "nearest".
        overviews : Optional[List[int]], optional
            Overview levels to be created, only valid when `cloud_optimized` is True,
            by default None to calculate the overview levels by the data's size.
        use_lerc_zstd : bool, optional
            Whether to use the LERC_ZSTD compression method, by default False not to use.
            Notice this option and the `max_z_error` is only valid
            when you are using our own provisioned gdal with LERC_ZSTD support.
        max_z_error : numeric, optional
            Maximum tolerated compression error when using LERC_ZSTD compression in lossy mode.
            By default 0 to use the lossless mode.
        options : Optional[Dict[str, str]], optional
            Additional geotiff writing options, by default None
        """
        self._check_validity()
        if not cloud_optimized:
            write_ndarray_to_geotiff(
                self.data,
                self.meta,
                fpath_output,
                use_lerc_zstd=use_lerc_zstd,
                max_z_error=max_z_error,
                options=options,
            )
        else:
            convert_to_cloud_optimized_geotiff(
                self.to_gdal_raster(mode="u"),
                fpath_output,
                method=method,
                overviews=overviews,
                use_lerc_zstd=use_lerc_zstd,
                max_z_error=max_z_error,
            )

    def to_jp2(
        self,
        fpath_output: str,
        use_kakadu=False,
        kakadu_nthreads: int = 1,
        options: Optional[Dict[str, str]] = None,
    ):
        """Save the RasterDataset to a jpeg2000 file.

        Parameters
        ----------
        fpath_output : str
            File path for the output.
        use_kakadu : bool, optional
            Whether to use JP2KAK driver, by default False not to use (so use JP2OpenJPEG driver).
        kakadu_nthreads : int, optional
            A positive integer for the number of threads to use by Kakadu, only valid if `use_kakadu` is True,
            by default 1 to use only 1 thread.
            Notice it is the caller's duty to make sure that a reasonable number is provided here,
            a too large number might lead to worse performance or even program halting.
        options : Optional[Dict[str, str]], optional
            Additional jpeg2000 writing options, by default None
        """
        self._check_validity()
        write_ndarray_to_jpeg2000(
            self.data,
            self.meta,
            fpath_output,
            use_kakadu=use_kakadu,
            kakadu_nthreads=kakadu_nthreads,
            options=options,
        )

    def to_gdal_raster(self, mode="r") -> GDALRaster:
        """Expose the RasterDataset object as a `GDALRaster` object

        Parameters
        ----------
        mode : str, optional
            Opening mode for the exposed `GDALRaster`, could be one of "r" (read) or "u" (update), by default "r".

        Returns
        -------
        GDALRaster
            The exposed `GDALRaster` object.
        """
        self._check_validity()
        return GDALRaster.open_ndarray(self.data, self.meta, mode=mode)

    def derive_nodata_mask(
        self,
        bands: Optional[Union[int, List[int]]] = None,
        nodata_vals: Optional[Union[numeric, List[numeric]]] = None,
    ) -> Optional[np.ndarray]:
        """Derive a nodata mask from the RasterDataset object.

        Parameters
        ----------
        bands : Optional[Union[int, List[int]]], optional
            Selected bands' indices to be involved in deriving the nodata mask, by default None to include all bands.
            Notice the index of band is 0-based.
        nodata_vals : Optional[Union[numeric, List[numeric]]], optional
            A nodata value or a list of nodata values to be used,
            by default None to use the nodata value provided by the `RasterDataset` object's
            metadata's `value_interpretations` field.

        Returns
        -------
        Optional[np.ndarray]
            None if no `nodata_values` is provided or found,
            otherwise a boolean ndarray with the same number of columns and rows as the data array,
            with True indicating the corresponding data value is nodata.

        Raises
        ------
        ValueError
            Raised if the `bands` and `nodata_vals` are both provided but with different lengths.
            Raised if the `nodata_vals` are not provided as a list or a single numeric value or None.
        """
        if bands is None:
            bands = list(range(self.meta.n_bands))
        elif isinstance(bands, int):
            bands = [bands]

        # derive the nodata_vals (either None then no need for nda_mask, or a list of nodata for each band)
        if isinstance(nodata_vals, int) or isinstance(nodata_vals, float):
            nodata_vals = [nodata_vals] * len(bands)
        elif isinstance(nodata_vals, list):
            if len(nodata_vals) != len(bands):
                raise ValueError(
                    "`nodata_vals`, if provided as a list of numerics, must have the same length as `bands`."
                )
        elif nodata_vals is None:
            # use nodata value defined in its `meta.value_interpretations`
            if self.meta.value_interpretations is not None:
                nodata_vals = [
                    self.meta.value_interpretations[band_ind].nodata
                    for band_ind in bands
                ]
        else:
            raise ValueError(
                "`nodata_vals` must be a numeric, or a list of numerics with a same length as bands, or None."
            )

        # only keep the bands that are with a valid nodata value
        if all([item is None for item in nodata_vals]):
            nodata_vals = None
        else:
            bands_included = []
            nodata_vals_included = []
            for ind, val in zip(bands, nodata_vals):
                if val is not None:
                    bands_included.append(ind)
                    nodata_vals_included.append(val)
            bands = bands_included
            nodata_vals = nodata_vals_included

        if nodata_vals is not None:
            # derive the nda_mask as long as it is possible
            if np.isnan(nodata_vals[0]):
                nda_mask = np.isnan(self.data[bands[0], :, :])
            else:
                nda_mask = self.data[bands[0], :, :] == nodata_vals[0]
            for ind in range(len(bands) - 1):
                if np.isnan(nodata_vals[ind]):
                    nda_mask |= np.isnan(self.data[bands[ind + 1], :, :])
                else:
                    nda_mask |= self.data[bands[ind + 1], :, :] == nodata_vals[ind + 1]
        else:
            nda_mask = None

        return nda_mask

    def stretch(
        self,
        bands: Union[int, List[int]] = 0,
        nodata_vals: Optional[Union[numeric, List[numeric]]] = None,
        scaling_strategy: Literal["linear", "percentage", "normal", "none"] = "linear",
        src_min: Optional[numeric] = None,
        src_max: Optional[numeric] = None,
        dest_min: numeric = 0,
        dest_max: numeric = 255,
        output_dtype: npd.DTypeLike = np.uint8,
        percentage: numeric = 2,
        sigma: numeric = 2,
    ):
        """Convert the RasterDataset object to a plottable Gray, RGB or RGBA ndarray.

        Parameters
        ----------
        bands : Union[int, List[int]], optional
            A band index, or a list of band indices to be selected and stretched.
            By default 0 to select the first band. Notice the index is 0-based.
        nodata_vals : Optional[Union[numeric, numeric, List[numeric]]], optional
            A nodata value or a list of nodata values to be used,
            by default None to use the nodata value provided by the `RasterDataset` object's
            metadata's `value_interpretations` field.
        scaling_strategy : Literal["linear", "normal", "percentage", "none"], optional
            Strategy to be used to scale the `RasterDataset`'s data array, by default "linear",
            other options are "normal", "percentage" and "none".
            Notice when "none" is used, no scaling is applied, and it is the caller's duty to make sure
            the actual value range in your selected bands are compatible with a plottable RGB or RGBA ndarray.
        src_min : Optional[numeric], optional
            Minimum value to be scaled to 0, if using `scaling_strategy` "linear",
            by default None to use the minimum value of each selected band for the data array.
        src_max : Optional[numeric], optional
            Maximum value to be scaled to 255, if using `scaling_strategy` "linear",
            by default None to use the maximum value of each selected band for the data array.
        dest_min : numeric, optional
            Minimum value to be scaled to. Default is 0.
        dest_max : numeric, optional
            Maximum value to be scaled to. Default is 255.
        output_dtype : npd.DTypeLike
            Dtype used by the output ndarray, Default is np.uint8.
            Notice it is the caller's duty to make `dest_min`, `dest_max` and `output_dtype` compatible.
            Frequently used combinations include: 0, 255, np.uint8; 0, 65535, np.uint16, 0, 1.0, np.float32.
        percentage : numeric, optional
            Percentile for stretching, if using `scaling_strategy` "percentage", by default 2.
        sigma : numeric, optional
            Number of standard deviation, if using `scaling_strategy` "normal", by default 2.
        """
        nda_mask = self.derive_nodata_mask(bands, nodata_vals)
        nda_stretched = np.squeeze(self.data[[bands], :, :])

        if nda_mask is None:
            valid_mask = None
        else:
            valid_mask = ~nda_mask

        if scaling_strategy == "linear":
            nda_stretched = color_stretch_linear(
                nda_stretched,
                src_min=src_min,
                src_max=src_max,
                dest_min=dest_min,
                dest_max=dest_max,
                output_dtype=output_dtype,
                valid_mask=valid_mask,
            )
        elif scaling_strategy == "normal":
            nda_stretched = color_stretch_normal(
                nda_stretched,
                sigma=sigma,
                dest_min=dest_min,
                dest_max=dest_max,
                output_dtype=output_dtype,
                valid_mask=valid_mask,
            )
        elif scaling_strategy == "percentage":
            nda_stretched = color_stretch_percentage(
                nda_stretched,
                percentage=percentage,
                dest_min=dest_min,
                dest_max=dest_max,
                output_dtype=output_dtype,
                valid_mask=valid_mask,
            )
        elif scaling_strategy == "none":
            # deliberately do nothing
            pass
        else:
            raise ValueError("Unknown type of `scaling_strategy`.")

        return nda_stretched, nda_mask

    def to_leafmap(self, bands, zoom=5, layer_name="GeoTIFF"):
        raise NotImplementedError

    def to_plottable(
        self,
        bands: Optional[
            Union[int, Tuple[int, int, int], Tuple[int, int, int, int]]
        ] = None,
        nodata_vals: Optional[Union[numeric, List[numeric]]] = None,
        scaling_strategy: Literal["linear", "percentage", "normal", "none"] = "linear",
        src_min: Optional[numeric] = None,
        src_max: Optional[numeric] = None,
        percentage: numeric = 2,
        sigma: numeric = 2,
        with_mask: bool = False,
        nodata_color: Optional[
            Union[Tuple[int, int, int], Tuple[int, int, int, int]]
        ] = None,
    ) -> Union[np.ndarray, Tuple[np.ndarray, Optional[np.ndarray]]]:
        """Convert the RasterDataset object to a plottable Gray, RGB or RGBA ndarray.

        Parameters
        ----------
        bands : Optional[Union[int, Tuple[int, int, int], Tuple[int, int, int, int]]], optional
            A band index (Gray), or a list of 3 (RGB) or 4 (RGBA) bands to be selected,
            by default None to use the first 3 bands for a RGB ndarray.
        nodata_vals : Optional[Union[numeric, numeric, List[numeric]]], optional
            A nodata value or a list of nodata values to be used,
            by default None to use the nodata value provided by the `RasterDataset` object's
            metadata's `value_interpretations` field.
        scaling_strategy : Literal["linear", "normal", "percentage", "none"], optional
            Strategy to be used to scale the `RasterDataset`'s data array, by default "linear",
            other options are "normal", "percentage" and "none".
            Notice when "none" is used, no scaling is applied, and it is the caller's duty to make sure
            the actual value range in your selected bands are compatible with a plottable RGB or RGBA ndarray.
        src_min : Optional[numeric], optional
            Minimum value to be scaled to 0, if using `scaling_strategy` "linear",
            by default None to use the minimum value of each selected band for the data array.
        src_max : Optional[numeric], optional
            Maximum value to be scaled to 255, if using `scaling_strategy` "linear",
            by default None to use the maximum value of each selected band for the data array.
        percentage : numeric, optional
            Percentile for stretching, if using `scaling_strategy` "percentage", by default 2.
        sigma : numeric, optional
            Number of standard deviation, if using `scaling_strategy` "normal", by default 2.
        with_mask : bool, optional
            Whether to return the nodata mask, by default False not to return.
        nodata_color : Optional[Union[Tuple[int, int, int], Tuple[int, int, int, int]]], optional
            A color to be used for nodata pixels. By default None not to specify a color.
            Could be a tuple of 3 integers (RGB) if the given `bands` contains three bands,
            or 4 integers (RGBA) if 4 `bands` provided.
            All the color values for each of R/G/B/A should be in the range of 0 - 255.

        Returns
        -------
        Union[np.ndarray, Tuple[np.ndarray, Optional[np.ndarray]]]
            The scaled plottable Gray, RGBA ndarray, in the shape of [n_rows X n_cols] or [n_rows X n_cols X 3 or 4].
            Notice if plotting for 3 or 4 bands, the alpha band is automatically applied to only show the valid zone,
            while when working in the Gray mode, the nodata area would not be masked,
            and the users are recommended to call with the `with_mask` option to
            get the nodata mask and proceed by themselves.
            If `with_mask` is True, also return the nodata mask as a boolean array of shape [n_rows X n_cols]
            or None if the nodata mask cannot be derived.

        Raises
        ------
        ValueError
            Raised if unknown `scaling_strategy` is provided.
            Raised if incompatible `bands` or `nodata_vals` are provided.
        """
        if bands is None:
            bands = (0, 1, 2)
        elif isinstance(bands, int):
            bands = [bands]
        if len(bands) not in (1, 3, 4):
            raise ValueError("You should only specify 1, or 3 or 4 bands.")

        nda_toplot, nda_mask = self.stretch(
            bands,
            nodata_vals=nodata_vals,
            scaling_strategy=scaling_strategy,
            src_min=src_min,
            src_max=src_max,
            dest_min=0,
            dest_max=255,
            output_dtype=np.uint8,
            percentage=percentage,
            sigma=sigma,
        )

        if len(bands) > 1:
            nda_toplot = np.moveaxis(nda_toplot, 0, -1)
            if nda_mask is not None:
                if len(bands) == 3:
                    if nodata_color is not None:
                        nda_toplot[nda_mask, :] = np.array(nodata_color)
                        nda_toplot = np.dstack(
                            (
                                nda_toplot,
                                np.full(nda_mask.shape, fill_value=255, dtype=np.uint8),
                            )
                        )
                    else:
                        nda_toplot = np.dstack((nda_toplot, (~nda_mask) * 255))
                elif len(bands) == 4:
                    if nodata_color is not None:
                        nda_toplot[nda_mask, :] = np.array(nodata_color)
                    else:
                        nda_toplot[:, :, -1] *= ~nda_mask
        if with_mask:
            return nda_toplot, nda_mask
        else:
            return nda_toplot

    def to_plottable_cmap(
        self,
        band: int = 0,
        nodata_val: Optional[numeric] = None,
        cmap: Union[str, Colormap] = "viridis",
        scaling_strategy: Literal["linear", "percentage", "normal", "none"] = "linear",
        src_min: Optional[numeric] = None,
        src_max: Optional[numeric] = None,
        percentage: numeric = 2,
        sigma: numeric = 2,
        with_mask: bool = False,
        nodata_color: Optional[Tuple[int, int, int]] = None,
    ) -> Union[np.ndarray, Tuple[np.ndarray, Optional[np.ndarray]]]:
        """Convert to a plottable ndarray based on given color map.

        Parameters
        ----------
        band : int, optional
            A band index to be selected, by default 0 to select the first band.
        nodata_val : Optional[numeric], optional
            A nodata value, by default None to use the nodata value provided by the `RasterDataset` object's
            metadata's `value_interpretations` field.
        cmap : Union[str, ColorMap]
            A `matplotlib.colors.Colormap` object or a string representing a known colormap, by default `viridis`.
            Notice all `matplotlib` colormaps are supported.
        scaling_strategy : Literal["linear", "normal", "percentage", "none"], optional
            Strategy to be used to scale the `RasterDataset`'s data array, by default "linear",
            other options are "normal", "percentage" and "none".
            Notice when "none" is used, no scaling is applied, and it is the caller's duty to make sure
            the actual value range in your selected bands are compatible with a plottable RGB or RGBA ndarray.
        src_min : Optional[numeric], optional
            Minimum value to be scaled to 0, if using `scaling_strategy` "linear",
            by default None to use the minimum value of each selected band for the data array.
        src_max : Optional[numeric], optional
            Maximum value to be scaled to 255, if using `scaling_strategy` "linear",
            by default None to use the maximum value of each selected band for the data array.
        percentage : numeric, optional
            Percentile for stretching, if using `scaling_strategy` "percentage", by default 2.
        sigma : numeric, optional
            Number of standard deviation, if using `scaling_strategy` "normal", by default 2.
        with_mask : bool, optional
            Whether to return the nodata mask, by default False not to return.
        nodata_color : Optional[Tuple[int, int, int]], optional
            A color to be used for nodata pixels. By default None not to specify a color.
            Should be a tuple of 3 integers (RGB) in the range of 0 - 255.

        Returns
        -------
        Union[np.ndarray, Tuple[np.ndarray, Optional[np.ndarray]]]
            The scaled plottable RGBA ndarray, in the shape of [n_rows X n_cols X 4].
            If `with_mask` is True, also return the nodata mask as a boolean array of shape [n_rows X n_cols]
            or None if the nodata mask cannot be derived.


        Raises
        ------
        ValueError
            Raised if unknown `scaling_strategy` is provided.
            Raised if incompatible `bands` or `nodata_vals` are provided.
        """
        nda_toplot, nda_mask = self.stretch(
            band,
            nodata_vals=nodata_val,
            scaling_strategy=scaling_strategy,
            src_min=src_min,
            src_max=src_max,
            dest_min=0,
            dest_max=1.0,
            output_dtype=np.float32,
            percentage=percentage,
            sigma=sigma,
        )
        m = cm.ScalarMappable(norm=None, cmap=cmap)
        nda_toplot = m.to_rgba(nda_toplot, bytes=True)
        if nda_mask is not None:
            if nodata_color is None:
                nda_toplot[:, :, -1] = ~nda_mask * 255
            else:
                nda_toplot[nda_mask, :-1] = np.array(nodata_color)

        if with_mask:
            return nda_toplot, nda_mask
        else:
            return nda_toplot

    def to_polygon(
        self,
        fpath_polygon: str,
        band_ind: int = 0,
        mask_band: Optional[Union[int, np.ndarray]] = None,
        eight_connected: bool = True,
    ) -> bool:
        """Convert to a polygon vector file in `shp` or `gpkg` or `geojson` format.

        Parameters
        ----------
        fpath_polygon : str
            The output polygon file path, could be a `shp` file, a `gpkg` file or a `geojson` file.
        band_ind : int, optional
            The band index for the band within the raster to be converted, by default 0 to use the first band.
            Notice the index is 0-based.
        mask_band : Optional[Union[int, np.ndarray, gdal.Band]], optional
            Mask band to be used,
            could be 1) None not to use a mask,
            or 2) an integer as the band index for the band in the given `ds` to be used,
            or 3) a np.ndarray for the mask to be used.
            If 2), it is the user's duty to ensure the index (0-based) is within the raster's band range.
            If 3), it is the user's duty to ensure the mask is a 2-D array
            with the same number of rows and columns as the given ds,
            and all non-0 pixels would be considered as valid data and included in the polygon transformation.
            By default None.
        eight_connected : bool, optional
            Whether to use eight-connected or four-connected, by default True.

        Returns
        -------
        bool
            Whether the conversion is successful.

        Raises
        ------
        err
            Raised when the call to `gdal.Polygonize` failed.

        Notes
        -----
        The algorithm will generally produce very dense polygon geometries,
        with edges that follow exactly on pixel boundaries for all non-interior pixels.
        For non-thematic raster data (such as satellite images) the result will
        essentially be one small polygon per pixel, and memory and output layer sizes will be substantial.
        The algorithm is primarily intended for relatively simple thematic imagery, masks, and classification results.
        """
        with self.to_gdal_raster() as gdal_rst:
            return raster_to_polygon(
                gdal_rst,
                fpath_polygon,
                band_ind=band_ind,
                mask_band=mask_band,
                eight_connected=eight_connected,
            )

    def to_simple_footprint(self, lon_th=180) -> Optional[dict]:
        """Compute the simplified convex footprint of the RasterDataset

        Parameters
        ----------
        lon_th: int
            Longitude threshold indicating if footprint crosses Antimeridian.
            Default: 180.

        Returns
        -------
        Optional[dict]
            A dictionary of the simplified convex footprint of the RasterDataset.
            {
                "black_ratio": black_ratio, "geom": geom_sim, "bbox": bounds
            }
        """
        nodata_val = self.meta.value_interpretations[0].nodata
        nda_alpha = self.data[0] != nodata_val
        black_ratio = 1 - nda_alpha.sum() / self.meta.n_size

        if black_ratio == 1:
            footprint = None
        else:
            coords = _get_possible_bounding_coords(nda_alpha, self.meta.geotrans)
            if len(coords) == 0:
                footprint = None
            else:
                geom = Geom.make_polygon(coords)
                if self.meta.prj_epsg != "EPSG:4326":
                    geom = project_geom(geom, self.meta.prj_epsg, "EPSG:4326")
                bounds = geom.bounds

                if geom.check_if_antimeridian_crossing(lon_th=lon_th):
                    geom = geom.fix_antimeridian_crossing()
                    bounds.left, bounds.right = bounds.right, bounds.left
                    # Get the convex hull of each polygon for simplification
                    geom_sim = Geom.create_an_empty_geom("MultiPolygon")
                    for poly_coords in geom.to_geometry()["coordinates"]:
                        poly_hull = Geom.from_shapely(
                            Geom({"type": "Polygon", "coordinates": poly_coords})
                            .to_shapely()
                            .convex_hull
                        )
                        geom_sim = geom_sim.union(poly_hull)
                else:
                    geom_sim = Geom.from_shapely(geom.to_shapely().convex_hull)
                footprint = {
                    "black_ratio": black_ratio,
                    "geom": geom_sim,
                    "bbox": bounds,
                }
        return footprint

    def to_grids(
        self,
        bands_selected: Optional[Dict[str, int]] = None,
        geom_type: Literal["polygon", "point"] = "point",
        coord_loc: Literal["upper-left", "center"] = "upper-left",
    ) -> gpd.GeoDataFrame:
        """Convert the RasterDataset to a GeoDataFrame, with each row representing one grid
        in the original RasterDataset.

        Parameters
        ----------
        bands_selected : Optional[Dict[str, int]], optional
            Bands to be selected to be included in the output GeoDataFrame,
            should be None or a dictionary of field name to band index.
            The field name will be the column name for that data in the resulting GeoDataFrame.
            One example might be `{"field0": 0}`, notice the band index is 0 based.
            By default None, to select only the first band, and named it as `field0`.
        geom_type : Literal["polygon", "point"], optional
            Geometry type to be used to represent the grid, either "point" or "polygon", by default "point".
        coord_loc : Literal["upper-left", "center"], optional
            Where the coordinate of a pixel in the RasterDataset is located in that pixel,
            either "upper-left" or "center", by default "upper-left".

        Returns
        -------
        gpd.GeoDataFrame
            The resulting GeoDataFrame.
            It would contains the following columns:
            `xs` for the x coordinates;
            `ys` for the y coordinates;
            `geometry` for the grid geometry, either point or polygon;
            and other fields containing selected bands' values.

        Notes
        -----
        This method transform the RasterDataset into a in-memory GeoDataFrame,
        thus would cause at least 2 - 3 times of memory consumption.
        It is provided mainly for ease-of-use for small RasterDataset objects, so use it with caution.
        """
        bbox_tmp = self.meta.bounds
        x_res = self.meta.x_res
        y_res = self.meta.y_res
        xs = bbox_tmp.left + np.arange(0, self.meta.n_cols) * x_res
        ys = bbox_tmp.upper + np.arange(0, self.meta.n_rows) * y_res
        nda_xs, nda_ys = np.meshgrid(xs, ys)

        lst_geoms = []
        # TODO, could make this more efficient by using primitives from geopandas but let us keep things simple for now
        if geom_type == "point":
            for x, y in zip(nda_xs.flatten(), nda_ys.flatten()):
                if coord_loc == "upper-left":
                    tmp_geom = Geom.make_point((x + 0.5 * x_res, y + 0.5 * y_res))
                else:  # center
                    tmp_geom = Geom.make_point((x, y))
                lst_geoms.append(tmp_geom._shapely_geom)
        elif geom_type == "polygon":
            for x, y in zip(nda_xs.flatten(), nda_ys.flatten()):
                if coord_loc == "upper-left":
                    tmp_geom = Geom.make_polygon(
                        [
                            [x, y + y_res],
                            [x + x_res, y + y_res],
                            [x + x_res, y],
                            [x, y],
                        ]
                    )
                else:  # center
                    tmp_geom = Geom.make_polygon(
                        [
                            [x - 0.5 * x_res, y + 0.5 * y_res],
                            [x + 0.5 * x_res, y + 0.5 * y_res],
                            [x + 0.5 * x_res, y - 0.5 * y_res],
                            [x - 0.5 * x_res, y - 0.5 * y_res],
                        ]
                    )
                lst_geoms.append(tmp_geom._shapely_geom)

        tmp_data = {
            "xs": nda_xs.flatten(),
            "ys": nda_ys.flatten(),
            "geom": lst_geoms,
        }
        if bands_selected is None:
            bands_selected = {"Field0": 0}
        for field_name, band_ind in bands_selected.items():
            tmp_data[field_name] = self.data[band_ind].flatten()

        df_output = gpd.GeoDataFrame(data=tmp_data)
        df_output = df_output.set_geometry(df_output.geom)
        del df_output["geom"]
        df_output = df_output.set_crs(self.meta.prj_epsg)
        return df_output

    def get_coords(self, enforce_wgs84: bool = False) -> Tuple[np.ndarray, np.ndarray]:
        """Get coordinates for each pixel in the RasterDataset object

        Parameters
        ----------
        enforce_wgs84 : bool, optional
            Whether to enforce using WGS84 for the returned coordinates,
            by default False not to enforce and stay within the `RasterDataset` object's own coordinate system.

        Returns
        -------
        Tuple[np.ndarray, np.ndarray]
            A tuple of two ndarray, each ndarray is with a shape of [n_rows X n_cols].
            The first one representing the x (or longitude) coordinates,
            and the second one representing the y (or latitude) coordinates.
        """
        bbox_tmp = self.meta.bounds
        x_res = self.meta.x_res
        y_res = self.meta.y_res
        xs = bbox_tmp.left + np.arange(0, self.meta.n_cols) * x_res
        ys = bbox_tmp.upper + np.arange(0, self.meta.n_rows) * y_res
        nda_xs, nda_ys = np.meshgrid(xs, ys)
        if enforce_wgs84:
            try:
                prj_epsg_no = self.meta.prj_epsg_no
            except ValueError:
                prj_epsg_no = None
            if prj_epsg_no != 4326:
                transformer = pyproj.Transformer.from_proj(
                    self.meta.prj_wkt, "EPSG:4326", always_xy=True
                )
                nda_lons, nda_lats = transformer.transform(nda_xs, nda_ys)
                return (nda_lons, nda_lats)
        else:
            return (nda_xs, nda_ys)


def mosaic_raster_files(
    lst_files: List[str],
    resample_alg: str = "nearest",
    src_nodata: Optional[Union[numeric, List[numeric]]] = None,
    dest_extent: Optional[BoundingBox] = None,
    dest_aoi: Optional[Geom] = None,
    crop_to_aoi: bool = False,
    dest_srs: Optional[str] = None,
    dest_res: Optional[numeric] = None,
    dest_nodata: Optional[Union[numeric, List[numeric]]] = None,
    n_threads: int = 4,
    mem: Optional[int] = None,
    progress: Optional[bool] = False,
    fpath_dest: Optional[str] = None,
    options: Optional[Dict] = None,
) -> Optional(RasterDataset):
    """Mosaic multiple raster files from different spatial reference systems.

    Parameters
    ----------
    lst_files: List[str]
        A list of raster file paths
    resample_alg : Optional[str]
        Resampling algorithm to be used, could be one of
        "nearest", "bilinear", "cubic", "cubicspline", "lanczos", "average", and "mode",
        by default "nearest".
    src_nodata : Optional[Union[numeric, List[numeric]]]
        Source data's nodata value(s),
        could be a single value for all bands, or a list of values with each value for one band,
        by default None
    dest_extent : Optional[BoundingBox]
        Destination extent as under the result spatial reference system, by default None
    dest_aoi: Optional[Geom]
        Geometry of the output AOI, by default None
    crop_to_aoi: bool
        If crop the output raster to AOI. Default: False
    dest_srs : Optional[str]
        Destination spatial reference system as a EPSG string, eg. "EPSG:4326", "EPSG:32601", etc.,
        by default None
    dest_res: Optional[numeric]
        Destination resolution, by default None
    dest_nodata : Optional[Union[numeric, List[numeric]]]
        Destination data's nodata value(s),
        could be a single value for all bands, or a list of values with each value for one band,
        by default None to use the source data's nodata value(s).
    n_threads : int, optional
        Number of threads to be used in warping. By default 4.
    mem: int, optional
        Size of working buffer in MB. By default to None, set by GDAL itself.
    progress: Optional[bool]
        If showing the progress of processing. Default: False
    fpath_dest: Optional[str]
        Destination output file path of the mosaic geoimage. Default: None
        NOTE if write to a file, no raster dataset will be returned.
    options : Optional[Dict[str, str]], optional
        Additional options passed into gdal for creating the output file, by default None.
        Only used when `fpath_dest` is provided.

    Notes
    -----
    Support input raster files from different spatial reference systems.
    """

    if dest_srs is None:
        epsg_code = get_common_epsg_from_geoimages(lst_files)
        dest_srs = f"EPSG:{epsg_code}"
        logger.warning(f"`dest_srs` not specified, use the most common `{dest_srs}`")

    with temp_dir(
        dir_name=f"mosaic_raster_files_{uuid.uuid4()}",
        base_dir=os.path.join(
            DEFAULT_CACHE_DIR,
            "common",
            "geoimage",
            "raster_dataset",
        ),
    ) as tmp_dir:
        vrt_scenes = []
        for fp in tqdm(lst_files, desc="Preparing source VRTs", disable=not progress):
            if f"EPSG:{get_epsg_from_geoimage(fp)}" == dest_srs:
                fp_vrt = fp
            else:
                fp_vrt = os.path.join(tmp_dir, f"{uuid.uuid4()}.vrt")
                warp_ds(
                    fp,
                    dest_srs=dest_srs,
                    resample_alg=resample_alg,
                    src_nodata=src_nodata,
                    dest_nodata=dest_nodata,
                    n_threads=n_threads,
                    mem=mem,
                    fpath_dest=fp_vrt,
                )
            vs = VRTScene.from_vrt_file_path(fp_vrt)
            vrt_scenes.append(vs)

        vrt_scene = VRTSceneCollection(vrt_scenes).mosaic_vrt()

        with GDALRaster.open(vrt_scene._vrt_file_path) as gdal_rst:
            if (dest_aoi is not None) or (
                src_nodata is not None
                and dest_nodata is not None
                and src_nodata != dest_nodata
            ):
                out_ds = warp_ds(
                    gdal_rst,
                    dest_srs=dest_srs,
                    dest_extent=dest_extent,
                    dest_nodata=dest_nodata,
                    src_nodata=src_nodata,
                    cutline=dest_aoi,
                    crop_to_cutline=crop_to_aoi,
                    dest_res=dest_res,
                    resample_alg=resample_alg,
                    n_threads=n_threads,
                    mem=mem,
                    progress=progress,
                    fpath_dest=fpath_dest,
                    options=options,
                )
            else:
                if src_nodata is not None:
                    nodata = src_nodata
                else:
                    nodata = dest_nodata
                out_ds = translate_ds(
                    gdal_rst,
                    extent=dest_extent,
                    dest_res=dest_res,
                    nodata=nodata,
                    resample_alg=resample_alg,
                    n_threads=n_threads,
                    progress=progress,
                    fpath_dest=fpath_dest,
                    options=options,
                )
            if fpath_dest is None and out_ds is not None:
                with out_ds:
                    rds = RasterDataset.from_gdal_raster(out_ds)
                    return rds


def get_common_epsg_from_raster_datasets(lst_rst: List[RasterDataset]) -> str:
    """Get the most common EPSG of all raster datasets"""
    epsg_cnt = Counter()
    for rst in lst_rst:
        epsg_code = rst.meta.prj_epsg_no
        epsg_cnt[epsg_code] += 1
    return epsg_cnt.most_common(1)[0][0]


def mosaic_raster_datasets(
    lst_rst: List[RasterDataset],
    resample_alg: str = "nearest",
    src_nodata: Optional[Union[numeric, List[numeric]]] = None,
    dest_extent: Optional[BoundingBox] = None,
    dest_aoi: Optional[Geom] = None,
    crop_to_aoi: bool = False,
    dest_srs: Optional[str] = None,
    dest_res: Optional[numeric] = None,
    dest_nodata: Optional[Union[numeric, List[numeric]]] = None,
) -> RasterDataset:
    """Mosaic a list of raster datasets from different spatial reference systems.

    Parameters
    ----------
    lst_rst : List[RasterDataset]
        List of RasterDataset to be mosaiced.
    resample_alg : Optional[str]
        Resampling algorithm to be used, could be one of
        "nearest", "bilinear", "cubic", "cubicspline", "lanczos", "average", and "mode",
        by default "nearest".
    src_nodata : Optional[Union[numeric, List[numeric]]]
        Source data's nodata value(s),
        could be a single value for all bands, or a list of values with each value for one band,
        by default None
    dest_extent : Optional[BoundingBox]
        Destination extent as under the result spatial reference system, by default None
    dest_aoi: Optional[Geom]
        Geometry of the output AOI, by default None
    crop_to_aoi: bool
        If crop the output raster to AOI. Default: False
    dest_srs : Optional[str]
        Destination spatial reference system as a EPSG string, eg. "EPSG:4326", "EPSG:32601", etc.,
        by default None
    dest_res: Optional[numeric]
        Destination resolution, by default None
    dest_nodata : Optional[Union[numeric, List[numeric]]]
        Destination data's nodata value(s),
        could be a single value for all bands, or a list of values with each value for one band,
        by default None to use the source data's nodata value(s).

    Returns
    -------
    RasterDataset
        Mosaicked RasterDataset.

    Notes
    -----
    Support input raster datasets from different spatial reference systems.
    """
    if dest_srs is None:
        epsg_code = get_common_epsg_from_raster_datasets(lst_rst)
        dest_srs = f"EPSG:{epsg_code}"
        logger.warning(f"`dest_srs` not specified, use the most common `{dest_srs}`")

    # reproject all rasters to the same SRS
    dss = []
    for rst in lst_rst:
        if rst.meta.prj_epsg != dest_srs:
            rst = rst.warp(dest_srs=dest_srs)
        with rst.to_gdal_raster() as grst:
            dss.append(grst.ds)

    # mosaic reprojected rasters
    try:
        with mosaic_and_crop_ds(
            dss,
            resample_alg=resample_alg,
            src_nodata=src_nodata,
            dest_extent=dest_extent,
            dest_aoi=dest_aoi,
            crop_to_aoi=crop_to_aoi,
            dest_res=dest_res,
            dest_nodata=dest_nodata,
        ) as gdal_raster:
            rst_result = RasterDataset.from_gdal_raster(gdal_raster)
    except Exception as e:
        raise Exception(f"Failed to mosaic the list of raster datasets due to `{e}`")
    finally:
        for i in range(len(dss)):
            dss[i] = None
        dss = None
    return rst_result


def stack_raster_datasets(
    lst_rst: List[RasterDataset],
    skip_check: bool = False,
) -> RasterDataset:
    """Stack multiple RasterDataset together

    Parameters
    ----------
    lst_rst : List[RasterDataset]
        A list of `RasterDataset`s to be stacked together.
        They must be under the same coordinate system,
        which means they have the same projection string and geo-transformation array,
        and with the the same size (which means they have the same number of columns and rows),
        and the data must be with the same dtype.
    skip_check : bool, optional
        Whether to skip the coordinate, by default False not to skip.
        If you are sure the provided list of `RasterDataset`s are good to be stacked,
        you can skip the check for better performance.

    Returns
    -------
    RasterDataset
        The stacked `RasterDataset`.

    Raises
    ------
    ValueError
        Raised when the list of `RasterDataset` are not compatible to be stacked.
    """
    if len(lst_rst) < 2:
        logger.info("Only 1 `RasterDataset` provided, so just return the same thing.")
        return lst_rst[0]

    def _compatible_meta(meta1: SceneMeta, meta2: SceneMeta):
        if meta1.n_cols != meta2.n_cols:
            return False
        if meta1.n_rows != meta2.n_rows:
            return False
        if meta1.prj_wkt != meta2.prj_wkt:
            return False
        if meta1.geotrans != meta2.geotrans:
            return False
        return True

    if not skip_check:
        tpl_rst = lst_rst[0]
        for rst_tmp in lst_rst[1:]:
            if not _compatible_meta(tpl_rst.meta, rst_tmp.meta):
                raise ValueError(
                    "All input must in the same coordinate system and with the same number of cols/rows."
                )
            if tpl_rst.data.dtype != rst_tmp.data.dtype:
                raise ValueError("All input must with the same dtype.")

    data = np.concatenate([rst.data for rst in lst_rst])
    total_bands = sum(rst.meta.n_bands for rst in lst_rst)
    all_value_interpretations = [
        vi for rst in lst_rst for vi in rst.meta.value_interpretations
    ]
    meta = lst_rst[0].meta.update(
        n_bands=total_bands, value_interpretations=all_value_interpretations
    )
    return RasterDataset.from_ndarray(data, meta)


@njit
def _get_possible_bounding_coords(nda_alpha, geotrans):
    """get all possible bounding point geographic coordinates
    given a numpy array and corresponding geo-transform

    NOTE Each pixel should have 2 bounding points in geographic
    coordinate system. 1 point is not enough.
    """

    lst_coords = []
    n_rows, n_cols = nda_alpha.shape
    gt = geotrans
    x_res, y_res = gt[1], gt[5]

    # upper -> down, left side
    # NOTE Add 2 geograhic points on the LEFT boundary of the pixel
    for i in range(n_rows):
        for j in range(n_cols):
            if nda_alpha[i, j] > 0:
                x = gt[0] + j * gt[1] + i * gt[2]
                y = gt[3] + j * gt[4] + i * gt[5]
                if len(lst_coords) == 0 or (x, y) != lst_coords[-1]:
                    lst_coords.append((x, y))
                lst_coords.append((x, y + y_res))
                break

    # down -> upper, right side
    # NOTE Add 2 geograhic points on the RIGHT boundary of the pixel
    for i in range(n_rows - 1, 0 - 1, -1):
        for j in range(n_cols - 1, 0 - 1, -1):
            if nda_alpha[i, j] > 0:
                x = gt[0] + j * gt[1] + i * gt[2]
                y = gt[3] + j * gt[4] + i * gt[5]
                if len(lst_coords) == 0 or (x + x_res, y + y_res) != lst_coords[-1]:
                    lst_coords.append((x + x_res, y + y_res))
                lst_coords.append((x + x_res, y))
                break
    return lst_coords
