import numpy as np
import torch
import datetime
from typing import List, Tuple
from torch.utils.data.dataset import Dataset
from torch.utils.data.dataloader import DataLoader
from catalog.models.mgrs import MGRSRecord
from catalog.models.sentinel2_l2a import Sentinel2L2AScene
from common.rs_utils.derived import compute_ndxi, ndxi_collection
import multiprocessing as mp

from analytics.crop_recognition.common_settings.data_config import (
    ASSET_NAMES,
    S2_STATS_MEAN,
    S2_STATS_STD,
    VI_STATS_MEAN,
    VI_STATS_STD,
    S2_ORI_RANGE,
    S2_TGT_RANGE,
    SCL_INVALID_MASK,
)
import analytics.crop_recognition.common_settings.model_config as config
from analytics.crop_identifier_system.crop_sys_common.DateRangeGenerator import (
    DateRangeGenerator,
)

from common.geoimage.raster_dataset import iter_image_by_block_with_buffer
from common.geoimage.scene_meta import SceneMeta, SourceWindow
from common.geometry.geom import Geom

from analytics.crop_recognition.data_utils.data_extraction_utils import (
    tile_data_extraction_intervals,
    filter_enduser_item_list,
)
from common.geometry.srs import mgrs_tile_to_epsg
from common.geometry.tools import project_geom


s2_collection_id = "COPERNICUS/SENTINEL2/L2A"


class TileDataset(Dataset):
    def __init__(
        self,
        tileid: str,
        temporal: Tuple[datetime.datetime, datetime.datetime],
        patch_size: int = 1024,
        delta_date: int = 4,
        geom_aoi: Geom = None,
        asset_names: List[str] = ASSET_NAMES,
        dest_resolution: float = 10,
        thread_num: int = 15,
    ) -> None:
        """
        Initialization of TileDataset.
        TileDataset will iter over the blocks of the selected tile given temporal and delta_date,
        the composite method is based on SCL cloud mask and fill cloud covered area as much as possilble.

        If geom_aoi is given, then only intersected patches will be taken into consideration.
        Patch source window can be retrieved by using get_src_window.
        Mgrs_tile meta can be retrieved by using get_dataset_meta.

        Parameters
        ----------
        tileid : str
            tileid
        temporal : Tuple[datetime.datetime, datetime.datetime]
            Tuple of start datetime and end datetime
        patch_size : int, optional
            Patch size to use for iter over the raster. by default 1024
        delta_date : int, optional
            How many days to use for composite, by default 4
        geom_aoi : Geom, optional
            If given, only intersected blocks will be taken into consideration, by default None
        asset_names : List[str], optional
            S2 asset names, by default ASSET_NAMES
        dest_resolution : float, optional
            Patch resolution, by default 10
        thread_num : int, optional
            Thread number for data extraction, by default 15
        """
        self.tileid = tileid
        self.thread_num = thread_num
        self.dest_res = dest_resolution
        self.asset_names = asset_names
        self.source_window_list: List[SourceWindow] = []

        self.mgrs_record: MGRSRecord = MGRSRecord.get_by_id(tileid)
        self.mgrs_meta = SceneMeta.from_dict(
            self.mgrs_record.get_mgrs_metadict_with_resolution(self.dest_res)
        )

        projected_aoi_geom = None
        if geom_aoi is not None:
            dest_srs = mgrs_tile_to_epsg(self.tileid)
            projected_aoi_geom = project_geom(geom_aoi, "EPSG:4326", dest_srs)

        for blk_valid_info, _ in iter_image_by_block_with_buffer(
            self.mgrs_meta.n_rows,
            self.mgrs_meta.n_cols,
            patch_size,
            patch_size,
            boundary_treatment="shrink",
            buffer_ncols=0,
            buffer_nrows=0,
        ):
            add_flag = True

            if projected_aoi_geom is not None:
                x_off = blk_valid_info.start_col
                y_off = blk_valid_info.start_row
                geotrans = list(self.mgrs_meta.geotrans)
                geotrans[0] = geotrans[0] + x_off * geotrans[1] + y_off * geotrans[2]
                geotrans[3] = geotrans[3] + x_off * geotrans[4] + y_off * geotrans[5]
                meta_new = self.mgrs_meta.update(
                    n_cols=blk_valid_info.end_col - blk_valid_info.start_col,
                    n_rows=blk_valid_info.end_row - blk_valid_info.start_row,
                    geotrans=tuple(geotrans),
                )

                bbox_geom = Geom.from_bbox(meta_new.bounds)

                if not bbox_geom.intersects(projected_aoi_geom):
                    add_flag = False

            if add_flag:
                self.source_window_list.append(
                    SourceWindow(
                        blk_valid_info.start_col,
                        blk_valid_info.start_row,
                        blk_valid_info.end_col - blk_valid_info.start_col,
                        blk_valid_info.end_row - blk_valid_info.start_row,
                    )
                )

        self.daterange_list = DateRangeGenerator.generate_daterange_with_target_range(
            temporal, delta_date, drop_last=True
        )

        query = Sentinel2L2AScene.query_many_items(
            temporal=temporal,
            mgrs_tile=tileid,
            sort_field="start_datetime",
        )

        self.items = list(query)
        self.items = filter_enduser_item_list(self.items)

        self.time_len = len(self.daterange_list)
        self.channel_len = len(self.asset_names)

    def __len__(self):
        return len(self.source_window_list)

    def __getitem__(self, index) -> np.ndarray:
        """
        Get patch sample data based on index. The data is clip from 0 to 10000 and
        normalized to 0 - 1.
        The shape of output is N, T, C where N is the number of pixel; T the number of
        time sequence and C is the channel of the data.

        __getitem__ is a required function for pytorch Dataset Class.
        Basically get a sample based on sample index.
        For more information, check pytorch docs: Dataset

        Parameters
        ----------
        index : int
            The index of sample.

        Returns
        -------
        np.ndarray
            Model input data of shape N, T, C. Clipped from 0 to 10000. Normalized between 0 - 1
        """
        source_window: SourceWindow = self.source_window_list[index]
        batch_len = source_window.y_size * source_window.x_size

        rst_arr_list, valid_mask_list = tile_data_extraction_intervals(
            self.items,
            self.asset_names,
            source_window,
            self.daterange_list,
            des_res=self.dest_res,
            thread_num=self.thread_num,
        )
        stack_arr = np.stack(rst_arr_list, axis=0).astype(np.float32)
        stack_mask = np.stack(valid_mask_list, axis=0).astype(np.bool8)
        stack_arr = stack_arr.reshape(self.time_len, self.channel_len, batch_len)
        stack_mask = stack_mask.reshape(self.time_len, batch_len)
        stack_arr = np.moveaxis(stack_arr, -1, 0)  # N, T, C
        stack_mask = np.moveaxis(stack_mask, -1, 0)  # N, T
        stack_mask = np.ones_like(stack_mask, dtype=stack_mask.dtype)
        if config.interpolate:
            for i in range(stack_arr.shape[0]):
                self.interpolate(stack_arr, i)
        stack_arr = (stack_arr - S2_ORI_RANGE[0]) / (
            S2_ORI_RANGE[1] - S2_ORI_RANGE[0]
        ) * (S2_TGT_RANGE[1] - S2_TGT_RANGE[0]) + S2_TGT_RANGE[0]
        stack_arr = (stack_arr - np.array(S2_STATS_MEAN)) / np.array(S2_STATS_STD)
        if config.feature == "optical_vi":
            data_type = "L2A"
            metric_arr = []
            for metric in config.metrics:
                metric_config = ndxi_collection["Sentinel-2"][metric]
                if "dehaze" in data_type:
                    asset_names_vi = [
                        band + "_DEHAZE" for band in metric_config["bands"]
                    ]
                else:
                    asset_names_vi = metric_config["bands"]
                stack_arr_vi = stack_arr[
                    :, :, self.__get_asset_index_list(asset_names_vi, exact_mode=False)
                ].astype(np.float32)
                derived_metric, _ = compute_ndxi(
                    np.moveaxis(stack_arr_vi, -1, 0),
                    ndxi_target=metric,
                    satellite_name="Sentinel-2",
                )
                metric_arr.append(derived_metric)
            metric_arr = np.moveaxis(np.array(metric_arr), 0, -1)
            # metric_arr = (metric_arr - np.nanmin(metric_arr, axis=0)) / (
            #         np.nanmax(metric_arr, axis=0) - np.nanmin(metric_arr, axis=0)
            #     )
            # metric_arr[np.isnan(metric_arr)] = 0
            VI_STATS_MEAN_reorg = [VI_STATS_MEAN[vi] for vi in config.metrics]
            VI_STATS_STD_reorg = [VI_STATS_STD[vi] for vi in config.metrics]
            metric_arr = (metric_arr - VI_STATS_MEAN_reorg) / VI_STATS_STD_reorg
            stack_arr = np.concatenate([stack_arr, metric_arr], axis=-1)
        else:
            stack_arr = stack_arr

        return stack_arr.astype(np.float32), stack_mask

    def get_dataset_meta(self):
        """
        Get meta for current mgrs_tile.

        Returns
        -------
        _type_
            _description_
        """
        return self.mgrs_meta

    def get_src_window(self, index: int):
        """
        Get source window for current patch.

        Parameters
        ----------
        index : int
            The index of patch in the dataset.

        Returns
        -------
        Boundingbox
            _description_
        """
        return self.source_window_list[index]

    def __get_asset_index_list(self, cur_asset_names: List[str], exact_mode=True):
        asset_index_list = []
        if exact_mode:
            for val in self.asset_names:
                if val not in cur_asset_names:
                    return None
                asset_index_list.append(self.asset_names.index(val))
        else:
            for val in cur_asset_names:
                if val not in self.asset_names:
                    return None
                asset_index_list.append(self.asset_names.index(val))
        return asset_index_list

    def interpolate(self, stack_arr, i):
        for missing_time in np.unique(np.where(stack_arr[i] == 0)[0]):
            if missing_time >= 1 and missing_time <= len(stack_arr[i]) - 3:
                candi_arr = stack_arr[i][missing_time - 1 : missing_time + 3]
                candi_arr[candi_arr == 0] = float("nan")
                stack_arr[i][missing_time] = np.nanmean(candi_arr, axis=0)


if __name__ == "__main__":
    from tqdm import tqdm as tqdm
    from catalog.place import PlaceAPI

    geom_aoi = PlaceAPI.get_geom_list_with_place_name(name_cn="黑龙江", region_level=1)[0]

    tileid_list = sorted(
        list(PlaceAPI.get_mgrs_set_with_place_name(name_cn="黑龙江", region_level=1))
    )

    temporal = (datetime.datetime(2019, 3, 1), datetime.datetime(2019, 10, 1))

    for patch_size in [1024]:
        for num_wks in [10]:
            for thread_num in [15]:
                for tileid in tileid_list:
                    tiledataset = TileDataset(
                        tileid,
                        temporal,
                        patch_size,
                        delta_date=8,
                        # geom_aoi=geom_aoi,
                        thread_num=thread_num,
                    )
                    print(len(tiledataset))

                    test_loader = DataLoader(
                        tiledataset,
                        batch_size=1,
                        num_workers=num_wks,
                        shuffle=False,
                    )
                    pbar = tqdm(total=len(test_loader))
                    for idx, result in enumerate(test_loader):
                        pbar.set_description(
                            "{}, {}, {}".format(
                                str(result.shape),
                                torch.max(result),
                                torch.min(result),
                            )
                        )
                        pbar.update(1)
