import random
import datetime
from multiprocessing.pool import ThreadPool
import numpy as np
from typing import Dict
from typing import List, Tuple, Literal
from catalog.models.sentinel2_l2a import Sentinel2L2AScene
from catalog.raster.raster import retrieve_by_item

from common.geoimage.scene_meta import SourceWindow
from catalog.raster.strategy_s2decloud_v1 import VALS_TO_MASK


def datetime_index_selection(
    time_seq: List[datetime.datetime],
    valid_mask_seq: List[bool],
    date_range_list: List[Tuple[datetime.datetime, datetime.datetime]],
    ignore_year: bool = True,
    seed: int = 1,
) -> List[int]:
    """
    Given time sequence and date range list, select one time sequence for each
    date_range if there exists one. Otherwise, None is returned at
    corresponding location.

    Both time_seq list and date_range_list should be in non-decreasing order.

    Parameters
    ----------
    time_seq : List[datetime.datetime]
        Time sequence to be selected from. If multiple time locate in the same
        date range, then a random one will be selected.
    valid_mask_seq: List[bool]
        the array denoting whether the value of a specific date is valid based its SCL value
    date_range_list : List[Tuple[datetime.datetime, datetime.datetime]]
        A list of date range. This range is generated based on the "delta_date" parameter in model_config.py,
        e.g., if delta_date is 30 and the sample is from 2023, date_range_list should be a list of 30-day period in 2023
    ignore_year: bool, optional
        Whether to ignore the year of data. If set to True, we will use the day
        of year to evaluate the time sequence and date_range_list, ignoring
        which year the time_seq and date_range_list locates in.
        If set to False, then when time_seq and date_range_list are in
        different years, no index will be selected.
        For most of training time, this trigger should be True, unless you are
        extremely sure the year of time_seq and date_range_list are the same.


    Returns
    -------
    result_list: List[int]
        The length is same as date_range_list
        For each date range, which index will be chosen from time_seq
    """
    random.seed(seed)
    time_seq = time_seq.copy()
    valid_mask_seq = valid_mask_seq.copy()
    date_range_list = date_range_list.copy()
    # to ensure dates are incremental
    for idx in range(len(time_seq) - 1):
        assert (
            time_seq[idx + 1] >= time_seq[idx]
        ), "Please make sure dates should be in non-decreasing order"
    # to ensure the target date range is seamless
    for idx in range(len(date_range_list) - 1):
        assert (
            date_range_list[idx + 1][0] == date_range_list[idx][1]
        ), "The date range contains inconsistent time period. Please check."

    if ignore_year:
        time_seq_start_date = datetime.datetime(time_seq[0].year, 1, 1)
        for idx_time_seq in range(len(time_seq)):
            time_seq[idx_time_seq] = (time_seq[idx_time_seq] - time_seq_start_date).days

        date_range_start_date = datetime.datetime(date_range_list[0][0].year, 1, 1)
        for idx_date_range in range(len(date_range_list)):
            date_s, date_e = date_range_list[idx_date_range]
            date_range_list[idx_date_range] = [
                (date_s - date_range_start_date).days,
                (date_e - date_range_start_date).days,
            ]

    result_list = [None for _ in range(len(date_range_list))]
    cur_time_seq_idx = 0
    while (
        cur_time_seq_idx < len(time_seq)
        and time_seq[cur_time_seq_idx] < date_range_list[0][0]
    ):
        cur_time_seq_idx += 1
    for date_range_idx, (date_s, date_e) in enumerate(date_range_list):
        candidate_cnt = 1
        while (
            cur_time_seq_idx < len(time_seq)
            and date_s <= time_seq[cur_time_seq_idx] < date_e
        ):
            cur_valid_mask = valid_mask_seq[cur_time_seq_idx]

            if cur_valid_mask:
                replace_flag = random.random() < (1 / candidate_cnt)
                if replace_flag:
                    result_list[date_range_idx] = cur_time_seq_idx
                    candidate_cnt += 1

            cur_time_seq_idx += 1

    return result_list


def tile_data_extraction(
    items: List[Sentinel2L2AScene],
    asset_names: List[str],
    source_window: SourceWindow,
    des_res,
    thread_num: int = 15,
) -> List[np.ndarray]:
    """
    Extract raster data based on item and source window.
    A list of 3D array will be returned with shape [C, H, W].
        C: Channels based on asset_names.
        H: Height of the patch.
        W: Width of the patch.
    The list order is the same as input items.

    Parameters
    ----------
    items : List[Sentinel2L2AScene]
        _description_
    asset_names : List[str]
        Same definition as Sentinel2L2AScene
    source_window : SourceWindow
        Patch window to be extracted.
    des_res : _type_
        Destination resolution
    thread_num : int, optional
        Thread number to be used, by default 15

    Returns
    -------
    List[np.ndarray]
        A list of 3D array with shape [C, H, W]
    """

    tmp_result = []
    pool = ThreadPool(min(thread_num, len(items)))

    for idx, item in enumerate(items):
        pool.apply_async(
            retrieve_rst_patch_with_jobid,
            args=[item, asset_names, source_window, idx, des_res],
            callback=tmp_result.append,
            error_callback=print,
        )

    pool.close()
    pool.join()

    tmp_result = sorted(tmp_result, key=lambda x: x[1])
    rst_arr_list, _ = zip(*tmp_result)

    return rst_arr_list


def tile_data_extraction_intervals(
    items: List[Sentinel2L2AScene],
    asset_names: List[str],
    source_window: SourceWindow,
    temporal_list: List[Tuple[datetime.datetime, datetime.datetime]],
    des_res: float,
    thread_num: int = 15,
) -> Tuple[List[np.ndarray], List[np.ndarray]]:
    """
    Extract list of rasters based on given items, asset_names, source_window and temporal_list.
    For each start_date, end_date in temporal_list, all items in the same temporal window will
    be used to composite a raster, based on SCL cloud related mask.

    Parameters
    ----------
    items : List[Sentinel2L2AScene]
        Items to be used for data extraction.
    asset_names : List[str]
        Asset names to be used for Sentinel-2 data
    source_window : SourceWindow
        Window to be used.
    temporal_list : List[Tuple[datetime.datetime, datetime.datetime]]
        Date range list.
    des_res : float
        Resolution for the extracted patch
    thread_num : int, optional
        Thread number to be used, by default 15

    Returns
    -------
    List[np.ndarray]
        list of raster patch, sorted by item start_datetime
    """

    tmp_result = []
    pool = ThreadPool(min(thread_num, len(temporal_list)))
    items = sorted(items, key=lambda x: x.start_datetime)

    item_idx = 0
    for idx, (date_s, date_e) in enumerate(temporal_list):
        tmp_item_list = []
        while item_idx < len(items) and items[item_idx].start_datetime < date_e:
            if date_s <= items[item_idx].start_datetime < date_e:
                tmp_item_list.append(items[item_idx])

            item_idx += 1

        #     for item in tmp_item_list:
        #         export = retrieve_by_item(
        #         item, asset_names=asset_names, dest_res=10
        # )
        #         export.to_geotiff(f"/NAS6/Members/linchenxi/projects/crop_recognition/inference/innerMongolia/model_4_test/{item.uid}.tif")

        pool.apply_async(
            retrieve_composite_decloud_with_jobid,
            args=[tmp_item_list, asset_names, source_window, idx, des_res, "median"],
            callback=tmp_result.append,
            error_callback=print,
        )

    pool.close()
    pool.join()

    tmp_result = sorted(tmp_result, key=lambda x: x[0])
    _, rst_arr_list, valid_mask_list = zip(*tmp_result)

    return rst_arr_list, valid_mask_list


def retrieve_composite_decloud_with_jobid(
    items: List[Sentinel2L2AScene],
    asset_names: List[str],
    source_window: SourceWindow,
    job_idx: int,
    dest_res: float = None,
    mode: Literal["median", "incremental"] = "incremental",
):
    """
    Retrieve composite result given items.
    In this algorithm, we fill the holes with data that not covered by the cloud.


    Parameters
    ----------
    items : List[Sentinel2L2AScene]
        List of Sentinel2L2AScene items
    asset_names : List[str]
        Which assets to use for Sentinel 2 rasters
    source_window : SourceWindow
        patch location and size
    job_idx : int
        job id, use for data combination after returning.
    dest_res : _type_, optional
        Generated patch resolution, by default None

    Returns
    -------
    _type_
        _description_
    """
    dummy_result = np.zeros(
        (len(asset_names), source_window.y_size, source_window.x_size),
        dtype=np.float32,
    )

    cur_valid_mask = np.zeros(
        (source_window.y_size, source_window.x_size), dtype=np.bool8
    )
    if len(items) == 0:
        return job_idx, dummy_result, cur_valid_mask

    cur_arr_lst = []
    # for item in items:
    #     cur_scl = retrieve_by_item(
    #         item, asset_names=["SCL"], window=source_window, dest_res=dest_res
    #     ).data

    #     valid_mask = ~np.squeeze(
    #         np.isin(cur_scl, VALS_TO_MASK),
    #     )

    #     selected_mask = (~cur_valid_mask) & valid_mask
    #     if not np.any(selected_mask):
    #         continue

    #     cur_arr = retrieve_by_item(
    #         item, asset_names=asset_names, window=source_window, dest_res=dest_res
    #     ).data.astype(np.float32)
    #     dummy_result[:, selected_mask] = cur_arr[:, selected_mask]
    #     cur_valid_mask[selected_mask] = True

    for item in items:
        try:
            cur_scl = retrieve_by_item(
                item, asset_names=["SCL"], window=source_window, dest_res=dest_res
            ).data

            valid_mask = ~np.squeeze(
                np.isin(cur_scl, VALS_TO_MASK),
            )

            selected_mask = (~cur_valid_mask) & valid_mask
            if not np.any(selected_mask) and mode == "incremental":
                continue
            cur_arr = retrieve_by_item(
                item, asset_names=asset_names, window=source_window, dest_res=dest_res
            ).data.astype(np.float32)
        except:
            selected_mask = np.zeros(
                (source_window.y_size, source_window.x_size), dtype=np.bool8
            )
            cur_arr = np.zeros(
                (len(asset_names), source_window.y_size, source_window.x_size),
                dtype=np.float32,
            )
            cur_arr.fill(np.nan)
        if mode == "incremental":
            dummy_result[:, selected_mask] = cur_arr[:, selected_mask]
            cur_valid_mask[selected_mask] = True
        else:
            cur_arr[:, ~selected_mask] = float("nan")
            cur_arr_lst.append(cur_arr)
    if mode == "median":
        dummy_result = np.nanmedian(np.array(cur_arr_lst), axis=0)
        dummy_result[np.isnan(dummy_result)] = 0

    return job_idx, dummy_result, cur_valid_mask


def retrieve_rst_patch_with_jobid(
    item: Sentinel2L2AScene,
    asset_names: List[str],
    source_window: SourceWindow,
    job_idx: int,
    dest_res=None,
) -> Tuple[np.ndarray, int]:
    """
    Retrieve raster patch based on item and soruce window.

    Parameters
    ----------
    item : Sentinel2L2AScene
        _description_
    asset_names : List[str]
        Same as defined in Sentinel2L2AScene
    source_window : SourceWindow
        Patch source window
    job_idx : int
        Job index, used to define the order.
    dest_res : _type_, optional
        Destination resolution of raster, by default None

    Returns
    -------
    Tuple[np.ndarray, int]
        return extracted nd array and job id.
    """
    rst = retrieve_by_item(
        item, asset_names=asset_names, window=source_window, dest_res=dest_res
    )

    rst_arr = rst.data

    return (rst_arr, job_idx)


def filter_enduser_item_list(
    item_lists: List[Sentinel2L2AScene],
) -> List[Sentinel2L2AScene]:
    """
    Given item list, filter out same data with different end users.
    Only the latest end user product will be preserved.

    Parameters
    ----------
    item_lists : List[Sentinel2L2AScene]
        List of items to be taken into consideration.

    Returns
    -------
    List[Sentinel2L2AScene]
    """
    time_dict: Dict[datetime.datetime, Sentinel2L2AScene] = dict()
    for item in item_lists:
        if item.start_datetime not in time_dict:
            time_dict[item.start_datetime] = item

        else:
            cur_enduser_time = item.processed
            prev_enduser_time = time_dict[item.start_datetime].processed
            if cur_enduser_time > prev_enduser_time:
                time_dict[item.start_datetime] = item

    result = sorted(list(time_dict.values()), key=lambda x: x.start_datetime)
    return result
