import datetime
import utm
import numpy as np
import multiprocessing as mp
from typing import List, Tuple
from common.logger import logger
from common.geoimage.raster_dataset import iter_image_by_block_with_buffer

from shapely.geometry import Point

from catalog.models.base_model import init_db
from common.geoimage.raster_dataset import RasterDataset
from common.geoimage.scene_meta import SceneMeta, SourceWindow
from common.geometry.geom import Geom
from catalog.models.mgrs import MGRSRecord
from catalog.models.sentinel2_l2a import Sentinel2L2AScene
from catalog.place.place_api import PlaceAPI
from catalog.raster.raster import retrieve_by_item

from analytics.crop_recognition.common_settings.data_config import (
    S2_CHANNEL_LIST,
)
from common.img_utils.img_shape import BBoxInfo

s2_collection_id = "COPERNICUS/SENTINEL2/L2A"
ASSET_NAMES = [name.split("_")[1] for name in S2_CHANNEL_LIST]


def extract_point_data(
    lon_lat_geom: Geom, date_range: Tuple[datetime.datetime, datetime.datetime]
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
    """
    Extract time series data based on given locaiton geom and date range

    Parameters
    ----------
    lon_lat_geom : Geom
        _description_
    date_range : Tuple[datetime.datetime, datetime.datetime]
        Start date and End date of the datetime.

    Returns
    -------
    Tuple[np.ndarray, np.ndarray, np.ndarray]
        raster numpy array, scl numpy array, time acquired array.
    """

    mgrs_set = PlaceAPI.get_mgrs_set_by_geom(lon_lat_geom)
    mgrs_list = sorted(list(mgrs_set))
    rst_arr = []
    time_arr = []
    scl_arr = []

    lon = lon_lat_geom._shapely_geom.x
    lat = lon_lat_geom._shapely_geom.y

    item_dict = dict()

    for tileid in mgrs_list:
        MGRSRecord.init_connection()
        mgrs_instance: MGRSRecord = MGRSRecord.get_by_id(tileid)
        MGRSRecord.close_connection()

        mgrs_meta = SceneMeta.from_dict(
            mgrs_instance.get_mgrs_metadict_with_resolution()
        )
        epsg_num = mgrs_meta.prj_epsg_no
        zone_number = epsg_num % 100
        if epsg_num // 100 == 326:
            zone_letter = "N"
        elif epsg_num // 100 == 327:
            zone_letter = "S"

        affine = np.array(mgrs_meta.affine).reshape(3, 3)
        affine_inv = np.linalg.inv(affine)

        east, north, _, _ = utm.from_latlon(lat, lon, zone_number, zone_letter)
        pos_tmp = affine_inv @ np.array([[east], [north], [1]])
        col = int(np.floor(pos_tmp[0, 0]))
        row = int(np.floor(pos_tmp[1, 0]))

        item_dict[(row, col)] = []

        items = Sentinel2L2AScene.query_many_items(
            temporal=date_range, sort_field="end_datetime", mgrs_tile=tileid
        )

        item_dict[(row, col)].extend(items)
    item: Sentinel2L2AScene

    datetime_dict = dict()

    for row, col in item_dict.keys():
        item_list = item_dict[(row, col)]
        for item in item_list:
            rst = retrieve_by_item(
                item,
                asset_names=ASSET_NAMES,
                window=SourceWindow(col, row, 1, 1),
                resample_alg="bilinear",
            )
            if rst is None:
                continue

            scl_rst = retrieve_by_item(
                item,
                asset_names=["SCL"],
                window=SourceWindow(col, row, 1, 1),
                dest_res=10,
                resample_alg="near",
            )

            data = rst.data
            scl_data = scl_rst.data
            capture_date = item.end_datetime

            if capture_date not in datetime_dict:
                datetime_dict[capture_date] = []

            datetime_dict[capture_date].append((np.squeeze(data), np.squeeze(scl_data)))

    for datetime_key in sorted(list(datetime_dict.keys())):
        for cur_rst_data, cur_scl_data in datetime_dict[datetime_key]:
            rst_arr.append(cur_rst_data)
            scl_arr.append(cur_scl_data)
            time_arr.append(datetime_key)

    result_rst_arr = np.stack(rst_arr, axis=0)
    result_scl_arr = np.array(scl_arr)
    time_arr = np.array(time_arr)
    return result_rst_arr, result_scl_arr, time_arr


def extract_polygon_data(polygon_geom, date_range):
    """
    Extract 4D datapatch based on given polygon.
    # TODO: Currently not fully finished. Because same tile may have more than 1
    item at same time because different end use. Will need to be updated using
    dictionary like funciton: extract_point_data

    Parameters
    ----------
    polygon_geom : Geom
        AoI Geom
    date_range : Tuple[datetime.datetime, datetime.datatime]
        start date (inclusive) and end date (exclusive) of interest. All data between
        the two dates will be extracted and merged.

    Returns
    -------
    _type_
        _description_
    """
    Sentinel2L2AScene._meta.database = init_db()
    Sentinel2L2AScene._meta.database.connect()
    mgrs_set = PlaceAPI.get_mgrs_set_by_geom(polygon_geom)
    mgrs_list = sorted(list(mgrs_set))
    mgrs_item: MGRSRecord = MGRSRecord.get_by_id(mgrs_list[0])
    dest_meta = SceneMeta.from_dict(mgrs_item.get_mgrs_metadict_with_resolution())
    dest_srs = dest_meta.prj_epsg

    query, _ = Sentinel2L2AScene.construct_query(polygon_geom, date_range)
    item: Sentinel2L2AScene
    query: List[Sentinel2L2AScene] = list(query)
    query = sorted(query, key=lambda x: x.end_datetime)
    arr_list = []
    time_list = []
    for item in query:
        rst = retrieve_by_item(
            item,
            asset_names=ASSET_NAMES,
            aoi=polygon_geom,
            resample_alg="bilinear",
            crop_to_aoi=True,
            dest_srs=dest_srs,
        )
        rst_arr = rst.data
        time = item.end_datetime

        arr_list.append(rst_arr)
        time_list.append(time)

    arr_result = np.stack(arr_list, axis=0)
    return arr_result, time_list


def retrieve_rst_patch_with_time(item: Sentinel2L2AScene, source_window, dest_res=None):
    rst = retrieve_by_item(
        item, asset_names=ASSET_NAMES, window=source_window, dest_res=dest_res
    )
    capture_time = item.end_datetime

    return (rst, capture_time)


def tile_data_generator(tileid, date_range, patch_size, des_res=10, process_num=10):
    query, _ = Sentinel2L2AScene.construct_query(temporal=date_range, mgrs_tile=tileid)
    query = list(query)

    if len(query) == 0:
        logger.warning("No query found in this query. Returning None.")
        return None

    blk_valid_info: BBoxInfo
    for blk_valid_info, _ in iter_image_by_block_with_buffer(
        10980,
        10980,
        patch_size,
        patch_size,
        boundary_treatment="shrink",
        buffer_ncols=0,
        buffer_nrows=0,
    ):
        source_window = SourceWindow(
            blk_valid_info.start_col,
            blk_valid_info.start_row,
            blk_valid_info.end_col - blk_valid_info.start_col,
            blk_valid_info.end_row - blk_valid_info.start_row,
        )
        print(
            "Using soruce window: ",
            source_window.x_off,
            source_window.y_off,
            source_window.x_size,
            source_window.y_size,
        )
        pool = mp.Pool(min(process_num, len(query)))
        tmp_result = []

        for item in query:
            pool.apply_async(
                retrieve_rst_patch_with_time,
                args=[item, source_window, des_res],
                callback=tmp_result.append,
            )

        pool.close()
        pool.join()

        rst_list: List[RasterDataset]
        tmp_result = sorted(tmp_result, key=lambda x: x[1])
        rst_list, time_list = zip(*tmp_result)
        rst_meta = rst_list[0].meta.update(n_bands=1, value_interpretations=None)
        rst_arr_list = [rst.data for rst in rst_list]

        stack_arr = np.stack(rst_arr_list, axis=0)

        yield (stack_arr, time_list, rst_meta)


if __name__ == "__main__":
    # Extract time series channel value given location and temporal information
    temporal = (datetime.datetime(2018, 1, 1), datetime.datetime(2018, 5, 1))
    arr_result, time_list = extract_point_data(
        Geom.from_shapely(Point(-122.182448, 39.635437)), temporal
    )

    # Extract time series channel value given polygon and temporal information. The polygon should be
    # small enough so it won't exceed memory limit.
    test_polygon_path = (
        "/media/workspace/chendu/data/temp_folder/test_polygon/test_polygon.geojson"
    )
    test_geom = Geom.from_geojson_fpath(test_polygon_path)

    arr_result, time_list = extract_polygon_data(test_geom, temporal)

    # Extract time series channel array, meta, and time list given tileid and patch_size.
    # patch size should be carefully selected so that it won't be out of memory.
    for test_arr, test_meta, test_time in tile_data_generator("48RVV", temporal, 4000):
        print(test_arr.shape, test_meta, test_time)
