import pickle
import geopandas as gpd
from typing import Tuple
import os

import numpy as np
import datetime
from multiprocessing.pool import ThreadPool
from tqdm import tqdm as tqdm
from common.geometry.geom import Geom
from analytics.crop_recognition.data_utils.extract_time_series_data import (
    extract_point_data,
)
from analytics.crop_recognition.data_utils.location_utils import (
    get_lon_lat_from_tile_position,
    get_s2_unique_location_id,
)

from analytics.crop_identifier_system.crop_sys_common.ChannelOrder import ASSET_NAMES
from catalog.place import PlaceRecord


def extract_and_save_point_data(
    tmp_geom: Geom,
    year: int,
    dest_filepath,
    skip_exist=True,
):
    if os.path.exists(dest_filepath):
        if skip_exist:
            return
        else:
            os.remove(dest_filepath)

    temporal = (datetime.datetime(year, 1, 1), datetime.datetime(year + 1, 7, 1))

    data_val, scl_val, data_time = extract_point_data(tmp_geom, temporal)
    data_time = np.array(data_time)

    tmp_dict = {
        "geom": tmp_geom._shapely_geom,
        "asset_names": ASSET_NAMES,
        "year": year,
        "datetime": data_time,
        "channel_val": data_val,
        "scl_val": scl_val,
    }

    with open(dest_filepath, "wb") as f:
        pickle.dump(tmp_dict, f)
    del tmp_dict
    del data_val
    del scl_val
    del data_time
    del temporal
    return


if __name__ == "__main__":
    save_path = "/NAS6/Members/linchenxi/projects/crop_recognition/test"
    gpkg_root = "/NAS6/Members/linchenxi/projects/crop_recognition/dataset/shandong_area/processed"
    if not os.path.exists(save_path):
        os.makedirs(save_path)

    china_geom: Geom = PlaceRecord.query_many_items(
        target_name="China", region_level=0
    )[0].geom

    filepath_list = []
    for root, _, filenames in os.walk(gpkg_root):
        for filename in filenames:
            if "processed" in root and filename.endswith(".gpkg"):
                # if you do not want to generate pickle files for all gpkg in the folder, uncomment the following code and specify the gpkg noqa:E501
                # if filename not in [
                #     "Heilongjiang_all_2018.gpkg",  # noqa:E501
                #     "Heilongjiang_all_2019.gpkg",  # noqa:E501
                # ]:
                #     continue
                cur_filepath = os.path.join(root, filename)
                filepath_list.append(cur_filepath)

    for data_path in filepath_list:
        print("convert data from:\n{}\nto:\n{}".format(data_path, save_path))
        gdf = gpd.read_file(data_path)
        visited_set = set()
        pbar = tqdm(total=len(gdf))

        def update_pbar(arg=None):
            pbar.update(1)

        pool = ThreadPool(1)
        for idx, row in gdf.iterrows():
            year = int(row["year"])
            # to handle the case where the geometry is a single multipoint that contains a lot of points
            if row.geometry.geom_type == "MultiPoint":
                pbar = tqdm(total=len(row.geometry.geoms))
                for point in row.geometry.geoms:
                    tmp_geom = Geom.from_shapely(point)
                    unique_location = get_s2_unique_location_id(tmp_geom)
                    tileid, row, col = unique_location
                    tmp_geom = get_lon_lat_from_tile_position(tileid, row, col)

                    if not china_geom.intersects(tmp_geom):
                        update_pbar()
                        continue

                    cur_save_folder = os.path.join(save_path, tileid, str(year))
                    if not os.path.exists(cur_save_folder):
                        os.makedirs(cur_save_folder)

                    cur_filename = f"{tileid}_{row}_{col}_{year}.p"

                    dest_filepath = os.path.join(cur_save_folder, cur_filename)
                    if dest_filepath in visited_set:
                        update_pbar()
                        continue

                    visited_set.add(dest_filepath)
                    pool.apply_async(
                        extract_and_save_point_data,
                        args=[tmp_geom, year, dest_filepath, True],
                        callback=update_pbar,
                        error_callback=print,
                    )
            else:
                tmp_geom = Geom.from_shapely(row.geometry)
                unique_location = get_s2_unique_location_id(tmp_geom)
                tileid, row, col = unique_location
                tmp_geom = get_lon_lat_from_tile_position(tileid, row, col)

                if not china_geom.intersects(tmp_geom):
                    update_pbar()
                    continue

                cur_save_folder = os.path.join(save_path, tileid, str(year))
                if not os.path.exists(cur_save_folder):
                    os.makedirs(cur_save_folder)

                cur_filename = f"{tileid}_{row}_{col}_{year}.p"

                dest_filepath = os.path.join(cur_save_folder, cur_filename)
                if dest_filepath in visited_set:
                    update_pbar()
                    continue

                visited_set.add(dest_filepath)

                pool.apply_async(
                    extract_and_save_point_data,
                    args=[tmp_geom, year, dest_filepath, True],
                    callback=update_pbar,
                    error_callback=print,
                )

        pool.close()
        pool.join()
        del update_pbar
        del pbar
