import asyncio
import json
import os
import random
import uuid
from glob import glob
from typing import AsyncGenerator, Optional

import aiofiles
import nest_asyncio
from motor.motor_asyncio import AsyncIOMotorClient
from motor.motor_tornado import MotorCollection
from pymongo.server_api import ServerApi
from pyproj import CRS, Proj, Transformer
from pyproj.exceptions import CRSError
from shapely.errors import GEOSException
from shapely.geometry import Polygon

nest_asyncio.apply()


async def safe_insert_many(collection: MotorCollection, documents: list[dict]) -> None:
    if len(documents) > 0:
        try:
            # Attempt to insert the documents
            await collection.insert_many(documents)
        except OverflowError:
            # If an OverflowError occurs, split the batch
            if len(documents) > 1:
                mid = len(documents) // 2
                # Recursively attempt to insert each half
                await safe_insert_many(collection, documents[:mid])
                await safe_insert_many(collection, documents[mid:])
            else:
                # Handle the case where a single document is too large
                raise ValueError(
                    "A document exceeds the maximum BSON size or dtype conflict.",
                )


def create_uuid(input_str: str) -> str:
    # Consistent random UUIDs based on input string
    # https://nathanielknight.ca/articles/consistent_random_uuids_in_python.html
    random.seed(input_str)
    return str(
        uuid.UUID(bytes=bytes(random.getrandbits(8) for _ in range(16)), version=4),
    )


def validate_coordinate(lon: float, lat: float) -> tuple:
    """
    Validate and adjust the longitude and latitude values to ensure they are within the valid range.

    Parameters:
    lon (float): Longitude value.
    lat (float): Latitude value.

    Returns:
    tuple: Validated and potentially adjusted (longitude, latitude) pair.
    """
    # Adjust longitude and latitude values to their valid range
    if not -180 <= lon <= 180:
        lon = min(max(lon, -180), 180)
    if not -90 <= lat <= 90:
        lat = min(max(lat, -90), 90)

    return lon, lat


def reproject_to_4326_and_convert_to_geojson(
    bbox: dict,
) -> dict:
    """
    Reprojects a bounding box from EPSG:102100 (3857) to EPSG:4326 and converts it to GeoJSON.

    Parameters:
    bbox (dict): A dictionary containing the keys 'xmin', 'ymin', 'xmax', 'ymax', and 'spatialReference'.

    Returns:
    dict: A GeoJSON object representing the reprojected bounding box.
    """

    # Define the source and destination coordinate systems
    def get_src_proj() -> Proj:
        sr = bbox["spatialReference"]
        for wkid in ["latestWkid", "wkid"]:
            if (sr_wkid := sr.get(wkid)) is not None:
                for authority in ["EPSG", "ESRI"]:
                    try:
                        return Proj(f"{authority}:{sr_wkid}")
                    except CRSError:
                        pass
        if (sr_wkt := sr.get("wkt")) is not None:
            return Proj(CRS.from_wkt(sr_wkt))
        raise ValueError("no spatialReference found")

    src_proj = get_src_proj()
    dst_proj = Proj("epsg:4326")
    transformer = Transformer.from_proj(src_proj, dst_proj, always_xy=True)

    # Extract coordinates
    xmin, ymin, xmax, ymax = bbox["xmin"], bbox["ymin"], bbox["xmax"], bbox["ymax"]

    # Transform the coordinates
    xmin_trans, ymin_trans = validate_coordinate(*transformer.transform(xmin, ymin))
    xmax_trans, ymax_trans = validate_coordinate(*transformer.transform(xmax, ymax))

    # Create a polygon from the transformed coordinates
    # Ensure that the polygon is closed by repeating the first point at the end
    coords = [
        (xmin_trans, ymin_trans),
        (xmax_trans, ymin_trans),
        (xmax_trans, ymax_trans),
        (xmin_trans, ymax_trans),
        (xmin_trans, ymin_trans),
    ]

    if len(set(coords)) < 3:
        raise ValueError("invalid extent")

    polygon = Polygon(coords)
    # # Convert the polygon to GeoJSON format
    geojson = {
        "type": "Polygon",
        "coordinates": [list(polygon.exterior.coords)],
    }

    return geojson


keepkeys = {
    "id",
    "associatedlayers",
    "domains",
    "copyrighttext",
    "description",
    "documentinfo",
    "fields",
    "mapname",
    "name",
    "parentlayer",
    "servicedescription",
    "subLayers",
    "tables",
    "version",
    "currentversion",
    "geometrytype",
    "extent",
    "type",
    "url",
    "server",
    "layers",
    "service",
}


async def process_metadata(
    metadata: dict,
    additional_fields: Optional[dict] = None,
) -> dict:
    additional_fields = additional_fields or {}
    # Process metadata and add any additional fields
    processed_md = {
        k: v for k, v in metadata.items() if k not in ["folders", "services", "layers"]
    }
    processed_md.update(additional_fields)
    processed_md["original_id"] = processed_md.get("id", None)
    processed_md["id"] = processed_md["hash"]
    del processed_md["hash"]

    processed_md = {k: v for k, v in processed_md.items() if k in keepkeys}

    if "extent" in processed_md:
        if (
            (extent := processed_md.get("extent")) is not None
            and extent.get("spatialReference") is not None
            and not any(
                str(v).lower() in {"nan", "none", "null"} for v in extent.values()
            )
        ):
            try:
                processed_md["extent"] = reproject_to_4326_and_convert_to_geojson(
                    extent,
                )
            except (ValueError, GEOSException, CRSError):
                del processed_md["extent"]
                # print(extent, e)
        else:
            del processed_md["extent"]
    return processed_md


def get_type(layer: dict) -> str:
    return layer.get("type", "unknown").lower().replace(" ", "_").strip()


async def read_data(jsonfiles: list[str]) -> AsyncGenerator[dict, None]:
    # Async generator to yield file content one by one
    for f in jsonfiles:
        async with aiofiles.open(f, "r") as infile:
            content = await infile.read()
            yield json.loads(content)


# Modified process_server function
async def process_server(server: dict, layers_collection: MotorCollection):
    server_services = server.pop("services")
    server_md = await process_metadata(
        server["metadata"],
        {
            "url": server["metadata"]["url"],
            "hash": create_uuid(server["metadata"]["url"]),
        },
    )

    layer_tasks = []
    for service in server_services:
        service_md = await process_metadata(
            service["metadata"],
            {
                "url": service["url"],
                "hash": create_uuid(service["url"]),
                "server": server_md,
            },
        )
        for layer in service["metadata"].pop("layers"):
            task = asyncio.create_task(process_layer(layer, service_md))
            layer_tasks.append(task)

    layers_md = await asyncio.gather(*layer_tasks)
    await safe_insert_many(layers_collection, layers_md)


# Modified process_layer function
async def process_layer(layer: dict, service: dict) -> dict:
    # Embed service metadata into layer
    layer_md = await process_metadata(
        layer,
        {
            "url": layer["url"],
            "hash": create_uuid(layer["url"]),
            "service": {**service},
        },
    )
    return layer_md


async def main() -> None:
    output_dir = os.path.abspath("/home/appuser/restgdf_api/lab/output_tryagain")
    jsonfiles = glob(os.path.join(output_dir, "*.json"))

    client = AsyncIOMotorClient(
        r"mongodb://root:example@mongo:27017/",
        server_api=ServerApi("1"),
    )
    db = client["govgis-nov2023-slim-spatial"]
    layers = db.layers

    # Process each server concurrently
    async for server in read_data(jsonfiles):
        await process_server(server, layers)


if __name__ == "__main__":
    asyncio.run(main())