from typing import Dict, List, Any, Optional, Union, Tuple
import os
import tempfile
import zipfile
import shutil
import logging
from pathlib import Path
import json

import numpy as np
import pandas as pd

# Optional geospatial imports
try:
    import geopandas as gpd
    from shapely.geometry import Point, Polygon, LineString, MultiPolygon
    from shapely.ops import transform
    import pyproj
    from fiona import supported_drivers
    GEOSPATIAL_AVAILABLE = True
    RASTERIO_AVAILABLE = False  # Disable rasterio due to numpy compatibility issues
except ImportError as e:
    logging.warning(f"Geospatial dependencies not available: {e}")
    GEOSPATIAL_AVAILABLE = False
    RASTERIO_AVAILABLE = False

# Disable rasterio entirely due to numpy compatibility issues
RASTERIO_AVAILABLE = False

from pyspark.sql import SparkSession, DataFrame
from pyspark.sql.functions import col, udf, lit, array
from pyspark.sql.types import StructType, StructField, FloatType, IntegerType, StringType, ArrayType
from pyspark.ml.linalg import VectorUDT, Vectors


class GeospatialEngine:
    """Engine for processing geospatial data including GDB files."""

    def __init__(self, spark_session: SparkSession):
        """
        Initialize geospatial engine.

        Args:
            spark_session: Spark session for distributed processing
        """
        self.spark = spark_session
        self.logger = logging.getLogger(__name__)

        # Check if geospatial dependencies are available
        if not GEOSPATIAL_AVAILABLE:
            self.logger.warning("Geospatial dependencies not available. Some features will be disabled.")
            self.geospatial_enabled = False
        else:
            self.geospatial_enabled = True
            self._check_geospatial_dependencies()

    def _check_geospatial_dependencies(self):
        """Check if required geospatial dependencies are available."""
        try:
            import geopandas
            import fiona
            import shapely
            import pyproj
            self.logger.info("All geospatial dependencies are available")
        except ImportError as e:
            self.logger.warning(f"Missing geospatial dependency: {e}")
            raise ImportError("Required geospatial dependencies not installed. "
                           "Please install: geopandas, fiona, shapely, pyproj")

    def read_gdb(self,
                 gdb_path: str,
                 layer_name: Optional[str] = None,
                 feature_classes: Optional[List[str]] = None) -> Dict[str, gpd.GeoDataFrame]:
        """
        Read data from File Geodatabase.

        Args:
            gdb_path: Path to the GDB file or directory
            layer_name: Specific layer to read (if None, reads all layers)
            feature_classes: List of feature classes to read

        Returns:
            Dictionary mapping layer names to GeoDataFrames
        """
        try:
            self.logger.info(f"Reading GDB from: {gdb_path}")

            # Handle different GDB formats
            if gdb_path.endswith('.gdb'):
                # ESRI File Geodatabase
                layers = self._read_esri_gdb(gdb_path, layer_name, feature_classes)
            elif gdb_path.endswith('.gpkg'):
                # GeoPackage
                layers = self._read_geopackage(gdb_path, layer_name)
            elif gdb_path.endswith(('.shp', '.geojson')):
                # Shapefile or GeoJSON
                layers = self._read_vector_file(gdb_path)
            else:
                # Try to detect format
                layers = self._auto_detect_and_read(gdb_path, layer_name)

            self.logger.info(f"Successfully read {len(layers)} layers from GDB")
            return layers

        except Exception as e:
            self.logger.error(f"Failed to read GDB: {str(e)}")
            raise

    def _read_esri_gdb(self, gdb_path: str, layer_name: Optional[str],
                       feature_classes: Optional[List[str]]) -> Dict[str, gpd.GeoDataFrame]:
        """Read ESRI File Geodatabase."""
        try:
            # Get list of all layers
            available_layers = fiona.listlayers(gdb_path)
            self.logger.info(f"Available layers in GDB: {available_layers}")

            layers = {}
            layers_to_read = []

            if layer_name:
                layers_to_read = [layer_name] if layer_name in available_layers else []
            elif feature_classes:
                layers_to_read = [fc for fc in feature_classes if fc in available_layers]
            else:
                layers_to_read = available_layers

            for layer in layers_to_read:
                try:
                    gdf = gpd.read_file(gdb_path, layer=layer)
                    layers[layer] = gdf
                    self.logger.info(f"Read layer '{layer}' with {len(gdf)} features")
                except Exception as e:
                    self.logger.warning(f"Failed to read layer '{layer}': {str(e)}")
                    continue

            return layers

        except Exception as e:
            self.logger.error(f"Failed to read ESRI GDB: {str(e)}")
            raise

    def _read_geopackage(self, gpkg_path: str, layer_name: Optional[str]) -> Dict[str, gpd.GeoDataFrame]:
        """Read GeoPackage file."""
        try:
            layers = {}

            if layer_name:
                gdf = gpd.read_file(gpkg_path, layer=layer_name)
                layers[layer_name] = gdf
            else:
                # Read all layers
                available_layers = fiona.listlayers(gpkg_path)
                for layer in available_layers:
                    gdf = gpd.read_file(gpkg_path, layer=layer)
                    layers[layer] = gdf

            return layers

        except Exception as e:
            self.logger.error(f"Failed to read GeoPackage: {str(e)}")
            raise

    def _read_vector_file(self, file_path: str) -> Dict[str, gpd.GeoDataFrame]:
        """Read single vector file (shapefile, geojson, etc.)."""
        try:
            layer_name = Path(file_path).stem
            gdf = gpd.read_file(file_path)
            return {layer_name: gdf}

        except Exception as e:
            self.logger.error(f"Failed to read vector file: {str(e)}")
            raise

    def _auto_detect_and_read(self, path: str, layer_name: Optional[str]) -> Dict[str, gpd.GeoDataFrame]:
        """Auto-detect format and read geospatial data."""
        path_obj = Path(path)

        if path_obj.is_dir():
            # Check if it's a GDB directory
            if (path_obj / 'gdb' / 'gdb').exists() or path_obj.suffix == '.gdb':
                return self._read_esri_gdb(str(path_obj), layer_name, None)
            else:
                raise ValueError(f"Directory is not a valid geodatabase: {path}")
        elif path_obj.is_file():
            return self._read_vector_file(str(path_obj))
        else:
            raise ValueError(f"Path does not exist: {path}")

    def extract_vector_features(self, gdf: gpd.GeoDataFrame,
                               feature_type: str = "centroid") -> pd.DataFrame:
        """
        Extract vector features from geometries for spatial analysis.

        Args:
            gdf: GeoDataFrame with geometries
            feature_type: Type of features to extract
                         ("centroid", "vertices", "bounds", "area_perimeter")

        Returns:
            DataFrame with extracted vector features
        """
        try:
            self.logger.info(f"Extracting {feature_type} features from {len(gdf)} geometries")

            if feature_type == "centroid":
                return self._extract_centroids(gdf)
            elif feature_type == "vertices":
                return self._extract_vertices(gdf)
            elif feature_type == "bounds":
                return self._extract_bounds(gdf)
            elif feature_type == "area_perimeter":
                return self._extract_area_perimeter(gdf)
            elif feature_type == "geometry_coords":
                return self._extract_geometry_coordinates(gdf)
            else:
                raise ValueError(f"Unsupported feature type: {feature_type}")

        except Exception as e:
            self.logger.error(f"Failed to extract features: {str(e)}")
            raise

    def _extract_centroids(self, gdf: gpd.GeoDataFrame) -> pd.DataFrame:
        """Extract centroid coordinates."""
        centroids = gdf.geometry.centroid
        df = pd.DataFrame({
            'centroid_x': centroids.x,
            'centroid_y': centroids.y
        })

        # Add original attributes (excluding geometry)
        attr_cols = [col for col in gdf.columns if col != 'geometry']
        for col in attr_cols:
            df[col] = gdf[col]

        return df

    def _extract_vertices(self, gdf: gpd.GeoDataFrame, max_vertices: int = 100) -> pd.DataFrame:
        """Extract vertices from geometries."""
        vertices_list = []

        for geom in gdf.geometry:
            if geom.geom_type == 'Point':
                vertices_list.append([geom.x, geom.y])
            elif geom.geom_type in ['LineString', 'Polygon']:
                coords = list(geom.coords)
                # Limit number of vertices for very complex geometries
                if len(coords) > max_vertices:
                    step = len(coords) // max_vertices
                    coords = coords[::step]
                vertices_list.append([coord for coord in coords])
            else:
                vertices_list.append([])

        df = pd.DataFrame({
            'vertices': vertices_list,
            'vertex_count': [len(v) for v in vertices_list]
        })

        # Add original attributes
        attr_cols = [col for col in gdf.columns if col != 'geometry']
        for col in attr_cols:
            df[col] = gdf[col]

        return df

    def _extract_bounds(self, gdf: gpd.GeoDataFrame) -> pd.DataFrame:
        """Extract bounding box coordinates."""
        bounds = gdf.geometry.bounds
        df = pd.DataFrame({
            'min_x': bounds['minx'],
            'min_y': bounds['miny'],
            'max_x': bounds['maxx'],
            'max_y': bounds['maxy']
        })

        # Add original attributes
        attr_cols = [col for col in gdf.columns if col != 'geometry']
        for col in attr_cols:
            df[col] = gdf[col]

        return df

    def _extract_area_perimeter(self, gdf: gpd.GeoDataFrame) -> pd.DataFrame:
        """Extract area and perimeter metrics."""
        # Use appropriate CRS if needed
        if not gdf.crs or gdf.crs.is_geographic:
            gdf_proj = gdf.to_crs('EPSG:3857')  # Web Mercator for area/perimeter
        else:
            gdf_proj = gdf

        areas = gdf_proj.geometry.area
        perimeters = gdf_proj.geometry.length

        df = pd.DataFrame({
            'area': areas,
            'perimeter': perimeters,
            'area_perimeter_ratio': areas / perimeters.replace(0, 1)
        })

        # Add original attributes
        attr_cols = [col for col in gdf.columns if col != 'geometry']
        for col in attr_cols:
            df[col] = gdf[col]

        return df

    def _extract_geometry_coordinates(self, gdf: gpd.GeoDataFrame,
                                   max_points: int = 100) -> pd.DataFrame:
        """Extract standardized coordinate sequences."""
        coords_list = []

        for geom in gdf.geometry:
            if geom.geom_type == 'Point':
                coords_list.append([geom.x, geom.y])
            elif geom.geom_type == 'LineString':
                coords = list(geom.coords)
                coords_list.append([coord for coord in coords])
            elif geom.geom_type == 'Polygon':
                # Extract exterior ring coordinates
                coords = list(geom.exterior.coords)
                coords_list.append([coord for coord in coords])
            else:
                # For multi-geometries, extract all coordinates
                all_coords = []
                for part in geom.geoms:
                    if hasattr(part, 'coords'):
                        all_coords.extend(list(part.coords))
                coords_list.append(all_coords)

        # Standardize coordinate sequences
        standardized_coords = []
        for coords in coords_list:
            if not coords:
                standardized_coords.append([])
            else:
                # Flatten and limit size
                flat_coords = [coord for point in coords for coord in point]
                if len(flat_coords) > max_points:
                    flat_coords = flat_coords[:max_points]
                # Pad if needed
                while len(flat_coords) < max_points:
                    flat_coords.extend([0.0, 0.0])
                standardized_coords.append(flat_coords[:max_points])

        # Create coordinate columns
        df = pd.DataFrame(standardized_coords)
        df.columns = [f'coord_{i}' for i in range(df.shape[1])]

        # Add original attributes
        attr_cols = [col for col in gdf.columns if col != 'geometry']
        for col in attr_cols:
            df[col] = gdf[col]

        return df

    def create_spatial_grid(self, gdf: gpd.GeoDataFrame, grid_size: float = 1000.0,
                          crs: str = "EPSG:3857") -> gpd.GeoDataFrame:
        """
        Create a regular grid covering the extent of geospatial data.

        Args:
            gdf: Input GeoDataFrame
            grid_size: Grid cell size in CRS units
            crs: Coordinate reference system for the grid

        Returns:
            GeoDataFrame with grid polygons
        """
        try:
            # Ensure data is in projected CRS
            if not gdf.crs or gdf.crs.is_geographic:
                gdf_proj = gdf.to_crs(crs)
            else:
                gdf_proj = gdf

            # Get bounds
            bounds = gdf_proj.total_bounds
            min_x, min_y, max_x, max_y = bounds

            # Create grid
            x_coords = np.arange(min_x, max_x + grid_size, grid_size)
            y_coords = np.arange(min_y, max_y + grid_size, grid_size)

            grid_polygons = []
            grid_ids = []

            for i, x in enumerate(x_coords[:-1]):
                for j, y in enumerate(y_coords[:-1]):
                    poly = Polygon([
                        (x, y),
                        (x + grid_size, y),
                        (x + grid_size, y + grid_size),
                        (x, y + grid_size),
                        (x, y)
                    ])
                    grid_polygons.append(poly)
                    grid_ids.append(f"grid_{i}_{j}")

            # Create grid GeoDataFrame
            grid_gdf = gpd.GeoDataFrame({
                'grid_id': grid_ids,
                'geometry': grid_polygons
            }, crs=crs)

            # Clip to extent of original data
            grid_gdf = gpd.clip(grid_gdf, gdf_proj.unary_union.buffer(grid_size))

            self.logger.info(f"Created spatial grid with {len(grid_gdf)} cells")
            return grid_gdf

        except Exception as e:
            self.logger.error(f"Failed to create spatial grid: {str(e)}")
            raise

    def spatial_join_analysis(self,
                           points_gdf: gpd.GeoDataFrame,
                           polygons_gdf: gpd.GeoDataFrame,
                           operation: str = "within") -> gpd.GeoDataFrame:
        """
        Perform spatial join between point and polygon layers.

        Args:
            points_gdf: GeoDataFrame with point geometries
            polygons_gdf: GeoDataFrame with polygon geometries
            operation: Spatial operation ("within", "contains", "intersects")

        Returns:
            GeoDataFrame with spatial join results
        """
        try:
            self.logger.info(f"Performing spatial join: {operation}")
            self.logger.info(f"Points: {len(points_gdf)}, Polygons: {len(polygons_gdf)}")

            # Ensure both are in same CRS
            if points_gdf.crs != polygons_gdf.crs:
                points_gdf = points_gdf.to_crs(polygons_gdf.crs)

            # Perform spatial join
            if operation == "within":
                result = gpd.sjoin(points_gdf, polygons_gdf, how="inner", predicate="within")
            elif operation == "contains":
                result = gpd.sjoin(polygons_gdf, points_gdf, how="inner", predicate="contains")
            elif operation == "intersects":
                result = gpd.sjoin(points_gdf, polygons_gdf, how="inner", predicate="intersects")
            else:
                raise ValueError(f"Unsupported spatial operation: {operation}")

            self.logger.info(f"Spatial join result: {len(result)} features")
            return result

        except Exception as e:
            self.logger.error(f"Spatial join failed: {str(e)}")
            raise

    def to_spark_dataframe(self, df: pd.DataFrame, vector_columns: List[str]) -> DataFrame:
        """
        Convert pandas DataFrame with vector features to Spark DataFrame.

        Args:
            df: Input pandas DataFrame
            vector_columns: List of columns to combine into vector

        Returns:
            Spark DataFrame with vector features
        """
        try:
            from pyspark.sql import SparkSession

            # Convert to Spark DataFrame
            spark_df = self.spark.createDataFrame(df)

            # Create vector column if multiple columns specified
            if len(vector_columns) > 1:
                from pyspark.ml.feature import VectorAssembler

                assembler = VectorAssembler(
                    inputCols=vector_columns,
                    outputCol="features",
                    handleInvalid="skip"
                )
                spark_df = assembler.transform(spark_df)
            elif len(vector_columns) == 1:
                # Single vector column already exists
                spark_df = spark_df.withColumnRenamed(vector_columns[0], "features")

            return spark_df

        except Exception as e:
            self.logger.error(f"Failed to convert to Spark DataFrame: {str(e)}")
            raise

    def get_geospatial_metadata(self, gdb_path: str) -> Dict[str, Any]:
        """
        Get metadata about geospatial data in GDB.

        Args:
            gdb_path: Path to GDB file

        Returns:
            Dictionary with metadata information
        """
        try:
            metadata = {
                "path": gdb_path,
                "layers": [],
                "total_features": 0,
                "crs_info": {},
                "geometry_types": set(),
                "attribute_info": {}
            }

            # Read GDB
            layers_dict = self.read_gdb(gdb_path)

            for layer_name, gdf in layers_dict.items():
                layer_info = {
                    "name": layer_name,
                    "feature_count": len(gdf),
                    "columns": list(gdf.columns),
                    "geometry_type": str(gdf.geometry.geom_type.iloc[0]) if len(gdf) > 0 else "None",
                    "crs": str(gdf.crs) if gdf.crs else "None",
                    "bounds": gdf.total_bounds.tolist() if len(gdf) > 0 else None
                }

                metadata["layers"].append(layer_info)
                metadata["total_features"] += len(gdf)

                if gdf.crs:
                    metadata["crs_info"][layer_name] = str(gdf.crs)

                if len(gdf) > 0:
                    metadata["geometry_types"].add(str(gdf.geometry.geom_type.iloc[0]))

                # Attribute info
                attr_info = {}
                for col in gdf.columns:
                    if col != 'geometry':
                        attr_info[col] = {
                            "type": str(gdf[col].dtype),
                            "null_count": gdf[col].isnull().sum(),
                            "unique_count": gdf[col].nunique()
                        }
                metadata["attribute_info"][layer_name] = attr_info

            metadata["geometry_types"] = list(metadata["geometry_types"])

            return metadata

        except Exception as e:
            self.logger.error(f"Failed to get geospatial metadata: {str(e)}")
            raise