#!/usr/bin/env python3
"""
Script to generate sample geospatial data for testing the Vector Analytics Platform GDB support.
"""

import numpy as np
import pandas as pd
import geopandas as gpd
from shapely.geometry import Point, Polygon, LineString
from shapely.ops import unary_union
import os
import json
import tempfile
import zipfile
from pathlib import Path


def create_sample_points_gdf(n_points=1000, seed=42):
    """Create sample point data with attributes."""
    np.random.seed(seed)

    # Create random points in a bounded area
    x_coords = np.random.uniform(-122.5, -121.5, n_points)  # Longitude
    y_coords = np.random.uniform(37.7, 38.3, n_points)    # Latitude

    # Create additional attributes
    attributes = {
        'id': range(n_points),
        'category': np.random.choice(['A', 'B', 'C', 'D'], n_points),
        'value': np.random.uniform(0, 100, n_points),
        'elevation': np.random.uniform(0, 500, n_points),
        'population': np.random.randint(10, 1000, n_points),
        'temperature': np.random.uniform(15, 25, n_points)
    }

    # Create GeoDataFrame
    geometry = [Point(x, y) for x, y in zip(x_coords, y_coords)]
    gdf = gpd.GeoDataFrame(attributes, geometry=geometry, crs='EPSG:4326')

    return gdf


def create_sample_polygons_gdf(n_polygons=200, seed=42):
    """Create sample polygon data with attributes."""
    np.random.seed(seed)

    polygons = []
    attributes = []

    for i in range(n_polygons):
        # Create random polygon coordinates
        center_x = np.random.uniform(-122.5, -121.5)
        center_y = np.random.uniform(37.7, 38.3)

        # Create irregular polygon
        n_vertices = np.random.randint(4, 8)
        angles = np.sort(np.random.uniform(0, 2*np.pi, n_vertices))
        radii = np.random.uniform(0.001, 0.01, n_vertices)

        coords = []
        for angle, radius in zip(angles, radii):
            x = center_x + radius * np.cos(angle)
            y = center_y + radius * np.sin(angle)
            coords.append((x, y))
        coords.append(coords[0])  # Close polygon

        polygon = Polygon(coords)
        polygons.append(polygon)

        # Add attributes
        attributes.append({
            'id': i,
            'zone_type': np.random.choice(['residential', 'commercial', 'industrial', 'park']),
            'area_value': np.random.uniform(1000, 50000),
            'population_density': np.random.uniform(10, 1000),
            'year_built': np.random.randint(1900, 2023),
            'property_count': np.random.randint(1, 50)
        })

    gdf = gpd.GeoDataFrame(attributes, geometry=polygons, crs='EPSG:4326')
    return gdf


def create_sample_lines_gdf(n_lines=300, seed=42):
    """Create sample line data with attributes."""
    np.random.seed(seed)

    lines = []
    attributes = []

    for i in range(n_lines):
        # Create random line coordinates
        n_points = np.random.randint(3, 10)
        x_coords = np.cumsum(np.random.uniform(-0.01, 0.01, n_points))
        y_coords = np.cumsum(np.random.uniform(-0.01, 0.01, n_points))

        # Normalize to our area
        x_coords = x_coords - x_coords.min() + np.random.uniform(-122.5, -121.5)
        y_coords = y_coords - y_coords.min() + np.random.uniform(37.7, 38.3)

        coords = [(x, y) for x, y in zip(x_coords, y_coords)]
        line = LineString(coords)
        lines.append(line)

        # Add attributes
        attributes.append({
            'id': i,
            'road_type': np.random.choice(['highway', ' arterial', 'collector', 'local']),
            'length_km': np.random.uniform(0.1, 5.0),
            'traffic_volume': np.random.randint(100, 50000),
            'speed_limit': np.random.choice([25, 35, 45, 55, 65]),
            'condition': np.random.choice(['excellent', 'good', 'fair', 'poor'])
        })

    gdf = gpd.GeoDataFrame(attributes, geometry=lines, crs='EPSG:4326')
    return gdf


def create_geopackage(output_path):
    """Create GeoPackage with multiple layers."""
    print("Creating GeoPackage...")

    # Create different layer types
    points_gdf = create_sample_points_gdf(500)
    polygons_gdf = create_sample_polygons_gdf(100)
    lines_gdf = create_sample_lines_gdf(150)

    # Write to GeoPackage
    points_gdf.to_file(output_path, layer='sample_points', driver='GPKG')
    polygons_gdf.to_file(output_path, layer='sample_polygons', driver='GPKG')
    lines_gdf.to_file(output_path, layer='sample_lines', driver='GPKG')

    print(f"GeoPackage created: {output_path}")
    print(f"  - Points: {len(points_gdf)}")
    print(f"  - Polygons: {len(polygons_gdf)}")
    print(f"  - Lines: {len(lines_gdf)}")


def create_shapefile_set(output_dir):
    """Create set of shapefiles."""
    print("Creating shapefiles...")
    os.makedirs(output_dir, exist_ok=True)

    # Create individual shapefiles
    points_gdf = create_sample_points_gdf(300)
    polygons_gdf = create_sample_polygons_gdf(80)
    lines_gdf = create_sample_lines_gdf(120)

    # Write shapefiles
    points_path = os.path.join(output_dir, 'sample_points.shp')
    polygons_path = os.path.join(output_dir, 'sample_polygons.shp')
    lines_path = os.path.join(output_dir, 'sample_lines.shp')

    points_gdf.to_file(points_path)
    polygons_gdf.to_file(polygons_path)
    lines_gdf.to_file(lines_path)

    print(f"Shapefiles created in: {output_dir}")
    print(f"  - Points: {points_path}")
    print(f"  - Polygons: {polygons_path}")
    print(f"  - Lines: {lines_path}")


def create_file_geodatabase(output_dir):
    """Create a mock File Geodatabase (using GeoPackage as GDB replacement)."""
    print("Creating mock File Geodatabase...")

    # In a real implementation, you would use ArcPy or other ESRI tools
    # For this example, we'll use GeoPackage as it's more accessible
    gdb_path = os.path.join(output_dir, 'sample_geodatabase.gpkg')
    create_geopackage(gdb_path)

    # Create additional metadata that would be in a real GDB
    metadata = {
        "type": "file_geodatabase",
        "version": "1.0",
        "layers": [
            {
                "name": "sample_points",
                "geometry_type": "POINT",
                "feature_count": 500,
                "srs": "EPSG:4326",
                "fields": [
                    {"name": "id", "type": "INTEGER"},
                    {"name": "category", "type": "STRING"},
                    {"name": "value", "type": "DOUBLE"},
                    {"name": "elevation", "type": "DOUBLE"},
                    {"name": "population", "type": "INTEGER"},
                    {"name": "temperature", "type": "DOUBLE"}
                ]
            },
            {
                "name": "sample_polygons",
                "geometry_type": "POLYGON",
                "feature_count": 100,
                "srs": "EPSG:4326",
                "fields": [
                    {"name": "id", "type": "INTEGER"},
                    {"name": "zone_type", "type": "STRING"},
                    {"name": "area_value", "type": "DOUBLE"},
                    {"name": "population_density", "type": "DOUBLE"},
                    {"name": "year_built", "type": "INTEGER"},
                    {"name": "property_count", "type": "INTEGER"}
                ]
            },
            {
                "name": "sample_lines",
                "geometry_type": "LINESTRING",
                "feature_count": 150,
                "srs": "EPSG:4326",
                "fields": [
                    {"name": "id", "type": "INTEGER"},
                    {"name": "road_type", "type": "STRING"},
                    {"name": "length_km", "type": "DOUBLE"},
                    {"name": "traffic_volume", "type": "INTEGER"},
                    {"name": "speed_limit", "type": "INTEGER"},
                    {"name": "condition", "type": "STRING"}
                ]
            }
        ]
    }

    # Save metadata
    metadata_path = os.path.join(output_dir, 'sample_geodatabase_metadata.json')
    with open(metadata_path, 'w') as f:
        json.dump(metadata, f, indent=2)

    print(f"Mock File Geodatabase created: {gdb_path}")
    print(f"Metadata saved: {metadata_path}")


def create_csv_from_geospatial_data(output_dir):
    """Create CSV files from geospatial data for comparison."""
    print("Creating CSV files from geospatial data...")
    os.makedirs(output_dir, exist_ok=True)

    # Points to CSV (using centroid coordinates)
    points_gdf = create_sample_points_gdf(800)
    points_df = pd.DataFrame({
        'longitude': [geom.x for geom in points_gdf.geometry],
        'latitude': [geom.y for geom in points_gdf.geometry],
        'category': points_gdf['category'],
        'value': points_gdf['value'],
        'elevation': points_gdf['elevation'],
        'population': points_gdf['population'],
        'temperature': points_gdf['temperature']
    })

    points_csv_path = os.path.join(output_dir, 'geospatial_points.csv')
    points_df.to_csv(points_csv_path, index=False)
    print(f"Points CSV created: {points_csv_path}")

    # Polygons to CSV (using centroid and area/perimeter)
    polygons_gdf = create_sample_polygons_gdf(150)
    # Project to calculate area
    polygons_proj = polygons_gdf.to_crs('EPSG:3857')

    polygons_df = pd.DataFrame({
        'centroid_x': polygons_proj.geometry.centroid.x,
        'centroid_y': polygons_proj.geometry.centroid.y,
        'area': polygons_proj.geometry.area,
        'perimeter': polygons_proj.geometry.length,
        'zone_type': polygons_gdf['zone_type'],
        'area_value': polygons_gdf['area_value'],
        'population_density': polygons_gdf['population_density'],
        'year_built': polygons_gdf['year_built'],
        'property_count': polygons_gdf['property_count']
    })

    polygons_csv_path = os.path.join(output_dir, 'geospatial_polygons.csv')
    polygons_df.to_csv(polygons_csv_path, index=False)
    print(f"Polygons CSV created: {polygons_csv_path}")


def create_vector_datasets_for_testing(output_dir):
    """Create various vector datasets for testing different features."""
    print("Creating vector datasets for testing...")
    os.makedirs(output_dir, exist_ok=True)

    # 1. High-dimensional centroid dataset
    points_gdf = create_sample_points_gdf(2000, seed=123)
    points_proj = points_gdf.to_crs('EPSG:3857')

    centroid_df = pd.DataFrame({
        'centroid_x': points_proj.geometry.centroid.x,
        'centroid_y': points_proj.geometry.centroid.y,
        'category': points_gdf['category'],
        'value': points_gdf['value'],
        'elevation': points_gdf['elevation'],
        'temperature': points_gdf['temperature'],
        'humidity': np.random.uniform(30, 90, len(points_gdf)),
        'pressure': np.random.uniform(980, 1040, len(points_gdf)),
        'wind_speed': np.random.uniform(0, 20, len(points_gdf)),
        'solar_radiation': np.random.uniform(0, 1000, len(points_gdf))
    })

    centroid_csv_path = os.path.join(output_dir, 'centroid_vectors.csv')
    centroid_df.to_csv(centroid_csv_path, index=False)
    print(f"Centroid vectors CSV created: {centroid_csv_path}")

    # 2. Area and perimeter dataset
    polygons_gdf = create_sample_polygons_gdf(300, seed=456)
    polygons_proj = polygons_gdf.to_crs('EPSG:3857')

    area_perimeter_df = pd.DataFrame({
        'area': polygons_proj.geometry.area,
        'perimeter': polygons_proj.geometry.length,
        'area_perimeter_ratio': polygons_proj.geometry.area / polygons_proj.geometry.length,
        'zone_type': polygons_gdf['zone_type'],
        'area_value': polygons_gdf['area_value'],
        'year_built': polygons_gdf['year_built'],
        'property_count': polygons_gdf['property_count'],
        'assessment_value': polygons_gdf['area_value'] * np.random.uniform(0.8, 1.2, len(polygons_gdf)),
        'tax_rate': np.random.uniform(0.01, 0.03, len(polygons_gdf))
    })

    area_perimeter_csv_path = os.path.join(output_dir, 'area_perimeter_vectors.csv')
    area_perimeter_df.to_csv(area_perimeter_csv_path, index=False)
    print(f"Area/perimeter vectors CSV created: {area_perimeter_csv_path}")

    # 3. Bounding box dataset
    bounds_df = pd.DataFrame({
        'min_x': polygons_proj.geometry.bounds['minx'],
        'min_y': polygons_proj.geometry.bounds['miny'],
        'max_x': polygons_proj.geometry.bounds['maxx'],
        'max_y': polygons_proj.geometry.bounds['maxy'],
        'width': polygons_proj.geometry.bounds['maxx'] - polygons_proj.geometry.bounds['minx'],
        'height': polygons_proj.geometry.bounds['maxy'] - polygons_proj.geometry.bounds['miny'],
        'zone_type': polygons_gdf['zone_type'],
        'property_count': polygons_gdf['property_count'],
        'year_built': polygons_gdf['year_built']
    })

    bounds_csv_path = os.path.join(output_dir, 'bounds_vectors.csv')
    bounds_df.to_csv(bounds_csv_path, index=False)
    print(f"Bounds vectors CSV created: {bounds_csv_path}")


def main():
    """Generate all sample geospatial data."""
    output_dir = "data"
    os.makedirs(output_dir, exist_ok=True)

    print("Generating sample geospatial data for Vector Analytics Platform...")
    print("=" * 60)

    # 1. Create GeoPackage
    geopackage_path = os.path.join(output_dir, "sample_geospatial.gpkg")
    create_geopackage(geopackage_path)

    # 2. Create shapefile set
    shapefile_dir = os.path.join(output_dir, "shapefiles")
    create_shapefile_set(shapefile_dir)

    # 3. Create mock File Geodatabase
    gdb_dir = os.path.join(output_dir, "file_geodatabases")
    os.makedirs(gdb_dir, exist_ok=True)
    create_file_geodatabase(gdb_dir)

    # 4. Create CSV datasets from geospatial data
    csv_output_dir = os.path.join(output_dir, "geospatial_csv")
    create_csv_from_geospatial_data(csv_output_dir)

    # 5. Create specialized vector datasets
    vector_output_dir = os.path.join(output_dir, "geospatial_vectors")
    create_vector_datasets_for_testing(vector_output_dir)

    print("\n" + "=" * 60)
    print("All sample geospatial data generated successfully!")
    print(f"Output directory: {output_dir}")
    print("\nGenerated files:")
    print("1. sample_geospatial.gpkg - Multi-layer GeoPackage")
    print("2. shapefiles/ - Directory with shapefile layers")
    print("3. file_geodatabases/ - Mock File Geodatabase (GeoPackage)")
    print("4. geospatial_csv/ - CSV files derived from geospatial data")
    print("5. geospatial_vectors/ - Specialized vector datasets")

    print("\nUsage:")
    print("1. Upload geospatial files through the web interface")
    print("2. Select feature type (centroid, geometry_coords, bounds, etc.)")
    print("3. Run spatial clustering, similarity, or statistics")
    print("4. Visualize results in the dashboard")


if __name__ == "__main__":
    main()