#!/usr/bin/env python3 """ Integration script to handle Arctic GRIB files in your wave puller Automatically detects polar stereographic errors and uses alternative extraction """ import os import sys import tempfile import pandas as pd from pathlib import Path # Import our Arctic extractor from arctic_grib_extractor import ArcticGRIBExtractor def process_arctic_grib_safe(grib_file, output_format='dataframe'): """ Safe processing of Arctic GRIB files that bypasses ECCODES polar stereographic errors Args: grib_file (str): Path to Arctic GRIB file output_format (str): 'dataframe', 'csv', or 'netcdf' Returns: pandas.DataFrame or str: Extracted data or path to saved file """ print(f"🌊 Processing Arctic GRIB file: {grib_file}") if not os.path.exists(grib_file): raise FileNotFoundError(f"GRIB file not found: {grib_file}") # Create extractor extractor = ArcticGRIBExtractor() # Extract data using all available methods data, method = extractor.extract_all_methods(grib_file) if data is None: raise RuntimeError("Failed to extract data from Arctic GRIB file using all methods") print(f"āœ… Successfully extracted {len(data)} points using {method}") # Return based on requested format if output_format == 'dataframe': return data elif output_format == 'csv': csv_file = grib_file.replace('.grib2', '_extracted.csv') data.to_csv(csv_file, index=False) print(f"šŸ’¾ Saved to CSV: {csv_file}") return csv_file elif output_format == 'netcdf': try: import xarray as xr # Convert to xarray and save if 'latitude' in data.columns and 'longitude' in data.columns: val_col = 'value' if 'value' in data.columns else 'val' ds = xr.Dataset({ 'wave_data': (['point'], data[val_col]), 'latitude': (['point'], data['latitude']), 'longitude': (['point'], data['longitude']) }) ds.attrs['extraction_method'] = method ds.attrs['source_file'] = grib_file nc_file = grib_file.replace('.grib2', '_extracted.nc') ds.to_netcdf(nc_file) print(f"šŸ’¾ Saved to NetCDF: {nc_file}") return nc_file else: # Fall back to CSV return process_arctic_grib_safe(grib_file, 'csv') except ImportError: print("āš ļø xarray not available, falling back to CSV") return process_arctic_grib_safe(grib_file, 'csv') else: raise ValueError(f"Unknown output format: {output_format}") def arctic_grib_handler(grib_file_path): """ Drop-in replacement for problematic Arctic GRIB processing Use this function when you encounter ECCODES polar stereographic errors """ try: # First, try your normal processing # If it fails with polar stereographic error, use this print("šŸ”„ Attempting Arctic GRIB extraction due to ECCODES error...") # Extract all data points df = process_arctic_grib_safe(grib_file_path, 'dataframe') # Convert to the format your application expects result = { 'coordinates': df[['latitude', 'longitude']].values.tolist(), 'data': df['value'].values.tolist() if 'value' in df.columns else df['val'].values.tolist(), 'total_points': len(df), 'lat_range': [df['latitude'].min(), df['latitude'].max()], 'lon_range': [df['longitude'].min(), df['longitude'].max()], 'extraction_method': 'arctic_bypass' } return result except Exception as e: print(f"āŒ Arctic GRIB handler failed: {e}") raise def integrate_with_wave_puller(): """ Example integration with your existing wave puller """ # Example of how to modify your existing code sample_code = ''' # In your grib_wave_puller, replace the problematic Arctic processing with: def process_arctic_region(grib_file): """Modified Arctic processing that handles polar stereographic errors""" try: # Your original processing code here # ... existing Arctic processing ... except Exception as e: if "Polar stereographic" in str(e) or "spherical earth" in str(e): print("ECCODES polar stereographic error detected, using alternative extraction...") # Use our Arctic handler from arctic_integration import arctic_grib_handler result = arctic_grib_handler(grib_file) print(f"Extracted {result['total_points']} Arctic data points") return result else: raise # Re-raise if it's a different error ''' print("Integration example:") print(sample_code) def batch_process_arctic_files(grib_directory, output_directory=None): """ Batch process multiple Arctic GRIB files """ if output_directory is None: output_directory = grib_directory grib_files = list(Path(grib_directory).glob("*.grib2")) print(f"Found {len(grib_files)} GRIB files to process") results = [] for grib_file in grib_files: print(f"\nšŸ“ Processing: {grib_file.name}") try: output_file = os.path.join(output_directory, f"{grib_file.stem}_extracted.csv") csv_file = process_arctic_grib_safe(str(grib_file), 'csv') # Move to output directory if different if output_directory != grib_directory: import shutil shutil.move(csv_file, output_file) csv_file = output_file results.append({ 'source': str(grib_file), 'output': csv_file, 'status': 'success' }) except Exception as e: print(f"āŒ Failed to process {grib_file.name}: {e}") results.append({ 'source': str(grib_file), 'output': None, 'status': 'failed', 'error': str(e) }) # Summary successful = sum(1 for r in results if r['status'] == 'success') print(f"\nšŸ“Š Batch processing complete:") print(f" āœ… Successful: {successful}") print(f" āŒ Failed: {len(results) - successful}") return results if __name__ == "__main__": # Command line usage if len(sys.argv) > 1: grib_file = sys.argv[1] output_format = sys.argv[2] if len(sys.argv) > 2 else 'csv' try: result = process_arctic_grib_safe(grib_file, output_format) print(f"šŸŽ‰ Success! Result: {result}") except Exception as e: print(f"āŒ Error: {e}") sys.exit(1) else: print("Usage:") print(" python arctic_integration.py [output_format]") print(" output_format: 'dataframe', 'csv', or 'netcdf'") print("") print("Example:") print(" python arctic_integration.py /tmp/tmp0cvj_act.grib2 csv") # Show integration example integrate_with_wave_puller()