#!/usr/bin/env python3
"""
Analyze GSI diagnostic binary file structure.
This script reverse-engineers the Fortran binary format.
"""

import struct
import sys
import numpy as np
from collections import defaultdict

def read_fortran_record(f):
    """Read a Fortran sequential unformatted record."""
    # Read opening record marker
    marker_bytes = f.read(4)
    if len(marker_bytes) < 4:
        return None

    marker = struct.unpack('>i', marker_bytes)[0]  # Big-endian int32

    # Read data
    data = f.read(marker)

    # Read closing record marker
    marker2_bytes = f.read(4)
    if len(marker2_bytes) < 4:
        return None

    marker2 = struct.unpack('>i', marker2_bytes)[0]

    if marker != marker2:
        print(f"Warning: Record markers don't match: {marker} != {marker2}")

    return data

def analyze_header(filename):
    """Analyze the header record of GSI diagnostic file."""
    with open(filename, 'rb') as f:
        # Read first record (header)
        header_data = read_fortran_record(f)

        if header_data is None:
            print("Failed to read header")
            return

        print(f"Header size: {len(header_data)} bytes")
        print(f"Header hex: {header_data[:100].hex()}")

        # Try to decode header as string
        try:
            header_str = header_data.decode('ascii', errors='ignore')
            print(f"Header ASCII: {header_str[:80]}")
        except:
            pass

        # Look for recognizable patterns
        # Common GSI pattern: variable name (char*10)
        print(f"\nFirst 38 bytes as chars: {repr(header_data[:38])}")

        # Read second record (grid info or first obs?)
        second_data = read_fortran_record(f)
        if second_data:
            print(f"\nSecond record size: {len(second_data)} bytes")

            # Try to parse as integers
            if len(second_data) >= 16:
                vals = struct.unpack('>4i', second_data[:16])
                print(f"First 4 int32 values: {vals}")

            # Try to parse as floats
            if len(second_data) >= 16:
                vals = struct.unpack('>4f', second_data[:16])
                print(f"First 4 float32 values: {vals}")

def count_observations(filename):
    """Count observation records in file."""
    obs_count = 0
    record_sizes = []

    with open(filename, 'rb') as f:
        # Skip header
        header = read_fortran_record(f)
        if header is None:
            return 0, []

        # Read all subsequent records
        while True:
            data = read_fortran_record(f)
            if data is None:
                break
            obs_count += 1
            record_sizes.append(len(data))

    return obs_count, record_sizes

def analyze_observation_record(filename, record_num=2):
    """Analyze a specific observation record."""
    with open(filename, 'rb') as f:
        # Skip to desired record
        for i in range(record_num):
            data = read_fortran_record(f)
            if data is None:
                print(f"Could not reach record {record_num}")
                return

        if data is None:
            return

        print(f"\nObservation record {record_num}:")
        print(f"Size: {len(data)} bytes")

        # Try different interpretations
        print("\nAs doubles (8 bytes each):")
        n_doubles = min(10, len(data) // 8)
        doubles = struct.unpack(f'>{n_doubles}d', data[:n_doubles*8])
        for i, val in enumerate(doubles):
            print(f"  [{i}] {val:20.6f}")

        print("\nAs floats (4 bytes each):")
        n_floats = min(20, len(data) // 4)
        floats = struct.unpack(f'>{n_floats}f', data[:n_floats*4])
        for i, val in enumerate(floats):
            print(f"  [{i}] {val:15.6f}")

        print("\nMixed interpretation (common GSI pattern):")
        # Typical pattern: lat(8), lon(8), pres(8), obs(8), err(8), etc.
        if len(data) >= 64:
            values = struct.unpack('>8d', data[:64])
            print(f"  Lat:  {values[0]:10.4f}")
            print(f"  Lon:  {values[1]:10.4f}")
            print(f"  Pres: {values[2]:10.4f}")
            print(f"  Val1: {values[3]:10.4f}")
            print(f"  Val2: {values[4]:10.4f}")
            print(f"  Val3: {values[5]:10.4f}")
            print(f"  Val4: {values[6]:10.4f}")
            print(f"  Val5: {values[7]:10.4f}")

        # Look for string data (station ID)
        print("\nString data:")
        for i in range(0, min(len(data), 200), 20):
            chunk = data[i:i+20]
            ascii_str = ''.join(chr(b) if 32 <= b < 127 else '.' for b in chunk)
            print(f"  [{i:3d}] {ascii_str}")

def main():
    filename = "/home/linden/comGSI/run/job/basic/diag_obs_setup_anl.2018081212"

    print("="*80)
    print("GSI Diagnostic Binary File Analysis")
    print("="*80)
    print(f"File: {filename}\n")

    # Basic file info
    import os
    size = os.path.getsize(filename)
    print(f"File size: {size:,} bytes\n")

    # Analyze header
    print("="*80)
    print("HEADER ANALYSIS")
    print("="*80)
    analyze_header(filename)

    # Count observations
    print("\n" + "="*80)
    print("OBSERVATION COUNT")
    print("="*80)
    obs_count, record_sizes = count_observations(filename)
    print(f"Total records (excluding header): {obs_count}")

    if record_sizes:
        from collections import Counter
        size_counts = Counter(record_sizes)
        print(f"\nRecord size distribution:")
        for size, count in sorted(size_counts.items()):
            print(f"  {size:4d} bytes: {count:5d} records")

        print(f"\nMost common record size: {max(size_counts, key=size_counts.get)} bytes")

    # Analyze sample observation record
    print("\n" + "="*80)
    print("SAMPLE OBSERVATION RECORD")
    print("="*80)
    analyze_observation_record(filename, record_num=2)

if __name__ == '__main__':
    main()
