#!/usr/bin/env python3
"""
Database Migration Script: Add directory_id foreign key to metrics table
Migrates from file_path string to directory_id + filename structure
"""

import sqlite3
import sys
import os
from pathlib import Path
from typing import Dict, List, Tuple
import re

def main():
    db_path = Path(__file__).parent.parent / "data" / "uma_insight.db"

    if not db_path.exists():
        print(f"❌ Database not found: {db_path}")
        sys.exit(1)

    print("🚀 Starting database migration to directory_id architecture...")

    try:
        conn = sqlite3.connect(str(db_path))
        conn.execute("PRAGMA foreign_keys = OFF")  # Disable foreign key checks during migration
        cursor = conn.cursor()

        # Step 1: Create directories_v2 table with improved structure
        print("\n📋 Step 1: Creating directories_v2 table...")
        cursor.execute("""
            CREATE TABLE IF NOT EXISTS directories_v2 (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                path TEXT NOT NULL UNIQUE,
                name TEXT NOT NULL,
                parent_id INTEGER,
                level INTEGER DEFAULT 0,
                team_id TEXT,
                assignment_confidence INTEGER DEFAULT 0,
                assigned_at DATETIME,
                assignment_method TEXT,
                created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
                updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
                FOREIGN KEY (parent_id) REFERENCES directories_v2(id) ON DELETE SET NULL,
                FOREIGN KEY (team_id) REFERENCES teams(id) ON DELETE SET NULL
            )
        """)

        # Create indexes
        cursor.execute("CREATE INDEX IF NOT EXISTS idx_directories_v2_parent_id ON directories_v2(parent_id)")
        cursor.execute("CREATE INDEX IF NOT EXISTS idx_directories_v2_path ON directories_v2(path)")
        cursor.execute("CREATE INDEX IF NOT EXISTS idx_directories_v2_team_id ON directories_v2(team_id)")

        # Step 2: Extract unique directories from metrics.file_path
        print("\n📂 Step 2: Extracting directories from metrics...")
        cursor.execute("""
            SELECT DISTINCT
                CASE
                    WHEN instr(file_path, '/') > 0
                    THEN substr(file_path, 1, instr(file_path, '/') - 1)
                    ELSE file_path
                END as directory_path
            FROM metrics
            WHERE file_path IS NOT NULL AND file_path != ''
        """)

        directories = cursor.fetchall()
        print(f"   Found {len(directories)} unique root directories")

        # Step 3: Build directory hierarchy
        print("\n🌳 Step 3: Building directory hierarchy...")

        # Get all unique file paths to extract full directory structure
        cursor.execute("""
            SELECT DISTINCT file_path FROM metrics
            WHERE file_path IS NOT NULL AND file_path != ''
            ORDER BY file_path
        """)
        file_paths = [row[0] for row in cursor.fetchall()]

        # Extract all unique directory paths (including subdirectories)
        all_dirs = set()
        for file_path in file_paths:
            parts = file_path.split('/')
            # Build all parent directory paths
            for i in range(len(parts) - 1):  # Exclude the filename
                dir_path = '/'.join(parts[:i+1])
                all_dirs.add(dir_path)

        print(f"   Found {len(all_dirs)} unique directory paths")

        # Sort directories by path length to ensure parents are created before children
        sorted_dirs = sorted(all_dirs, key=lambda x: (x.count('/'), x))

        # Insert directories with parent relationships
        dir_map = {}  # Maps path to ID

        for dir_path in sorted_dirs:
            parts = dir_path.split('/')
            dir_name = parts[-1]

            # Find parent directory
            parent_path = '/'.join(parts[:-1]) if len(parts) > 1 else None
            parent_id = dir_map.get(parent_path) if parent_path else None
            level = parts.count('/') - 1

            # Insert directory
            cursor.execute("""
                INSERT INTO directories_v2 (path, name, parent_id, level)
                VALUES (?, ?, ?, ?)
            """, (dir_path, dir_name, parent_id, level))

            dir_map[dir_path] = cursor.lastrowid

        print(f"   ✅ Inserted {len(dir_map)} directories into directories_v2")

        # Step 4: Add new columns to metrics table
        print("\n📝 Step 4: Adding new columns to metrics table...")
        try:
            cursor.execute("ALTER TABLE metrics ADD COLUMN directory_id INTEGER")
            print("   ✅ Added directory_id column")
        except sqlite3.OperationalError as e:
            if "duplicate column name" in str(e):
                print("   ⚠️ directory_id column already exists")
            else:
                raise

        try:
            cursor.execute("ALTER TABLE metrics ADD COLUMN filename TEXT")
            print("   ✅ Added filename column")
        except sqlite3.OperationalError as e:
            if "duplicate column name" in str(e):
                print("   ⚠️ filename column already exists")
            else:
                raise

        # Step 5: Update metrics with directory_id and filename
        print("\n🔄 Step 5: Updating metrics with directory_id and filename...")

        # Get all metrics
        cursor.execute("SELECT id, file_path FROM metrics WHERE file_path IS NOT NULL AND file_path != ''")
        metrics_to_update = cursor.fetchall()

        updated_count = 0
        error_count = 0

        for metric_id, file_path in metrics_to_update:
            try:
                # Extract directory path and filename
                parts = file_path.split('/')
                if len(parts) >= 2:
                    directory_path = '/'.join(parts[:-1])
                    filename = parts[-1]
                else:
                    # Handle edge case where no directory separator
                    directory_path = file_path
                    filename = file_path

                # Get directory_id
                if directory_path in dir_map:
                    directory_id = dir_map[directory_path]

                    # Update metric
                    cursor.execute("""
                        UPDATE metrics
                        SET directory_id = ?, filename = ?
                        WHERE id = ?
                    """, (directory_id, filename, metric_id))

                    updated_count += 1
                else:
                    print(f"   ⚠️ Directory not found for: {file_path}")
                    error_count += 1

            except Exception as e:
                print(f"   ❌ Error updating metric {metric_id}: {e}")
                error_count += 1

        print(f"   ✅ Updated {updated_count} metrics")
        if error_count > 0:
            print(f"   ⚠️ {error_count} metrics had errors")

        # Step 6: Create metrics_v2 table (rename old metrics)
        print("\n📋 Step 6: Creating optimized metrics table...")

        # Backup original metrics table
        cursor.execute("ALTER TABLE metrics RENAME TO metrics_backup")

        # Create new optimized metrics table
        cursor.execute("""
            CREATE TABLE metrics (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                name TEXT NOT NULL UNIQUE,
                type TEXT NOT NULL CHECK(type IN ('histogram', 'enum', 'ukm_event', 'ukm_metric')),
                summary TEXT,
                units TEXT,
                expires_after TEXT,
                component TEXT,
                singular BOOLEAN DEFAULT FALSE,
                directory_id INTEGER NOT NULL,
                filename TEXT NOT NULL,
                line_number INTEGER,
                analysis_status TEXT DEFAULT 'NOT_ANALYZED',
                analysis_notes TEXT,
                analyzed_by TEXT,
                analyzed_at DATETIME,
                created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
                updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
                FOREIGN KEY (directory_id) REFERENCES directories_v2(id) ON DELETE CASCADE
            )
        """)

        # Create indexes
        cursor.execute("CREATE INDEX idx_metrics_directory_id ON metrics(directory_id)")
        cursor.execute("CREATE INDEX idx_metrics_analysis_status ON metrics(analysis_status)")
        cursor.execute("CREATE INDEX idx_metrics_component ON metrics(component)")
        cursor.execute("CREATE UNIQUE INDEX idx_metrics_name ON metrics(name)")

        # Copy data from backup
        cursor.execute("""
            INSERT INTO metrics (
                id, name, type, summary, units, expires_after, component, singular,
                directory_id, filename, line_number, analysis_status, analysis_notes,
                analyzed_by, analyzed_at, created_at, updated_at
            )
            SELECT
                id, name, type, summary, units, expires_after, component, singular,
                directory_id, filename, line_number, analysis_status, analysis_notes,
                analyzed_by, analyzed_at, created_at, updated_at
            FROM metrics_backup
        """)

        print(f"   ✅ Created new metrics table with {cursor.rowcount} records")

        # Step 7: Replace old directories table
        print("\n🔄 Step 7: Replacing directories table...")
        cursor.execute("DROP TABLE IF EXISTS directories")
        cursor.execute("ALTER TABLE directories_v2 RENAME TO directories")

        # Step 8: Clean up
        print("\n🧹 Step 8: Cleaning up...")
        cursor.execute("DROP TABLE metrics_backup")

        # Step 9: Verify migration
        print("\n✅ Step 9: Verifying migration...")

        # Check metrics count
        cursor.execute("SELECT COUNT(*) FROM metrics")
        metrics_count = cursor.fetchone()[0]
        print(f"   Total metrics: {metrics_count}")

        # Check directories count
        cursor.execute("SELECT COUNT(*) FROM directories")
        directories_count = cursor.fetchone()[0]
        print(f"   Total directories: {directories_count}")

        # Check null directory_id
        cursor.execute("SELECT COUNT(*) FROM metrics WHERE directory_id IS NULL")
        null_directory_count = cursor.fetchone()[0]
        if null_directory_count > 0:
            print(f"   ⚠️ {null_directory_count} metrics have null directory_id")
        else:
            print("   ✅ All metrics have valid directory_id")

        # Check orphaned metrics
        cursor.execute("""
            SELECT COUNT(*) FROM metrics m
            LEFT JOIN directories d ON m.directory_id = d.id
            WHERE d.id IS NULL
        """)
        orphaned_count = cursor.fetchone()[0]
        if orphaned_count > 0:
            print(f"   ⚠️ {orphaned_count} metrics have invalid directory_id")
        else:
            print("   ✅ All metrics have valid directory references")

        # Test a sample query
        cursor.execute("""
            SELECT d.path, COUNT(m.id) as metric_count
            FROM directories d
            LEFT JOIN metrics m ON d.id = m.directory_id
            WHERE d.path = 'tools/metrics/histograms/metadata/accessibility'
            GROUP BY d.id, d.path
        """)
        result = cursor.fetchone()
        if result:
            print(f"   ✅ Test query successful: accessibility directory has {result[1]} metrics")
        else:
            print("   ⚠️ Test query failed for accessibility directory")

        # Commit all changes
        conn.commit()

        print("\n🎉 Migration completed successfully!")
        print(f"   - Migrated {metrics_count} metrics")
        print(f"   - Created {directories_count} directories")
        print(f"   - Updated {updated_count} metrics with directory_id")

        # Enable foreign key checks
        conn.execute("PRAGMA foreign_keys = ON")

        conn.close()

    except Exception as e:
        print(f"\n❌ Migration failed: {e}")
        conn.rollback()
        conn.close()
        sys.exit(1)

if __name__ == "__main__":
    main()