"""
Direct executor that converts MySQL to KWDB and executes in memory without file I/O.
"""
from __future__ import annotations

import logging
import time
from pathlib import Path
from typing import Optional, Dict
import re

from .config import MigratorConfig
from .pipeline import run_pipeline
from .db_executor import KWDBExecutor

logger = logging.getLogger(__name__)


class DirectMigrationExecutor:
    """Executor that converts MySQL SQL and executes directly without intermediate files."""
    
    def __init__(
        self,
        connection_string: Optional[str] = None,
        host: Optional[str] = None,
        port: Optional[int] = None,
        user: Optional[str] = None,
        password: Optional[str] = None,
        database: Optional[str] = None,
        on_error: str = "stop",
    ):
        """
        Initialize direct migration executor.
        
        Args:
            connection_string: PostgreSQL connection string (takes precedence)
            host: Database host
            port: Database port
            user: Database user
            password: Database password
            database: Database name
            on_error: Error handling mode ("stop" or "skip")
        """
        self.db_executor = KWDBExecutor(
            connection_string=connection_string,
            host=host,
            port=port,
            user=user,
            password=password,
            database=database,
        )
        self.on_error = on_error
        self._rollback_executed = False
    
    def execute_sql_text(self, sql_text: str, description: str = "") -> tuple[float, int]:
        """
        Execute SQL text directly (from memory).
        
        Args:
            sql_text: SQL text to execute
            description: Description for logging
            
        Returns:
            Tuple of (execution_time, statement_count)
        """
        start_time = time.time()
        
        # For INSERT statements (DML), split by ');' to avoid issues with semicolons in string values
        # For other statements (DDL, constraints), split by ';'
        is_dml = "INSERT" in description.upper() or "DATA" in description.upper()
        
        if is_dml:
            # Split by ');' to better align with multi-row INSERT endings
            statements = []
            for part in sql_text.split(');'):
                stmt = part.strip()
                if not stmt:
                    continue
                # Add back ');' for INSERT statements
                if stmt.upper().startswith('INSERT'):
                    statements.append(stmt + ');')
                else:
                    # Handle any non-INSERT statements that might be mixed in
                    # Split by ';' for these
                    for sub_stmt in stmt.split(';'):
                        sub_stmt = sub_stmt.strip()
                        if sub_stmt:
                            statements.append(sub_stmt + ';')
        else:
            # Split by semicolon for DDL and other statements
            statements = [stmt.strip() + ';' for stmt in sql_text.split(';') if stmt.strip()]
        
        executed_count = 0
        for stmt in statements:
            try:
                self.db_executor.execute_sql(stmt)
                executed_count += 1
            except Exception as e:
                logger.error(f"Error executing statement: {e}")
                logger.error(f"Statement: {stmt[:200]}...")
                if self.on_error == "stop":
                    raise
                # If on_error is "skip", continue to next statement
        
        elapsed_time = time.time() - start_time
        
        if description:
            logger.info(f"✓ {description}: executed {executed_count} statements in {elapsed_time:.2f}s")
        
        return elapsed_time, executed_count
    
    def execute_sql_file_streaming(self, sql_file_path: Path | str, description: str = "", chunk_size: int = 1024 * 1024) -> tuple[float, int]:
        """
        Execute SQL file in streaming mode (line by line) to avoid loading entire file into memory.
        
        Args:
            sql_file_path: Path to SQL file
            description: Description for logging
            chunk_size: Size of chunks to read from file (default 1MB)
            
        Returns:
            Tuple of (execution_time, statement_count)
        """
        start_time = time.time()
        sql_file_path = Path(sql_file_path)
        
        if not sql_file_path.exists():
            raise FileNotFoundError(f"SQL file not found: {sql_file_path}")
        
        executed_count = 0
        buffer = ""
        
        # For INSERT statements (DML), split by ');' to avoid issues with semicolons in string values
        # For other statements (DDL, constraints), split by ';'
        is_dml = "INSERT" in description.upper() or "DATA" in description.upper()
        
        with open(sql_file_path, 'r', encoding='utf-8') as f:
            while True:
                chunk = f.read(chunk_size)
                if not chunk:
                    break
                
                buffer += chunk
                
                # Process complete statements
                while True:
                    if is_dml:
                        # For INSERT statements, look for ');' as statement terminator
                        end_pos = buffer.find(');')
                        if end_pos == -1:
                            # Check if buffer is too large (might indicate a very long statement)
                            if len(buffer) > 10 * 1024 * 1024:  # 10MB buffer limit
                                # Try to find last complete statement
                                last_split = buffer.rfind(');')
                                if last_split > 0:
                                    buffer = buffer[last_split + 2:]
                                else:
                                    # No complete statement found, clear buffer to avoid OOM
                                    logger.warning(f"Very long statement detected (>10MB), truncating buffer")
                                    buffer = ""
                            break
                        
                        stmt = buffer[:end_pos + 2].strip()
                        buffer = buffer[end_pos + 2:]
                    else:
                        # For DDL/constraints, look for ';' as statement terminator
                        end_pos = buffer.find(';')
                        if end_pos == -1:
                            if len(buffer) > 10 * 1024 * 1024:  # 10MB buffer limit
                                last_split = buffer.rfind(';')
                                if last_split > 0:
                                    buffer = buffer[last_split + 1:]
                                else:
                                    buffer = ""
                            break
                        
                        stmt = buffer[:end_pos + 1].strip()
                        buffer = buffer[end_pos + 1:]
                    
                    if not stmt:
                        continue
                    
                    # Execute statement
                    try:
                        self.db_executor.execute_sql(stmt)
                        executed_count += 1
                    except Exception as e:
                        logger.error(f"Error executing statement: {e}")
                        logger.error(f"Statement: {stmt[:200]}...")
                        if self.on_error == "stop":
                            raise
                        # If on_error is "skip", continue to next statement
            
            # Process any remaining buffer
            if buffer.strip():
                stmt = buffer.strip()
                if is_dml and not stmt.endswith(');'):
                    stmt += ');'
                elif not is_dml and not stmt.endswith(';'):
                    stmt += ';'
                
                try:
                    self.db_executor.execute_sql(stmt)
                    executed_count += 1
                except Exception as e:
                    logger.error(f"Error executing final statement: {e}")
                    logger.error(f"Statement: {stmt[:200]}...")
                    if self.on_error == "stop":
                        raise
        
        elapsed_time = time.time() - start_time
        
        if description:
            logger.info(f"✓ {description}: executed {executed_count} statements in {elapsed_time:.2f}s")
        
        return elapsed_time, executed_count
    
    def execute_from_mysql_file(
        self,
        mysql_file: Path | str,
        dialect_read: str = "mysql",
    ) -> dict:
        """
        Convert MySQL file to KWDB and execute directly in memory.
        
        Args:
            mysql_file: Path to MySQL dump file
            dialect_read: Input dialect (default: mysql)
            
        Returns:
            Dictionary with execution results and timings
        """
        mysql_path = Path(mysql_file)
        if not mysql_path.exists():
            raise FileNotFoundError(f"MySQL file not found: {mysql_path}")
        
        total_start_time = time.time()
        
        results = {
            "conversion_time": 0.0,
            "schema_time": 0.0,
            "data_time": 0.0,
            "constraints_time": 0.0,
            "total_time": 0.0,
            "schema_statements": 0,
            "data_statements": 0,
            "constraints_statements": 0,
            "conversion_warnings": [],
            "errors": [],
        }
        
        # Clear diagnostic errors from previous runs
        if hasattr(self.db_executor, '_diagnostic_errors'):
            self.db_executor._diagnostic_errors = []
        
        try:
            # Stage 0: Convert MySQL to KWDB (in memory)
            logger.info("="*60)
            logger.info("Stage 0: Converting MySQL to KWDB SQL")
            logger.info("="*60)
            
            conversion_start = time.time()
            
            # Create config for conversion
            cfg = MigratorConfig(
                input_sql_path=mysql_path,
                output_sql_path=None,  # No output file
                on_error=self.on_error,
                dialect_read=dialect_read,
            )
            
            # Run conversion pipeline
            pipeline_result = run_pipeline(cfg)
            
            results["conversion_time"] = time.time() - conversion_start
            results["conversion_warnings"] = pipeline_result.warnings
            
            if pipeline_result.errors:
                results["errors"].extend(pipeline_result.errors)
                if self.on_error == "stop":
                    raise RuntimeError(f"Conversion errors: {pipeline_result.errors}")
            
            logger.info(f"✓ Conversion completed in {results['conversion_time']:.2f}s")
            logger.info(f"  - Schema statements: {len([s for s in pipeline_result.ddl_sql.split(';') if s.strip()])}")
            logger.info(f"  - Data statements: {len([s for s in pipeline_result.dml_sql.split(');') if s.strip() and s.strip().upper().startswith('INSERT')])}")
            logger.info(f"  - Constraint statements: {len([s for s in pipeline_result.constraints_sql.split(';') if s.strip()])}")
            
            if pipeline_result.warnings:
                logger.warning(f"Conversion warnings: {len(pipeline_result.warnings)}")
                for warning in pipeline_result.warnings[:5]:  # Show first 5
                    logger.warning(f"  - {warning}")
                if len(pipeline_result.warnings) > 5:
                    logger.warning(f"  ... and {len(pipeline_result.warnings) - 5} more warnings")
            
            # Stage 1: Execute Schema (DDL)
            logger.info("\n" + "="*60)
            logger.info("Stage 1: Executing Schema (DDL)")
            logger.info("="*60)
            
            if pipeline_result.ddl_sql.strip():
                results["schema_time"], results["schema_statements"] = self.execute_sql_text(
                    pipeline_result.ddl_sql,
                    "Schema (DDL)"
                )
            else:
                logger.warning("No schema SQL to execute")
            
            # Stage 2: Execute Data (INSERT statements)
            logger.info("\n" + "="*60)
            logger.info("Stage 2: Executing Data (INSERT statements)")
            logger.info("="*60)
            
            # Check if pipeline already created a temporary file for INSERT statements
            # (happens when file is large > 100MB)
            temp_insert_file = None
            use_file_for_inserts = False
            
            if pipeline_result.dml_file_path and pipeline_result.dml_file_path.exists():
                # Pipeline already created a temporary file for streaming
                temp_insert_file = pipeline_result.dml_file_path
                use_file_for_inserts = True
                logger.info(f"Using pipeline-created INSERT file: {temp_insert_file} ({temp_insert_file.stat().st_size / 1024 / 1024:.1f}MB)")
            elif pipeline_result.dml_sql and pipeline_result.dml_sql.strip():
                # Write insert_sql to temporary file for streaming processing
                # This avoids loading large INSERT statements into memory
                import tempfile
                temp_insert_fd, temp_insert_path = tempfile.mkstemp(suffix='.sql', prefix='inserts_', text=True)
                temp_insert_file = Path(temp_insert_path)
                try:
                    with open(temp_insert_fd, 'w', encoding='utf-8') as f:
                        f.write(pipeline_result.dml_sql)
                    use_file_for_inserts = True
                    logger.info(f"Created temporary INSERT file: {temp_insert_file} ({len(pipeline_result.dml_sql) / 1024 / 1024:.1f}MB)")
                except Exception as e:
                    logger.error(f"Error: Failed to create temporary INSERT file: {e}")
                    if temp_insert_file and temp_insert_file.exists():
                        try:
                            temp_insert_file.unlink()
                        except Exception:
                            pass
                    temp_insert_file = None
                    use_file_for_inserts = False
            
            try:
                if use_file_for_inserts and temp_insert_file:
                    # Use streaming execution for file-based INSERT statements
                    results["data_time"], results["data_statements"] = self.execute_sql_file_streaming(
                        temp_insert_file,
                        "Data (INSERT)"
                    )
                elif pipeline_result.dml_sql and pipeline_result.dml_sql.strip():
                    # Fallback: Use in-memory execution for small files
                    results["data_time"], data_stmt_count = self.execute_sql_text(
                        pipeline_result.dml_sql,
                        "Data (INSERT)"
                    )
                    # Count only INSERT statements (split by ');' to match actual statement endings)
                    results["data_statements"] = len(
                        [s for s in pipeline_result.dml_sql.split(');') 
                         if s.strip() and s.strip().upper().startswith('INSERT')]
                    )
                else:
                    logger.warning("No data SQL to execute")
                    results["data_time"] = 0.0
                    results["data_statements"] = 0
            finally:
                # Clean up temporary file
                # We clean up both our created files and pipeline-created files since
                # this is a direct execution mode and the file is no longer needed after execution
                if temp_insert_file and temp_insert_file.exists():
                    try:
                        temp_insert_file.unlink()
                        logger.debug(f"Cleaned up temporary INSERT file: {temp_insert_file}")
                    except Exception as e:
                        logger.warning(f"Failed to clean up temporary INSERT file {temp_insert_file}: {e}")
            
            # Stage 3: Execute Constraints (Indexes and Constraints)
            logger.info("\n" + "="*60)
            logger.info("Stage 3: Executing Constraints (Indexes and Constraints)")
            logger.info("="*60)
            
            if pipeline_result.constraints_sql.strip():
                results["constraints_time"], results["constraints_statements"] = self.execute_sql_text(
                    pipeline_result.constraints_sql,
                    "Constraints (Indexes)"
                )
            else:
                logger.warning("No constraints SQL to execute")
            
            results["total_time"] = time.time() - total_start_time
            
            # Print summary
            logger.info("\n" + "="*60)
            logger.info("Execution Summary:")
            logger.info("="*60)
            logger.info(f"Conversion:       {results['conversion_time']:.2f}s")
            logger.info(f"Schema (DDL):     {results['schema_time']:.2f}s ({results['schema_statements']} statements)")
            logger.info(f"Data (INSERT):     {results['data_time']:.2f}s ({results['data_statements']} statements)")
            logger.info(f"Constraints:      {results['constraints_time']:.2f}s ({results['constraints_statements']} statements)")
            logger.info(f"{'-'*60}")
            logger.info(f"Total Time:       {results['total_time']:.2f}s")
            logger.info("="*60)
            
        except Exception as e:
            logger.error(f"Migration failed: {e}", exc_info=True)
            results["errors"].append(str(e))
            results["total_time"] = time.time() - total_start_time
            # Output diagnostic errors before raising
            if hasattr(self.db_executor, '_diagnostic_errors') and self.db_executor._diagnostic_errors:
                logger.info("\n" + "="*60)
                logger.info("Diagnostic Errors Summary")
                logger.info("="*60)
                for idx, diag in enumerate(self.db_executor._diagnostic_errors, 1):
                    logger.error(f"\n[{idx}] {diag}")
                logger.info("="*60 + "\n")
                # Clear diagnostic errors after output
                self.db_executor._diagnostic_errors = []
            raise
        
        # Output all diagnostic errors at the end
        if hasattr(self.db_executor, '_diagnostic_errors') and self.db_executor._diagnostic_errors:
            logger.info("\n" + "="*60)
            logger.info("Diagnostic Errors Summary")
            logger.info("="*60)
            for idx, diag in enumerate(self.db_executor._diagnostic_errors, 1):
                logger.error(f"\n[{idx}] {diag}")
            logger.info("="*60 + "\n")
            # Clear diagnostic errors after output
            self.db_executor._diagnostic_errors = []
        
        return results
    
    def close(self):
        """Close database connections."""
        if self.db_executor:
            self.db_executor.close()





