import os
import pytest
import tempfile
from pathlib import Path

from sqlmigration.transformer import transform_statements_split
from sqlmigration.parser import iter_parse_sql_text
from sqlmigration.kafka_controller import KafkaMigrationController
from sqlmigration.validation import (
    parse_mysql_source,
    validate_counts_against_source,
    validate_structure_against_source,
    render_report_text,
)


def _sep(title: str, start: bool):
    print("\n" + "-" * 72)
    print(f"{title} — {'START' if start else 'END'}")
    print("-" * 72)


def _require_env(var_name: str) -> str:
    val = os.environ.get(var_name)
    if not val:
        pytest.skip(f"Missing required environment variable: {var_name}")
    return val


def _split(mysql_sql: str):
    asts = [p.ast for p in iter_parse_sql_text(mysql_sql, read_dialect="mysql") if p.ast is not None]
    return transform_statements_split(asts)


def test_e2e_kwdb_kafka():
    _sep("[E2E] KWDB+Kafka 集成", True)
    dsn = _require_env("KWDB_DSN")  # e.g., postgresql://user:pass@host:port/db
    bootstrap = _require_env("KAFKA_BOOTSTRAP")  # e.g., host:9092
    topic_prefix = os.environ.get("TOPIC_PREFIX", "sql_migration_insert_native")

    # Preflight: check Kafka availability, otherwise skip to avoid hard failure during producer init
    try:
        from kafka import KafkaAdminClient
        _adm = KafkaAdminClient(bootstrap_servers=bootstrap, client_id="e2e_native_probe")
        _ = _adm.list_topics()
    except Exception as e:
        pytest.skip(f"Kafka not available at {bootstrap}: {e}")

    # Minimal MySQL source with constraints and data
    mysql_src = (
        "CREATE TABLE `t1`(\n"
        "  id int NOT NULL AUTO_INCREMENT,\n"
        "  v  int,\n"
        "  PRIMARY KEY (id),\n"
        "  UNIQUE KEY uq_v (v)\n"
        ");\n"
        "CREATE TABLE `t2`(\n"
        "  id int PRIMARY KEY,\n"
        "  pid int,\n"
        ");\n"
        "INSERT INTO t1 VALUES (1, 10), (2, 20);\n"
        "INSERT INTO t2 VALUES (1, 1);\n"
    )

    # Show original MySQL source
    print("[ORIGIN] MySQL source SQL:\n" + mysql_src.strip())

    # Transform
    split = _split(mysql_src)
    print("[INPUT] MySQL source DDL/DML size:", len(split.ddl_sql), len(split.dml_sql), len(split.constraints_sql))
    print("[CONVERT] Syntax conversion completed")
    print("[DDL] Converted DDL:\n" + split.ddl_sql.strip())
    print("[CONS] Converted Constraints:\n" + (split.constraints_sql.strip() or "<empty>"))

    # Pre-clean target schema to ensure idempotent runs
    try:
        from sqlmigration.db_executor import KWDBExecutor
        _cleanup_exec = KWDBExecutor(connection_string=dsn, min_connections=1, max_connections=2)
        for stmt in [
            'DROP TABLE IF EXISTS "t2" CASCADE',
            'DROP TABLE IF EXISTS "t1" CASCADE',
            'DROP SEQUENCE IF EXISTS t1_id_seq',
        ]:
            try:
                _cleanup_exec.execute_sql(stmt + ';')
            except Exception:
                pass
        _cleanup_exec.close()
    except Exception:
        # best-effort cleanup; continue even if it fails
        pass

    # If DML split为空，回退为从原始文本提取 INSERT 行
    # Note: transform_statements_split 可能将 INSERT 识别为其他类型，需要手动兜底提取
    if not split.dml_sql.strip():
        inserts = []
        for line in mysql_src.splitlines():
            if line.strip().upper().startswith("INSERT INTO"):
                inserts.append(line if line.strip().endswith(';') else (line + ';'))
        split.dml_sql = "\n".join(inserts) + ("\n" if inserts else "")
    print("[DML] Final DML (from transformer or fallback):\n" + (split.dml_sql.strip() or "<empty>"))

    # Run migration via Kafka controller (native connections)
    ctrl = KafkaMigrationController(
        db_connection_string=dsn,
        kafka_servers=[bootstrap],
        kafka_topic_prefix=topic_prefix,
        max_message_size=64 * 1024 * 1024,
        topic_partitions=3,
        consumers_per_table=2,
        db_min_connections=2,
        db_max_connections=10,
    )

    # Create temporary file for INSERT statements to test streaming processing
    # This matches the current Kafka streaming logic that processes from file
    insert_file_path = None
    try:
        print("[MIGRATE] Starting database migration (DDL + Kafka INSERT + INDEX)...")
        
        # Write INSERT statements to temporary file for streaming processing
        if split.dml_sql.strip():
            with tempfile.NamedTemporaryFile(mode='w', suffix='.sql', delete=False, encoding='utf-8') as f:
                f.write(split.dml_sql)
                insert_file_path = Path(f.name)
            print(f"[MIGRATE] Created temporary INSERT file: {insert_file_path}")
        
        # 执行完整的迁移流程：DDL + INSERT (via file streaming) + 约束/索引
        # 使用 insert_file_path 以符合当前的流式处理逻辑
        results = ctrl.execute_migration_from_sql(
            ddl_sql=split.ddl_sql,
            insert_sql=split.dml_sql,  # Fallback if file doesn't exist
            constraints_sql=split.constraints_sql,
            insert_file_path=insert_file_path,  # Use file for streaming processing
        )
        print("[RESULT] Migration flags:", {k: results.get(k) for k in ("ddl_executed", "inserts_completed", "constraints_executed")})
        assert results.get("ddl_executed") is True
        assert results.get("inserts_completed") is True
        assert results.get("constraints_executed") is True, "Constraints/Indexes should be executed"
        print("[MIGRATE] Migration completed successfully")

        # Validate vs MySQL source using a fresh executor (controller may have cleaned pool)
        from sqlmigration.db_executor import KWDBExecutor
        src_meta = parse_mysql_source(mysql_src)
        fresh_exec = KWDBExecutor(connection_string=dsn, min_connections=1, max_connections=2)
        rpt_counts = validate_counts_against_source(fresh_exec, src_meta)
        report = render_report_text([rpt_counts])
        print("[RESULT] Validation Report:\n" + report)
        assert rpt_counts.ok is True
    finally:
        # Clean up temporary INSERT file
        if insert_file_path and insert_file_path.exists():
            try:
                insert_file_path.unlink()
                print(f"[CLEANUP] Deleted temporary INSERT file: {insert_file_path}")
            except Exception as e:
                print(f"[CLEANUP] Warning: Failed to delete temporary file {insert_file_path}: {e}")
        
        # Ensure resources are closed even if the test fails midway
        try:
            ctrl.close()
        except Exception:
            pass

    _sep("[E2E] KWDB+Kafka 集成", False)


