#!/usr/bin/env python3
"""
Flink Agents 集群模式 - 流式处理
使用 Table API 定义 Kinesis Source（Connector 5.0.0），转换为 DataStream 后应用 Agent

特点：
- ✅ 真正的流式处理（LATEST 模式，持续消费新数据）
- ✅ Flink 分布式执行（JobManager + TaskManager）
- ✅ 复用现有 BehaviorAnalysisAgent（Agent 代码完全不变）
- ✅ 官方推荐方案（Table API → DataStream）

使用场景：
- 生产环境实时用户行为分析
- 需要低延迟（秒级）处理
- 需要分布式并行能力
"""
import os
import logging
from pyflink.datastream import StreamExecutionEnvironment, RuntimeExecutionMode
from pyflink.table import StreamTableEnvironment
from pyflink.common import Configuration
from flink_agents.api.execution_environment import AgentsExecutionEnvironment

# 导入共享的 Agent 和 Sink
from agents.behavior_agent import BehaviorAnalysisAgent
from utils.postgres_sink import write_to_postgres

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)


def main():
    """主函数 - 流式集群模式"""
    logger.info("============================================================")
    logger.info("Flink Agents - 集群模式 (Streaming via Table API)")
    logger.info("============================================================")
    logger.info("运行模式: STREAMING（持续流式处理）")
    logger.info(f"Flink JobManager: {os.getenv('FLINK_JOBMANAGER', 'flink-jobmanager:8081')}")
    logger.info(f"Kinesis Stream: {os.getenv('KINESIS_STREAM_NAME', 'click-events-stream')}")
    logger.info(f"PostgreSQL: {os.getenv('POSTGRES_HOST', 'postgres')}:{os.getenv('POSTGRES_PORT', 5432)}")
    logger.info(f"LLM: {os.getenv('LLM_PROVIDER', 'deepseek')} - {os.getenv('LLM_MODEL', 'deepseek-chat')}")
    logger.info("============================================================")

    # ====== 1. 创建 StreamExecutionEnvironment（集群模式）======
    logger.info("📝 Step 1: 配置 Flink 集群连接...")

    config = Configuration()

    # 设置 JobManager 地址
    jobmanager_address = os.getenv('FLINK_JOBMANAGER', 'flink-jobmanager:8081')
    jobmanager_host = jobmanager_address.split(':')[0]
    jobmanager_port = jobmanager_address.split(':')[1] if ':' in jobmanager_address else '8081'

    config.set_string("rest.address", jobmanager_host)
    config.set_string("rest.port", jobmanager_port)
    config.set_string("jobmanager.rpc.address", jobmanager_host)
    config.set_string("jobmanager.rpc.port", "6123")

    logger.info(f"📝 JobManager REST: {jobmanager_host}:{jobmanager_port}")

    # 创建 StreamExecutionEnvironment
    logger.info("📝 Step 2: 创建 StreamExecutionEnvironment...")
    try:
        env = StreamExecutionEnvironment.get_execution_environment(config)
        env.set_runtime_mode(RuntimeExecutionMode.STREAMING)  # ✅ 流式模式！
        env.set_parallelism(int(os.getenv('FLINK_PARALLELISM', 4)))
        logger.info("✅ StreamExecutionEnvironment 已创建（流式模式）")
    except Exception as e:
        logger.error(f"❌ 创建 StreamExecutionEnvironment 失败: {e}")
        raise

    # ====== 2. 创建 StreamTableEnvironment（关键！）======
    logger.info("📝 Step 3: 创建 StreamTableEnvironment...")
    try:
        t_env = StreamTableEnvironment.create(stream_execution_environment=env)

        # 配置 Kinesis Connector JAR（应该已经在 /opt/flink/lib/ 下）
        # 不需要额外配置，Flink 会自动加载 /opt/flink/lib/ 下的所有 JAR

        logger.info("✅ StreamTableEnvironment 已创建")
    except Exception as e:
        logger.error(f"❌ 创建 StreamTableEnvironment 失败: {e}")
        raise

    # ====== 3. 用 SQL DDL 定义 Kinesis Source（Table API）======
    logger.info("📝 Step 4: 创建 Kinesis Table Source...")

    kinesis_stream_name = os.getenv('KINESIS_STREAM_NAME', 'click-events-stream')
    aws_region = os.getenv('AWS_REGION', 'us-east-1')
    aws_endpoint = os.getenv('AWS_ENDPOINT', 'http://localstack:4566')

    # 构建 Kinesis ARN（LocalStack 使用固定账户 ID）
    kinesis_arn = f"arn:aws:kinesis:{aws_region}:000000000000:stream/{kinesis_stream_name}"

    try:
        t_env.execute_sql(f"""
        CREATE TABLE kinesis_source (
            userId STRING,
            eventType STRING,
            url STRING,
            `timestamp` BIGINT,
            sessionId STRING,
            interactionContext ROW<
                targetNodeId STRING,
                selector STRING,
                boundingRect ROW<x DOUBLE, y DOUBLE, width DOUBLE, height DOUBLE>
            >,
            globalStructure ROW<
                semanticTree STRING
            >,
            viewport ROW<width INT, height INT>,
            eventData ROW<clickX INT, clickY INT>
        ) WITH (
            'connector' = 'kinesis',
            'stream.arn' = '{kinesis_arn}',
            'aws.region' = '{aws_region}',
            'aws.endpoint' = '{aws_endpoint}',
            'aws.credentials.provider' = 'BASIC',
            'aws.credentials.basic.accesskeyid' = 'test',
            'aws.credentials.basic.secretkey' = 'test',
            'format' = 'json',
            'json.fail-on-missing-field' = 'false',
            'json.ignore-parse-errors' = 'true'
        )
        """)
        logger.info(f"✅ Kinesis Table Source 已创建: {kinesis_stream_name}")
    except Exception as e:
        logger.error(f"❌ 创建 Kinesis Table 失败: {e}")
        raise

    # ====== 4. Table → DataStream（官方方法）======
    logger.info("📝 Step 5: 将 Table 转换为 DataStream...")
    try:
        table = t_env.from_path('kinesis_source')
        kinesis_stream = t_env.to_data_stream(table)
        logger.info("✅ DataStream 已创建")
    except Exception as e:
        logger.error(f"❌ Table → DataStream 转换失败: {e}")
        raise

    # ====== 5. 创建 Flink Agents 环境 ======
    logger.info("📝 Step 6: 创建 Flink Agents 环境...")
    try:
        agents_env = AgentsExecutionEnvironment.get_execution_environment(
            env=env,
            t_env=t_env  # 传入 TableEnvironment
        )
        logger.info("✅ Flink Agents 环境已创建")
    except Exception as e:
        logger.error(f"❌ 创建 Flink Agents 环境失败: {e}")
        raise

    # ====== 6. 应用 Agent 处理 ======
    logger.info("📝 Step 7: 应用 BehaviorAnalysisAgent...")

    # 定义 key_selector（从 Row 对象中提取 userId）
    def extract_user_id(row):
        try:
            if hasattr(row, 'userId'):
                return row.userId
            elif hasattr(row, 'as_dict'):
                return row.as_dict().get('userId', 'unknown')
            else:
                return 'unknown'
        except Exception as e:
            logger.warning(f"提取 userId 失败: {e}")
            return 'unknown'

    try:
        output_stream = (
            agents_env
            .from_datastream(
                input=kinesis_stream,
                key_selector=extract_user_id  # 按 userId 分组
            )
            .apply(BehaviorAnalysisAgent())  # 复用现有 Agent！
            .to_datastream()
        )
        logger.info("✅ Agent 已应用到 DataStream")
    except Exception as e:
        logger.error(f"❌ 应用 Agent 失败: {e}")
        raise

    # ====== 7. 添加 PostgreSQL Sink ======
    logger.info("📝 Step 8: 添加 PostgreSQL Sink...")
    try:
        output_stream.map(write_to_postgres)
        logger.info("✅ PostgreSQL Sink 已添加")
    except Exception as e:
        logger.error(f"❌ 添加 Sink 失败: {e}")
        raise

    # ====== 8. 执行作业（流式，不会退出）======
    logger.info("🚀 Step 9: 提交流式作业到 Flink 集群...")
    logger.info("📝 调用 agents_env.execute()...")
    try:
        agents_env.execute()  # flink-agents 的 execute() 不接受参数
        logger.info("✅ 流式作业已启动（持续运行）")
    except Exception as e:
        logger.error(f"❌ agents_env.execute() 失败: {e}")
        import traceback
        traceback.print_exc()
        raise


if __name__ == '__main__':
    main()
