package com.example.flinkcourse.lesson4.table;

import com.example.flinkcourse.lesson4.model.Event;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Table API 处理器
 * 用于实现 SQL 查询和表操作功能
 */
public class TableProcessor {
    private static final Logger LOG = LoggerFactory.getLogger(TableProcessor.class);
    
    /**
     * 处理事件流
     * @param env 执行环境
     * @param inputStream 输入事件流
     * @return 处理结果表
     */
    public static Table processEvents(StreamExecutionEnvironment env, DataStream<Event> inputStream) {
        LOG.info("Starting Table API processing");
        
        // 创建表环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        
        // 将数据流转换为表
        Table eventsTable = tableEnv.fromDataStream(inputStream);
        
        // 注册临时视图
        tableEnv.createTemporaryView("events", eventsTable);
        
        // 执行 SQL 查询
        String sql = "SELECT " +
                    "  type, " +
                    "  COUNT(*) as event_count, " +
                    "  SUM(amount) as total_amount, " +
                    "  MAX(timestamp) as last_timestamp, " +
                    "  TUMBLE_START(timestamp, INTERVAL '1' MINUTE) as window_start, " +
                    "  TUMBLE_END(timestamp, INTERVAL '1' MINUTE) as window_end " +
                    "FROM events " +
                    "GROUP BY type, TUMBLE(timestamp, INTERVAL '1' MINUTE)";
        
        LOG.info("Executing SQL query: {}", sql);
        
        // 执行查询并获取结果表
        Table resultTable = tableEnv.sqlQuery(sql);
        
        // 设置并行度
        resultTable.getQueryOperation().getParallelism().ifPresent(parallelism -> {
            LOG.info("Setting parallelism to: {}", parallelism);
        });
        
        LOG.info("Table API processing completed");
        
        return resultTable;
    }
    
    /**
     * 创建事件统计表
     * @param tableEnv 表环境
     * @return 统计结果表
     */
    public static Table createEventStatsTable(StreamTableEnvironment tableEnv) {
        LOG.info("Creating event statistics table");
        
        String createTableSql = "CREATE TABLE event_stats (" +
                              "  type STRING, " +
                              "  event_count BIGINT, " +
                              "  total_amount DOUBLE, " +
                              "  last_timestamp TIMESTAMP(3), " +
                              "  window_start TIMESTAMP(3), " +
                              "  window_end TIMESTAMP(3), " +
                              "  PRIMARY KEY (type, window_start) NOT ENFORCED" +
                              ") WITH (" +
                              "  'connector' = 'upsert-kafka', " +
                              "  'topic' = 'event-stats', " +
                              "  'properties.bootstrap.servers' = 'localhost:9092', " +
                              "  'key.format' = 'json', " +
                              "  'value.format' = 'json'" +
                              ")";
        
        LOG.info("Executing create table SQL: {}", createTableSql);
        
        // 执行建表语句
        tableEnv.executeSql(createTableSql);
        
        LOG.info("Event statistics table created successfully");
        
        return tableEnv.from("event_stats");
    }
    
    /**
     * 插入统计结果
     * @param tableEnv 表环境
     * @param resultTable 结果表
     */
    public static void insertStats(StreamTableEnvironment tableEnv, Table resultTable) {
        LOG.info("Inserting statistics into event_stats table");
        
        // 执行插入操作
        resultTable.executeInsert("event_stats");
        
        LOG.info("Statistics inserted successfully");
    }
} 