package com.example.flinkcourse.lesson2.table;

import com.example.flinkcourse.lesson2.model.Event;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * 事件表处理器
 * 使用 Table API 处理事件数据
 */
public class EventTableProcessor {
    private static final Logger LOG = LoggerFactory.getLogger(EventTableProcessor.class);

    /**
     * 处理事件流
     * @param env 流执行环境
     * @param inputStream 输入事件流
     * @return 处理后的表
     */
    public static Table processEvents(StreamExecutionEnvironment env, DataStream<Event> inputStream) {
        LOG.info("Processing events with Table API...");
        
        // 创建 Table 环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        
        // 将 DataStream 转换为 Table
        Table inputTable = tableEnv.fromDataStream(inputStream);
        
        // 注册视图
        tableEnv.createTemporaryView("events", inputTable);
        
        // 执行 SQL 查询
        String sql = "SELECT " +
                    "  type, " +
                    "  COUNT(*) as count, " +
                    "  MAX(timestamp) as last_timestamp, " +
                    "  TUMBLE_START(timestamp, INTERVAL '1' MINUTE) as window_start, " +
                    "  TUMBLE_END(timestamp, INTERVAL '1' MINUTE) as window_end " +
                    "FROM events " +
                    "GROUP BY " +
                    "  type, " +
                    "  TUMBLE(timestamp, INTERVAL '1' MINUTE)";
        
        LOG.info("Executing SQL query: {}", sql);
        Table resultTable = tableEnv.sqlQuery(sql);
        
        // 设置并行度
        resultTable = resultTable.setParallelism(4);
        
        LOG.info("Table API processing completed");
        return resultTable;
    }
} 