package com.example.flinkcourse.lesson1;

import com.example.flinkcourse.lesson1.source.KafkaSourceFactory;
import com.example.flinkcourse.lesson1.source.HBaseSourceFactory;
import com.example.flinkcourse.lesson1.transform.*;
import com.example.flinkcourse.lesson1.window.WindowProcessor;
import com.example.flinkcourse.lesson1.sink.EsSinkFactory;
import com.example.flinkcourse.lesson1.sink.PgSinkFactory;
import com.example.flinkcourse.lesson1.state.StateAndCheckpointManager;
import com.example.flinkcourse.lesson1.time.TimeAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Flink Lesson 1 主作业类
 * 实现双源流 ETL 处理
 */
public class FlinkLesson1Job {
    private static final Logger LOG = LoggerFactory.getLogger(FlinkLesson1Job.class);

    public static void main(String[] args) throws Exception {
        // 创建执行环境
        Configuration configuration = new Configuration();
        // 设置 Web UI 端口
        configuration.set(RestOptions.PORT, 8081);
        // 设置 TaskManager 内存
        configuration.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, "1024m");
        // 设置 TaskManager 槽位数
        configuration.set(TaskManagerOptions.NUM_TASK_SLOTS, 4);
        
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(configuration);
        
        // 设置并行度
        env.setParallelism(4);
        // 设置最大并行度
        env.setMaxParallelism(16);

        // 配置作业
        StateAndCheckpointManager.configureStateAndCheckpoint(env);

        // 创建 Kafka 数据源
        DataStream<String> kafkaStream = env.fromSource(
            KafkaSourceFactory.createKafkaSource(),
            WatermarkStrategy.noWatermarks(),
            "Kafka Source"
        ).setParallelism(2); // 设置 Kafka 源并行度

        // 创建 HBase 数据源
        DataStream<String> hbaseStream = env.fromSource(
            HBaseSourceFactory.createHBaseSource(),
            WatermarkStrategy.noWatermarks(),
            "HBase Source"
        ).setParallelism(2); // 设置 HBase 源并行度

        // 合并数据流
        DataStream<String> mergedStream = kafkaStream.union(hbaseStream);

        // 数据转换处理
        DataStream<String> transformedStream = mergedStream
            .map(new MyMapFunction())
            .setParallelism(4) // 设置 Map 操作并行度
            .flatMap(new MyFlatMapFunction())
            .setParallelism(4) // 设置 FlatMap 操作并行度
            .filter(new MyFilterFunction())
            .setParallelism(4) // 设置 Filter 操作并行度
            .keyBy(new MyKeySelector())
            .process(new MyRichFunction())
            .setParallelism(4); // 设置 Process 操作并行度

        // 窗口处理
        DataStream<String> windowedStream = transformedStream
            .assignTimestampsAndWatermarks(new TimeAssigner())
            .windowAll(TumblingEventTimeWindows.of(Time.minutes(1)))
            .process(new WindowProcessor())
            .setParallelism(2); // 设置窗口处理并行度

        // 输出到 Elasticsearch
        windowedStream.sinkTo(EsSinkFactory.createEsSink())
            .setParallelism(2); // 设置 ES Sink 并行度

        // 输出到 PostgreSQL
        windowedStream.sinkTo(PgSinkFactory.createPgSink())
            .setParallelism(2); // 设置 PG Sink 并行度

        // 执行作业
        LOG.info("Starting Flink Lesson 1 Job...");
        env.execute("Flink Lesson 1 Job");
    }
} 