package com.lm.flink.component;

import com.lm.flink.entry.LogEvent;
import com.lm.flink.entry.LogLevelCount;
import com.lm.flink.log.LogLevelCountAgg;
import com.lm.flink.log.LogLevelWindowResult;
import com.lm.flink.log.LogParser;
import com.lm.flink.log.RedisLevelCountSink;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.api.common.serialization.SimpleStringSchema;

import java.time.Duration;

public class FlinkKafkaToRedisApp {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);

        // 在 main 方法中配置
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(
                3, // 尝试重启 3 次
                org.apache.flink.api.common.time.Time.seconds(10) // 每次间隔 10 秒
        ));

        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers("10.2.0.230:9092")
                .setTopics("logtest-topic")
//                .setGroupId("flink-group")
                .setStartingOffsets(OffsetsInitializer.earliest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();

        DataStream<String> kafkaStream = env.fromSource(
                kafkaSource,
                WatermarkStrategy.forBoundedOutOfOrderness(Duration.ofSeconds(5)),
                "Kafka Source"
        );

        DataStream<LogEvent> parsedStream = kafkaStream.flatMap(new LogParser());

        SingleOutputStreamOperator<LogLevelCount> levelCountStream = parsedStream
                .keyBy(LogEvent::getLevel)
                .window(TumblingProcessingTimeWindows.of(Time.seconds(10)))
                .aggregate(new LogLevelCountAgg(), new LogLevelWindowResult());

        levelCountStream.addSink(new RedisLevelCountSink());

        env.execute("Flink Kafka → Redis Log Processing");
    }
}
