package org.example.flink.windows;


import org.apache.commons.lang3.RandomStringUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.OutputTag;
import org.example.flink.util.KafkaUtil;

import java.time.Duration;

public class Hello13WatermarkLateData {
    public static void main(String[] args) throws Exception {
        //启动一个线程专门发送消息给Kafka，这样我们才有数据消费
        new Thread(() -> {
            String uname = RandomStringUtils.randomAlphabetic(8);
            for (int i = 100; i < 200; i++) {
                if (i % 10 == 0) {
                    KafkaUtil.sendMsg("yjxxt", uname + i % 2 + ":" + i + ":" + (System.currentTimeMillis() - 13000));
                } else if (i % 5 == 0) {
                    KafkaUtil.sendMsg("yjxxt", uname + i % 2 + ":" + i + ":" + (System.currentTimeMillis() - 8000));
                } else {
                    KafkaUtil.sendMsg("yjxxt", uname + i % 2 + ":" + i + ":" + System.currentTimeMillis());
                }
                try {
                    Thread.sleep(500);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }).start();
        //获取环境
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        environment.setParallelism(1);
        //设置Kafka连接
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("192.168.101.100:9092,192.168.101.101:9092,192.168.101.102:9092")
                .setTopics("yjxxt")
                .setGroupId("flink_KafkaConnector")
                .setStartingOffsets(OffsetsInitializer.latest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();
        //声明侧输出
        OutputTag<String> outputTag = new OutputTag<String>("sideOutputLateData") {
        };
        //读取数据源
        DataStreamSource<String> kafkaSource =
                environment.fromSource(source, WatermarkStrategy.noWatermarks(), "KafkaSource");
        SingleOutputStreamOperator<String> reduceDataStream =
                kafkaSource.assignTimestampsAndWatermarks(
                                WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(1))
                                        .withTimestampAssigner((element, recordTimestamp) -> {
                                            return Long.valueOf(element.split(":")[2]);
                                        })
                        ).keyBy(t -> t.split(":")[0])
                        .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                        .allowedLateness(Time.seconds(2))
                        .sideOutputLateData(outputTag)
                        .reduce((t1, t2) -> t1 + "[" + t2.split(":")[1] + "," + t2.split(":")[2] + "]");
        //打印数据
        reduceDataStream.map(t -> "[" + System.currentTimeMillis() + "][" + t + "]").print("WatermarkLateData");
        //获取侧输出数据
        reduceDataStream.getSideOutput(outputTag).print();
        //执行环境
        environment.execute();
    }
}
