package org.example.flink.windows;


import org.apache.commons.lang3.RandomStringUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.example.flink.util.KafkaUtil;

import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;

public class Hello07EventTimeWindow {
    public static void main(String[] args) throws Exception {
        //启动一个线程专门发送消息给Kafka，这样我们才有数据消费
        new Thread(() -> {
            String uname = RandomStringUtils.randomAlphabetic(8);
            for (int i = 100; i < 200; i++) {
                String date = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy年MM月dd日HH时mm分ss秒SSS"));
                KafkaUtil.sendMsg("yjxxt", uname + i % 2 + ":" + i + ":" + date);
                try {
                    Thread.sleep(495);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }).start();
        //获取环境
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置Kafka连接
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("node01:9092,node02:9092,node03:9092")
                .setTopics("yjxxt")
                .setGroupId("flink_KafkaConnector")
                .setStartingOffsets(OffsetsInitializer.latest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();
        //读取数据源
        DataStreamSource<String> kafkaSource =
                environment.fromSource(source, WatermarkStrategy.noWatermarks(), "KafkaSource");
        KeyedStream<Tuple3<String, String, String>, String> keyedStream = kafkaSource.map(word -> {
                    String[] split = word.split(":");
                    return Tuple3.of(split[0], split[1], split[2]);
                }, Types.TUPLE(Types.STRING, Types.STRING, Types.STRING))
                .keyBy(t -> t.f0);
        //TimeWindow--Tumbling
        keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(5)))
                .reduce((t1, t2) -> {
                    t1.f1 = t1.f1 + "-" + t2.f1;
                    return t1;
                }).map(t -> {
                    t.f2 = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy年MM月dd日HH时mm分ss秒SSS"));
                    return t;
                }, Types.TUPLE(Types.STRING, Types.STRING, Types.STRING))
                .print("[TimeWindow--Tumbling--EventTime]").setParallelism(1);
        //执行环境
        environment.execute();
    }
}

