package com.yanxu;

import com.google.gson.Gson;
import com.yanxu.domain.Event2;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.OutputTag;

import java.time.Duration;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;

/**
 * @author 折戟沉沙铁未销
 * @version V1.0
 * @date 2025/7/27-2025
 * @Description: 这里用一句话描述这个类的作用
 */
public class Api_18_DelayDataSample {
    public static void main(String[] args) throws Exception {
        // 执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // source 源
        DataStreamSource<String> dataSource = env.socketTextStream("192.168.1.30", 6666);

        // map 转换算子进行转换
        SingleOutputStreamOperator<Event2> mapOperator = dataSource.map(new MapFunction<String, Event2>() {
            @Override
            public Event2 map(String s) throws Exception {
                String[] arrays = s.split(",");
                return new Event2(arrays[0], arrays[1], Integer.parseInt(arrays[2]), Long.parseLong(arrays[3]));
            }
        });
        // 设置水位线和时间戳
        SingleOutputStreamOperator<Event2> operator = mapOperator.assignTimestampsAndWatermarks(
                WatermarkStrategy.<Event2>forBoundedOutOfOrderness(Duration.ZERO)
                        .withTimestampAssigner(new SerializableTimestampAssigner<Event2>() {
                            @Override
                            public long extractTimestamp(Event2 event2, long recordTimestamp) {
                                return event2.getTimestamp();
                            }
                        })
        );

        // 进行分区
        // 使用 data->true , 实际上没有进行任何的分区
        KeyedStream<Event2, Boolean> keyByStream = operator.keyBy(data -> true);

        //定义侧输出流tag标签
        OutputTag<Event2> outputTag=new OutputTag<Event2>("delay"){};

        //时间窗口、侧输出流、自定义aggregate
        SingleOutputStreamOperator<String> aggregateOperator = keyByStream
                //设置时间窗口
                // 使用滚动时间窗口，开窗时间为 5 秒钟
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))

                //设置最大接收延迟为 1min
                //水位线的设置直接影响到窗口何时触发计算；而allowedLateness则决定了窗口在“关闭”
                // 后还能否及如何处理额外的迟到数据，影响的是结果的精确度和完整性。
                .allowedLateness(Time.minutes(1))

                // 设置侧输出流数据标签
                .sideOutputLateData(outputTag)

                //自定义 aggregate 聚合函数
                .aggregate(new AggregateFunction<Event2, Map<String, Integer>, String>() {
                    @Override
                    public Map<String, Integer> createAccumulator() {
                        return new LinkedHashMap<>();
                    }

                    @Override
                    public Map<String, Integer> add(Event2 event2, Map<String, Integer> accumulator) {
                        if (accumulator.containsKey(event2.getUrl())) {
                            Integer val = accumulator.get(event2.getUrl());
                            accumulator.put(event2.getUrl(), val + event2.getResponseTime());
                        } else {
                            accumulator.put(event2.getUrl(), event2.getResponseTime());
                        }
                        return accumulator;
                    }

                    @Override
                    public String getResult(Map<String, Integer> accumulator) {
                        return new Gson().toJson(accumulator);
                    }

                    @Override
                    public Map<String, Integer> merge(Map<String, Integer> stringIntegerMap, Map<String, Integer> acc1) {
                        return Collections.emptyMap();
                    }
                });

        aggregateOperator.print(" result >>>> ");
        aggregateOperator.getSideOutput(outputTag).print("side_out_print");

        env.execute();


    }
}