package edu.nepu.flink.api.window;

import edu.nepu.flink.api.bean.WaterSensor;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;

/**
 * @Date 2024/2/29 21:49
 * @Created by chenshuaijun
 */
public class ProcessWindow {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        SingleOutputStreamOperator<WaterSensor> source = env.socketTextStream("hadoop102", 9999).map(new MapFunction<String, WaterSensor>() {
            @Override
            public WaterSensor map(String value) throws Exception {
                String[] split = value.split(",");
                return new WaterSensor(split[0], Long.valueOf(split[1]), Integer.valueOf(split[2]));
            }
        });
        /**
         * 有了reduce为什么还需要aggregate：
         * 因为reduce传入的参数需要数据的输入类型、中间类型和输出类型都必须相同。这个就限制了计算的灵活性
         * 而aggregate方法的输入、中间结果和输出类型完全可以是不同的
         */
        source.keyBy(WaterSensor::getId)
                        .window(TumblingProcessingTimeWindows.of(Time.seconds(10)))
                        /**
                         * 传入的泛型是：输入数据类型、输出数据类型、key的类型、窗口的类型
                         * process与reduce和aggregate之间的区别
                         * （1）process是底层的逻辑，计算的过程中是可以拿到上下文的参数，同时可以进行生命周期的控制
                         * （2）process是全窗口函数，他不是数据来一条就计算一条，而是现将到来的数据存储到一个集合中，等到窗口关闭的时候一起进行计算
                         * （3）reduce和aggregate是数据来一条就算一条，一直保留着一条数据
                         */
                        .process(new ProcessWindowFunction<WaterSensor, String, String, TimeWindow>() {
                            @Override
                            public void process(String s, ProcessWindowFunction<WaterSensor, String, String, TimeWindow>.Context context, Iterable<WaterSensor> elements, Collector<String> out) throws Exception {
                                TimeWindow window = context.window();
                                String windowStart = DateFormatUtils.format(window.getStart(), "yyyy-MM-dd HH:mm:ss");
                                String windowEnd = DateFormatUtils.format(window.getEnd(), "yyyy-MM-dd HH:mm:ss");
                                long size = elements.spliterator().estimateSize();
                                System.out.println("key: " + s + " 窗口的范围:["+windowStart+"-->"+ windowEnd + "]" + "包含的数据条数 " + size + " 数据为"+ elements);
                            }
                        }).print();

        env.execute();
    }
}
