package com.zyh.flink.day07.trigger;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
import org.apache.flink.streaming.api.windowing.triggers.CountTrigger;
import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
import org.apache.flink.util.Collector;

public class CountTriggerTest {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> dataStreamSource = environment.socketTextStream("hadoop10", 9998);

        SingleOutputStreamOperator<String> result = dataStreamSource.windowAll(GlobalWindows.create())
                .trigger(CountTrigger.of(3))//每三个元素计算一次
                //.trigger(PurgingTrigger.of(CountTrigger.of(3)))//剔除每次计算的中间数据
                .process(new ProcessAllWindowFunction<String, String, GlobalWindow>() {
                    @Override
                    public void process(Context context, Iterable<String> iterable, Collector<String> collector) throws Exception {
                        StringBuilder sb = new StringBuilder();
                        for (String s : iterable) {
                            sb.append(s).append(" ");
                        }
                        collector.collect(sb.toString());
                    }
                });

        result.print();
        environment.execute("Job");
    }
}
