package Countwindow;

import bean.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
import org.apache.flink.util.Collector;

/**
 * @author Spring_Hu
 * @date 2021/10/13 20:43
 */
public class CountWindow {
    //基于元素个数的窗口 （窗口的划分不是以第一条数据划分的 每经过一个步长都有一个窗口输出和关闭）
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // env.setParallelism(1);
        env.socketTextStream("hadoop102",8888)
                .map(new MapFunction<String, WaterSensor>() {
                    @Override
                    public WaterSensor map(String value) throws Exception {
                        String[] split = value.split(",");
                        return new WaterSensor(split[0],Long.parseLong(split[1]),Integer.parseInt(split[2]));
                    }
                })
                .keyBy(value -> value.getId())
                .countWindow(5,2)
                .process(new ProcessWindowFunction<WaterSensor, String, String, GlobalWindow>() {
                    @Override
                    public void process(String s, Context context, Iterable<WaterSensor> elements, Collector<String> out) throws Exception {
                        System.out.println(elements.spliterator().estimateSize());
                        for (WaterSensor element : elements) {
                            out.collect("采集的值："+element);
                        }

                    }
                }).print();

        env.execute();

    }
}
