package com.atguigu.flink.chapter7;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2020/12/22 11:36
 */
public class Flink02_Chapter7_Window_Slide {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env
          .socketTextStream("hadoop162", 9999)
          .flatMap(new FlatMapFunction<String, Tuple2<String, Long>>() {
              @Override
              public void flatMap(String value, Collector<Tuple2<String, Long>> out) throws Exception {
                  String[] s = value.split(" ");
                  for (String word : s) {
                      out.collect(Tuple2.of(word, 1L));
                  }
              }
          })

          .keyBy(t -> t.f0)
          .window(SlidingProcessingTimeWindows.of(Time.seconds(10), Time.seconds(5)))
          .process(new ProcessWindowFunction<Tuple2<String, Long>, String, String, TimeWindow>() {
              @Override
              public void process(String key, Context context, Iterable<Tuple2<String, Long>> elements, Collector<String> out) throws Exception {
                  TimeWindow window = context.window();
                  System.out.println("[" + window.getStart() / 1000 + "," + window.getEnd() / 1000 + ")");
                  out.collect(elements.spliterator().estimateSize() + "");
              }
          })
          .print();

        env.execute();
    }
}
