package com.zyh.flink.day06.assignor;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

public class TumblingWindowTest {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> hadoop10 = env.socketTextStream("hadoop10", 9999);
        KeyedStream<Tuple2<String, Integer>, String> keyedStream = hadoop10.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
                String[] ss = s.split("\\s+");
                for (String s1 : ss) {
                    collector.collect(Tuple2.of(s1, 1));
                }
            }
        }).keyBy(t -> t.f0);
        /*
            通过滚动窗口完成数据的计算处理
            把无界数据流切分为滚动窗口，窗口大小是5s
         */
        SingleOutputStreamOperator<Tuple2<String, Integer>> result = keyedStream//将流上的数据划分到一个窗口中
                .window(TumblingProcessingTimeWindows.of(Time.seconds(5)))
                //基于windowedStream使用算子完成数据的处理：对窗口中的数据进行处理
                .reduce(new ReduceFunction<Tuple2<String, Integer>>() {
                    /*
                     * value1,value2
                     *   在第一次调用时:value1代表窗口第一个元素,value2是第二个元素
                     *   在第二次调用时:value1代表上一次结果,value2是第三个元素
                     * */
                    @Override
                    public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) throws Exception {
                        System.out.print("value1 = " + value1);
                        System.out.println(",value2 = " + value2);
                        return Tuple2.of(value1.f0, value1.f1 + value2.f1);
                    }
                });
        result.print();
//        SingleOutputStreamOperator<Tuple2<String, Integer>> reducingStream = keyedStream.map(new RichMapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {
//            private ReducingState<Integer> reducingState;
//
//            @Override
//            public void open(Configuration parameters) throws Exception {
//                RuntimeContext rtx = getRuntimeContext();
//                ReducingStateDescriptor<Integer> rsd = new ReducingStateDescriptor<Integer>("rsd", (v1, v2) -> v1 + v2, Types.INT);
//                reducingState = rtx.getReducingState(rsd);
//            }
//
//            @Override
//            public Tuple2<String, Integer> map(Tuple2<String, Integer> value) throws Exception {
//                this.reducingState.add(1);
//                Integer count = this.reducingState.get();
//                return Tuple2.of(value.f0, count);
//            }
//        });


        env.execute("Job");
    }
}
