package com.ry.flink.job5;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
import org.apache.flink.streaming.api.windowing.triggers.Trigger;
import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
import org.apache.flink.util.Collector;

/**
 * 需求：单词每出现三次统计一次
 */
public class GlobalWindowTest1 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> dataStream = env.socketTextStream("hadoop5", 9999);

        SingleOutputStreamOperator<Tuple2<String, Integer>> stream = dataStream.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String line, Collector<Tuple2<String, Integer>> collector) throws Exception {
                String[] fields = line.split(",");
                for (String word : fields) {
                    collector.collect(Tuple2.of(word, 1));
                }
            }
        });

        stream.keyBy(0)
                //自定的CountTrigger跟源码非常相识
                .countWindow(3)
                //如果不配合 trigger 不会运行该窗口
//                .window(GlobalWindows.create())
                .trigger(new CountTrigger(3))
                .sum(1)
                .print();

        env.execute("SessionWindowTest");
    }

    private static class CountTrigger extends Trigger<Tuple2<String,Integer>,GlobalWindow> {

        private long maxCount;

        public CountTrigger(long maxCount) {
            this.maxCount = maxCount;
        }


        ReducingStateDescriptor<Long> descriptor = new ReducingStateDescriptor<Long>(
                "count",
                new ReduceFunction<Long>() {
                    @Override
                    public Long reduce(Long value1, Long value2) throws Exception {
                        return value1+value2;
                    }
                },Long.class);

        /**
         * 进入窗口的每个元素都会调用该方法
         *
         * 返回值：
         * TriggerResult.CONTINUE 表示对窗口不进行任何处理
         * TriggerResult.FIRE_AND_PURGE：触发window计算，并且清除当前window数据
         * TriggerResult.PURGE:清除当前window中所有数据
         * TriggerResult.FIRE：触发window计算
         *
         * */
        @Override
        public TriggerResult onElement(Tuple2<String, Integer> element, long timestamp, GlobalWindow window, TriggerContext ctx) throws Exception {
            ReducingState<Long> partitionedState = ctx.getPartitionedState(descriptor);
            partitionedState.add(1L);
            if (partitionedState.get() == maxCount) {
                partitionedState.clear();
                return TriggerResult.FIRE_AND_PURGE;
            }
            return TriggerResult.CONTINUE;
        }

        @Override
        public TriggerResult onProcessingTime(long time, GlobalWindow window, TriggerContext ctx) throws Exception {
            return TriggerResult.CONTINUE;
        }

        @Override
        public TriggerResult onEventTime(long time, GlobalWindow window, TriggerContext ctx) throws Exception {
            return TriggerResult.CONTINUE;
        }

        @Override
        public void clear(GlobalWindow window, TriggerContext ctx) throws Exception {
            ctx.getPartitionedState(descriptor).clear();
        }
    }

}
