package com.gy.flink.window;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
import org.apache.flink.streaming.api.windowing.triggers.Trigger;
import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
import org.apache.flink.streaming.api.windowing.windows.GlobalWindow;
import org.apache.flink.util.Collector;

import javax.lang.model.element.ElementVisitor;
import java.util.Arrays;
import java.util.Map;

/**
 * 自定义窗口处理逻辑
 */
public class MyWindow {

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf);

        DataStreamSource<String> dataStream = env.fromElements(
                "hadoop,hadoop",
                "hive,hadoop",
                "hive,hadoop",
                "spark,hadoop",
                "spark,hadoop",
                "hive,hadoop",
                "hive,hadoop"
        );
//        DataStreamSource<String> dataStream = env.socketTextStream("localhost", 9999);
        SingleOutputStreamOperator<Tuple2<String, Integer>> flatMapStream = dataStream.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                String[] split = value.split(",");
                Arrays.stream(split).forEach(x -> out.collect(Tuple2.of(x, 1)));
            }
        });

        WindowedStream<Tuple2<String, Integer>, Tuple, GlobalWindow> windowStream = flatMapStream.keyBy(0)
                .window(GlobalWindows.create())
                .trigger(myCountTrigger(3L));

        windowStream.sum(1).print().name("print").setParallelism(1);


        env.execute(MyWindow.class.getCanonicalName());

    }

    /**
     * 自定义窗口触发器
     *
     * @param maxCount
     * @return
     */
    private static Trigger<Tuple2<String, Integer>, GlobalWindow> myCountTrigger(final Long maxCount) {
        return new Trigger<Tuple2<String, Integer>, GlobalWindow>() {

            //指定元素的最大数量
//            private Integer maxCount;

            //存储每个 key对应的count值
            private ReducingStateDescriptor<Long> descriptor = new ReducingStateDescriptor<>(
                    "count",
                    new ReduceFunction<Long>() {
                        @Override
                        public Long reduce(Long value1, Long value2) throws Exception {
                            return value1 + value2;
                        }
                    },
                    Long.class
            );

            /**
             * 当一个元素进入到一个  window 中的时候就会调用这个方法
             * @param element 元素
             * @param timestamp 进来的时间
             * @param window 元素所属的窗口
             * @param ctx 上下文
             * @return TriggerResult:
             *      TriggerResult.CONTINUE: 不做处理
             *      TriggerResult.FIRE: 触发计算
             *      TriggerResult.PURGE: 清除window中的数据
             *      TriggerResult.FIRE_AND_PURGE: 先计算后清除
             * @throws Exception
             */
            @Override
            public TriggerResult onElement(Tuple2<String, Integer> element, long timestamp, GlobalWindow window, TriggerContext ctx) throws Exception {
                //获取当前key对应的count状态值
                ReducingState<Long> count = ctx.getPartitionedState(descriptor);
                //count ++
                count.add(1L);
//                System.out.println(count.get() + "\t" + maxCount + "\t" + (count.get() == maxCount));
                //如果当前 key 的 count 值等于 mazCount
                if (count.get().equals(maxCount)) {
                    //清除key的状态
                    count.clear();
                    //触发 window计算 删除数据
                    return TriggerResult.FIRE_AND_PURGE;

                }
                //如果数据没有达到maxCount的值 不做任何处理
                return TriggerResult.CONTINUE;
            }

            /**
             * 基于 precessing Time的定时器任务处理方式
             * @param time
             * @param window
             * @param ctx
             * @return
             * @throws Exception
             */
            @Override
            public TriggerResult onProcessingTime(long time, GlobalWindow window, TriggerContext ctx) throws Exception {
                return TriggerResult.CONTINUE;
            }

            /**
             * 基于 event Time的定时器任务处理方式
             * @param time
             * @param window
             * @param ctx
             * @return
             * @throws Exception
             */
            @Override
            public TriggerResult onEventTime(long time, GlobalWindow window, TriggerContext ctx) throws Exception {
                return TriggerResult.CONTINUE;
            }

            /**
             * 清除状态值
             * @param window
             * @param ctx
             * @throws Exception
             */
            @Override
            public void clear(GlobalWindow window, TriggerContext ctx) throws Exception {
                ctx.getPartitionedState(descriptor).clear();
            }
        };
    }


}
