package com.chief.watermark;

import org.apache.commons.lang3.time.DateUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class KeyedProcessFunctionTest {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<String> socketTextStream = env.socketTextStream("hadoop55", 7777);

        DataStream<Tuple3<String, Long, Integer>> tuple3SingleOutputStreamOperator = socketTextStream.map(new MapFunction<String, Tuple3<String, Long, Integer>>() {
            @Override
            public Tuple3<String, Long, Integer> map(String value) throws Exception {
                String[] split = value.split(",");
                return new Tuple3<>(split[0], DateUtils.parseDate(split[1], "yyyy-MM-dd HH:mm:ss").getTime(), Integer.valueOf(split[2]));
            }
        });


        SingleOutputStreamOperator<Tuple3<String, Long, Integer>> tuple3SingleOutputStreamOperator1 =
                tuple3SingleOutputStreamOperator.assignTimestampsAndWatermarks(
                        WatermarkStrategy.<Tuple3<String, Long, Integer>>forBoundedOutOfOrderness(Duration.ofMillis(0))
                                .withTimestampAssigner((e, t) -> e.f1));

        KeyedStream<Tuple3<String, Long, Integer>, String> tuple3StringKeyedStream = tuple3SingleOutputStreamOperator1.keyBy(t -> t.f0);

        SingleOutputStreamOperator<Tuple3<String, Long, Integer>> process = tuple3StringKeyedStream.process(new KeyedProcessFunction<String, Tuple3<String, Long, Integer>, Tuple3<String, Long,
                Integer>>() {
            //存储大于5的数字个数
            private ValueState<Integer> cnt;

            @Override
            public void onTimer(long timestamp, OnTimerContext ctx, Collector<Tuple3<String, Long, Integer>> out) throws Exception {
                System.out.println("onTime:" + ctx.timestamp() + ",cnt = " + cnt.value());
                if (cnt.value() > 3) {
                    out.collect(new Tuple3<>(ctx.getCurrentKey(), DateUtils.parseDate("2022-03-29 10:00:00", "yyyy-MM-dd HH:mm:ss").getTime(), 2));
                }
            }

            @Override
            public void open(Configuration parameters) throws Exception {
                cnt = getRuntimeContext().getState(new ValueStateDescriptor<>("cnt", Integer.class));
            }

            @Override
            public void processElement(Tuple3<String, Long, Integer> value, Context ctx, Collector<Tuple3<String, Long, Integer>> out) throws Exception {
                System.out.println("processElement:" + ctx.timestamp());
                if (null == cnt.value()) {
                    cnt.update(0);
                }
                if (value.f2 > 5) {
                    cnt.update(cnt.value() + 1);
                }
                ctx.timerService().registerEventTimeTimer(ctx.timestamp() + 10000);
                System.out.println(value + " currentWatermark :" + ctx.timerService().currentWatermark());
            }
        });

        process.print("tuple3StringKeyedStream");

        env.execute();
    }

}
