package com.chukun.flink.stream.operator.base;

import com.chukun.flink.stream.bean.Trade;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.IterativeStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.ArrayList;
import java.util.concurrent.TimeUnit;

/**
 * @author chukun
 * @version 1.0.0
 * @description iterate操作符
 * @createTime 2022年05月10日 00:06:00
 */
public class IterateOperator {

    /**
     * 标记小金额的数据流
     */
    private static final OutputTag<Tuple2<String,Integer>> SMALL_AMOUNT_TAG = new OutputTag<Tuple2<String,Integer>>("small_amount_tag"){};

    /**
     * 标记大金额的数据流
     */
    private static final OutputTag<Tuple2<String,Integer>> LARGE_AMOUNT_TAG = new OutputTag<Tuple2<String,Integer>>("large_amount_tag") {};

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStream<Tuple2<String,Integer>> dataStream = env.fromCollection(new ArrayList<Tuple2<String,Integer>>(){
            {
                add(new Tuple2<>("12300986754",899));
                add(new Tuple2<>("14500986754",699));
                add(new Tuple2<>("18800986754",88));
            }
        });

        env.setParallelism(1);

        // 设置迭代流最大等待时间
        IterativeStream<Tuple2<String, Integer>> it = dataStream.iterate(5000);

        DataStream<Tuple2<String, Integer>> mapStreams = it.map(new MapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(Tuple2<String, Integer> tuple) throws Exception {
                TimeUnit.SECONDS.sleep(1);
                System.out.println("迭代流处理： " + tuple);
                return new Tuple2<>(tuple.f0, tuple.f1 - 20);
            }
        });

        // 分流操作
        SingleOutputStreamOperator<Tuple2<String,Integer>> processStream = mapStreams.process(new ProcessFunction<Tuple2<String,Integer>, Tuple2<String,Integer>>() {
            @Override
            public void processElement(Tuple2<String,Integer> tuple, ProcessFunction<Tuple2<String,Integer>, Tuple2<String,Integer>>.Context context, Collector<Tuple2<String,Integer>> collector) throws Exception {
                if (tuple.f1 < 100) {
                    context.output(SMALL_AMOUNT_TAG, new Tuple2<>(tuple.f0, tuple.f1));
                } else {
                    context.output(LARGE_AMOUNT_TAG, new Tuple2<>(tuple.f0, tuple.f1));
                }
            }
        });

        // 获取标记的流  small_amount_tag
        DataStream<Tuple2<String,Integer>> smallAmountStream = processStream.getSideOutput(SMALL_AMOUNT_TAG);

        // 选择small_amount_tag的流来关闭迭代
        it.closeWith(smallAmountStream);


        // 获取标记的流  large_amount_tag
        DataStream<Tuple2<String,Integer>> largeAmountStream = processStream.getSideOutput(LARGE_AMOUNT_TAG);
        largeAmountStream.print("large amount");

        // 触发程序执行
        env.execute("iterate stream");
    }
}
