package com.mxnavi5.example.DataStream;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.apache.flink.streaming.api.functions.co.KeyedCoProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

public class MyTransformTest {
    private static final OutputTag<Integer> outputTag = new OutputTag<Integer>("OutputTag") {
    };

    public static void main(String[] args) throws Exception {
        //    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Configuration configuration = new Configuration();
        configuration.setInteger(RestOptions.PORT, 8081);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(configuration);
        env.setParallelism(2);
        env.disableOperatorChaining();

        DataStreamSource<Integer> integerDataStreamSource = env.fromElements(1, 2, 3);

        SingleOutputStreamOperator<Integer> mapStream = integerDataStreamSource.map(x -> x * 2);
//        mapStream.print("map");

        SingleOutputStreamOperator<Integer> flatMapStream = mapStream.flatMap(new FlatMapFunction<Integer, Integer>() {
            @Override
            public void flatMap(Integer integer, Collector<Integer> collector) throws Exception {
                collector.collect(integer + 1);
                collector.collect(integer + 1);
            }
        });

//        flatMapStream.print("flatMap");

        SingleOutputStreamOperator<Integer> filterStream = flatMapStream.filter(x -> x < 7);

//        filterStream.print("filter");

        //分流
        SingleOutputStreamOperator<Integer> filter1Stream = filterStream.filter(x -> x == 3);
        SingleOutputStreamOperator<Integer> filter2Stream = filterStream.filter(x -> x == 5);

//        filter1Stream.print("f1");
//        filter2Stream.print("f2");

        SingleOutputStreamOperator<Integer> processStream = filterStream.process(new ProcessFunction<Integer, Integer>() {
            @Override
            public void processElement(Integer integer, ProcessFunction<Integer, Integer>.Context context, Collector<Integer> collector) throws Exception {
                if (integer == 3) {
                    collector.collect(integer);
                } else {
                    context.output(outputTag,integer);
                }
            }
        });

        DataStream<Integer> sideOutputStream = processStream.getSideOutput(outputTag);
//        processStream.print("f3");
//        sideOutputStream.print("f4");

        DataStream<Integer> unionStream = processStream.union(sideOutputStream);
//        unionStream.print("union");

        SingleOutputStreamOperator returns1 = env.addSource(new UdfSourceTest()).returns(Integer.class);
        SingleOutputStreamOperator<Tuple2<Integer, Integer>> returns = returns1
                .map(x -> Tuple2.of(x, x))
                .returns(TypeInformation.of(new TypeHint<Tuple2<Integer, Integer>>() {
                }));
        returns.print("re");
        returns.uid("flinkKeyedTest").keyBy(1).print("key:");


        //keyed
        DataStreamSource<Tuple3<Integer, Integer, Integer>> tuple3DataStreamSource
                = env.fromElements(Tuple3.of(1, 1, 1), Tuple3.of(1, 2, 0),
                Tuple3.of(2, 1, 7), Tuple3.of(2, 2, 6));

        KeyedStream<Tuple3<Integer,Integer, Integer>, Tuple> keyedStream = tuple3DataStreamSource
                .keyBy(1);

//        keyedStream.print();

//        keyedStream.sum(1).print("sum");
//        keyedStream.reduce((x,y)->Tuple3.of(x.f0,x.f1+y.f1,x.f2+y.f2)).print("reduce");
//        keyedStream.max(1).print("max");
//        keyedStream.maxBy(2).print("maxBy");

        //关联
        DataStreamSource<Tuple2<Integer, Integer>> tuple2_1DataStreamSource
                = env.fromElements(Tuple2.of(1, 1), Tuple2.of(1, 2),
                Tuple2.of(2, 1), Tuple2.of(2, 2));

        KeyedStream<Tuple2<Integer,Integer>, Tuple> keyed1Stream = tuple2_1DataStreamSource
                .keyBy(0);

        DataStreamSource<Tuple2<Integer, Integer>> tuple2_2DataStreamSource
                = env.fromElements(Tuple2.of(1, 3), Tuple2.of(1, 4),
                Tuple2.of(2, 5));

        KeyedStream<Tuple2<Integer,Integer>, Tuple> keyed2Stream = tuple2_2DataStreamSource
                .keyBy(0);


        SingleOutputStreamOperator<Tuple3<Integer, Integer, Integer>> coMapStream = tuple2_1DataStreamSource.connect(tuple2_2DataStreamSource).map(new CoMapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple3<Integer, Integer, Integer>>() {
            @Override
            public Tuple3<Integer, Integer, Integer> map1(Tuple2<Integer, Integer> t2) throws Exception {
                return Tuple3.of(t2.f0, t2.f1, 10);
            }

            @Override
            public Tuple3<Integer, Integer, Integer> map2(Tuple2<Integer, Integer> t2) throws Exception {
                return Tuple3.of(t2.f0, t2.f1, 20);
            }
        });
//        coMapStream.print("coMap");

        env.execute();


    }

}
