package cn.itcast.flink.transformation;

import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

/**
 * @author lilulu
 */
//将两个String类型的流进行 union ；
//将一个String类型和一个Long类型的流进行 connect ；
public class TransformationUnionConnectDemo {
    public static void main(String[] args) throws Exception {
        // 1. 执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 2. 数据源-source
        // 2. 数据源-source
        DataStream<String> dataStream01 = env.fromElements("A", "B", "C", "D");
        DataStream<String> dataStream02 = env.fromElements("aa", "bb", "cc", "dd");
        DataStream<Integer> dataStream03 = env.fromElements(1, 2, 3, 4);
        // 3. 数据转换-transformation
        DataStream<String> unionData = dataStream01.union(dataStream02);
//        unionData.print();

        ConnectedStreams<String, Integer> connectData = dataStream01.connect(dataStream03);
        SingleOutputStreamOperator<String> connectDataStream = connectData.map(
                new CoMapFunction<String, Integer, String>() {
                    @Override
                    public String map1(String value) throws Exception {
                        return "map1: left -> " + value;
                    }

                    @Override
                    public String map2(Integer value) throws Exception {
                        return "map2: right -> " + value;
                    }
                }
        );
        // 4. 数据终端-sink
        connectDataStream.printToErr();
        // 5. 触发执行-execute
        env.execute("TransformationUnionConnectDemo");
    }
}