
/*
 * Copyright © 2021 https://www.cestc.cn/ All rights reserved.
 */

package com.zx.learn.flink.transform;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

/**
 * 将两个String类型的流进行union
 * 将一个String类型和一个Long类型的流进行connect
 *
 */
public class UnionAndConnectDemo {
    public static void main(String[] args) throws Exception {
        //1.env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC);
        env.setParallelism(1);
        //2.Source
        DataStream<String> ds1 = env.fromElements("hadoop", "spark", "flink");
        DataStream<String> ds2 = env.fromElements("hadoop", "spark", "flink");
        DataStream<Long> ds3 = env.fromElements(1L, 2L, 3L);
        //3. transformation
        //3.1 union  nion 算子 保证两个数据流类型保持一致 ，合并但不去重
        DataStream<String> union = ds1.union(ds2);
        union.print("union:");
        //3.2 connect 算子 两个数据流类型可以不一样
        ConnectedStreams<String, Long> connect = ds1.connect(ds3);
        SingleOutputStreamOperator<String> source2 = connect.map(new CoMapFunction<String, Long, String>() {
            @Override
            public String map1(String value) throws Exception {
                return "string->string:" + value;
            }

            @Override
            public String map2(Long value) throws Exception {
                return "Long->Long:" + value;
            }
        });
        //打印输出
        source2.print("connect:");
        env.execute();
    }
}
