package com.wudl.core;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Arrays;

/**
 * TODO
 *  算子 union 将两个流合并成为一个新的流(需要注意的是， 这两个流的数据类型必须要保持一致)
 * @author wudl
 * @version 1.0
 * @date 2020/12/23 17:37
 */
public class TransformUnion {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStreamSource<String> inutDs01 = env.fromCollection(Arrays.asList("hbase","clickhouse"));
        DataStreamSource<String> inputDs02 = env.fromCollection(Arrays.asList("hadoop", "spark", "flink"));
        DataStream<String> union = inutDs01.union(inputDs02);
        union.print();
        env.execute();


    }
}
