package cn.itcast.flink.transformation;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class UnionDemo {
    public static void main(String[] args) throws Exception {
        /**
         * 实现步骤：
         * 1）初始化flink的流处理的运行环境
         * 2）加载/创建数据源
         * 3）处理数据
         * 4）打印输出
         * 5）递交执行作业
         */
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> ds1 = env.fromElements("hadoop", "hive", "flume");
        DataStreamSource<String> ds2 = env.fromElements("hadoop","hive","spark");
        DataStream<String> result = ds1.union(ds2);
        result.printToErr();

        env.execute();
    }
}