package com.atguigu.flink.day04;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author Felix
 * @date 2023/12/4
 * 该案例演示了合流---union
 * 要求：参与合并的流的数据类型必须要一致
 */
public class Flink03_union {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env
            = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());

        DataStreamSource<String> strDS1
            = env.socketTextStream("hadoop102", 8888);

        SingleOutputStreamOperator<String> strDS2 = env
            .socketTextStream("hadoop102", 8889);

        DataStreamSource<String> strDS3 = env.fromElements("a", "b", "c");

        // DataStream<String> unionDS = strDS1.union(strDS2).union(strDS3);

        DataStream<String> unionDS = strDS1.union(strDS2, strDS3);

        unionDS.print();
        env.execute();
    }
}
