package com.hu.flink12.api.transformation;

import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

/**
 * @Author: hujianjun
 * @Date: 2021/2/4 0:17
 * @Describe:
 */
public class UnionAndConnStream {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //TODO 2.读取数据源
        DataStream<String> ds1 = env.fromElements("spark", "flink hive", "hue");
        DataStream<String> ds2 = env.fromElements("spark", "flink hive", "hue");
        DataStream<String> ds3 = env.fromElements("spark", "flink hive", "hue2");
        DataStream<Long> ds4 = env.fromSequence(1, 10);

        //TODO 3.转换操作
        DataStream<String> union = ds1.union(ds2).union(ds3);
//       ds1.union(ds4);
        ConnectedStreams<String, Long> connect = ds1.connect(ds4);

        SingleOutputStreamOperator<String> connectRes = connect.map(new CoMapFunction<String, Long, String>() {
            @Override
            public String map1(String s) throws Exception {
                return "ds1:" + s;
            }

            @Override
            public String map2(Long aLong) throws Exception {
                return "ds4:" + aLong;
            }
        });

        //TODO 4.sink操作
        union.print();
//        connect.print();
        connectRes.print();

        //TODO 5.执行程序
        env.execute();
    }
}
