package com.atguigu.flink.day04;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

/**
 * @author Felix
 * @date 2023/12/4
 * 该案例演示了合流---connect
 *      参与连接的两条流数据类型可以不一致
 *      只能对两条流进行连接
 */
public class Flink04_connect {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env
            = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());

        env.setParallelism(1);
        env.disableOperatorChaining();

        DataStreamSource<String> strDS1
            = env.socketTextStream("hadoop102", 8888);

        SingleOutputStreamOperator<Integer> intDS2 = env
            .socketTextStream("hadoop102", 8889)
            .map(Integer::valueOf);

        ConnectedStreams<String, Integer> connectDS = strDS1.connect(intDS2);

        SingleOutputStreamOperator<String> processDS = connectDS.map(
            new CoMapFunction<String, Integer, String>() {

                //处理第一条流中的数据
                @Override
                public String map1(String value) throws Exception {
                    return "字符串流：" + value;
                }

                //处理第二条流中的数据
                @Override
                public String map2(Integer value) throws Exception {
                    return "数字流：" + value;
                }
            }
        );

        processDS.print();

        env.execute();
    }
}
