package com.atguigu.flink.splitAndUnionStream;

import jdk.nashorn.internal.runtime.regexp.joni.Config;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

public class ConnectStreamExample {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        DataStreamSource<String> ds1 = env.socketTextStream("hadoop102", 8888);
        SingleOutputStreamOperator<Integer> ds2 = env.socketTextStream("hadoop102", 8889).map(Integer::parseInt);

        ConnectedStreams<String, Integer> connectDS = ds1.connect(ds2);

        //对关联后的数据进行处理
        SingleOutputStreamOperator<String> mapDS = connectDS.map(
                new CoMapFunction<String, Integer, String>() {
                    @Override
                    public String map1(String value) throws Exception {
                        return "字符串 : " + value;
                    }

                    @Override
                    public String map2(Integer value) throws Exception {
                        return "数字 : " + value;
                    }
                }
        );

        mapDS.print();

        env.execute();

    }
}
