package com.atguigu.flink.day04;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.util.Collector;

/**
 * @author Felix
 * @date 2024/8/13
 * 该案例演示了合流算子--connect
 */
public class Flink06_Connect {
    public static void main(String[] args) throws Exception {
        //TODO 1.指定流处理环境
        Configuration conf = new Configuration();
        conf.set(RestOptions.PORT,8088);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        //TODO 2.从指定的网络端口读取数据
        DataStreamSource<String> ds1 = env.socketTextStream("hadoop102", 8888);
        SingleOutputStreamOperator<Integer> ds2 = env
                .socketTextStream("hadoop102", 8889)
                .map(Integer::valueOf);


        ConnectedStreams<String, Integer> connectDS = ds1.connect(ds2);

        SingleOutputStreamOperator<String> mapDS = connectDS.map(new CoMapFunction<String, Integer, String>() {
            @Override
            public String map1(String value) throws Exception {
                //处理第一条流中的数据
                return "字符串:" + value;
            }

            @Override
            public String map2(Integer value) throws Exception {
                //处理第二条流中的数据
                return "数字:" + value;
            }
        });

        mapDS.print();

        env.execute();
    }
}
