package com.atguigu.day04;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

/**
 * @author Felix
 * @date 2024/7/12
 * 该案例演示了connect
 *      connect用于连接2条流
 *      参与连接的2条流数据类型可以不一样
 *      连接完毕后，在对流中数据进行处理的时候，需要分别对两条数据的数据进行处理
 */
public class Flink06_Connect {
    public static void main(String[] args) throws Exception {
        //TODO 1.指定流处理环境
        Configuration conf = new Configuration();
        conf.set(RestOptions.PORT,8088);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        //TODO 2.从指定网络端口8888读取数据---字符串
        DataStreamSource<String> ds1 = env.socketTextStream("hadoop102", 8888);
        //TODO 3.从指定网络端口8889读取数据---数字
        SingleOutputStreamOperator<Integer> ds2 = env.socketTextStream("hadoop102", 8889).map(Integer::parseInt);
        //TODO 4.合流
        ConnectedStreams<String, Integer> connectDS = ds1.connect(ds2);
        //TODO 5.对关联后的数据进行处理
        SingleOutputStreamOperator<String> mapDS = connectDS.map(
                new CoMapFunction<String, Integer, String>() {
                    @Override
                    public String map1(String value) throws Exception {
                        return "字符串:" + value;
                    }

                    @Override
                    public String map2(Integer value) throws Exception {
                        return "数字:" + value;
                    }
                }
        );
        //TODO 6.打印输出
        mapDS.print();

        //TODO 7.提交作业
        env.execute();
    }
}
