package com.atguigu.day04;

import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author Felix
 * @date 2024/4/2
 * 分流
 */
public class Flink03_Split_Stream {
    public static void main(String[] args) throws Exception {
        //TODO 1.指定流处理环境
        //StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //TODO 2.从指定的网络端口读取数据 并将字符串转换为数字
        SingleOutputStreamOperator<Integer> intDS = env
                .socketTextStream("hadoop102", 8888)
                .map(Integer::parseInt);

        //TODO 3.过滤出奇数
        SingleOutputStreamOperator<Integer> ds1 = intDS.filter(num -> num % 2 != 0).setParallelism(2);
        //TODO 4.过滤出偶数
        SingleOutputStreamOperator<Integer> ds2 = intDS.filter(num -> num % 2 == 0).setParallelism(3);
        //TODO 5.分别打印两条流
        ds1.print("奇数");
        ds2.print("偶数");
        //TODO 6.提交作业
        env.execute();

    }
}
