package com.atguigu.chapter04;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * TODO
 *
 * @author cjp
 * @version 1.0
 * @date 2021/3/2 10:01
 */
public class Flink02_OperatorChain {
    public static void main(String[] args) throws Exception {
        // 1.获取执行环境
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // TODO 为了IDEA运行，直接查看webUI，使用如下方式获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        env.setParallelism(1);

        env.disableOperatorChaining();

        // 2.读取数据
        DataStreamSource<String> socketDS = env.socketTextStream("localhost", 9999);
//        DataStreamSource<String> socketDS = env.socketTextStream("hadoop1", 9999);

        // 3.处理数据
        // 3.1 压平：切分
        SingleOutputStreamOperator<String> wordDS = socketDS
                .flatMap(new FlatMapFunction<String, String>() {
                    @Override
                    public void flatMap(String value, Collector<String> out) throws Exception {
                        String[] words = value.split(" ");
                        for (String word : words) {
                            out.collect(word);
                        }
                    }
                });
//                .disableChaining(); // 当前算子不加入链条（与前后都切开）
//                .startNewChain();   // 以当前算子为起点，开启新的链条（与前面切开）
        // 3.2 转换成二元组（word，1）
        SingleOutputStreamOperator<Tuple2<String, Long>> wordAndOneDS = wordDS
                .map(value -> Tuple2.of(value, 1L))
                .returns(Types.TUPLE(Types.STRING, Types.LONG));

        // 3.3 按照word分组
        KeyedStream<Tuple2<String, Long>, Tuple> wordAndOneKS = wordAndOneDS.keyBy(0);

        // 3.4 组内求和
        SingleOutputStreamOperator<Tuple2<String, Long>> resultDS = wordAndOneKS.sum(1);

        // 4. 输出
        resultDS.print();

        // 5. 启动
        env.execute();

    }


}

/*
    算子.startNewChain();      以当前算子为起点，开启新的链条（与前面切开）
    算子.disableChaining();    当前算子不加入链条（与前后都切开）

    env.disableOperatorChaining()   全局禁用操作链


    打断链条的意义： 有些算子很忙，压力太大，有些算子比较闲，  操作链可能会他们串在一起，为了 忙的任务能更好的专心执行，把他们拆散

 */