package com.shujia.flink.transform;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class Demo04KeyBy {
    public static void main(String[] args) throws Exception {
        /*
         * keyBy：进行流上的分组
         * 让相同key的数据能够进入同一个线程对应的Task中进行处理
         * 同一个Task中也会有不同的key
         */

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream<String> lineDS = env.socketTextStream("master", 8888);

        // 将数据切分，一个单词变成一行数据并转换成KV格式
        DataStream<Tuple2<String, Integer>> kvDS = lineDS.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                for (String word : value.split(",")) {
                    out.collect(Tuple2.of(word, 1));
                }
            }
        });

        // 如果DS没有进行分组，数据最终会轮流落到每一个线程对应的Task上面
//        kvDS.print();

        // 按照Key进行分组，默认使用Hash分组的原理
        /*
         * KeyedStream同样也为DataStream的子类
         * 它是在DS的基础之上增加了聚合类一些操作，例如：max、min、sum等
         */
        KeyedStream<Tuple2<String, Integer>, String> grpDS = kvDS.keyBy(kv -> kv.f0);
//        grpDS.print();

        grpDS.sum(1).print();
        env.execute();


    }
}
