package com.shujia.flink.tf;

import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Demo5Reduce {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> linesDS = env.socketTextStream("master", 8888);

        //一行转换成多行，转换成kv格式
        DataStream<Tuple2<String, Integer>> kvDS = linesDS.flatMap((line, collect) -> {
            for (String word : line.split(",")) {
                collect.collect(Tuple2.of(word, 1));
            }
        }, Types.TUPLE(Types.STRING, Types.INT));

        //按照单词分组
        KeyedStream<Tuple2<String, Integer>, String> keyByDS = kvDS.keyBy(kv -> kv.f0);

        /*
         * reduce: 按照key分组之后对相同key的value做聚合计算
         */
        DataStream<Tuple2<String, Integer>> countDS = keyByDS.reduce(new ReduceFunction<Tuple2<String, Integer>>() {
            /**
             * reduce: 每来一条数据执行一次
             * @param kv1： 之前同一个key的聚合结果
             * @param kv2： 下一条数据
             * @return : 返回新的聚合结果
             */
            @Override
            public Tuple2<String, Integer> reduce(Tuple2<String, Integer> kv1,
                                                  Tuple2<String, Integer> kv2) throws Exception {
                //对value做聚合
                int count = kv1.f1 + kv2.f1;

                return Tuple2.of(kv1.f0, count);
            }
        });


        //lambda 表达式
        DataStream<Tuple2<String, Integer>> countDS1 = keyByDS.reduce((kv1, kv2) -> {
            //对value做聚合
            int count = kv1.f1 + kv2.f1;
            return Tuple2.of(kv1.f0, count);
        });

        countDS1.print();

        env.execute();

    }
}
