package com.shujia.flink.tf;

import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class Demo5Reduce {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream<String> linesDS = env.socketTextStream("master", 8888);

        DataStream<Tuple2<String, Integer>> kvDS = linesDS
                //一行转换成多行，返回kv格式
                .flatMap((line, out) -> {
                    for (String word : line.split(",")) {
                        out.collect(Tuple2.of(word, 1));
                    }
                }, Types.TUPLE(Types.STRING, Types.INT));

        //分组
        KeyedStream<Tuple2<String, Integer>, String> keyByDS = kvDS.keyBy(kv -> kv.f0);

        DataStream<Tuple2<String, Integer>> wordCountDS = keyByDS
                .reduce(new ReduceFunction<Tuple2<String, Integer>>() {
                    /**
                     * reduce方法每一条数据执行一次
                     * @param value1： 之前的聚合结果（状态）（有状态计算）
                     * @param value2: 当前九三的数据
                     */
                    @Override
                    public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) throws Exception {
                        System.out.println(value1);
                        System.out.println(value2);
                        //取出key, value1和value2的key是一样，因为前面做了分组
                        String word = value1.f0;
                        //聚合计算单词的数量
                        int count = value1.f1 + value2.f1;
                        //返回新的结果
                        return Tuple2.of(word, count);
                    }
                });

        //wordCountDS.print();

        //lambda表达式
        DataStream<Tuple2<String, Integer>> countDS = keyByDS
                .reduce((kv1, kv2) -> Tuple2.of(kv1.f0, kv1.f1 + kv2.f1));

        countDS.print();

        env.execute();


    }
}
