package com.pengheng.transformation.aggregation;

import com.pengheng.bean.WaterSensor;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 按键分区  min minBy max maxBy sum
 * 1.keyBy之后才能调用
 * 2.分组内的聚合，对同一个key的数据进行聚合
 * 3. reduce 方法中的 两个参数
 *      v1：为之前计算的结果，存结果装填
 *      v2：为当前处理的数据
 */
public class TransKeyByReduce {
    public static void main(String[] args) throws Exception {
        LocalStreamEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
        env.setParallelism(1);
        DataStreamSource<WaterSensor> stream = env.fromElements(
                new WaterSensor("s1", 1L, 1),
                new WaterSensor("s1", 11L, 12),
                new WaterSensor("s2", 2L, 2),
                new WaterSensor("s3", 3L, 3)
        );
        //通过 keyBy 将相同的数据 分配到同一个分区
        KeyedStream<WaterSensor, String> keyedStream = stream.keyBy(e -> e.id);
        SingleOutputStreamOperator<WaterSensor> reduce = keyedStream.reduce(new ReduceFunction<WaterSensor>() {
            @Override
            public WaterSensor reduce(WaterSensor v1, WaterSensor v2) throws Exception {
                return new WaterSensor(v1.id + "-" + v2.id, v1.ts, v1.vc + v2.vc);
            }
        });
        reduce.print();
        env.execute();


    }
}
