package com.atguigu.aggregate;

import com.atguigu.bean.WaterSensor;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class reduceDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);
        DataStreamSource<WaterSensor> sensorDS = env.fromElements(
                new WaterSensor("s1", 1L, 11),
                new WaterSensor("s2", 2L, 22),
                new WaterSensor("s3", 3L, 33),
                new WaterSensor("s4", 4L, 44)
        );
        // todo 按照key来分组
        //  要点：
        //      1.返回的是 一个 keyStream ，键控流
        //      2.keyBy不是 转换算子 ，而是对数据进行重分区，也不能设置并行度
        //      3.
        KeyedStream<WaterSensor, String> sensorKeyDS = sensorDS.keyBy(new KeySelector<WaterSensor, String>() {
            @Override
            public String getKey(WaterSensor waterSensor) throws Exception {
                return waterSensor.getId();
            }
        });

        SingleOutputStreamOperator<WaterSensor> reduceDS = sensorKeyDS.reduce(new ReduceFunction<WaterSensor>() {

            @Override
            public WaterSensor reduce(WaterSensor waterSensor, WaterSensor t1) throws Exception {
                return new WaterSensor(waterSensor.getVc() + t1.getVc());
            }
        });
        reduceDS.print();
        env.execute();
    }
}
