package edu.nepu.flink.api.aggregation;

import edu.nepu.flink.api.bean.WaterSensor;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @Date 2024/2/28 21:31
 * @Created by chenshuaijun
 */
public class ReduceOperator {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        DataStreamSource<WaterSensor> elementSource = env.fromElements(
                new WaterSensor("s1", 1L, 7),
                new WaterSensor("s1", 3L, 9),
                new WaterSensor("s2", 2L, 8),
                new WaterSensor("s2", 4L, 10),
                new WaterSensor("s3", 5L, 11)
        );

        KeyedStream<WaterSensor, String> keyedStream = elementSource.keyBy(WaterSensor::getId);

        /**
         * 对于reduce这个算子，他其中包含如下的知识点
         * （1）同一key的第一条数据到来的时候数据是不进入到reduce方法的
         * （2）reduce会将数据存储到状态中
         */
        keyedStream.reduce(new ReduceFunction<WaterSensor>() {
            @Override
            public WaterSensor reduce(WaterSensor value1, WaterSensor value2) throws Exception {
                return new WaterSensor(value1.id,value1.ts,value1.vc + value2.vc);
            }
        }).print();

        env.execute();
    }
}
