package com.atguigu.transform;

import com.atguigu.been.WaterSensor;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.aggregation.AggregationFunction;

import java.util.ArrayList;

/**
 * @author wky
 * @create 2021-07-14-15:04
 */
public class Transform_reduce {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment();
        senv.setParallelism(1);
        ArrayList<WaterSensor> waterSensors = new ArrayList<>();
        waterSensors.add(new WaterSensor("sensor_1", 1607527992000L, 20));
        waterSensors.add(new WaterSensor("sensor_1", 1607527994000L, 50));
        waterSensors.add(new WaterSensor("sensor_1", 1607527996000L, 40));
        waterSensors.add(new WaterSensor("sensor_2", 1607527993000L, 10));
        waterSensors.add(new WaterSensor("sensor_2", 1607527995000L, 30));
        DataStreamSource<WaterSensor> streamSource = senv.fromCollection(waterSensors);
        KeyedStream<WaterSensor, String> keyedStream = streamSource.keyBy(WaterSensor::getId);
//        KeyedStream<WaterSensor, String> keyedStream = streamSource.keyBy(r -> r.getId());
        //reduce 为聚合算子的底层调用
        keyedStream.reduce(new AggregationFunction<WaterSensor>() {
            @Override
            //t1 为上一条数据 t2 为新数据
            public WaterSensor reduce(WaterSensor t1, WaterSensor t2) throws Exception {
                return new WaterSensor(t1.getId(),t2.getTs(),t1.getVc()+ t2.getVc());
            }
        }).print();

        senv.execute();
    }
}
