package com.chencong.transform;

import com.chencong.bean.WaterSensor;
import com.chencong.env.FlinkTableEnv;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.ArrayList;

/**
 * @Author chencong
 * @Description
 * @Date 8:26 下午 2021/8/18
 * @Param
 **/
public class Reduce {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment streamTableEnvironment = FlinkTableEnv.getStreamTableEnvironment();
        streamTableEnvironment.setParallelism(1);

        ArrayList<WaterSensor> waterSensors = new ArrayList<>();

        waterSensors.add(new WaterSensor("sensor_1", 1607527992000L, 20));
        waterSensors.add(new WaterSensor("sensor_1", 1607527994000L, 50));
        waterSensors.add(new WaterSensor("sensor_1", 1607527996000L, 50));
        waterSensors.add(new WaterSensor("sensor_2", 1607527993000L, 10));
        waterSensors.add(new WaterSensor("sensor_2", 1607527995000L, 30));

        KeyedStream<WaterSensor, String> waterSensorStringKeyedStream = streamTableEnvironment
                .fromCollection(waterSensors)
                .keyBy(waterSensor -> waterSensor.getId());

        waterSensorStringKeyedStream
                .reduce(new ReduceFunction<WaterSensor>() {
                    @Override
                    public WaterSensor reduce(WaterSensor waterSensor, WaterSensor t1) throws Exception {
                        return new WaterSensor(waterSensor.getId(), waterSensor.getTs(), waterSensor.getVc() + t1.getVc());
                    }
                }).print("reduce....");

        streamTableEnvironment.execute();

    }


}
