package com.atguigu.agg;

import com.atguigu.bean.WaterSensor;
import com.atguigu.function.WaterSensorFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author yhm
 * @create 2024-04-02 14:07
 */
public class Test02_Reduce {
    public static void main(String[] args) throws Exception {
        // 1. 创建环境
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",8081);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        // 2. 读取数据源
        DataStreamSource<String> dataStreamSource = env.socketTextStream("hadoop102", 7777);

        // 3. 处理数据
        SingleOutputStreamOperator<WaterSensor> maxReduceStream = dataStreamSource.flatMap(new WaterSensorFunction())
                .keyBy(WaterSensor::getId)

                .reduce(new ReduceFunction<WaterSensor>() {
                    @Override
                    public WaterSensor reduce(WaterSensor value1, WaterSensor value2) throws Exception {
                        // 实现max方法
                        // 结果只保留max当前的一个值  其余值都使用第一个的数
//                        value1.setVc(Math.max(value1.vc,value2.vc));
//                        return value1;
                        // 实现maxBy方法
                        // 结果返回整个对象
                        if (value1.vc >= value2.vc) {
                            return value1;
                        } else {
                            return value2;
                        }
                    }
                });

        // 4. 输出
        maxReduceStream.print();

        // 5. 执行环境
        env.execute();
    }
}
