package com.xqianli.bigdata.flink.transform;

import com.xqianli.bigdata.flink.utils.SensorReading;
import org.apache.flink.client.program.StreamContextEnvironment;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class ReduceApp01 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamContextEnvironment.getExecutionEnvironment();
        String inputPath = "data/sensor.txt";
        DataStream<String> dataStream = env.readTextFile(inputPath);

        DataStream<SensorReading> mapStream = dataStream.map(line -> {
            String[] fields = line.split(",");
            return new SensorReading(fields[0], new Long(fields[1]), new Double(fields[2]));
        });

        KeyedStream<SensorReading, String> keyedStream = mapStream.keyBy(SensorReading::getId);

        // Reduce Function
//        DataStream<SensorReading> reduce = keyedStream.reduce(new ReduceFunction<SensorReading>() {
//            @Override
//            public SensorReading reduce(SensorReading sensorReading, SensorReading t1) throws Exception {
//                return new SensorReading(sensorReading.getId(), t1.getTimestamp(), Math.max(sensorReading.getTemperature(), t1.getTemperature()));
//            }
//        });
        DataStream<SensorReading> reduce = keyedStream.reduce((curSensor, newSensor) -> {
            return new SensorReading(curSensor.getId(), curSensor.getTimestamp(), Math.max(curSensor.getTemperature(), newSensor.getTemperature()));
        });

        reduce.print();

        env.execute();
    }
}
