package com.atguigu.chapter5.transfrom;

import com.atguigu.chapter5.source.WaterSensor;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

import java.util.ArrayList;

import static org.apache.hadoop.metrics2.impl.MsInfo.Context;

/**
 * @ClassName: roll_Transfrom
 * @Description:
 * @Author: kele
 * @Date: 2021/4/5 16:39
 **/
public class roll_Transfrom {

    public static void main(String[] args) throws Exception {

        Configuration conf = new Configuration();
        conf.setInteger("rest.port",20000);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(2);


        ArrayList<WaterSensor> waterSensors = new ArrayList<>();
        waterSensors.add(new WaterSensor("sensor_1", 1607527992000L, 20));
        waterSensors.add(new WaterSensor("sensor_1", 1607527994000L, 50));
        waterSensors.add(new WaterSensor("sensor_1", 1607527996000L, 30));
        waterSensors.add(new WaterSensor("sensor_2", 1607527993000L, 10));
        waterSensors.add(new WaterSensor("sensor_2", 1607527995000L, 30));


        DataStreamSource<WaterSensor> ds = env.fromCollection(waterSensors);

        KeyedStream<WaterSensor, String> ds1 = ds.keyBy(WaterSensor::getId);

//        ds1.sum("vc").print();

        /*ds1.max("vc").print();

        ds1.maxBy("vc",false).print();
*/

        //聚合前的数据类型和聚合后的数据类型一致

       /* ds1.reduce(new ReduceFunction<WaterSensor>() {
            @Override
            //该方法类似于spark中的reduce算子
            public WaterSensor reduce(WaterSensor value1,
                                      WaterSensor value2) throws Exception {



                return new WaterSensor(value1.getId(),value1.getTs(),value1.getVc()+ value2.getVc());
            }
        }).print();*/


        /**
         * 对watersensor中的vc进行求和，只需要得到结果
         */

       /* ds1.process(new ProcessFunction<WaterSensor, Integer>() {

            int sum = 0; //状态的个数和并行度相关，
            @Override
            public void processElement(WaterSensor value, //输入
                                       Context ctx,
                                       Collector<Integer> out) throws Exception {

                sum += value.getVc();

                out.collect(sum);

*//*   由于是两个并行度，所以在聚合之后会有两个，求出来的不是所有的值，按照分区计算的值
                1> 10
                1> 40
                2> 20
                2> 70
                2> 100
*//*


            }
        }).print();*/


        ds1.keyBy(WaterSensor::getId)
                .process(new KeyedProcessFunction<String, WaterSensor, Integer>() {

                    int sum = 0; //状态的个数和并行的个数相同，

                    @Override
                    public void processElement(WaterSensor value, KeyedProcessFunction<String, WaterSensor, Integer>.Context ctx, Collector<Integer> out) throws Exception {

                        sum+=value.getVc();
                        out.collect(sum);

                    }
                }).print();






        env.execute();

    }

}
