package com.hkbigdata.transfer;

import com.hkbigdata.bean.WaterSensor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

import java.util.ArrayList;

/**
 * @author liuanbo
 * @creat 2024-04-12-17:28
 * @see 2194550857@qq.com
 */
public class Flink10_TransForm_Process_Anonymous {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        ArrayList<WaterSensor> waterSensors = new ArrayList<>();
        waterSensors.add(new WaterSensor("sensor_1", 1607527992000L, 20.0));
        waterSensors.add(new WaterSensor("sensor_1", 1607527994000L, 50.0));
        waterSensors.add(new WaterSensor("sensor_1", 1607527996000L, 50.0));
        waterSensors.add(new WaterSensor("sensor_2", 1607527993000L, 10.2));
        waterSensors.add(new WaterSensor("sensor_2", 1607527995000L, 30.2));

/*        env.fromCollection(waterSensors).keyBy(WaterSensor::getId)
                .process(new ProcessFunction<WaterSensor, Tuple2<String, Double>>() {
                    @Override
                    public void processElement(WaterSensor value, Context ctx, Collector<Tuple2<String, Double>> out) throws Exception {
                        out.collect(new Tuple2<>(value.getId(), value.getVc()));
                    }
                }).print();*/

        env.fromCollection(waterSensors).keyBy(WaterSensor::getId)
                .process(new KeyedProcessFunction<String, WaterSensor, Tuple2<String, Double>>() {
                    @Override
                    public void processElement(WaterSensor value, Context ctx, Collector<Tuple2<String, Double>> out) throws Exception {
                        out.collect(new Tuple2<>(ctx.getCurrentKey(), value.getVc()));
                    }
                }).print();

//        reduce.print();
        /**
         * 1.离线数仓 hive on spark
         * 2.实时数仓 flink
         * 3.spark streaming实时数仓 or spark sql 离线数仓
         * 4.hudi datalake 湖仓一体
         * 5.数据治理
         * 6.数据中台
         * 7.数据挖掘
         * 8.用户画像
         */

        env.execute();


    }
}
