package com.atguigu.day10;

import com.atguigu.bean.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.AggregateFunction;
import org.apache.flink.table.functions.TableAggregateFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;

import static org.apache.flink.table.api.Expressions.$;
import static org.apache.flink.table.api.Expressions.call;

public class Flink06_UDF_TableAggFun {
    public static void main(String[] args) {
        //1.获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        //2.读取元素得到DataStream
//        DataStreamSource<WaterSensor> waterSensorDataStreamSource = env.fromElements(new WaterSensor("sensor_1", 1000L, 10),
//                new WaterSensor("sensor_1", 2000L, 20),
//                new WaterSensor("sensor_2", 3000L, 30),
//                new WaterSensor("sensor_1", 4000L, 40),
//                new WaterSensor("sensor_1", 5000L, 50),
//                new WaterSensor("sensor_2", 6000L, 60));

        SingleOutputStreamOperator<WaterSensor> waterSensorDataStreamSource = env.socketTextStream("localhost", 9999)
                .map(new MapFunction<String, WaterSensor>() {
                    @Override
                    public WaterSensor map(String value) throws Exception {
                        String[] split = value.split(",");
                        return new WaterSensor(split[0], Long.parseLong(split[1]), Integer.parseInt(split[2]));
                    }
                });

        //3.获取表的执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //4.将流转为表
        Table table = tableEnv.fromDataStream(waterSensorDataStreamSource);

        //不注册直接使用
       /* table
                .groupBy($("id"))
                .flatAggregate(call(MyUDTAF.class, $("vc")).as("vcValue", "rank"))
                .select($("id"),$("vcValue"),$("rank"))
                .execute().print();
*/
        //先注册再使用
        tableEnv.createTemporarySystemFunction("MyTop2", MyUDTAF.class);

        table
                .groupBy($("id"))
                .flatAggregate(call("MyTop2", $("vc")))
                .select($("id"),$("f0").as("vcValue"),$("f1").as("rank"))
                .execute().print();



    }
    //自定义一个表聚合函数，多进多出，根据id求最大的两个vc
    public static class MyTopAcc{
        public Integer first;
        public Integer second;
    }

    public static class MyUDTAF extends TableAggregateFunction<Tuple2<Integer,String>,MyTopAcc>{

        @Override
        public MyTopAcc createAccumulator() {
            MyTopAcc myTopAcc = new MyTopAcc();
            myTopAcc.first = Integer.MIN_VALUE;
            myTopAcc.second = Integer.MIN_VALUE;
            return myTopAcc;
        }

        public void accumulate(MyTopAcc acc,Integer value){
            if (value>acc.first){
                acc.second = acc.first;
                acc.first = value;
            }else if (value>acc.second){
                acc.second = value;
            }
        }

        public void emitValue(MyTopAcc acc, Collector<Tuple2<Integer, String>> out){
           if (acc.first!=Integer.MIN_VALUE){

               out.collect(Tuple2.of(acc.first,"1"));
           }

           if (acc.second!= Integer.MIN_VALUE){
               out.collect(Tuple2.of(acc.second,"2"));

           }
        }
    }


}
