package com.atguigu.day10;

import com.atguigu.bean.WaterSensor;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.ScalarFunction;

import static org.apache.flink.table.api.Expressions.$;
import static org.apache.flink.table.api.Expressions.call;

public class Flink03_UDF_ScalarFun {
    public static void main(String[] args) {
        //1.获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        //2.读取元素得到DataStream
        DataStreamSource<WaterSensor> waterSensorDataStreamSource = env.fromElements(new WaterSensor("sensor_1", 1000L, 10),
                new WaterSensor("sensor_1", 2000L, 20),
                new WaterSensor("sensor_2", 3000L, 30),
                new WaterSensor("sensor_1", 4000L, 40),
                new WaterSensor("sensor_1", 5000L, 50),
                new WaterSensor("sensor_2", 6000L, 60));

        //3.获取表的执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //4.将流转为表
        Table table = tableEnv.fromDataStream(waterSensorDataStreamSource);

        //不注册直接使用
//        table.select($("id"),call(MyUDF.class,$("id"))).execute().print();


        //先注册再使用
        tableEnv.createTemporarySystemFunction("StrLen", MyUDF.class);

//        table.select($("id"),call("StrLen",$("id"))).execute().print();

        //sql
        tableEnv.executeSql("select id,StrLen(id) from "+table).print();

    }
    //自定义一个标量函数，一进一出，获取id的字符串长度
    public static class MyUDF extends ScalarFunction{
        //方法名必须是eval
        public Integer eval(String value){
            return value.length();
        }
    }
}
