package org.example.udf;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.InputGroup;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.ScalarFunction;
import org.example.data.WaterSensor;

import static org.apache.flink.table.api.Expressions.$;
import static org.apache.flink.table.api.Expressions.call;

/**
 * 自定义标量函数（一进一出）
 */
public class MyScalarFunctionDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<WaterSensor> sensorDS = env.fromElements(new WaterSensor("s1", 1L, 1),
                new WaterSensor("s2", 2L, 2),
                new WaterSensor("s3", 3L, 3),
                new WaterSensor("s4", 4L, 4),
                new WaterSensor("s4", 5L, 5));

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //指定字段
        //Table table = tableEnv.fromDataStream(sensorDS,$("id"));
        Table sensorTable = tableEnv.fromDataStream(sensorDS);
        tableEnv.createTemporaryView("sensor", sensorTable);

        //注册函数
        tableEnv.createTemporarySystemFunction("HashFunction", HashFunction.class);

        //tableEnv.sqlQuery("select id,HashFunction(id) from sensor").execute().print();

        sensorTable.select(call("HashFunction", $("id")))
                .execute()
                .print();

    }

    public static class HashFunction extends ScalarFunction {
        //接入任何类型的输入，返回int类型输出
        public int eval(@DataTypeHint(inputGroup = InputGroup.ANY) Object o) {
            return o.hashCode();
        }
    }
}
