package com.xzx.flink.tableapi;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.InputGroup;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.ScalarFunction;

import static org.apache.flink.table.api.Expressions.$;

/**
 * 自定义UDF标量函数
 *
 * @version 1.0
 * @auther xinzhixuan
 * @date 2022/6/5 21:58
 */
public class TableAPI_05_UDF_ScalarFunction {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<Integer> stream = env.fromElements(23, 24, 20);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        Table table = tableEnv.fromDataStream(stream, Schema.newBuilder()
                .columnByExpression("age", $("f0"))
                .build());

        tableEnv.createTemporarySystemFunction("hash", HashFunction.class);

        tableEnv.toDataStream(tableEnv.sqlQuery("select hash(age) from " + table)).print();

        env.execute();

    }

    public static class HashFunction extends ScalarFunction {

        /**
         * 接受任意类型的参数，返回这个值的hashcode值
         * 方法名必须是eval，这个是框架约定好的，这个跟spark中的udf很像
         *
         * @param o .
         * @return .
         */
        public int eval(@DataTypeHint(inputGroup = InputGroup.ANY) Object o) {
            return o.hashCode() + 1;
        }
    }
}


