package com.rem.flink.flink10Sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.ScalarFunction;

/**
 * 标量函数（Scalar Functions）  一对一转换
 * <p>
 * 自定义函数实现
 *
 * @author Rem
 * @date 2022-11-08
 */

public class UdfTest_ScalarFunction {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 在创建表的DDL中直接定义时间属性
        String createDdl = "CREATE TABLE clickTable (" +
                " user_name STRING, " +
                " url STRING, " +
                " ts BIGINT, " +
                " et AS TO_TIMESTAMP( FROM_UNIXTIME(ts / 1000) ), " +  //BIGINT类型转为时间戳类型
                " WATERMARK FOR et AS et - INTERVAL '1' SECOND " +    //定义1s延迟时间的水位线
                ") WITH (" +
                " 'connector' = 'filesystem', " +
                " 'path' = 'input/clicks.csv', " +
                " 'format' =  'csv' " +
                ")";

        tableEnv.executeSql(createDdl);


        // 2 注册自定义标量函数
        tableEnv.createTemporarySystemFunction("MyHash",MyHash.class);

        Table table = tableEnv.sqlQuery("select user_name,MyHash(user_name) from clickTable");

        tableEnv.toDataStream(table).print();
        env.execute();
    }

    /**
     * 自定义一个ScalarFunction
     * eval 方法 是固定写法
     */
    public static class MyHash extends ScalarFunction {
        public int eval(String str){
            return str.hashCode();
        }
    }
}
