package com.study.chapter11;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.ScalarFunction;

/**
 * @Description:
 * @Author: LiuQun
 * @Date: 2022/8/25 20:17
 */
public class UdfScalarFunction {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tabEnv = StreamTableEnvironment.create(env);

        //1.在创建表的DDL中直接定义时间属性：事件时间
        String createDDL = " CREATE TABLE click_table (" +
                " `user` STRING, " +
                " url STRING, " +
                " ts BIGINT, " +
                " event_time AS TO_TIMESTAMP( FROM_UNIXTIME( ts / 1000 ) ), " +   //将ts时间戳转换成timestamp类型
                " WATERMARK FOR event_time AS event_time - INTERVAL '1' SECOND " +  //设置水位线的时间间隔为1s
                " ) WITH ( " +
                " 'connector' = 'filesystem', " +   //指定连接器为文件
                " 'path' = 'input/cart.txt', " +    //指定文件路径
                " 'format' = 'csv' " +              //指定格式
                " ) ";
        tabEnv.executeSql(createDDL);

        //2.注册自定义标量函数
        tabEnv.createTemporarySystemFunction("MyHash",MyHashFunction.class);

        //3.调用UDF进行查询转换
        Table table = tabEnv.sqlQuery("select user,MyHash(user) from click_table");

        //4.转换成流打印输出
        tabEnv.toDataStream(table).print("result：");


        env.execute();
    }

    //自定义实现ScalarFunction
    public static class MyHashFunction extends ScalarFunction{
        public int eval(String str){
            return str.hashCode();
        }
    }
}
