package com.rem.flink.flink10Sql;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.TableFunction;

/**
 * 表函数 多对一 转换
 *
 * @author Rem
 * @date 2022-11-08
 */

public class UdfTest_TableFunction {
    public static void main(String[] args) throws Exception {
            //2.从socket中读取流
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 在创建表的DDL中直接定义时间属性
        String createDdl = "CREATE TABLE clickTable (" +
                " user_name STRING, " +
                " url STRING, " +
                " ts BIGINT, " +
                " et AS TO_TIMESTAMP( FROM_UNIXTIME(ts / 1000) ), " +  //BIGINT类型转为时间戳类型
                " WATERMARK FOR et AS et - INTERVAL '1' SECOND " +    //定义1s延迟时间的水位线
                ") WITH (" +
                " 'connector' = 'filesystem', " +
                " 'path' = 'input/clicks.csv', " +
                " 'format' =  'csv' " +
                ")";

        tableEnv.executeSql(createDdl);


        tableEnv.createTemporarySystemFunction("MySplit", MySplit.class);

        Table table = tableEnv.sqlQuery("select user_name, url, word, length " +
                "from clickTable , LATERAL TABLE( MySplit(url) ) as T(word, length)");

        tableEnv.toDataStream(table).print();
        env.execute();
    }

    /**
     * 输入一个 string 转换为一个二元组
     * eval 方法 是固定写法
     */
    public static class MySplit extends TableFunction<Tuple2<String, Integer>> {
        public void eval(String str) {
            String[] fields = str.split("\\?");    // 转义问号，以及反斜杠本身
            for (String field : fields) {
                collect(Tuple2.of(field, field.length()));
            }
        }
    }
}
