package com.zhang.sql2;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.TableFunction;

/**
 * @title:
 * @author: zhang
 * @date: 2022/3/26 16:50
 */
public class MyTableFunction {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2 创建表 (连接器表)
        String create = "create table clickTable (" +
                " line STRING" +
                " ) WITH ( " +
                " 'connector' = 'filesystem'," +
                " 'path' = 'input/test.txt'," +
                " 'format' = 'csv'" +
                " )";
        tableEnv.executeSql(create);

        //注册自定义函数
        tableEnv.createTemporarySystemFunction("MySplit",MySplit.class);


        //调用自定义函数
        Table table = tableEnv.sqlQuery("" +
                "select line,word ,len from clickTable ," +
                " LATERAL TABLE(MySplit(line)) t(word,len)");

        //转换成流打印输出
        tableEnv.toDataStream(table).print();
        env.execute();

    }

    public static class MySplit extends TableFunction<Tuple2<String,Integer>>{
        public void eval(String str){
            String[] fields = str.split(" ");
            for (String field : fields) {
                collect(Tuple2.of(field,field.length()));
            }
        }
    }
}
