package com.hhf.rrd.udf.two;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * // todo
 *
 * @author huanghaifeng15
 * @date 2022/2/15 20:07
 **/
public class UserTestSqlJob {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        // 1. 创建 UDF
        tEnv.createTemporarySystemFunction("user_scalar_func", UserScalarFunction.class);

        // 2. 创建数据源表
        String sql2 = "CREATE TABLE source_table (\n" +
                "    user_id BIGINT NOT NULL COMMENT '用户 id'\n" +
                ") WITH (\n" +
                "  'connector' = 'datagen',\n" +
                "  'rows-per-second' = '1',\n" +
                "  'fields.user_id.min' = '1',\n" +
                "  'fields.user_id.max' = '10'\n" +
                ")\n";
        tEnv.executeSql(sql2);

        // 3、创建数据汇表
        String sql3 = "CREATE TABLE sink_table (\n" +
                "    result_row_1 ROW<age INT, name STRING, totalBalance DECIMAL(10, 2)>,\n" +
                "    result_row_2 STRING\n" +
                ") WITH (\n" +
                "  'connector' = 'print'\n" +
                ")\n";
        tEnv.executeSql(sql3);

        // 4、查询
        String sql4 = "INSERT INTO sink_table\n" +
                "select\n" +
                "    -- 4.a. 用户自定义类型作为输出\n" +
                "    user_scalar_func(user_id) as result_row_1,\n" +
                "    -- 4.b. 用户自定义类型作为输出及输入\n" +
                "    user_scalar_func(user_scalar_func(user_id)) as result_row_2\n" +
                "from source_table\n";
        tEnv.executeSql(sql4).print();
    }
}
