package com.softfly.flink;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;

// table api的使用
public class TableDemo {


    public static void main(String[] args) {
        TableEnvironment tEnv =
                TableEnvironment.create(
                        EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build());
        String kafkaSourceDef = "CREATE TABLE sourceT (\n" +
                "  uuid varchar(20),\n" +
                "  name varchar(10),\n" +
                "  age int,\n" +
                "  ts timestamp(3),\n" +
                "  `partition` varchar(20)\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'HYF_FLINK_KAFKA1',\n" +
                "  'properties.bootstrap.servers' = '10.30.200.155:15386',\n" +
                "  'properties.group.id' = 'testGroup',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'json'\n" +
                ")";

        tEnv.executeSql(kafkaSourceDef);

        String hudiTableDef = "create table t3(\n" +
                "  uuid varchar(20),\n" +
                "  name varchar(10),\n" +
                "  age int,\n" +
                "  ts timestamp(3),\n" +
                "  dt bigint\n" +
                ")PARTITIONED BY (`dt`)\n" +
                "with (\n" +
                "  'connector' = 'hudi',\n" +
                "  'path' = 'hdfs:///user/hudi/t3', -- $HUDI_DEMO 替换成的绝对路径\n" +
                "  'table.type' = 'MERGE_ON_READ',\n" +
                "  'write.bucket_assign.tasks' = '2',\n" +
                "  'write.tasks' = '2'\n" +
                ")";
        tEnv.executeSql(hudiTableDef);

        String dml = "insert overwrite t3\n" +
                "select \n" +
                "    uuid\n" +
                "    ,name\n" +
                "    ,age\n" +
                "    ,ts\n" +
                "    ,20220110 as dt\n" +
                "from sourceT";
        tEnv.executeSql(dml);
    }

}
