package com.yifeng.repo.flink.data.transport.bootstrap;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 通过SQL写入数据（验证）
 * @author wangzhi
 * @since 2023-06-12
 */
public class FlinkSqlWriteToIceberg {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        StreamTableEnvironment tblEnv = StreamTableEnvironment.create(env);
        env.enableCheckpointing(1000);

        //1.创建Catalog
        tblEnv.executeSql("CREATE CATALOG hive_catalog WITH (" +
                "  'type'='iceberg'," +
                "  'catalog-type'='hive'," +
                "  'uri'='thrift://192.168.253.132:9083'," +
                "  'clients'='5'," +
                "  'property-version'='1'," +
                "  'warehouse'='/warehouse/tablespace/managed/iceberg'" +
                ")");


        //2.使用当前Catalog
        tblEnv.useCatalog("hive_catalog");

        //3.创建数据库
//        tblEnv.executeSql("create database iceberg_db");

        //4.使用数据库
        tblEnv.useDatabase("default");

        //5.创建iceberg表 sample3
        tblEnv.executeSql("CREATE TABLE `hive_catalog`.`default`.`sample3` (" +
                "  `id`  INT UNIQUE COMMENT 'unique id'," +
                "  `name` STRING NOT NULL," +
                "  `age` INT NOT NULL," +
                "  `loc` STRING," +
                " PRIMARY KEY(`id`) NOT ENFORCED" +
                ") with ('format-version'='2', 'write.upsert.enabled'='true');");

        //6.写入数据到表 flink_iceberg_tbl
        tblEnv.executeSql("insert into `hive_catalog`.`default`.`sample3` values (1,'zs',18,'beijing'),(2,'ls',19,'shanghai'),(3,'ww',20,'guangzhou')");
    }
}
