package com.hrt.flink.icebergoperate;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.util.Arrays;

/**
 *  SQL API 操作Iceberg
 */
public class SQLAPIWithIceberg {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env =
                StreamExecutionEnvironment.getExecutionEnvironment();

        env.enableCheckpointing(10000);
        StreamTableEnvironment tblEnv = StreamTableEnvironment.create(env);

        //创建Catalog
        tblEnv.executeSql("CREATE CATALOG hadoop_iceberg WITH (" +
                "'type'='iceberg'," +
                "'catalog-type'='hadoop'," +
                "'warehouse'='hdfs://mycluster/flink_iceberg')");

//        //使用当前Catalog
//        tblEnv.useCatalog("hadoop_iceberg");
//        //创建数据库
////        tblEnv.executeSql("create database if not exists iceberg_db");
//        //使用数据库
//        tblEnv.useDatabase("iceberg_db");
//
//        //创建iceberg表 flink_iceberg_tbl
////        tblEnv.executeSql("create table if not exists hadoop_iceberg.iceberg_db.flink_iceberg_tbl(id int,name string,age int)");
//
//        //写入数据到表 flink_iceberg_tbl
//        tblEnv.executeSql("insert into hadoop_iceberg.iceberg_db.flink_iceberg_tbl values (1,'zs',18),(2,'ls',19),(3,'ww',20)");
////        tblEnv.executeSql("insert into hadoop_iceberg.iceberg_db.flink_iceberg_tbl  select 1,'zs',18");
//
//        //批量读取表数据
////        TableResult tableResult = tblEnv.executeSql("select * from hadoop_iceberg.iceberg_db.flink_iceberg_tbl");
////
////        tableResult.print();
//        env.execute();


    }
}
