package com.dada.flink;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class IcebergRead {

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
        env.enableCheckpointing(60000);
        env.setStateBackend(new FsStateBackend("file:///opt/soft/checkpoint/iceberg"));
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        String catalog2 = "CREATE CATALOG hadoop_catalog WITH (\n" +
                "  'type' = 'iceberg',\n" +
                " 'catalog-type'='hadoop',\n" +
                "  'warehouse' = 'hdfs://dongpengdeAir:8020/user/warehouse/iceberg',\n" +
                "  'property-version' = '1' \n" +
                ");";
        String useCatalog2 = "USE CATALOG hadoop_catalog;\n";
        String useDatabase2 = "use iceberg;\n";

//        String select = "select * from  hadoop_catalog.iceberg.sample2/*+ OPTIONS('streaming'='true', 'monitor-interval'='1s')*/ ;";
        String select = "select * from hadoop_catalog.iceberg.sample2";
        tEnv.executeSql(catalog2);
        tEnv.executeSql(useCatalog2);
        tEnv.executeSql(useDatabase2);

        tEnv.executeSql(select).print();

    }

}
