package com.codejiwei.flink.table;

import org.apache.flink.connector.jdbc.catalog.JdbcCatalog;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.util.Arrays;

/**
 * author: codejiwei
 * date: 2023/8/7
 * desc: flink table api with catelog
 **/
public class Flink_Table_API_02 {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        tEnv.registerCatalog("custom_catalog", new JdbcCatalog(
                "default",
                "db_datacube_sdses_test",
                "datacube_sdses_test",
                "WzG6_VG9on",
                "jdbc:postgresql://192.168.102.154:5866"
        ));


        //使用自定义catalog
        tEnv.useCatalog("custom_catalog");
//        tEnv.useDatabase("mydb");

        String[] strings = tEnv.listTables();
        System.out.println(Arrays.toString(strings));

        tEnv.executeSql("SELECT * FROM custom_catalog.db_datacube_sdses_test.`test_schema.data_etl`");



    }
}
