package com.z.demo.catalog.pg

import org.apache.flink.connector.jdbc.catalog._
import org.apache.flink.table.api._
import org.apache.flink.table.catalog.{CatalogDatabase, CatalogDatabaseImpl}

import java.util
import scala.collection.mutable

/**
 * @author wenzheng.ma 
 * @date 2021-11-27 13:53
 * @desc Postgres Catalog 测试用例
 */
object PGCatalogDemo{
  def main(args: Array[String]): Unit = {
    val settings = EnvironmentSettings.newInstance().inStreamingMode().build()
    val tableEnv = TableEnvironment.create(settings)

    val name = "catalog"
    val defaultDatabase = "pg_flink_catalog"
    val username = "postgres"
    val password = "postgres"
    val baseUrl = "jdbc:postgresql://192.168.1.113:5432"

    val catalog = new PostgresCatalog(name, defaultDatabase, username, password, baseUrl)
    tableEnv.registerCatalog(name, catalog)

    // set the JdbcCatalog as the current catalog of the session
    tableEnv.useCatalog("catalog")
    val prop = new util.HashMap[String,String]()
    catalog.createDatabase("db01",new CatalogDatabaseImpl(prop,"test db01"),true)




    /*tableEnv.executeSql(
      """
       create table catalog.pg_flink_catalog.student (
        |id int ,
        |name string
        |)with(
        | 'connector' = 'kafka',
        |  'topic' = 'test-topic',
        |  'properties.bootstrap.servers' = 'server110:9092',
        |  'properties.group.id' = 'testGroup',
        |  'scan.startup.mode' = 'latest-offset',
        |  'format' = 'csv'
        |)
        |""".stripMargin)*/


  }
}
