package com.gabry.flink

import java.util.Properties

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.scala.StreamTableEnvironment
import com.ersansiwu.bigdata.flink.catalog.FlinkCatalogPhoenixBase
object PhoenixMain {
  def main(args: Array[String]): Unit = {
    val senv = StreamExecutionEnvironment.getExecutionEnvironment
    val stenv = StreamTableEnvironment.create(senv)
    val jdbcUrl = "hadoop02,hadoop03,hadoop05:2181"
    val jdbcProps = new Properties
    jdbcProps.setProperty("UpsertBatchSize", "1000")
    jdbcProps.setProperty("phoenix.schema.isNamespaceMappingEnabled", "true")
    stenv.registerExternalCatalog("phoenix",new FlinkCatalogPhoenixBase(jdbcUrl,jdbcProps))

    val sql = stenv.sqlQuery("select * from phoenix.phoenix.skynet__skynet_e_strategy_decision_result")
    sql.printSchema()
    senv.execute();
  }
}
