package com.shujia.sql

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.table.catalog.hive.HiveCatalog

object Demo7FlinkOnHIve {
  def main(args: Array[String]): Unit = {
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink的计划器
      .inStreamingMode() //使用流模型
      .build()

    //窗口table 环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)
    val configuration = new Configuration()
    //如果主键为null,自动删除
    configuration.setString("table.exec.sink.not-null-enforcer", "drop")
    configuration.setString("table.dynamic-table-options.enabled", "true")
    bsTableEnv.getConfig.addConfiguration(configuration)

    /**
      * 注册hive的元数据
      *
      * 可以直接读取hive中的表
      *
      */

    val name = "myhive"
    val defaultDatabase = "flink"
    val hiveConfDir = "/usr/local/soft/hive-1.2.1/conf"

    val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir)
    //注册catalog
    bsTableEnv.registerCatalog("myhive", hive)

    // 切换catalog
    bsTableEnv.useCatalog("myhive")


    /**
      * 编写sql,使用hive中的表
      *
      */

    bsTableEnv.executeSql(
      """
        |insert into mysql_clazz_num
        |select clazz,count(1) as num from
        |student  /*+ OPTIONS('scan.startup.mode'='latest-offset') */
        |group by clazz
        |
      """.stripMargin)


  }

}
