package com.shujia.flink.table

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.table.catalog.hive.HiveCatalog

object Demo12FlinkOnHIve {
  def main(args: Array[String]): Unit = {
    //创建flink 环境
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    bsEnv.setParallelism(1)

    //设置table 环境的一些参数
    val bsSettings: EnvironmentSettings = EnvironmentSettings.newInstance()
      .useBlinkPlanner() //使用blikc计划器
      .inStreamingMode() //流模式
      .build()

    // 创建flink  table 环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)


    /**
      * 使用hive作为flink的元数据存储
      *
      */

    val name = "myhive"
    val defaultDatabase = "flink"
    //集群中hive配置文件所在的位置
    val hiveConfDir = "/usr/local/soft/hive-1.2.1/conf"

    val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir)
    //注册元数据
    bsTableEnv.registerCatalog("myhive", hive)

    // 切换元数据
    bsTableEnv.useCatalog("myhive")

    /**
      * 可以直接使用hiez中已经存在的表，不需要再重新创建表
      * 代码不能再本地运行，需要提交到集群中运行
      *
      */

    bsTableEnv.executeSql(
      """
        |insert into city_num
        |select
        |city,
        |count(distinct mdn) as num
        |from dianxin
        |group by city
        |
      """.stripMargin)


  }

}
