package com.shujia.flink.sql

import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.catalog.hive.HiveCatalog

object Demo12OnHIve {
  def main(args: Array[String]): Unit = {
    //创建flink的环境
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment


    /**
      * 修改事件模式为事件时间
      *
      */

    bsEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink计划器
      .inStreamingMode() //流模式
      .build()

    /**
      * 创建flink sql 环境
      *
      */

    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)


    /**
      * 使用hive的元数据服务
      *
      */

    val catalog = new HiveCatalog(
      "myhive",
      "flink",
      "/usr/local/soft/hive-1.2.1/conf")

    //注册元数据
    bsTableEnv.registerCatalog("myhive",catalog)

    //切换元数据
    bsTableEnv.useCatalog("myhive")

    /**
      * 直接写sql 使用hive中的表
      *
      */
    bsTableEnv.executeSql(
      """
        |insert into mysql_sink
        |select
        | word,
        | TUMBLE_END(user_action_time, INTERVAL '5' SECOND) as window_end,
        | count(1) as c
        |from
        | words_event_time
        |group by
        | word,
        | TUMBLE(user_action_time, INTERVAL '5' SECOND)
        |
      """.stripMargin)
  }

}
