package com.shujia.sql

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.table.catalog.hive.HiveCatalog

object Demo08MatchStockDesc {
  def main(args: Array[String]): Unit = {
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() // 使用Blink的SQL解析器
      .inStreamingMode() // 开启流处理模式
      .build()
    // 构建Table Env
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    val name = "myhive"
    val defaultDatabase = "flink"
    val hiveConfDir = "/usr/local/soft/hive-1.2.1/conf" // hive配置文件的路径

    val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir)
    bsTableEnv.registerCatalog("myhive", hive)

    // 切换元数据
    bsTableEnv.useCatalog("myhive")

    bsTableEnv
      .executeSql(
        """
          |INSERT INTO stock_desc
          |SELECT  T.symbol
          |        ,T.start_time
          |        ,T.end_time
          |        ,T.first_price
          |        ,T.last_price
          |FROM stock
          |    MATCH_RECOGNIZE (
          |      PARTITION BY symbol
          |      ORDER BY rowtime
          |      MEASURES
          |       A.price as first_price,
          |       last(B.price) as last_price,
          |       FIRST(rowtime) AS start_time,
          |       LAST(B.rowtime) AS end_time
          |      AFTER MATCH SKIP TO FIRST C
          |      PATTERN (A B+ C)
          |      DEFINE
          |        B as (last(B.price,1) is null AND B.price < A.price) OR B.price < last(B.price,1),
          |        C as C.price > B.price
          |) AS T
          |""".stripMargin)
  }

}
