package com.shujia.gma.test


import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.types.Row

object Test1 {
  def main(args: Array[String]): Unit = {
    val fsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //计划器
      .inStreamingMode()
      .build()

    val fsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    fsEnv.setParallelism(1)
    val fsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(fsEnv, fsSettings)

    fsTableEnv.executeSql(
      """
        |
        |CREATE TABLE `user`(
        |  `id` BIGINT,
        |  name string,
        |  ts AS PROCTIME()
        |) WITH(
        | 'connector' = 'kafka',
        | 'topic' = 'gma.user',
        | 'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',
        | 'properties.group.id' = 'testGroup1',
        | 'format' = 'canal-json' ,
        | 'scan.startup.mode' = 'earliest-offset',
        | 'canal-json.ignore-parse-errors'='true'
        |)
        |
      """.stripMargin)


    fsTableEnv.executeSql(
      """
        |CREATE TABLE dim_age(
        | id BIGINT,
        | i ROW<age int>,
        | PRIMARY KEY (id) NOT ENFORCED
        |) WITH (
        | 'connector' = 'hbase-1.4',
        | 'table-name' = 'dim:age',
        | 'zookeeper.quorum' = 'master:2181,node1:2181,node2:2181',
        | 'sink.buffer-flush.max-rows'='1'
        |)
        |
      """.stripMargin)


    /**
      * FOR SYSTEM_TIME AS OF a.ts
      * 每一条数据都会去hbase中查询，获取最新的维表数据
      *
      *
      */

    fsTableEnv.sqlQuery(
      """
        |
        |select a.id,a.name,b.age
        |from `user` as a
        |left join dim_age FOR SYSTEM_TIME AS OF a.ts as b
        | on a.id=b.id
        |
        |""".stripMargin).toRetractStream[Row].print()

    fsEnv.execute()


  }
}
