package com.shujia.gma.dim

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.types.Row


object Demo1RegionDIm {
  def main(args: Array[String]): Unit = {
    /**
      * 增量数据导入
      *
      */

    val fsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //计划器
      .inStreamingMode()
      .build()

    val configuration: Configuration = new Configuration

    configuration.setString("akka.ask.timeout","1000s")

    val fsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(configuration)


    fsEnv.setParallelism(1)
    val fsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(fsEnv, fsSettings)


    fsTableEnv.executeSql(
      """
        |CREATE TABLE ods_base_province (
        |  id int,
        |  name STRING,
        |  region_id int,
        |  area_code STRING
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'gma.base_province',
        | 'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',
        | 'properties.group.id' = 'testGroup',
        | 'format' = 'canal-json',
        | 'scan.startup.mode' = 'earliest-offset',
        | 'canal-json.ignore-parse-errors'='true'
        |)
        |
      """.stripMargin)

    fsTableEnv.executeSql(
      """
        |CREATE TABLE ods_base_region (
        |  id int,
        |  region_name STRING
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'gma.base_region',
        | 'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',
        | 'properties.group.id' = 'testGroup',
        | 'format' = 'canal-json',
        | 'scan.startup.mode' = 'earliest-offset',
        | 'canal-json.ignore-parse-errors'='true'
        |)
        |
      """.stripMargin)

    /**
      * 在hbase中先创建维表
      * create_namespace 'dim'
      * create 'dim:base_region' ,'i'
      *
      */

    fsTableEnv.executeSql(
      """
        |CREATE TABLE dim_base_region (
        | id int,
        | i ROW<name STRING,region_name STRING,area_code STRING>,
        | PRIMARY KEY (id) NOT ENFORCED
        |) WITH (
        | 'connector' = 'hbase-1.4',
        | 'table-name' = 'dim:base_region',
        | 'zookeeper.quorum' = 'master:2181,node1,2181,node2:2181',
        | 'sink.buffer-flush.max-rows'='1'
        |)
        |
      """.stripMargin)



    /**
      * 执行关联将维表数据保存到hbase
      *
      */

//    fsTableEnv.executeSql(
//      """
//        |insert into dim_base_region
//        |select a.id as id, ROW(name,region_name,area_code) as i  from
//        |ods_base_province as a
//        |join ods_base_region  as b
//        |on a.region_id=b.id
//        |
//      """.stripMargin)


//    fsEnv.execute()



        fsTableEnv.sqlQuery(
          """
            |
            |select * from dim_base_region
            |
          """.stripMargin).toAppendStream[Row].print()

        fsEnv.execute()


  }
}
