package com.shujia.search

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

object CardsToHbase {
  def main(args: Array[String]): Unit = {
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink计划器
      .inStreamingMode() //使用流模型
      .build()

    //创建flink sql的执行环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    bsTableEnv.executeSql(
      """
        |CREATE TABLE cards_file (
        |  card_id BIGINT,
        |  lon DOUBLE,
        |  lat DOUBLE
        |)  WITH (
        |  'connector' = 'filesystem',
        |  'path' = 'data/cards.csv',
        |  'format' = 'csv'
        |)
        |
      """.stripMargin)


    /**
      * 先在hbase中创建表
      * create 'cards' ,'info'
      *
      */
    bsTableEnv.executeSql(
      """
        |
        |CREATE TABLE cards_hbase (
        | card_id STRING,
        | info ROW<lat DOUBLE,lon DOUBLE>,
        | PRIMARY KEY (card_id) NOT ENFORCED
        |) WITH (
        | 'connector' = 'hbase-1.4',
        | 'table-name' = 'cards',
        | 'zookeeper.quorum' = 'master:2181,node1:2181,node2:2181'
        |)
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |insert into cards_hbase
        |select cast(card_id as STRING) as card_id,ROW(lat,lon) as info from cards_file
      """.stripMargin)

  }

}
