package com.shujia.seach

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

object CardToHbasse {

  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    /**
      * 创建flink sql环境
      *
      */

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink计划器
      .inStreamingMode() //流模式
      .build()

    /**
      * 创建flink sql 环境
      *
      */

    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(env, bsSettings)

    /**
      * 读取卡口经纬度数据
      *
      */


    bsTableEnv.executeSql(
      """
        |CREATE TABLE cards_source (
        |  card STRING,
        |  lon DOUBLE,
        |  lat DOUBLE
        |) WITH (
        |  'connector' = 'filesystem',
        |  'path' = 'data/cards.csv',
        |  'format' = 'csv'
        |)
      """.stripMargin)

    /**
      * 需要先在hbase中创建表
      * create 'card','info'
      *
      */
    bsTableEnv.executeSql(
      """
        |CREATE TABLE hbase_sink (
        | card STRING,
        | info ROW<lon DOUBLE,lat DOUBLE>,
        | PRIMARY KEY (card) NOT ENFORCED
        |) WITH (
        | 'connector' = 'hbase-1.4',
        | 'table-name' = 'card',
        | 'zookeeper.quorum' = 'master:2181'
        |)
        |
      """.stripMargin)

    bsTableEnv.executeSql(
      """
        |insert into hbase_sink
        |select card,ROW(lon,lat) as info from cards_source
        |
      """.stripMargin)


  }

}
