package com.shujia.ads

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.table.catalog.hive.HiveCatalog

object Demo3SourceIndex {
  def main(args: Array[String]): Unit = {

    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink计划器
      .inStreamingMode() //流处理模型
      .build()


    //创建table环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    val hiveCatalog: HiveCatalog = new HiveCatalog("myHive", "sent", "sentcompute/src/main/resources")

    bsTableEnv.registerCatalog("myHive", hiveCatalog)

    bsTableEnv.useCatalog("myHive")


    /**
      * 统计来源数量
      *
      */

    bsTableEnv.executeSql(
      """
        |
        |insert into ads.ads_mysql_source_index
        |select source ,count(id) as c from dwd.dwd_kafka_weibo_msk group by source
        |
        |
      """.stripMargin)


  }
}
