package com.shujia.sql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.types.Row

object Demo1TableApi {
  def main(args: Array[String]): Unit = {


    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink的计划器
      .inStreamingMode() //使用流模型
      .build()

    //窗口table 环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)


    //构建一个流
    val lineDS: DataStream[String] = bsEnv.socketTextStream("master", 8888)


    /**
      * 将刘娥转换成动态表
      *
      * Table : 类似于spark 中的DataFrame  ,可以使用DSL(flink中的DSl不是很友好)的api
      */

    val table: Table = bsTableEnv.fromDataStream(lineDS, $"word")
    //注册一张表
    bsTableEnv.createTemporaryView("words", table)


    /**
      * 在动态表上进行连续查询
      *
      */

    val countTable: Table = bsTableEnv.sqlQuery(
      """
        |select word,count(1) from words group by word

      """.stripMargin)


    /**
      * 将结果表转换成流
      *
      */
    val resultDS: DataStream[(Boolean, Row)] = countTable.toRetractStream[Row]

    resultDS.print()


    bsEnv.execute()


  }

}
