package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.types.Row

object Demo1TableApi {

  def main(args: Array[String]): Unit = {

    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //指定计划器为blink
      .inStreamingMode() //指定流模式
      .build()
    /**
      *
      * 构建flink table环境
      *
      */
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)


    bsTableEnv.executeSql(
      """
        |
        |CREATE TABLE student (
        |  id STRING,
        |  name STRING,
        |  age INT,
        |  gender STRING,
        |  clazz STRING
        |) WITH (
        |  'connector' = 'kafka',
        |  'topic' = 'stu',
        |  'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',
        |  'properties.group.id' = 'testGroup',
        |  'scan.startup.mode' = 'earliest-offset',
        |  'format' = 'csv'
        |)
        |
        |
      """.stripMargin)


    /**
      *
      * DSL
      *
      *
      * Table相当于spark中的DF
      *
      */

    val student: Table = bsTableEnv.from("student")


    val numTable: Table = student
      .groupBy($"clazz")
      .select($"clazz", $"clazz".count() as "num")


    numTable.toRetractStream[Row].print()


    bsEnv.execute()

  }

}
